@typeberry/lib 0.4.1-0a3acb2 → 0.4.1-9e565b9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.cjs +31 -470
- package/index.d.ts +41 -559
- package/index.js +31 -470
- package/package.json +1 -1
package/index.js
CHANGED
|
@@ -4424,434 +4424,8 @@ class ArrayView {
|
|
|
4424
4424
|
}
|
|
4425
4425
|
}
|
|
4426
4426
|
|
|
4427
|
-
/** A map which uses
|
|
4428
|
-
class
|
|
4429
|
-
mapNodeThreshold;
|
|
4430
|
-
/**
|
|
4431
|
-
* The root node of the dictionary.
|
|
4432
|
-
*
|
|
4433
|
-
* This is the main internal data structure that organizes entries
|
|
4434
|
-
* in a tree-like fashion (array-based nodes up to `mapNodeThreshold`,
|
|
4435
|
-
* map-based nodes beyond it). All insertions, updates, and deletions
|
|
4436
|
-
* operate through this structure.
|
|
4437
|
-
*/
|
|
4438
|
-
root = Node.withList();
|
|
4439
|
-
/**
|
|
4440
|
-
* Auxiliary map that stores references to the original keys and their values.
|
|
4441
|
-
*
|
|
4442
|
-
* - Overriding a value in the main structure does not replace the original key reference.
|
|
4443
|
-
* - Used for efficient iteration over `keys()`, `values()`, `entries()`, and computing `size`.
|
|
4444
|
-
*/
|
|
4445
|
-
keyvals = new Map();
|
|
4446
|
-
/**
|
|
4447
|
-
* Protected constructor used internally by `BlobDictionary.new`
|
|
4448
|
-
* and `BlobDictionary.fromEntries`.
|
|
4449
|
-
*
|
|
4450
|
-
* This enforces controlled instantiation — users should create instances
|
|
4451
|
-
* through the provided static factory methods instead of calling the
|
|
4452
|
-
* constructor directly.
|
|
4453
|
-
*
|
|
4454
|
-
* @param mapNodeThreshold - The threshold that determines when the dictionary
|
|
4455
|
-
* switches from using an array-based (`ListChildren`) node to a map-based (`MapChildren`) node for storing entries.
|
|
4456
|
-
*/
|
|
4457
|
-
constructor(mapNodeThreshold) {
|
|
4458
|
-
super();
|
|
4459
|
-
this.mapNodeThreshold = mapNodeThreshold;
|
|
4460
|
-
}
|
|
4461
|
-
/**
|
|
4462
|
-
* Returns the number of entries in the dictionary.
|
|
4463
|
-
*
|
|
4464
|
-
* The count is derived from the auxiliary `keyvals` map, which stores
|
|
4465
|
-
* all original key references and their associated values. This ensures
|
|
4466
|
-
* that the `size` reflects the actual number of entries, independent of
|
|
4467
|
-
* internal overrides in the main `root` structure.
|
|
4468
|
-
*
|
|
4469
|
-
* @returns The total number of entries in the dictionary.
|
|
4470
|
-
*/
|
|
4471
|
-
get size() {
|
|
4472
|
-
return this.keyvals.size;
|
|
4473
|
-
}
|
|
4474
|
-
[TEST_COMPARE_USING]() {
|
|
4475
|
-
const vals = Array.from(this);
|
|
4476
|
-
vals.sort((a, b) => a[0].compare(b[0]).value);
|
|
4477
|
-
return vals;
|
|
4478
|
-
}
|
|
4479
|
-
/**
|
|
4480
|
-
* Creates an empty `BlobDictionary`.
|
|
4481
|
-
*
|
|
4482
|
-
* @param mapNodeThreshold - The threshold that determines when the dictionary
|
|
4483
|
-
* switches from using an array-based (`ListChildren`) node to a map-based (`MapChildren`) node for storing entries.
|
|
4484
|
-
* Defaults to `0`.
|
|
4485
|
-
*
|
|
4486
|
-
* @returns A new, empty `BlobDictionary` instance.
|
|
4487
|
-
*/
|
|
4488
|
-
static new(mapNodeThreshold = 0) {
|
|
4489
|
-
return new BlobDictionary(mapNodeThreshold);
|
|
4490
|
-
}
|
|
4491
|
-
/**
|
|
4492
|
-
* Creates a new `BlobDictionary` initialized with the given entries.
|
|
4493
|
-
*
|
|
4494
|
-
* @param entries - An array of `[key, value]` pairs used to populate the dictionary.
|
|
4495
|
-
* @param mapNodeThreshold - The threshold that determines when the dictionary
|
|
4496
|
-
* switches from using an array-based (`ListChildren`) node to a map-based (`MapChildren`) node for storing entries.
|
|
4497
|
-
* Defaults to `0`.
|
|
4498
|
-
*
|
|
4499
|
-
* @returns A new `BlobDictionary` containing the provided entries.
|
|
4500
|
-
*/
|
|
4501
|
-
static fromEntries(entries, mapNodeThreshold) {
|
|
4502
|
-
const dict = BlobDictionary.new(mapNodeThreshold);
|
|
4503
|
-
for (const [key, value] of entries) {
|
|
4504
|
-
dict.set(key, value);
|
|
4505
|
-
}
|
|
4506
|
-
return dict;
|
|
4507
|
-
}
|
|
4508
|
-
/**
|
|
4509
|
-
* Internal helper that inserts, updates or deletes an entry in the dictionary.
|
|
4510
|
-
*
|
|
4511
|
-
* Behaviour details:
|
|
4512
|
-
* - Passing `undefined` as `value` indicates a deletion. (E.g. `delete` uses `internalSet(key, undefined)`.)
|
|
4513
|
-
* - When an add (new entry) or a delete actually changes the structure, the method returns the affected leaf node.
|
|
4514
|
-
* - When the call only overrides an existing value (no structural add/delete), the method returns `null`.
|
|
4515
|
-
*
|
|
4516
|
-
* This method is intended for internal use by the dictionary implementation and allows `undefined` as a
|
|
4517
|
-
* sentinel value to signal removals.
|
|
4518
|
-
*
|
|
4519
|
-
* @param key - The key to insert, update or remove.
|
|
4520
|
-
* @param value - The value to associate with the key, or `undefined` to remove the key.
|
|
4521
|
-
* @returns The leaf node created or removed on add/delete, or `null` if the operation only overwrote an existing value.
|
|
4522
|
-
*/
|
|
4523
|
-
internalSet(key, value) {
|
|
4524
|
-
let node = this.root;
|
|
4525
|
-
const keyChunkGenerator = key.chunks(CHUNK_SIZE);
|
|
4526
|
-
let depth = 0;
|
|
4527
|
-
for (;;) {
|
|
4528
|
-
const maybeKeyChunk = keyChunkGenerator.next().value;
|
|
4529
|
-
if (maybeKeyChunk === undefined) {
|
|
4530
|
-
if (value === undefined) {
|
|
4531
|
-
return node.remove(key);
|
|
4532
|
-
}
|
|
4533
|
-
return node.set(key, value);
|
|
4534
|
-
}
|
|
4535
|
-
const keyChunk = asOpaqueType(maybeKeyChunk);
|
|
4536
|
-
if (node.children instanceof ListChildren) {
|
|
4537
|
-
const subkey = BytesBlob.blobFrom(key.raw.subarray(CHUNK_SIZE * depth));
|
|
4538
|
-
const leaf = value !== undefined ? node.children.insert(subkey, { key, value }) : node.children.remove(subkey);
|
|
4539
|
-
if (subkey.length > CHUNK_SIZE && node.children.children.length > this.mapNodeThreshold) {
|
|
4540
|
-
node.convertListChildrenToMap();
|
|
4541
|
-
}
|
|
4542
|
-
return leaf;
|
|
4543
|
-
}
|
|
4544
|
-
depth += 1;
|
|
4545
|
-
const children = node.children;
|
|
4546
|
-
if (children instanceof ListChildren) {
|
|
4547
|
-
throw new Error("We handle list node earlier. If we fall through, we know it's for the `Map` case.");
|
|
4548
|
-
}
|
|
4549
|
-
if (children instanceof MapChildren) {
|
|
4550
|
-
const maybeNode = children.getChild(keyChunk);
|
|
4551
|
-
if (maybeNode !== undefined) {
|
|
4552
|
-
// simply go one level deeper
|
|
4553
|
-
node = maybeNode;
|
|
4554
|
-
}
|
|
4555
|
-
else {
|
|
4556
|
-
// we are trying to remove an item, but it does not exist
|
|
4557
|
-
if (value === undefined) {
|
|
4558
|
-
return null;
|
|
4559
|
-
}
|
|
4560
|
-
// no more child nodes, we insert a new one.
|
|
4561
|
-
const newNode = Node.withList();
|
|
4562
|
-
children.setChild(keyChunk, newNode);
|
|
4563
|
-
node = newNode;
|
|
4564
|
-
}
|
|
4565
|
-
continue;
|
|
4566
|
-
}
|
|
4567
|
-
assertNever(children);
|
|
4568
|
-
}
|
|
4569
|
-
}
|
|
4570
|
-
/**
|
|
4571
|
-
* Adds a new entry to the dictionary or updates the value of an existing key.
|
|
4572
|
-
*
|
|
4573
|
-
* If an entry with the given key already exists, its value is replaced
|
|
4574
|
-
* with the new one.
|
|
4575
|
-
*
|
|
4576
|
-
* @param key - The key to add or update in the dictionary.
|
|
4577
|
-
* @param value - The value to associate with the specified key.
|
|
4578
|
-
* @returns Nothing (`void`).
|
|
4579
|
-
*/
|
|
4580
|
-
set(key, value) {
|
|
4581
|
-
const leaf = this.internalSet(key, value);
|
|
4582
|
-
if (leaf !== null) {
|
|
4583
|
-
this.keyvals.set(leaf.key, leaf);
|
|
4584
|
-
}
|
|
4585
|
-
}
|
|
4586
|
-
/**
|
|
4587
|
-
* Retrieves the value associated with the given key from the dictionary.
|
|
4588
|
-
*
|
|
4589
|
-
* If the key does not exist, this method returns `undefined`.
|
|
4590
|
-
*
|
|
4591
|
-
* @param key - The key whose associated value should be retrieved.
|
|
4592
|
-
* @returns The value associated with the specified key, or `undefined` if the key is not present.
|
|
4593
|
-
*/
|
|
4594
|
-
get(key) {
|
|
4595
|
-
let node = this.root;
|
|
4596
|
-
const pathChunksGenerator = key.chunks(CHUNK_SIZE);
|
|
4597
|
-
let depth = 0;
|
|
4598
|
-
while (node !== undefined) {
|
|
4599
|
-
const maybePathChunk = pathChunksGenerator.next().value;
|
|
4600
|
-
if (node.children instanceof ListChildren) {
|
|
4601
|
-
const subkey = BytesBlob.blobFrom(key.raw.subarray(depth * CHUNK_SIZE));
|
|
4602
|
-
const child = node.children.find(subkey);
|
|
4603
|
-
if (child !== null) {
|
|
4604
|
-
return child.value;
|
|
4605
|
-
}
|
|
4606
|
-
}
|
|
4607
|
-
if (maybePathChunk === undefined) {
|
|
4608
|
-
return node.getLeaf()?.value;
|
|
4609
|
-
}
|
|
4610
|
-
if (node.children instanceof MapChildren) {
|
|
4611
|
-
const pathChunk = asOpaqueType(maybePathChunk);
|
|
4612
|
-
node = node.children.getChild(pathChunk);
|
|
4613
|
-
depth += 1;
|
|
4614
|
-
}
|
|
4615
|
-
}
|
|
4616
|
-
return undefined;
|
|
4617
|
-
}
|
|
4618
|
-
/**
|
|
4619
|
-
* Checks whether the dictionary contains an entry for the given key.
|
|
4620
|
-
*
|
|
4621
|
-
* ⚠️ **Note:** Avoid using `has(...)` together with `get(...)` in a pattern like this:
|
|
4622
|
-
*
|
|
4623
|
-
* ```ts
|
|
4624
|
-
* if (dict.has(key)) {
|
|
4625
|
-
* const value = dict.get(key);
|
|
4626
|
-
* ...
|
|
4627
|
-
* }
|
|
4628
|
-
* ```
|
|
4629
|
-
*
|
|
4630
|
-
* This approach performs two lookups for the same key.
|
|
4631
|
-
*
|
|
4632
|
-
* Instead, prefer the following pattern, which retrieves the value once:
|
|
4633
|
-
*
|
|
4634
|
-
* ```ts
|
|
4635
|
-
* const value = dict.get(key);
|
|
4636
|
-
* if (value !== undefined) {
|
|
4637
|
-
* ...
|
|
4638
|
-
* }
|
|
4639
|
-
* ```
|
|
4640
|
-
*
|
|
4641
|
-
* @param key - The key to check for.
|
|
4642
|
-
* @returns `true` if the dictionary contains an entry for the given key, otherwise `false`.
|
|
4643
|
-
*/
|
|
4644
|
-
has(key) {
|
|
4645
|
-
return this.get(key) !== undefined;
|
|
4646
|
-
}
|
|
4647
|
-
/**
|
|
4648
|
-
* Removes an entry with the specified key from the dictionary.
|
|
4649
|
-
*
|
|
4650
|
-
* Internally, this calls {@link internalSet} with `undefined` to mark the entry as deleted.
|
|
4651
|
-
*
|
|
4652
|
-
* @param key - The key of the entry to remove.
|
|
4653
|
-
* @returns `true` if an entry was removed (i.e. the key existed), otherwise `false`.
|
|
4654
|
-
*/
|
|
4655
|
-
delete(key) {
|
|
4656
|
-
const leaf = this.internalSet(key, undefined);
|
|
4657
|
-
if (leaf !== null) {
|
|
4658
|
-
this.keyvals.delete(leaf.key);
|
|
4659
|
-
return true;
|
|
4660
|
-
}
|
|
4661
|
-
return false;
|
|
4662
|
-
}
|
|
4663
|
-
/**
|
|
4664
|
-
* Returns an iterator over the keys in the dictionary.
|
|
4665
|
-
*
|
|
4666
|
-
* The iterator yields each key in insertion order.
|
|
4667
|
-
*
|
|
4668
|
-
* @returns An iterator over all keys in the dictionary.
|
|
4669
|
-
*/
|
|
4670
|
-
keys() {
|
|
4671
|
-
return this.keyvals.keys();
|
|
4672
|
-
}
|
|
4673
|
-
/**
|
|
4674
|
-
* Returns an iterator over the values in the dictionary.
|
|
4675
|
-
*
|
|
4676
|
-
* The iterator yields each value in insertion order.
|
|
4677
|
-
*
|
|
4678
|
-
* @returns An iterator over all values in the dictionary.
|
|
4679
|
-
*/
|
|
4680
|
-
*values() {
|
|
4681
|
-
for (const leaf of this.keyvals.values()) {
|
|
4682
|
-
yield leaf.value;
|
|
4683
|
-
}
|
|
4684
|
-
}
|
|
4685
|
-
/**
|
|
4686
|
-
* Returns an iterator over the `[key, value]` pairs in the dictionary.
|
|
4687
|
-
*
|
|
4688
|
-
* The iterator yields entries in insertion order.
|
|
4689
|
-
*
|
|
4690
|
-
* @returns An iterator over `[key, value]` tuples for each entry in the dictionary.
|
|
4691
|
-
*/
|
|
4692
|
-
*entries() {
|
|
4693
|
-
for (const leaf of this.keyvals.values()) {
|
|
4694
|
-
yield [leaf.key, leaf.value];
|
|
4695
|
-
}
|
|
4696
|
-
}
|
|
4697
|
-
/**
|
|
4698
|
-
* Default iterator for the dictionary.
|
|
4699
|
-
*
|
|
4700
|
-
* Equivalent to calling {@link entries}.
|
|
4701
|
-
* Enables iteration with `for...of`:
|
|
4702
|
-
*
|
|
4703
|
-
* ```ts
|
|
4704
|
-
* for (const [key, value] of dict) {
|
|
4705
|
-
* ...
|
|
4706
|
-
* }
|
|
4707
|
-
* ```
|
|
4708
|
-
*
|
|
4709
|
-
* @returns An iterator over `[key, value]` pairs.
|
|
4710
|
-
*/
|
|
4711
|
-
[Symbol.iterator]() {
|
|
4712
|
-
return this.entries();
|
|
4713
|
-
}
|
|
4714
|
-
/**
|
|
4715
|
-
* Creates a new sorted array of values, ordered by their corresponding keys.
|
|
4716
|
-
*
|
|
4717
|
-
* Iterates over all entries in the dictionary and sorts them according
|
|
4718
|
-
* to the provided comparator function applied to the keys.
|
|
4719
|
-
*
|
|
4720
|
-
* @param comparator - A comparator function that can compare two keys.
|
|
4721
|
-
*
|
|
4722
|
-
* @returns A new array containing all values from the dictionary,
|
|
4723
|
-
* sorted according to their keys.
|
|
4724
|
-
*/
|
|
4725
|
-
toSortedArray(comparator) {
|
|
4726
|
-
const vals = Array.from(this);
|
|
4727
|
-
vals.sort((a, b) => comparator(a[0], b[0]).value);
|
|
4728
|
-
return vals.map((x) => x[1]);
|
|
4729
|
-
}
|
|
4730
|
-
}
|
|
4731
|
-
const CHUNK_SIZE = 6;
|
|
4732
|
-
/**
|
|
4733
|
-
* A function to transform a bytes chunk (up to 6 bytes into U48 number)
|
|
4734
|
-
*
|
|
4735
|
-
* Note that it uses 3 additional bits to store length(`value * 8 + len;`),
|
|
4736
|
-
* It is needed to distinguish shorter chunks that have 0s at the end, for example: [1, 2] and [1, 2, 0]
|
|
4737
|
-
* */
|
|
4738
|
-
function bytesAsU48(bytes) {
|
|
4739
|
-
const len = bytes.length;
|
|
4740
|
-
check `${len <= CHUNK_SIZE} Length has to be <= ${CHUNK_SIZE}, got: ${len}`;
|
|
4741
|
-
let value = bytes[3] | (bytes[2] << 8) | (bytes[1] << 16) | (bytes[0] << 24);
|
|
4742
|
-
for (let i = 4; i < bytes.length; i++) {
|
|
4743
|
-
value = value * 256 + bytes[i];
|
|
4744
|
-
}
|
|
4745
|
-
return value * 8 + len;
|
|
4746
|
-
}
|
|
4747
|
-
class Node {
|
|
4748
|
-
leaf;
|
|
4749
|
-
children;
|
|
4750
|
-
convertListChildrenToMap() {
|
|
4751
|
-
if (!(this.children instanceof ListChildren)) {
|
|
4752
|
-
return;
|
|
4753
|
-
}
|
|
4754
|
-
this.children = MapChildren.fromListNode(this.children);
|
|
4755
|
-
}
|
|
4756
|
-
static withList() {
|
|
4757
|
-
return new Node(undefined, ListChildren.new());
|
|
4758
|
-
}
|
|
4759
|
-
static withMap() {
|
|
4760
|
-
return new Node(undefined, MapChildren.new());
|
|
4761
|
-
}
|
|
4762
|
-
constructor(leaf, children) {
|
|
4763
|
-
this.leaf = leaf;
|
|
4764
|
-
this.children = children;
|
|
4765
|
-
}
|
|
4766
|
-
getLeaf() {
|
|
4767
|
-
return this.leaf;
|
|
4768
|
-
}
|
|
4769
|
-
remove(_key) {
|
|
4770
|
-
if (this.leaf === undefined) {
|
|
4771
|
-
return null;
|
|
4772
|
-
}
|
|
4773
|
-
const removedLeaf = this.leaf;
|
|
4774
|
-
this.leaf = undefined;
|
|
4775
|
-
return removedLeaf;
|
|
4776
|
-
}
|
|
4777
|
-
set(key, value) {
|
|
4778
|
-
if (this.leaf === undefined) {
|
|
4779
|
-
this.leaf = { key, value };
|
|
4780
|
-
return this.leaf;
|
|
4781
|
-
}
|
|
4782
|
-
this.leaf.value = value;
|
|
4783
|
-
return null;
|
|
4784
|
-
}
|
|
4785
|
-
}
|
|
4786
|
-
class ListChildren {
|
|
4787
|
-
children = [];
|
|
4788
|
-
constructor() { }
|
|
4789
|
-
find(key) {
|
|
4790
|
-
const result = this.children.find((item) => item[0].isEqualTo(key));
|
|
4791
|
-
if (result !== undefined) {
|
|
4792
|
-
return result[1];
|
|
4793
|
-
}
|
|
4794
|
-
return null;
|
|
4795
|
-
}
|
|
4796
|
-
remove(key) {
|
|
4797
|
-
const existingIndex = this.children.findIndex((item) => item[0].isEqualTo(key));
|
|
4798
|
-
if (existingIndex >= 0) {
|
|
4799
|
-
const ret = this.children.splice(existingIndex, 1);
|
|
4800
|
-
return ret[0][1];
|
|
4801
|
-
}
|
|
4802
|
-
return null;
|
|
4803
|
-
}
|
|
4804
|
-
insert(key, leaf) {
|
|
4805
|
-
const existingIndex = this.children.findIndex((item) => item[0].isEqualTo(key));
|
|
4806
|
-
if (existingIndex >= 0) {
|
|
4807
|
-
const existing = this.children[existingIndex];
|
|
4808
|
-
existing[1].value = leaf.value;
|
|
4809
|
-
return null;
|
|
4810
|
-
}
|
|
4811
|
-
this.children.push([key, leaf]);
|
|
4812
|
-
return leaf;
|
|
4813
|
-
}
|
|
4814
|
-
static new() {
|
|
4815
|
-
return new ListChildren();
|
|
4816
|
-
}
|
|
4817
|
-
}
|
|
4818
|
-
class MapChildren {
|
|
4819
|
-
children = new Map();
|
|
4820
|
-
constructor() { }
|
|
4821
|
-
static new() {
|
|
4822
|
-
return new MapChildren();
|
|
4823
|
-
}
|
|
4824
|
-
static fromListNode(node) {
|
|
4825
|
-
const mapNode = new MapChildren();
|
|
4826
|
-
for (const [key, leaf] of node.children) {
|
|
4827
|
-
const currentKeyChunk = asOpaqueType(BytesBlob.blobFrom(key.raw.subarray(0, CHUNK_SIZE)));
|
|
4828
|
-
const subKey = BytesBlob.blobFrom(key.raw.subarray(CHUNK_SIZE));
|
|
4829
|
-
let child = mapNode.getChild(currentKeyChunk);
|
|
4830
|
-
if (child === undefined) {
|
|
4831
|
-
child = Node.withList();
|
|
4832
|
-
mapNode.setChild(currentKeyChunk, child);
|
|
4833
|
-
}
|
|
4834
|
-
const children = child.children;
|
|
4835
|
-
children.insert(subKey, leaf);
|
|
4836
|
-
}
|
|
4837
|
-
return mapNode;
|
|
4838
|
-
}
|
|
4839
|
-
getChild(keyChunk) {
|
|
4840
|
-
const chunkAsNumber = bytesAsU48(keyChunk.raw);
|
|
4841
|
-
return this.children.get(chunkAsNumber);
|
|
4842
|
-
}
|
|
4843
|
-
setChild(keyChunk, node) {
|
|
4844
|
-
const chunkAsNumber = bytesAsU48(keyChunk.raw);
|
|
4845
|
-
this.children.set(chunkAsNumber, node);
|
|
4846
|
-
}
|
|
4847
|
-
}
|
|
4848
|
-
|
|
4849
|
-
/**
|
|
4850
|
-
* A map which uses hashes as keys.
|
|
4851
|
-
*
|
|
4852
|
-
* @deprecated
|
|
4853
|
-
* */
|
|
4854
|
-
class StringHashDictionary {
|
|
4427
|
+
/** A map which uses hashes as keys. */
|
|
4428
|
+
class HashDictionary {
|
|
4855
4429
|
// TODO [ToDr] [crit] We can't use `TrieHash` directly in the map,
|
|
4856
4430
|
// because of the way it's being compared. Hence having `string` here.
|
|
4857
4431
|
// This has to be benchmarked and re-written to a custom map most likely.
|
|
@@ -4916,16 +4490,6 @@ class StringHashDictionary {
|
|
|
4916
4490
|
return this.map.delete(key.toString());
|
|
4917
4491
|
}
|
|
4918
4492
|
}
|
|
4919
|
-
/**
|
|
4920
|
-
* A value that indicates when `BlobDictionary` transforms Array nodes into Map nodes.
|
|
4921
|
-
* In practice, it doesn't matter much because, in real life, arrays in this structure usually have a length close to 1.
|
|
4922
|
-
*/
|
|
4923
|
-
const BLOB_DICTIONARY_THRESHOLD$1 = 5;
|
|
4924
|
-
class HashDictionary extends BlobDictionary {
|
|
4925
|
-
constructor() {
|
|
4926
|
-
super(BLOB_DICTIONARY_THRESHOLD$1);
|
|
4927
|
-
}
|
|
4928
|
-
}
|
|
4929
4493
|
|
|
4930
4494
|
/** A set specialized for storing hashes. */
|
|
4931
4495
|
class HashSet {
|
|
@@ -5375,18 +4939,6 @@ class SortedSet extends SortedArray {
|
|
|
5375
4939
|
}
|
|
5376
4940
|
}
|
|
5377
4941
|
|
|
5378
|
-
function getTruncatedKey(key) {
|
|
5379
|
-
// Always return exactly TRUNCATED_HASH_SIZE bytes.
|
|
5380
|
-
if (key.length === TRUNCATED_HASH_SIZE) {
|
|
5381
|
-
return key;
|
|
5382
|
-
}
|
|
5383
|
-
return Bytes.fromBlob(key.raw.subarray(0, TRUNCATED_HASH_SIZE), TRUNCATED_HASH_SIZE);
|
|
5384
|
-
}
|
|
5385
|
-
/**
|
|
5386
|
-
* A value that indicates when `BlobDictionary` transforms Array nodes into Map nodes.
|
|
5387
|
-
* In practice, it doesn't matter much because, in real life, arrays in this structure usually have a length close to 1.
|
|
5388
|
-
*/
|
|
5389
|
-
const BLOB_DICTIONARY_THRESHOLD = 5;
|
|
5390
4942
|
/**
|
|
5391
4943
|
* A collection of hash-based keys (likely `StateKey`s) which ignores
|
|
5392
4944
|
* differences on the last byte.
|
|
@@ -5399,37 +4951,48 @@ class TruncatedHashDictionary {
|
|
|
5399
4951
|
* Each key will be copied and have the last byte replace with a 0.
|
|
5400
4952
|
*/
|
|
5401
4953
|
static fromEntries(entries) {
|
|
5402
|
-
|
|
4954
|
+
/** Copy key bytes of an entry and replace the last one with 0. */
|
|
4955
|
+
const mapped = Array.from(entries).map(([key, value]) => {
|
|
4956
|
+
const newKey = Bytes.zero(HASH_SIZE).asOpaque();
|
|
4957
|
+
newKey.raw.set(key.raw.subarray(0, TRUNCATED_HASH_SIZE));
|
|
4958
|
+
return [newKey, value];
|
|
4959
|
+
});
|
|
4960
|
+
return new TruncatedHashDictionary(HashDictionary.fromEntries(mapped));
|
|
5403
4961
|
}
|
|
4962
|
+
/** A truncated key which we re-use to query the dictionary. */
|
|
4963
|
+
truncatedKey = Bytes.zero(HASH_SIZE).asOpaque();
|
|
5404
4964
|
constructor(dict) {
|
|
5405
4965
|
this.dict = dict;
|
|
5406
4966
|
}
|
|
5407
4967
|
[TEST_COMPARE_USING]() {
|
|
5408
|
-
return
|
|
4968
|
+
return this.dict;
|
|
5409
4969
|
}
|
|
5410
4970
|
/** Return number of items in the dictionary. */
|
|
5411
4971
|
get size() {
|
|
5412
4972
|
return this.dict.size;
|
|
5413
4973
|
}
|
|
5414
4974
|
/** Retrieve a value that matches the key on `TRUNCATED_HASH_SIZE`. */
|
|
5415
|
-
get(
|
|
5416
|
-
|
|
5417
|
-
return this.dict.get(truncatedKey);
|
|
4975
|
+
get(fullKey) {
|
|
4976
|
+
this.truncatedKey.raw.set(fullKey.raw.subarray(0, TRUNCATED_HASH_SIZE));
|
|
4977
|
+
return this.dict.get(this.truncatedKey);
|
|
5418
4978
|
}
|
|
5419
4979
|
/** Return true if the key is present in the dictionary */
|
|
5420
|
-
has(
|
|
5421
|
-
|
|
5422
|
-
return this.dict.has(truncatedKey);
|
|
4980
|
+
has(fullKey) {
|
|
4981
|
+
this.truncatedKey.raw.set(fullKey.raw.subarray(0, TRUNCATED_HASH_SIZE));
|
|
4982
|
+
return this.dict.has(this.truncatedKey);
|
|
5423
4983
|
}
|
|
5424
4984
|
/** Set or update a value that matches the key on `TRUNCATED_HASH_SIZE`. */
|
|
5425
|
-
set(
|
|
5426
|
-
|
|
5427
|
-
|
|
4985
|
+
set(fullKey, value) {
|
|
4986
|
+
// NOTE we can't use the the shared key here, since the collection will
|
|
4987
|
+
// store the key for us, hence the copy.
|
|
4988
|
+
const key = Bytes.zero(HASH_SIZE);
|
|
4989
|
+
key.raw.set(fullKey.raw.subarray(0, TRUNCATED_HASH_SIZE));
|
|
4990
|
+
this.dict.set(key.asOpaque(), value);
|
|
5428
4991
|
}
|
|
5429
4992
|
/** Remove a value that matches the key on `TRUNCATED_HASH_SIZE`. */
|
|
5430
|
-
delete(
|
|
5431
|
-
|
|
5432
|
-
this.dict.delete(truncatedKey);
|
|
4993
|
+
delete(fullKey) {
|
|
4994
|
+
this.truncatedKey.raw.set(fullKey.raw.subarray(0, TRUNCATED_HASH_SIZE));
|
|
4995
|
+
this.dict.delete(this.truncatedKey);
|
|
5433
4996
|
}
|
|
5434
4997
|
/** Iterator over values of the dictionary. */
|
|
5435
4998
|
values() {
|
|
@@ -5437,7 +5000,9 @@ class TruncatedHashDictionary {
|
|
|
5437
5000
|
}
|
|
5438
5001
|
/** Iterator over entries of the dictionary (with truncated keys) */
|
|
5439
5002
|
*entries() {
|
|
5440
|
-
|
|
5003
|
+
for (const [key, value] of this.dict.entries()) {
|
|
5004
|
+
yield [Bytes.fromBlob(key.raw.subarray(0, TRUNCATED_HASH_SIZE), TRUNCATED_HASH_SIZE).asOpaque(), value];
|
|
5005
|
+
}
|
|
5441
5006
|
}
|
|
5442
5007
|
[Symbol.iterator]() {
|
|
5443
5008
|
return this.entries();
|
|
@@ -5447,18 +5012,14 @@ class TruncatedHashDictionary {
|
|
|
5447
5012
|
var index$n = /*#__PURE__*/Object.freeze({
|
|
5448
5013
|
__proto__: null,
|
|
5449
5014
|
ArrayView: ArrayView,
|
|
5450
|
-
BlobDictionary: BlobDictionary,
|
|
5451
5015
|
FixedSizeArray: FixedSizeArray,
|
|
5452
5016
|
HashDictionary: HashDictionary,
|
|
5453
5017
|
HashSet: HashSet,
|
|
5454
|
-
ListChildren: ListChildren,
|
|
5455
5018
|
MultiMap: MultiMap,
|
|
5456
5019
|
SortedArray: SortedArray,
|
|
5457
5020
|
SortedSet: SortedSet,
|
|
5458
|
-
StringHashDictionary: StringHashDictionary,
|
|
5459
5021
|
TruncatedHashDictionary: TruncatedHashDictionary,
|
|
5460
|
-
asKnownSize: asKnownSize
|
|
5461
|
-
bytesAsU48: bytesAsU48
|
|
5022
|
+
asKnownSize: asKnownSize
|
|
5462
5023
|
});
|
|
5463
5024
|
|
|
5464
5025
|
/**
|