@typeberry/lib 0.4.1-9e565b9 → 0.4.1-f776cce

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (4) hide show
  1. package/index.cjs +470 -31
  2. package/index.d.ts +559 -41
  3. package/index.js +470 -31
  4. package/package.json +1 -1
package/index.d.ts CHANGED
@@ -1008,7 +1008,7 @@ declare class BytesBlob {
1008
1008
  *
1009
1009
  * Last chunk might be smaller than `size`.
1010
1010
  */
1011
- *chunks(size: number): Generator<BytesBlob> {
1011
+ *chunks(size: number): Generator<BytesBlob, undefined, void> {
1012
1012
  for (let i = 0; i < this.length; i += size) {
1013
1013
  yield BytesBlob.blobFrom(this.raw.subarray(i, i + size));
1014
1014
  }
@@ -3570,6 +3570,497 @@ declare class ArrayView<T> implements Iterable<T> {
3570
3570
  }
3571
3571
  }
3572
3572
 
3573
+ /** A map which uses byte blobs as keys */
3574
+ declare class BlobDictionary<K extends BytesBlob, V> extends WithDebug {
3575
+ /**
3576
+ * The root node of the dictionary.
3577
+ *
3578
+ * This is the main internal data structure that organizes entries
3579
+ * in a tree-like fashion (array-based nodes up to `mapNodeThreshold`,
3580
+ * map-based nodes beyond it). All insertions, updates, and deletions
3581
+ * operate through this structure.
3582
+ */
3583
+ private root: Node<K, V> = Node.withList();
3584
+
3585
+ /**
3586
+ * Auxiliary map that stores references to the original keys and their values.
3587
+ *
3588
+ * - Overriding a value in the main structure does not replace the original key reference.
3589
+ * - Used for efficient iteration over `keys()`, `values()`, `entries()`, and computing `size`.
3590
+ */
3591
+ private keyvals: Map<K, Leaf<K, V>> = new Map();
3592
+
3593
+ /**
3594
+ * Protected constructor used internally by `BlobDictionary.new`
3595
+ * and `BlobDictionary.fromEntries`.
3596
+ *
3597
+ * This enforces controlled instantiation — users should create instances
3598
+ * through the provided static factory methods instead of calling the
3599
+ * constructor directly.
3600
+ *
3601
+ * @param mapNodeThreshold - The threshold that determines when the dictionary
3602
+ * switches from using an array-based (`ListChildren`) node to a map-based (`MapChildren`) node for storing entries.
3603
+ */
3604
+ protected constructor(private mapNodeThreshold: number) {
3605
+ super();
3606
+ }
3607
+
3608
+ /**
3609
+ * Returns the number of entries in the dictionary.
3610
+ *
3611
+ * The count is derived from the auxiliary `keyvals` map, which stores
3612
+ * all original key references and their associated values. This ensures
3613
+ * that the `size` reflects the actual number of entries, independent of
3614
+ * internal overrides in the main `root` structure.
3615
+ *
3616
+ * @returns The total number of entries in the dictionary.
3617
+ */
3618
+ get size(): number {
3619
+ return this.keyvals.size;
3620
+ }
3621
+
3622
+ [TEST_COMPARE_USING]() {
3623
+ const vals: [K, V][] = Array.from(this);
3624
+ vals.sort((a, b) => a[0].compare(b[0]).value);
3625
+ return vals;
3626
+ }
3627
+
3628
+ /**
3629
+ * Creates an empty `BlobDictionary`.
3630
+ *
3631
+ * @param mapNodeThreshold - The threshold that determines when the dictionary
3632
+ * switches from using an array-based (`ListChildren`) node to a map-based (`MapChildren`) node for storing entries.
3633
+ * Defaults to `0`.
3634
+ *
3635
+ * @returns A new, empty `BlobDictionary` instance.
3636
+ */
3637
+ static new<K extends BytesBlob, V>(mapNodeThreshold = 0) {
3638
+ return new BlobDictionary<K, V>(mapNodeThreshold);
3639
+ }
3640
+
3641
+ /**
3642
+ * Creates a new `BlobDictionary` initialized with the given entries.
3643
+ *
3644
+ * @param entries - An array of `[key, value]` pairs used to populate the dictionary.
3645
+ * @param mapNodeThreshold - The threshold that determines when the dictionary
3646
+ * switches from using an array-based (`ListChildren`) node to a map-based (`MapChildren`) node for storing entries.
3647
+ * Defaults to `0`.
3648
+ *
3649
+ * @returns A new `BlobDictionary` containing the provided entries.
3650
+ */
3651
+ static fromEntries<K extends BytesBlob, V>(entries: [K, V][], mapNodeThreshold?: number): BlobDictionary<K, V> {
3652
+ const dict = BlobDictionary.new<K, V>(mapNodeThreshold);
3653
+ for (const [key, value] of entries) {
3654
+ dict.set(key, value);
3655
+ }
3656
+ return dict;
3657
+ }
3658
+ /**
3659
+ * Internal helper that inserts, updates or deletes an entry in the dictionary.
3660
+ *
3661
+ * Behaviour details:
3662
+ * - Passing `undefined` as `value` indicates a deletion. (E.g. `delete` uses `internalSet(key, undefined)`.)
3663
+ * - When an add (new entry) or a delete actually changes the structure, the method returns the affected leaf node.
3664
+ * - When the call only overrides an existing value (no structural add/delete), the method returns `null`.
3665
+ *
3666
+ * This method is intended for internal use by the dictionary implementation and allows `undefined` as a
3667
+ * sentinel value to signal removals.
3668
+ *
3669
+ * @param key - The key to insert, update or remove.
3670
+ * @param value - The value to associate with the key, or `undefined` to remove the key.
3671
+ * @returns The leaf node created or removed on add/delete, or `null` if the operation only overwrote an existing value.
3672
+ */
3673
+ private internalSet(key: K, value: V | undefined): Leaf<K, V> | null {
3674
+ let node: Node<K, V> = this.root;
3675
+ const keyChunkGenerator = key.chunks(CHUNK_SIZE);
3676
+ let depth = 0;
3677
+
3678
+ for (;;) {
3679
+ const maybeKeyChunk = keyChunkGenerator.next().value;
3680
+ if (maybeKeyChunk === undefined) {
3681
+ if (value === undefined) {
3682
+ return node.remove(key);
3683
+ }
3684
+ return node.set(key, value);
3685
+ }
3686
+
3687
+ const keyChunk: KeyChunk = asOpaqueType(maybeKeyChunk);
3688
+
3689
+ if (node.children instanceof ListChildren) {
3690
+ const subkey = BytesBlob.blobFrom(key.raw.subarray(CHUNK_SIZE * depth));
3691
+ const leaf = value !== undefined ? node.children.insert(subkey, { key, value }) : node.children.remove(subkey);
3692
+
3693
+ if (subkey.length > CHUNK_SIZE && node.children.children.length > this.mapNodeThreshold) {
3694
+ node.convertListChildrenToMap();
3695
+ }
3696
+ return leaf;
3697
+ }
3698
+
3699
+ depth += 1;
3700
+
3701
+ const children = node.children;
3702
+ if (children instanceof ListChildren) {
3703
+ throw new Error("We handle list node earlier. If we fall through, we know it's for the `Map` case.");
3704
+ }
3705
+
3706
+ if (children instanceof MapChildren) {
3707
+ const maybeNode = children.getChild(keyChunk);
3708
+
3709
+ if (maybeNode !== undefined) {
3710
+ // simply go one level deeper
3711
+ node = maybeNode;
3712
+ } else {
3713
+ // we are trying to remove an item, but it does not exist
3714
+ if (value === undefined) {
3715
+ return null;
3716
+ }
3717
+
3718
+ // no more child nodes, we insert a new one.
3719
+ const newNode = Node.withList<K, V>();
3720
+ children.setChild(keyChunk, newNode);
3721
+ node = newNode;
3722
+ }
3723
+ continue;
3724
+ }
3725
+
3726
+ assertNever(children);
3727
+ }
3728
+ }
3729
+
3730
+ /**
3731
+ * Adds a new entry to the dictionary or updates the value of an existing key.
3732
+ *
3733
+ * If an entry with the given key already exists, its value is replaced
3734
+ * with the new one.
3735
+ *
3736
+ * @param key - The key to add or update in the dictionary.
3737
+ * @param value - The value to associate with the specified key.
3738
+ * @returns Nothing (`void`).
3739
+ */
3740
+ set(key: K, value: V): void {
3741
+ const leaf = this.internalSet(key, value);
3742
+ if (leaf !== null) {
3743
+ this.keyvals.set(leaf.key, leaf);
3744
+ }
3745
+ }
3746
+
3747
+ /**
3748
+ * Retrieves the value associated with the given key from the dictionary.
3749
+ *
3750
+ * If the key does not exist, this method returns `undefined`.
3751
+ *
3752
+ * @param key - The key whose associated value should be retrieved.
3753
+ * @returns The value associated with the specified key, or `undefined` if the key is not present.
3754
+ */
3755
+ get(key: K): V | undefined {
3756
+ let node: MaybeNode<K, V> = this.root;
3757
+ const pathChunksGenerator = key.chunks(CHUNK_SIZE);
3758
+ let depth = 0;
3759
+
3760
+ while (node !== undefined) {
3761
+ const maybePathChunk = pathChunksGenerator.next().value;
3762
+
3763
+ if (node.children instanceof ListChildren) {
3764
+ const subkey = BytesBlob.blobFrom(key.raw.subarray(depth * CHUNK_SIZE));
3765
+ const child = node.children.find(subkey);
3766
+ if (child !== null) {
3767
+ return child.value;
3768
+ }
3769
+ }
3770
+
3771
+ if (maybePathChunk === undefined) {
3772
+ return node.getLeaf()?.value;
3773
+ }
3774
+
3775
+ if (node.children instanceof MapChildren) {
3776
+ const pathChunk: KeyChunk = asOpaqueType(maybePathChunk);
3777
+ node = node.children.getChild(pathChunk);
3778
+ depth += 1;
3779
+ }
3780
+ }
3781
+
3782
+ return undefined;
3783
+ }
3784
+
3785
+ /**
3786
+ * Checks whether the dictionary contains an entry for the given key.
3787
+ *
3788
+ * ⚠️ **Note:** Avoid using `has(...)` together with `get(...)` in a pattern like this:
3789
+ *
3790
+ * ```ts
3791
+ * if (dict.has(key)) {
3792
+ * const value = dict.get(key);
3793
+ * ...
3794
+ * }
3795
+ * ```
3796
+ *
3797
+ * This approach performs two lookups for the same key.
3798
+ *
3799
+ * Instead, prefer the following pattern, which retrieves the value once:
3800
+ *
3801
+ * ```ts
3802
+ * const value = dict.get(key);
3803
+ * if (value !== undefined) {
3804
+ * ...
3805
+ * }
3806
+ * ```
3807
+ *
3808
+ * @param key - The key to check for.
3809
+ * @returns `true` if the dictionary contains an entry for the given key, otherwise `false`.
3810
+ */
3811
+ has(key: K): boolean {
3812
+ return this.get(key) !== undefined;
3813
+ }
3814
+
3815
+ /**
3816
+ * Removes an entry with the specified key from the dictionary.
3817
+ *
3818
+ * Internally, this calls {@link internalSet} with `undefined` to mark the entry as deleted.
3819
+ *
3820
+ * @param key - The key of the entry to remove.
3821
+ * @returns `true` if an entry was removed (i.e. the key existed), otherwise `false`.
3822
+ */
3823
+ delete(key: K): boolean {
3824
+ const leaf = this.internalSet(key, undefined);
3825
+ if (leaf !== null) {
3826
+ this.keyvals.delete(leaf.key);
3827
+ return true;
3828
+ }
3829
+ return false;
3830
+ }
3831
+
3832
+ /**
3833
+ * Returns an iterator over the keys in the dictionary.
3834
+ *
3835
+ * The iterator yields each key in insertion order.
3836
+ *
3837
+ * @returns An iterator over all keys in the dictionary.
3838
+ */
3839
+ keys(): Iterator<K> & Iterable<K> {
3840
+ return this.keyvals.keys();
3841
+ }
3842
+
3843
+ /**
3844
+ * Returns an iterator over the values in the dictionary.
3845
+ *
3846
+ * The iterator yields each value in insertion order.
3847
+ *
3848
+ * @returns An iterator over all values in the dictionary.
3849
+ */
3850
+ *values(): Iterator<V> & Iterable<V> {
3851
+ for (const leaf of this.keyvals.values()) {
3852
+ yield leaf.value;
3853
+ }
3854
+ }
3855
+
3856
+ /**
3857
+ * Returns an iterator over the `[key, value]` pairs in the dictionary.
3858
+ *
3859
+ * The iterator yields entries in insertion order.
3860
+ *
3861
+ * @returns An iterator over `[key, value]` tuples for each entry in the dictionary.
3862
+ */
3863
+ *entries(): Iterator<[K, V]> & Iterable<[K, V]> {
3864
+ for (const leaf of this.keyvals.values()) {
3865
+ yield [leaf.key, leaf.value];
3866
+ }
3867
+ }
3868
+
3869
+ /**
3870
+ * Default iterator for the dictionary.
3871
+ *
3872
+ * Equivalent to calling {@link entries}.
3873
+ * Enables iteration with `for...of`:
3874
+ *
3875
+ * ```ts
3876
+ * for (const [key, value] of dict) {
3877
+ * ...
3878
+ * }
3879
+ * ```
3880
+ *
3881
+ * @returns An iterator over `[key, value]` pairs.
3882
+ */
3883
+ [Symbol.iterator](): Iterator<[K, V]> & Iterable<[K, V]> {
3884
+ return this.entries();
3885
+ }
3886
+
3887
+ /**
3888
+ * Creates a new sorted array of values, ordered by their corresponding keys.
3889
+ *
3890
+ * Iterates over all entries in the dictionary and sorts them according
3891
+ * to the provided comparator function applied to the keys.
3892
+ *
3893
+ * @param comparator - A comparator function that can compare two keys.
3894
+ *
3895
+ * @returns A new array containing all values from the dictionary,
3896
+ * sorted according to their keys.
3897
+ */
3898
+ toSortedArray(comparator: Comparator<K>): V[] {
3899
+ const vals: [K, V][] = Array.from(this);
3900
+ vals.sort((a, b) => comparator(a[0], b[0]).value);
3901
+ return vals.map((x) => x[1]);
3902
+ }
3903
+ }
3904
+
3905
+ declare const CHUNK_SIZE = 6;
3906
+ type CHUNK_SIZE = typeof CHUNK_SIZE;
3907
+
3908
+ /**
3909
+ * A function to transform a bytes chunk (up to 6 bytes into U48 number)
3910
+ *
3911
+ * Note that it uses 3 additional bits to store length(`value * 8 + len;`),
3912
+ * It is needed to distinguish shorter chunks that have 0s at the end, for example: [1, 2] and [1, 2, 0]
3913
+ * */
3914
+ declare function bytesAsU48(bytes: Uint8Array): number {
3915
+ const len = bytes.length;
3916
+
3917
+ check`${len <= CHUNK_SIZE} Length has to be <= ${CHUNK_SIZE}, got: ${len}`;
3918
+
3919
+ let value = bytes[3] | (bytes[2] << 8) | (bytes[1] << 16) | (bytes[0] << 24);
3920
+
3921
+ for (let i = 4; i < bytes.length; i++) {
3922
+ value = value * 256 + bytes[i];
3923
+ }
3924
+
3925
+ return value * 8 + len;
3926
+ }
3927
+
3928
+ type KeyChunk = Opaque<BytesBlob, `up to ${CHUNK_SIZE} bytes`>;
3929
+ type U48 = number;
3930
+ type SubKey<_K extends BytesBlob> = BytesBlob;
3931
+ type OriginalKeyRef<K> = K;
3932
+ type MaybeNode<K extends BytesBlob, V> = Node<K, V> | undefined;
3933
+
3934
+ type Leaf<K extends BytesBlob, V> = {
3935
+ key: OriginalKeyRef<K>;
3936
+ value: V;
3937
+ };
3938
+
3939
+ declare class Node<K extends BytesBlob, V> {
3940
+ convertListChildrenToMap() {
3941
+ if (!(this.children instanceof ListChildren)) {
3942
+ return;
3943
+ }
3944
+ this.children = MapChildren.fromListNode<K, V>(this.children);
3945
+ }
3946
+
3947
+ static withList<K extends BytesBlob, V>(): Node<K, V> {
3948
+ return new Node(undefined, ListChildren.new());
3949
+ }
3950
+
3951
+ static withMap<K extends BytesBlob, V>(): Node<K, V> {
3952
+ return new Node(undefined, MapChildren.new());
3953
+ }
3954
+
3955
+ private constructor(
3956
+ private leaf: Leaf<K, V> | undefined,
3957
+ public children: MapChildren<K, V> | ListChildren<K, V>,
3958
+ ) {}
3959
+
3960
+ getLeaf(): Leaf<K, V> | undefined {
3961
+ return this.leaf;
3962
+ }
3963
+
3964
+ remove(_key: K): Leaf<K, V> | null {
3965
+ if (this.leaf === undefined) {
3966
+ return null;
3967
+ }
3968
+
3969
+ const removedLeaf = this.leaf;
3970
+ this.leaf = undefined;
3971
+ return removedLeaf;
3972
+ }
3973
+
3974
+ set(key: K, value: V): Leaf<K, V> | null {
3975
+ if (this.leaf === undefined) {
3976
+ this.leaf = { key, value };
3977
+ return this.leaf;
3978
+ }
3979
+ this.leaf.value = value;
3980
+ return null;
3981
+ }
3982
+ }
3983
+
3984
+ declare class ListChildren<K extends BytesBlob, V> {
3985
+ children: [SubKey<K>, Leaf<K, V>][] = [];
3986
+
3987
+ private constructor() {}
3988
+
3989
+ find(key: SubKey<K>): Leaf<K, V> | null {
3990
+ const result = this.children.find((item) => item[0].isEqualTo(key));
3991
+ if (result !== undefined) {
3992
+ return result[1];
3993
+ }
3994
+ return null;
3995
+ }
3996
+
3997
+ remove(key: SubKey<K>): Leaf<K, V> | null {
3998
+ const existingIndex = this.children.findIndex((item) => item[0].isEqualTo(key));
3999
+ if (existingIndex >= 0) {
4000
+ const ret = this.children.splice(existingIndex, 1);
4001
+ return ret[0][1];
4002
+ }
4003
+ return null;
4004
+ }
4005
+
4006
+ insert(key: SubKey<K>, leaf: Leaf<K, V>): Leaf<K, V> | null {
4007
+ const existingIndex = this.children.findIndex((item) => item[0].isEqualTo(key));
4008
+ if (existingIndex >= 0) {
4009
+ const existing = this.children[existingIndex];
4010
+ existing[1].value = leaf.value;
4011
+ return null;
4012
+ }
4013
+
4014
+ this.children.push([key, leaf]);
4015
+ return leaf;
4016
+ }
4017
+
4018
+ static new<K extends BytesBlob, V>() {
4019
+ return new ListChildren<K, V>();
4020
+ }
4021
+ }
4022
+
4023
+ declare class MapChildren<K extends BytesBlob, V> {
4024
+ children: Map<U48, Node<K, V>> = new Map();
4025
+
4026
+ private constructor() {}
4027
+
4028
+ static new<K extends BytesBlob, V>(): MapChildren<K, V> {
4029
+ return new MapChildren<K, V>();
4030
+ }
4031
+
4032
+ static fromListNode<K extends BytesBlob, T>(node: ListChildren<K, T>): MapChildren<K, T> {
4033
+ const mapNode = new MapChildren<K, T>();
4034
+
4035
+ for (const [key, leaf] of node.children) {
4036
+ const currentKeyChunk: KeyChunk = asOpaqueType(BytesBlob.blobFrom(key.raw.subarray(0, CHUNK_SIZE)));
4037
+ const subKey = BytesBlob.blobFrom(key.raw.subarray(CHUNK_SIZE));
4038
+
4039
+ let child = mapNode.getChild(currentKeyChunk);
4040
+
4041
+ if (child === undefined) {
4042
+ child = Node.withList<K, T>();
4043
+ mapNode.setChild(currentKeyChunk, child);
4044
+ }
4045
+
4046
+ const children = child.children as ListChildren<K, T>;
4047
+ children.insert(subKey, leaf);
4048
+ }
4049
+
4050
+ return mapNode;
4051
+ }
4052
+
4053
+ getChild(keyChunk: KeyChunk) {
4054
+ const chunkAsNumber = bytesAsU48(keyChunk.raw);
4055
+ return this.children.get(chunkAsNumber);
4056
+ }
4057
+
4058
+ setChild(keyChunk: KeyChunk, node: Node<K, V>) {
4059
+ const chunkAsNumber = bytesAsU48(keyChunk.raw);
4060
+ this.children.set(chunkAsNumber, node);
4061
+ }
4062
+ }
4063
+
3573
4064
  type ITypedArray = Uint8Array | Uint16Array | Uint32Array;
3574
4065
  type IDataType = string | Buffer | ITypedArray;
3575
4066
 
@@ -3770,17 +4261,20 @@ interface ImmutableHashDictionary<K extends OpaqueHash, V> extends Iterable<[K,
3770
4261
  get(key: K): V | undefined;
3771
4262
 
3772
4263
  /** Iterator over keys of the dictionary. */
3773
- keys(): Generator<K>;
4264
+ keys(): Iterator<K>;
3774
4265
 
3775
4266
  /** Iterator over values of the dictionary. */
3776
- values(): Generator<V>;
4267
+ values(): Iterator<V>;
3777
4268
 
3778
4269
  /** Returns an array of the map's values, sorted by their corresponding keys */
3779
4270
  toSortedArray(compare: Comparator<K>): V[];
3780
4271
  }
3781
-
3782
- /** A map which uses hashes as keys. */
3783
- declare class HashDictionary<K extends OpaqueHash, V> implements ImmutableHashDictionary<K, V> {
4272
+ /**
4273
+ * A map which uses hashes as keys.
4274
+ *
4275
+ * @deprecated
4276
+ * */
4277
+ declare class StringHashDictionary<K extends OpaqueHash, V> implements ImmutableHashDictionary<K, V> {
3784
4278
  // TODO [ToDr] [crit] We can't use `TrieHash` directly in the map,
3785
4279
  // because of the way it's being compared. Hence having `string` here.
3786
4280
  // This has to be benchmarked and re-written to a custom map most likely.
@@ -3860,6 +4354,15 @@ declare class HashDictionary<K extends OpaqueHash, V> implements ImmutableHashDi
3860
4354
  }
3861
4355
  }
3862
4356
 
4357
+ declare class HashDictionary<K extends OpaqueHash, V>
4358
+ extends BlobDictionary<K, V>
4359
+ implements ImmutableHashDictionary<K, V>
4360
+ {
4361
+ constructor() {
4362
+ super(BLOB_DICTIONARY_THRESHOLD);
4363
+ }
4364
+ }
4365
+
3863
4366
  /** Immutable version of the HashSet. */
3864
4367
  interface ImmutableHashSet<V extends OpaqueHash> extends Iterable<V> {
3865
4368
  /** Return number of items in the set. */
@@ -4451,7 +4954,14 @@ declare class SortedSet<V> extends SortedArray<V> implements ImmutableSortedSet<
4451
4954
  }
4452
4955
  }
4453
4956
 
4454
- type HashWithZeroedBit<T extends OpaqueHash> = T;
4957
+ declare function getTruncatedKey(key: OpaqueHash | TruncatedHash) {
4958
+ // Always return exactly TRUNCATED_HASH_SIZE bytes.
4959
+ if (key.length === TRUNCATED_HASH_SIZE) {
4960
+ return key;
4961
+ }
4962
+
4963
+ return Bytes.fromBlob(key.raw.subarray(0, TRUNCATED_HASH_SIZE), TRUNCATED_HASH_SIZE);
4964
+ }
4455
4965
 
4456
4966
  /**
4457
4967
  * A collection of hash-based keys (likely `StateKey`s) which ignores
@@ -4466,22 +4976,18 @@ declare class TruncatedHashDictionary<T extends OpaqueHash, V> {
4466
4976
  static fromEntries<T extends OpaqueHash, V>(
4467
4977
  entries: Iterable<[T | TruncatedHash, V] | readonly [T | TruncatedHash, V]>,
4468
4978
  ): TruncatedHashDictionary<T, V> {
4469
- /** Copy key bytes of an entry and replace the last one with 0. */
4470
- const mapped = Array.from(entries).map<[T, V]>(([key, value]) => {
4471
- const newKey: T = Bytes.zero(HASH_SIZE).asOpaque();
4472
- newKey.raw.set(key.raw.subarray(0, TRUNCATED_HASH_SIZE));
4473
- return [newKey, value];
4474
- });
4475
- return new TruncatedHashDictionary(HashDictionary.fromEntries(mapped));
4979
+ return new TruncatedHashDictionary(
4980
+ BlobDictionary.fromEntries<TruncatedHash, V>(
4981
+ Array.from(entries).map(([key, value]) => [getTruncatedKey(key), value]),
4982
+ BLOB_DICTIONARY_THRESHOLD,
4983
+ ),
4984
+ );
4476
4985
  }
4477
4986
 
4478
- /** A truncated key which we re-use to query the dictionary. */
4479
- private readonly truncatedKey: T = Bytes.zero(HASH_SIZE).asOpaque();
4480
-
4481
- private constructor(private readonly dict: HashDictionary<HashWithZeroedBit<T>, V>) {}
4987
+ private constructor(private readonly dict: BlobDictionary<TruncatedHash, V>) {}
4482
4988
 
4483
4989
  [TEST_COMPARE_USING]() {
4484
- return this.dict;
4990
+ return Array.from(this.dict);
4485
4991
  }
4486
4992
 
4487
4993
  /** Return number of items in the dictionary. */
@@ -4490,31 +4996,27 @@ declare class TruncatedHashDictionary<T extends OpaqueHash, V> {
4490
4996
  }
4491
4997
 
4492
4998
  /** Retrieve a value that matches the key on `TRUNCATED_HASH_SIZE`. */
4493
- get(fullKey: T | TruncatedHash): V | undefined {
4494
- this.truncatedKey.raw.set(fullKey.raw.subarray(0, TRUNCATED_HASH_SIZE));
4495
- return this.dict.get(this.truncatedKey);
4999
+ get(key: T | TruncatedHash): V | undefined {
5000
+ const truncatedKey = getTruncatedKey(key);
5001
+ return this.dict.get(truncatedKey);
4496
5002
  }
4497
5003
 
4498
5004
  /** Return true if the key is present in the dictionary */
4499
- has(fullKey: T | TruncatedHash): boolean {
4500
- this.truncatedKey.raw.set(fullKey.raw.subarray(0, TRUNCATED_HASH_SIZE));
4501
-
4502
- return this.dict.has(this.truncatedKey);
5005
+ has(key: T | TruncatedHash): boolean {
5006
+ const truncatedKey = getTruncatedKey(key);
5007
+ return this.dict.has(truncatedKey);
4503
5008
  }
4504
5009
 
4505
5010
  /** Set or update a value that matches the key on `TRUNCATED_HASH_SIZE`. */
4506
- set(fullKey: T | TruncatedHash, value: V) {
4507
- // NOTE we can't use the the shared key here, since the collection will
4508
- // store the key for us, hence the copy.
4509
- const key = Bytes.zero(HASH_SIZE);
4510
- key.raw.set(fullKey.raw.subarray(0, TRUNCATED_HASH_SIZE));
4511
- this.dict.set(key.asOpaque(), value);
5011
+ set(key: T | TruncatedHash, value: V) {
5012
+ const truncatedKey = getTruncatedKey(key);
5013
+ this.dict.set(truncatedKey, value);
4512
5014
  }
4513
5015
 
4514
5016
  /** Remove a value that matches the key on `TRUNCATED_HASH_SIZE`. */
4515
- delete(fullKey: T | TruncatedHash) {
4516
- this.truncatedKey.raw.set(fullKey.raw.subarray(0, TRUNCATED_HASH_SIZE));
4517
- this.dict.delete(this.truncatedKey);
5017
+ delete(key: T | TruncatedHash) {
5018
+ const truncatedKey = getTruncatedKey(key);
5019
+ this.dict.delete(truncatedKey);
4518
5020
  }
4519
5021
 
4520
5022
  /** Iterator over values of the dictionary. */
@@ -4524,9 +5026,7 @@ declare class TruncatedHashDictionary<T extends OpaqueHash, V> {
4524
5026
 
4525
5027
  /** Iterator over entries of the dictionary (with truncated keys) */
4526
5028
  *entries(): Generator<[TruncatedHash, V]> {
4527
- for (const [key, value] of this.dict.entries()) {
4528
- yield [Bytes.fromBlob(key.raw.subarray(0, TRUNCATED_HASH_SIZE), TRUNCATED_HASH_SIZE).asOpaque(), value];
4529
- }
5029
+ yield* this.dict.entries();
4530
5030
  }
4531
5031
 
4532
5032
  [Symbol.iterator]() {
@@ -4536,35 +5036,53 @@ declare class TruncatedHashDictionary<T extends OpaqueHash, V> {
4536
5036
 
4537
5037
  type index$o_ArrayView<T> = ArrayView<T>;
4538
5038
  declare const index$o_ArrayView: typeof ArrayView;
5039
+ type index$o_BlobDictionary<K extends BytesBlob, V> = BlobDictionary<K, V>;
5040
+ declare const index$o_BlobDictionary: typeof BlobDictionary;
5041
+ type index$o_CHUNK_SIZE = CHUNK_SIZE;
4539
5042
  type index$o_FixedSizeArray<T, N extends number> = FixedSizeArray<T, N>;
4540
5043
  declare const index$o_FixedSizeArray: typeof FixedSizeArray;
4541
5044
  type index$o_HashDictionary<K extends OpaqueHash, V> = HashDictionary<K, V>;
4542
5045
  declare const index$o_HashDictionary: typeof HashDictionary;
4543
5046
  type index$o_HashSet<V extends OpaqueHash> = HashSet<V>;
4544
5047
  declare const index$o_HashSet: typeof HashSet;
4545
- type index$o_HashWithZeroedBit<T extends OpaqueHash> = HashWithZeroedBit<T>;
4546
5048
  type index$o_ImmutableHashDictionary<K extends OpaqueHash, V> = ImmutableHashDictionary<K, V>;
4547
5049
  type index$o_ImmutableHashSet<V extends OpaqueHash> = ImmutableHashSet<V>;
4548
5050
  type index$o_ImmutableSortedArray<V> = ImmutableSortedArray<V>;
4549
5051
  type index$o_ImmutableSortedSet<V> = ImmutableSortedSet<V>;
5052
+ type index$o_KeyChunk = KeyChunk;
4550
5053
  type index$o_KeyMapper<K> = KeyMapper<K>;
4551
5054
  type index$o_KeyMappers<TKeys extends readonly unknown[]> = KeyMappers<TKeys>;
4552
5055
  type index$o_KnownSize<T, F extends string> = KnownSize<T, F>;
4553
5056
  type index$o_KnownSizeArray<T, F extends string> = KnownSizeArray<T, F>;
4554
5057
  type index$o_KnownSizeId<X> = KnownSizeId<X>;
5058
+ type index$o_Leaf<K extends BytesBlob, V> = Leaf<K, V>;
5059
+ type index$o_ListChildren<K extends BytesBlob, V> = ListChildren<K, V>;
5060
+ declare const index$o_ListChildren: typeof ListChildren;
5061
+ type index$o_MapChildren<K extends BytesBlob, V> = MapChildren<K, V>;
5062
+ declare const index$o_MapChildren: typeof MapChildren;
5063
+ type index$o_MaybeNode<K extends BytesBlob, V> = MaybeNode<K, V>;
4555
5064
  type index$o_MultiMap<TKeys extends readonly unknown[], TValue> = MultiMap<TKeys, TValue>;
4556
5065
  declare const index$o_MultiMap: typeof MultiMap;
4557
5066
  type index$o_NestedMaps<TKeys extends readonly unknown[], TValue> = NestedMaps<TKeys, TValue>;
5067
+ type index$o_Node<K extends BytesBlob, V> = Node<K, V>;
5068
+ declare const index$o_Node: typeof Node;
5069
+ type index$o_OriginalKeyRef<K> = OriginalKeyRef<K>;
4558
5070
  type index$o_SortedArray<V> = SortedArray<V>;
4559
5071
  declare const index$o_SortedArray: typeof SortedArray;
4560
5072
  type index$o_SortedSet<V> = SortedSet<V>;
4561
5073
  declare const index$o_SortedSet: typeof SortedSet;
5074
+ type index$o_StringHashDictionary<K extends OpaqueHash, V> = StringHashDictionary<K, V>;
5075
+ declare const index$o_StringHashDictionary: typeof StringHashDictionary;
5076
+ type index$o_SubKey<_K extends BytesBlob> = SubKey<_K>;
4562
5077
  type index$o_TruncatedHashDictionary<T extends OpaqueHash, V> = TruncatedHashDictionary<T, V>;
4563
5078
  declare const index$o_TruncatedHashDictionary: typeof TruncatedHashDictionary;
5079
+ type index$o_U48 = U48;
4564
5080
  declare const index$o_asKnownSize: typeof asKnownSize;
5081
+ declare const index$o_bytesAsU48: typeof bytesAsU48;
5082
+ declare const index$o_getTruncatedKey: typeof getTruncatedKey;
4565
5083
  declare namespace index$o {
4566
- export { index$o_ArrayView as ArrayView, index$o_FixedSizeArray as FixedSizeArray, index$o_HashDictionary as HashDictionary, index$o_HashSet as HashSet, index$o_MultiMap as MultiMap, index$o_SortedArray as SortedArray, index$o_SortedSet as SortedSet, index$o_TruncatedHashDictionary as TruncatedHashDictionary, index$o_asKnownSize as asKnownSize };
4567
- export type { index$o_HashWithZeroedBit as HashWithZeroedBit, index$o_ImmutableHashDictionary as ImmutableHashDictionary, index$o_ImmutableHashSet as ImmutableHashSet, index$o_ImmutableSortedArray as ImmutableSortedArray, index$o_ImmutableSortedSet as ImmutableSortedSet, index$o_KeyMapper as KeyMapper, index$o_KeyMappers as KeyMappers, index$o_KnownSize as KnownSize, index$o_KnownSizeArray as KnownSizeArray, index$o_KnownSizeId as KnownSizeId, index$o_NestedMaps as NestedMaps };
5084
+ export { index$o_ArrayView as ArrayView, index$o_BlobDictionary as BlobDictionary, index$o_FixedSizeArray as FixedSizeArray, index$o_HashDictionary as HashDictionary, index$o_HashSet as HashSet, index$o_ListChildren as ListChildren, index$o_MapChildren as MapChildren, index$o_MultiMap as MultiMap, index$o_Node as Node, index$o_SortedArray as SortedArray, index$o_SortedSet as SortedSet, index$o_StringHashDictionary as StringHashDictionary, index$o_TruncatedHashDictionary as TruncatedHashDictionary, index$o_asKnownSize as asKnownSize, index$o_bytesAsU48 as bytesAsU48, index$o_getTruncatedKey as getTruncatedKey };
5085
+ export type { index$o_CHUNK_SIZE as CHUNK_SIZE, index$o_ImmutableHashDictionary as ImmutableHashDictionary, index$o_ImmutableHashSet as ImmutableHashSet, index$o_ImmutableSortedArray as ImmutableSortedArray, index$o_ImmutableSortedSet as ImmutableSortedSet, index$o_KeyChunk as KeyChunk, index$o_KeyMapper as KeyMapper, index$o_KeyMappers as KeyMappers, index$o_KnownSize as KnownSize, index$o_KnownSizeArray as KnownSizeArray, index$o_KnownSizeId as KnownSizeId, index$o_Leaf as Leaf, index$o_MaybeNode as MaybeNode, index$o_NestedMaps as NestedMaps, index$o_OriginalKeyRef as OriginalKeyRef, index$o_SubKey as SubKey, index$o_U48 as U48 };
4568
5086
  }
4569
5087
 
4570
5088
  declare namespace bandersnatch_d_exports {