@typeberry/convert 0.4.0-fcdfbb1 → 0.4.1-0a3acb2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +5 -0
- package/index.js +619 -81
- package/index.js.map +1 -1
- package/package.json +1 -1
package/index.js
CHANGED
|
@@ -4254,7 +4254,11 @@ var TestSuite;
|
|
|
4254
4254
|
})(TestSuite || (TestSuite = {}));
|
|
4255
4255
|
const ALL_VERSIONS_IN_ORDER = [GpVersion.V0_6_7, GpVersion.V0_7_0, GpVersion.V0_7_1, GpVersion.V0_7_2];
|
|
4256
4256
|
const DEFAULT_SUITE = TestSuite.W3F_DAVXY;
|
|
4257
|
-
|
|
4257
|
+
/**
|
|
4258
|
+
* Current version is set to track the jam-conformance testing.
|
|
4259
|
+
* Since we are currently at 0.7.1 not 0.7.2, we set our default version accordingly.
|
|
4260
|
+
*/
|
|
4261
|
+
const DEFAULT_VERSION = GpVersion.V0_7_1;
|
|
4258
4262
|
const env = typeof process === "undefined" ? {} : process.env;
|
|
4259
4263
|
let CURRENT_VERSION = parseCurrentVersion(env.GP_VERSION) ?? DEFAULT_VERSION;
|
|
4260
4264
|
let CURRENT_SUITE = parseCurrentSuite(env.TEST_SUITE) ?? DEFAULT_SUITE;
|
|
@@ -4313,8 +4317,8 @@ class Compatibility {
|
|
|
4313
4317
|
/**
|
|
4314
4318
|
* Allows selecting different values for different Gray Paper versions from one record.
|
|
4315
4319
|
*
|
|
4316
|
-
*
|
|
4317
|
-
*
|
|
4320
|
+
* fallback The default value to return if no value is found for the current.
|
|
4321
|
+
* versions A record mapping versions to values, checking if the version is greater or equal to the current version.
|
|
4318
4322
|
* @returns The value for the current version, or the default value.
|
|
4319
4323
|
*/
|
|
4320
4324
|
static selectIfGreaterOrEqual({ fallback, versions, }) {
|
|
@@ -4477,7 +4481,7 @@ const workspacePathFix = dev_env.NODE_ENV === "development"
|
|
|
4477
4481
|
|
|
4478
4482
|
;// CONCATENATED MODULE: ./packages/core/utils/opaque.ts
|
|
4479
4483
|
/**
|
|
4480
|
-
*
|
|
4484
|
+
* `Opaque<Type, Token>` constructs a unique type which is a subset of Type with a
|
|
4481
4485
|
* specified unique token Token. It means that base type cannot be assigned to unique type by accident.
|
|
4482
4486
|
* Good examples of opaque types include:
|
|
4483
4487
|
* - JWTs or other tokens - these are special kinds of string used for authorization purposes.
|
|
@@ -4840,7 +4844,7 @@ function isResult(x) {
|
|
|
4840
4844
|
var minimist = __nccwpck_require__(595);
|
|
4841
4845
|
var minimist_default = /*#__PURE__*/__nccwpck_require__.n(minimist);
|
|
4842
4846
|
;// CONCATENATED MODULE: ./bin/convert/package.json
|
|
4843
|
-
const package_namespaceObject = {"rE":"0.4.
|
|
4847
|
+
const package_namespaceObject = {"rE":"0.4.1"};
|
|
4844
4848
|
;// CONCATENATED MODULE: ./packages/core/bytes/bitvec.ts
|
|
4845
4849
|
|
|
4846
4850
|
/**
|
|
@@ -7763,9 +7767,438 @@ class ArrayView {
|
|
|
7763
7767
|
}
|
|
7764
7768
|
}
|
|
7765
7769
|
|
|
7770
|
+
;// CONCATENATED MODULE: ./packages/core/collections/blob-dictionary.ts
|
|
7771
|
+
|
|
7772
|
+
|
|
7773
|
+
/** A map which uses byte blobs as keys */
|
|
7774
|
+
class BlobDictionary extends WithDebug {
|
|
7775
|
+
mapNodeThreshold;
|
|
7776
|
+
/**
|
|
7777
|
+
* The root node of the dictionary.
|
|
7778
|
+
*
|
|
7779
|
+
* This is the main internal data structure that organizes entries
|
|
7780
|
+
* in a tree-like fashion (array-based nodes up to `mapNodeThreshold`,
|
|
7781
|
+
* map-based nodes beyond it). All insertions, updates, and deletions
|
|
7782
|
+
* operate through this structure.
|
|
7783
|
+
*/
|
|
7784
|
+
root = Node.withList();
|
|
7785
|
+
/**
|
|
7786
|
+
* Auxiliary map that stores references to the original keys and their values.
|
|
7787
|
+
*
|
|
7788
|
+
* - Overriding a value in the main structure does not replace the original key reference.
|
|
7789
|
+
* - Used for efficient iteration over `keys()`, `values()`, `entries()`, and computing `size`.
|
|
7790
|
+
*/
|
|
7791
|
+
keyvals = new Map();
|
|
7792
|
+
/**
|
|
7793
|
+
* Protected constructor used internally by `BlobDictionary.new`
|
|
7794
|
+
* and `BlobDictionary.fromEntries`.
|
|
7795
|
+
*
|
|
7796
|
+
* This enforces controlled instantiation — users should create instances
|
|
7797
|
+
* through the provided static factory methods instead of calling the
|
|
7798
|
+
* constructor directly.
|
|
7799
|
+
*
|
|
7800
|
+
* @param mapNodeThreshold - The threshold that determines when the dictionary
|
|
7801
|
+
* switches from using an array-based (`ListChildren`) node to a map-based (`MapChildren`) node for storing entries.
|
|
7802
|
+
*/
|
|
7803
|
+
constructor(mapNodeThreshold) {
|
|
7804
|
+
super();
|
|
7805
|
+
this.mapNodeThreshold = mapNodeThreshold;
|
|
7806
|
+
}
|
|
7807
|
+
/**
|
|
7808
|
+
* Returns the number of entries in the dictionary.
|
|
7809
|
+
*
|
|
7810
|
+
* The count is derived from the auxiliary `keyvals` map, which stores
|
|
7811
|
+
* all original key references and their associated values. This ensures
|
|
7812
|
+
* that the `size` reflects the actual number of entries, independent of
|
|
7813
|
+
* internal overrides in the main `root` structure.
|
|
7814
|
+
*
|
|
7815
|
+
* @returns The total number of entries in the dictionary.
|
|
7816
|
+
*/
|
|
7817
|
+
get size() {
|
|
7818
|
+
return this.keyvals.size;
|
|
7819
|
+
}
|
|
7820
|
+
[TEST_COMPARE_USING]() {
|
|
7821
|
+
const vals = Array.from(this);
|
|
7822
|
+
vals.sort((a, b) => a[0].compare(b[0]).value);
|
|
7823
|
+
return vals;
|
|
7824
|
+
}
|
|
7825
|
+
/**
|
|
7826
|
+
* Creates an empty `BlobDictionary`.
|
|
7827
|
+
*
|
|
7828
|
+
* @param mapNodeThreshold - The threshold that determines when the dictionary
|
|
7829
|
+
* switches from using an array-based (`ListChildren`) node to a map-based (`MapChildren`) node for storing entries.
|
|
7830
|
+
* Defaults to `0`.
|
|
7831
|
+
*
|
|
7832
|
+
* @returns A new, empty `BlobDictionary` instance.
|
|
7833
|
+
*/
|
|
7834
|
+
static new(mapNodeThreshold = 0) {
|
|
7835
|
+
return new BlobDictionary(mapNodeThreshold);
|
|
7836
|
+
}
|
|
7837
|
+
/**
|
|
7838
|
+
* Creates a new `BlobDictionary` initialized with the given entries.
|
|
7839
|
+
*
|
|
7840
|
+
* @param entries - An array of `[key, value]` pairs used to populate the dictionary.
|
|
7841
|
+
* @param mapNodeThreshold - The threshold that determines when the dictionary
|
|
7842
|
+
* switches from using an array-based (`ListChildren`) node to a map-based (`MapChildren`) node for storing entries.
|
|
7843
|
+
* Defaults to `0`.
|
|
7844
|
+
*
|
|
7845
|
+
* @returns A new `BlobDictionary` containing the provided entries.
|
|
7846
|
+
*/
|
|
7847
|
+
static fromEntries(entries, mapNodeThreshold) {
|
|
7848
|
+
const dict = BlobDictionary.new(mapNodeThreshold);
|
|
7849
|
+
for (const [key, value] of entries) {
|
|
7850
|
+
dict.set(key, value);
|
|
7851
|
+
}
|
|
7852
|
+
return dict;
|
|
7853
|
+
}
|
|
7854
|
+
/**
|
|
7855
|
+
* Internal helper that inserts, updates or deletes an entry in the dictionary.
|
|
7856
|
+
*
|
|
7857
|
+
* Behaviour details:
|
|
7858
|
+
* - Passing `undefined` as `value` indicates a deletion. (E.g. `delete` uses `internalSet(key, undefined)`.)
|
|
7859
|
+
* - When an add (new entry) or a delete actually changes the structure, the method returns the affected leaf node.
|
|
7860
|
+
* - When the call only overrides an existing value (no structural add/delete), the method returns `null`.
|
|
7861
|
+
*
|
|
7862
|
+
* This method is intended for internal use by the dictionary implementation and allows `undefined` as a
|
|
7863
|
+
* sentinel value to signal removals.
|
|
7864
|
+
*
|
|
7865
|
+
* @param key - The key to insert, update or remove.
|
|
7866
|
+
* @param value - The value to associate with the key, or `undefined` to remove the key.
|
|
7867
|
+
* @returns The leaf node created or removed on add/delete, or `null` if the operation only overwrote an existing value.
|
|
7868
|
+
*/
|
|
7869
|
+
internalSet(key, value) {
|
|
7870
|
+
let node = this.root;
|
|
7871
|
+
const keyChunkGenerator = key.chunks(CHUNK_SIZE);
|
|
7872
|
+
let depth = 0;
|
|
7873
|
+
for (;;) {
|
|
7874
|
+
const maybeKeyChunk = keyChunkGenerator.next().value;
|
|
7875
|
+
if (maybeKeyChunk === undefined) {
|
|
7876
|
+
if (value === undefined) {
|
|
7877
|
+
return node.remove(key);
|
|
7878
|
+
}
|
|
7879
|
+
return node.set(key, value);
|
|
7880
|
+
}
|
|
7881
|
+
const keyChunk = opaque_asOpaqueType(maybeKeyChunk);
|
|
7882
|
+
if (node.children instanceof ListChildren) {
|
|
7883
|
+
const subkey = bytes_BytesBlob.blobFrom(key.raw.subarray(CHUNK_SIZE * depth));
|
|
7884
|
+
const leaf = value !== undefined ? node.children.insert(subkey, { key, value }) : node.children.remove(subkey);
|
|
7885
|
+
if (subkey.length > CHUNK_SIZE && node.children.children.length > this.mapNodeThreshold) {
|
|
7886
|
+
node.convertListChildrenToMap();
|
|
7887
|
+
}
|
|
7888
|
+
return leaf;
|
|
7889
|
+
}
|
|
7890
|
+
depth += 1;
|
|
7891
|
+
const children = node.children;
|
|
7892
|
+
if (children instanceof ListChildren) {
|
|
7893
|
+
throw new Error("We handle list node earlier. If we fall through, we know it's for the `Map` case.");
|
|
7894
|
+
}
|
|
7895
|
+
if (children instanceof MapChildren) {
|
|
7896
|
+
const maybeNode = children.getChild(keyChunk);
|
|
7897
|
+
if (maybeNode !== undefined) {
|
|
7898
|
+
// simply go one level deeper
|
|
7899
|
+
node = maybeNode;
|
|
7900
|
+
}
|
|
7901
|
+
else {
|
|
7902
|
+
// we are trying to remove an item, but it does not exist
|
|
7903
|
+
if (value === undefined) {
|
|
7904
|
+
return null;
|
|
7905
|
+
}
|
|
7906
|
+
// no more child nodes, we insert a new one.
|
|
7907
|
+
const newNode = Node.withList();
|
|
7908
|
+
children.setChild(keyChunk, newNode);
|
|
7909
|
+
node = newNode;
|
|
7910
|
+
}
|
|
7911
|
+
continue;
|
|
7912
|
+
}
|
|
7913
|
+
debug_assertNever(children);
|
|
7914
|
+
}
|
|
7915
|
+
}
|
|
7916
|
+
/**
|
|
7917
|
+
* Adds a new entry to the dictionary or updates the value of an existing key.
|
|
7918
|
+
*
|
|
7919
|
+
* If an entry with the given key already exists, its value is replaced
|
|
7920
|
+
* with the new one.
|
|
7921
|
+
*
|
|
7922
|
+
* @param key - The key to add or update in the dictionary.
|
|
7923
|
+
* @param value - The value to associate with the specified key.
|
|
7924
|
+
* @returns Nothing (`void`).
|
|
7925
|
+
*/
|
|
7926
|
+
set(key, value) {
|
|
7927
|
+
const leaf = this.internalSet(key, value);
|
|
7928
|
+
if (leaf !== null) {
|
|
7929
|
+
this.keyvals.set(leaf.key, leaf);
|
|
7930
|
+
}
|
|
7931
|
+
}
|
|
7932
|
+
/**
|
|
7933
|
+
* Retrieves the value associated with the given key from the dictionary.
|
|
7934
|
+
*
|
|
7935
|
+
* If the key does not exist, this method returns `undefined`.
|
|
7936
|
+
*
|
|
7937
|
+
* @param key - The key whose associated value should be retrieved.
|
|
7938
|
+
* @returns The value associated with the specified key, or `undefined` if the key is not present.
|
|
7939
|
+
*/
|
|
7940
|
+
get(key) {
|
|
7941
|
+
let node = this.root;
|
|
7942
|
+
const pathChunksGenerator = key.chunks(CHUNK_SIZE);
|
|
7943
|
+
let depth = 0;
|
|
7944
|
+
while (node !== undefined) {
|
|
7945
|
+
const maybePathChunk = pathChunksGenerator.next().value;
|
|
7946
|
+
if (node.children instanceof ListChildren) {
|
|
7947
|
+
const subkey = bytes_BytesBlob.blobFrom(key.raw.subarray(depth * CHUNK_SIZE));
|
|
7948
|
+
const child = node.children.find(subkey);
|
|
7949
|
+
if (child !== null) {
|
|
7950
|
+
return child.value;
|
|
7951
|
+
}
|
|
7952
|
+
}
|
|
7953
|
+
if (maybePathChunk === undefined) {
|
|
7954
|
+
return node.getLeaf()?.value;
|
|
7955
|
+
}
|
|
7956
|
+
if (node.children instanceof MapChildren) {
|
|
7957
|
+
const pathChunk = opaque_asOpaqueType(maybePathChunk);
|
|
7958
|
+
node = node.children.getChild(pathChunk);
|
|
7959
|
+
depth += 1;
|
|
7960
|
+
}
|
|
7961
|
+
}
|
|
7962
|
+
return undefined;
|
|
7963
|
+
}
|
|
7964
|
+
/**
|
|
7965
|
+
* Checks whether the dictionary contains an entry for the given key.
|
|
7966
|
+
*
|
|
7967
|
+
* ⚠️ **Note:** Avoid using `has(...)` together with `get(...)` in a pattern like this:
|
|
7968
|
+
*
|
|
7969
|
+
* ```ts
|
|
7970
|
+
* if (dict.has(key)) {
|
|
7971
|
+
* const value = dict.get(key);
|
|
7972
|
+
* ...
|
|
7973
|
+
* }
|
|
7974
|
+
* ```
|
|
7975
|
+
*
|
|
7976
|
+
* This approach performs two lookups for the same key.
|
|
7977
|
+
*
|
|
7978
|
+
* Instead, prefer the following pattern, which retrieves the value once:
|
|
7979
|
+
*
|
|
7980
|
+
* ```ts
|
|
7981
|
+
* const value = dict.get(key);
|
|
7982
|
+
* if (value !== undefined) {
|
|
7983
|
+
* ...
|
|
7984
|
+
* }
|
|
7985
|
+
* ```
|
|
7986
|
+
*
|
|
7987
|
+
* @param key - The key to check for.
|
|
7988
|
+
* @returns `true` if the dictionary contains an entry for the given key, otherwise `false`.
|
|
7989
|
+
*/
|
|
7990
|
+
has(key) {
|
|
7991
|
+
return this.get(key) !== undefined;
|
|
7992
|
+
}
|
|
7993
|
+
/**
|
|
7994
|
+
* Removes an entry with the specified key from the dictionary.
|
|
7995
|
+
*
|
|
7996
|
+
* Internally, this calls {@link internalSet} with `undefined` to mark the entry as deleted.
|
|
7997
|
+
*
|
|
7998
|
+
* @param key - The key of the entry to remove.
|
|
7999
|
+
* @returns `true` if an entry was removed (i.e. the key existed), otherwise `false`.
|
|
8000
|
+
*/
|
|
8001
|
+
delete(key) {
|
|
8002
|
+
const leaf = this.internalSet(key, undefined);
|
|
8003
|
+
if (leaf !== null) {
|
|
8004
|
+
this.keyvals.delete(leaf.key);
|
|
8005
|
+
return true;
|
|
8006
|
+
}
|
|
8007
|
+
return false;
|
|
8008
|
+
}
|
|
8009
|
+
/**
|
|
8010
|
+
* Returns an iterator over the keys in the dictionary.
|
|
8011
|
+
*
|
|
8012
|
+
* The iterator yields each key in insertion order.
|
|
8013
|
+
*
|
|
8014
|
+
* @returns An iterator over all keys in the dictionary.
|
|
8015
|
+
*/
|
|
8016
|
+
keys() {
|
|
8017
|
+
return this.keyvals.keys();
|
|
8018
|
+
}
|
|
8019
|
+
/**
|
|
8020
|
+
* Returns an iterator over the values in the dictionary.
|
|
8021
|
+
*
|
|
8022
|
+
* The iterator yields each value in insertion order.
|
|
8023
|
+
*
|
|
8024
|
+
* @returns An iterator over all values in the dictionary.
|
|
8025
|
+
*/
|
|
8026
|
+
*values() {
|
|
8027
|
+
for (const leaf of this.keyvals.values()) {
|
|
8028
|
+
yield leaf.value;
|
|
8029
|
+
}
|
|
8030
|
+
}
|
|
8031
|
+
/**
|
|
8032
|
+
* Returns an iterator over the `[key, value]` pairs in the dictionary.
|
|
8033
|
+
*
|
|
8034
|
+
* The iterator yields entries in insertion order.
|
|
8035
|
+
*
|
|
8036
|
+
* @returns An iterator over `[key, value]` tuples for each entry in the dictionary.
|
|
8037
|
+
*/
|
|
8038
|
+
*entries() {
|
|
8039
|
+
for (const leaf of this.keyvals.values()) {
|
|
8040
|
+
yield [leaf.key, leaf.value];
|
|
8041
|
+
}
|
|
8042
|
+
}
|
|
8043
|
+
/**
|
|
8044
|
+
* Default iterator for the dictionary.
|
|
8045
|
+
*
|
|
8046
|
+
* Equivalent to calling {@link entries}.
|
|
8047
|
+
* Enables iteration with `for...of`:
|
|
8048
|
+
*
|
|
8049
|
+
* ```ts
|
|
8050
|
+
* for (const [key, value] of dict) {
|
|
8051
|
+
* ...
|
|
8052
|
+
* }
|
|
8053
|
+
* ```
|
|
8054
|
+
*
|
|
8055
|
+
* @returns An iterator over `[key, value]` pairs.
|
|
8056
|
+
*/
|
|
8057
|
+
[Symbol.iterator]() {
|
|
8058
|
+
return this.entries();
|
|
8059
|
+
}
|
|
8060
|
+
/**
|
|
8061
|
+
* Creates a new sorted array of values, ordered by their corresponding keys.
|
|
8062
|
+
*
|
|
8063
|
+
* Iterates over all entries in the dictionary and sorts them according
|
|
8064
|
+
* to the provided comparator function applied to the keys.
|
|
8065
|
+
*
|
|
8066
|
+
* @param comparator - A comparator function that can compare two keys.
|
|
8067
|
+
*
|
|
8068
|
+
* @returns A new array containing all values from the dictionary,
|
|
8069
|
+
* sorted according to their keys.
|
|
8070
|
+
*/
|
|
8071
|
+
toSortedArray(comparator) {
|
|
8072
|
+
const vals = Array.from(this);
|
|
8073
|
+
vals.sort((a, b) => comparator(a[0], b[0]).value);
|
|
8074
|
+
return vals.map((x) => x[1]);
|
|
8075
|
+
}
|
|
8076
|
+
}
|
|
8077
|
+
const CHUNK_SIZE = 6;
|
|
8078
|
+
/**
|
|
8079
|
+
* A function to transform a bytes chunk (up to 6 bytes into U48 number)
|
|
8080
|
+
*
|
|
8081
|
+
* Note that it uses 3 additional bits to store length(`value * 8 + len;`),
|
|
8082
|
+
* It is needed to distinguish shorter chunks that have 0s at the end, for example: [1, 2] and [1, 2, 0]
|
|
8083
|
+
* */
|
|
8084
|
+
function bytesAsU48(bytes) {
|
|
8085
|
+
const len = bytes.length;
|
|
8086
|
+
debug_check `${len <= CHUNK_SIZE} Length has to be <= ${CHUNK_SIZE}, got: ${len}`;
|
|
8087
|
+
let value = bytes[3] | (bytes[2] << 8) | (bytes[1] << 16) | (bytes[0] << 24);
|
|
8088
|
+
for (let i = 4; i < bytes.length; i++) {
|
|
8089
|
+
value = value * 256 + bytes[i];
|
|
8090
|
+
}
|
|
8091
|
+
return value * 8 + len;
|
|
8092
|
+
}
|
|
8093
|
+
class Node {
|
|
8094
|
+
leaf;
|
|
8095
|
+
children;
|
|
8096
|
+
convertListChildrenToMap() {
|
|
8097
|
+
if (!(this.children instanceof ListChildren)) {
|
|
8098
|
+
return;
|
|
8099
|
+
}
|
|
8100
|
+
this.children = MapChildren.fromListNode(this.children);
|
|
8101
|
+
}
|
|
8102
|
+
static withList() {
|
|
8103
|
+
return new Node(undefined, ListChildren.new());
|
|
8104
|
+
}
|
|
8105
|
+
static withMap() {
|
|
8106
|
+
return new Node(undefined, MapChildren.new());
|
|
8107
|
+
}
|
|
8108
|
+
constructor(leaf, children) {
|
|
8109
|
+
this.leaf = leaf;
|
|
8110
|
+
this.children = children;
|
|
8111
|
+
}
|
|
8112
|
+
getLeaf() {
|
|
8113
|
+
return this.leaf;
|
|
8114
|
+
}
|
|
8115
|
+
remove(_key) {
|
|
8116
|
+
if (this.leaf === undefined) {
|
|
8117
|
+
return null;
|
|
8118
|
+
}
|
|
8119
|
+
const removedLeaf = this.leaf;
|
|
8120
|
+
this.leaf = undefined;
|
|
8121
|
+
return removedLeaf;
|
|
8122
|
+
}
|
|
8123
|
+
set(key, value) {
|
|
8124
|
+
if (this.leaf === undefined) {
|
|
8125
|
+
this.leaf = { key, value };
|
|
8126
|
+
return this.leaf;
|
|
8127
|
+
}
|
|
8128
|
+
this.leaf.value = value;
|
|
8129
|
+
return null;
|
|
8130
|
+
}
|
|
8131
|
+
}
|
|
8132
|
+
class ListChildren {
|
|
8133
|
+
children = [];
|
|
8134
|
+
constructor() { }
|
|
8135
|
+
find(key) {
|
|
8136
|
+
const result = this.children.find((item) => item[0].isEqualTo(key));
|
|
8137
|
+
if (result !== undefined) {
|
|
8138
|
+
return result[1];
|
|
8139
|
+
}
|
|
8140
|
+
return null;
|
|
8141
|
+
}
|
|
8142
|
+
remove(key) {
|
|
8143
|
+
const existingIndex = this.children.findIndex((item) => item[0].isEqualTo(key));
|
|
8144
|
+
if (existingIndex >= 0) {
|
|
8145
|
+
const ret = this.children.splice(existingIndex, 1);
|
|
8146
|
+
return ret[0][1];
|
|
8147
|
+
}
|
|
8148
|
+
return null;
|
|
8149
|
+
}
|
|
8150
|
+
insert(key, leaf) {
|
|
8151
|
+
const existingIndex = this.children.findIndex((item) => item[0].isEqualTo(key));
|
|
8152
|
+
if (existingIndex >= 0) {
|
|
8153
|
+
const existing = this.children[existingIndex];
|
|
8154
|
+
existing[1].value = leaf.value;
|
|
8155
|
+
return null;
|
|
8156
|
+
}
|
|
8157
|
+
this.children.push([key, leaf]);
|
|
8158
|
+
return leaf;
|
|
8159
|
+
}
|
|
8160
|
+
static new() {
|
|
8161
|
+
return new ListChildren();
|
|
8162
|
+
}
|
|
8163
|
+
}
|
|
8164
|
+
class MapChildren {
|
|
8165
|
+
children = new Map();
|
|
8166
|
+
constructor() { }
|
|
8167
|
+
static new() {
|
|
8168
|
+
return new MapChildren();
|
|
8169
|
+
}
|
|
8170
|
+
static fromListNode(node) {
|
|
8171
|
+
const mapNode = new MapChildren();
|
|
8172
|
+
for (const [key, leaf] of node.children) {
|
|
8173
|
+
const currentKeyChunk = opaque_asOpaqueType(bytes_BytesBlob.blobFrom(key.raw.subarray(0, CHUNK_SIZE)));
|
|
8174
|
+
const subKey = bytes_BytesBlob.blobFrom(key.raw.subarray(CHUNK_SIZE));
|
|
8175
|
+
let child = mapNode.getChild(currentKeyChunk);
|
|
8176
|
+
if (child === undefined) {
|
|
8177
|
+
child = Node.withList();
|
|
8178
|
+
mapNode.setChild(currentKeyChunk, child);
|
|
8179
|
+
}
|
|
8180
|
+
const children = child.children;
|
|
8181
|
+
children.insert(subKey, leaf);
|
|
8182
|
+
}
|
|
8183
|
+
return mapNode;
|
|
8184
|
+
}
|
|
8185
|
+
getChild(keyChunk) {
|
|
8186
|
+
const chunkAsNumber = bytesAsU48(keyChunk.raw);
|
|
8187
|
+
return this.children.get(chunkAsNumber);
|
|
8188
|
+
}
|
|
8189
|
+
setChild(keyChunk, node) {
|
|
8190
|
+
const chunkAsNumber = bytesAsU48(keyChunk.raw);
|
|
8191
|
+
this.children.set(chunkAsNumber, node);
|
|
8192
|
+
}
|
|
8193
|
+
}
|
|
8194
|
+
|
|
7766
8195
|
;// CONCATENATED MODULE: ./packages/core/collections/hash-dictionary.ts
|
|
7767
|
-
/**
|
|
7768
|
-
|
|
8196
|
+
/**
|
|
8197
|
+
* A map which uses hashes as keys.
|
|
8198
|
+
*
|
|
8199
|
+
* @deprecated
|
|
8200
|
+
* */
|
|
8201
|
+
class StringHashDictionary {
|
|
7769
8202
|
// TODO [ToDr] [crit] We can't use `TrieHash` directly in the map,
|
|
7770
8203
|
// because of the way it's being compared. Hence having `string` here.
|
|
7771
8204
|
// This has to be benchmarked and re-written to a custom map most likely.
|
|
@@ -7831,6 +8264,17 @@ class HashDictionary {
|
|
|
7831
8264
|
}
|
|
7832
8265
|
}
|
|
7833
8266
|
|
|
8267
|
+
/**
|
|
8268
|
+
* A value that indicates when `BlobDictionary` transforms Array nodes into Map nodes.
|
|
8269
|
+
* In practice, it doesn't matter much because, in real life, arrays in this structure usually have a length close to 1.
|
|
8270
|
+
*/
|
|
8271
|
+
const BLOB_DICTIONARY_THRESHOLD = 5;
|
|
8272
|
+
class HashDictionary extends BlobDictionary {
|
|
8273
|
+
constructor() {
|
|
8274
|
+
super(BLOB_DICTIONARY_THRESHOLD);
|
|
8275
|
+
}
|
|
8276
|
+
}
|
|
8277
|
+
|
|
7834
8278
|
;// CONCATENATED MODULE: ./packages/core/collections/hash-set.ts
|
|
7835
8279
|
|
|
7836
8280
|
/** A set specialized for storing hashes. */
|
|
@@ -8295,6 +8739,18 @@ class SortedSet extends SortedArray {
|
|
|
8295
8739
|
|
|
8296
8740
|
|
|
8297
8741
|
|
|
8742
|
+
function getTruncatedKey(key) {
|
|
8743
|
+
// Always return exactly TRUNCATED_HASH_SIZE bytes.
|
|
8744
|
+
if (key.length === TRUNCATED_HASH_SIZE) {
|
|
8745
|
+
return key;
|
|
8746
|
+
}
|
|
8747
|
+
return bytes_Bytes.fromBlob(key.raw.subarray(0, TRUNCATED_HASH_SIZE), TRUNCATED_HASH_SIZE);
|
|
8748
|
+
}
|
|
8749
|
+
/**
|
|
8750
|
+
* A value that indicates when `BlobDictionary` transforms Array nodes into Map nodes.
|
|
8751
|
+
* In practice, it doesn't matter much because, in real life, arrays in this structure usually have a length close to 1.
|
|
8752
|
+
*/
|
|
8753
|
+
const truncated_hash_dictionary_BLOB_DICTIONARY_THRESHOLD = 5;
|
|
8298
8754
|
/**
|
|
8299
8755
|
* A collection of hash-based keys (likely `StateKey`s) which ignores
|
|
8300
8756
|
* differences on the last byte.
|
|
@@ -8307,48 +8763,37 @@ class TruncatedHashDictionary {
|
|
|
8307
8763
|
* Each key will be copied and have the last byte replace with a 0.
|
|
8308
8764
|
*/
|
|
8309
8765
|
static fromEntries(entries) {
|
|
8310
|
-
|
|
8311
|
-
const mapped = Array.from(entries).map(([key, value]) => {
|
|
8312
|
-
const newKey = bytes_Bytes.zero(hash_HASH_SIZE).asOpaque();
|
|
8313
|
-
newKey.raw.set(key.raw.subarray(0, TRUNCATED_HASH_SIZE));
|
|
8314
|
-
return [newKey, value];
|
|
8315
|
-
});
|
|
8316
|
-
return new TruncatedHashDictionary(HashDictionary.fromEntries(mapped));
|
|
8766
|
+
return new TruncatedHashDictionary(BlobDictionary.fromEntries(Array.from(entries).map(([key, value]) => [getTruncatedKey(key), value]), truncated_hash_dictionary_BLOB_DICTIONARY_THRESHOLD));
|
|
8317
8767
|
}
|
|
8318
|
-
/** A truncated key which we re-use to query the dictionary. */
|
|
8319
|
-
truncatedKey = bytes_Bytes.zero(hash_HASH_SIZE).asOpaque();
|
|
8320
8768
|
constructor(dict) {
|
|
8321
8769
|
this.dict = dict;
|
|
8322
8770
|
}
|
|
8323
8771
|
[TEST_COMPARE_USING]() {
|
|
8324
|
-
return this.dict;
|
|
8772
|
+
return Array.from(this.dict);
|
|
8325
8773
|
}
|
|
8326
8774
|
/** Return number of items in the dictionary. */
|
|
8327
8775
|
get size() {
|
|
8328
8776
|
return this.dict.size;
|
|
8329
8777
|
}
|
|
8330
8778
|
/** Retrieve a value that matches the key on `TRUNCATED_HASH_SIZE`. */
|
|
8331
|
-
get(
|
|
8332
|
-
|
|
8333
|
-
return this.dict.get(
|
|
8779
|
+
get(key) {
|
|
8780
|
+
const truncatedKey = getTruncatedKey(key);
|
|
8781
|
+
return this.dict.get(truncatedKey);
|
|
8334
8782
|
}
|
|
8335
8783
|
/** Return true if the key is present in the dictionary */
|
|
8336
|
-
has(
|
|
8337
|
-
|
|
8338
|
-
return this.dict.has(
|
|
8784
|
+
has(key) {
|
|
8785
|
+
const truncatedKey = getTruncatedKey(key);
|
|
8786
|
+
return this.dict.has(truncatedKey);
|
|
8339
8787
|
}
|
|
8340
8788
|
/** Set or update a value that matches the key on `TRUNCATED_HASH_SIZE`. */
|
|
8341
|
-
set(
|
|
8342
|
-
|
|
8343
|
-
|
|
8344
|
-
const key = bytes_Bytes.zero(hash_HASH_SIZE);
|
|
8345
|
-
key.raw.set(fullKey.raw.subarray(0, TRUNCATED_HASH_SIZE));
|
|
8346
|
-
this.dict.set(key.asOpaque(), value);
|
|
8789
|
+
set(key, value) {
|
|
8790
|
+
const truncatedKey = getTruncatedKey(key);
|
|
8791
|
+
this.dict.set(truncatedKey, value);
|
|
8347
8792
|
}
|
|
8348
8793
|
/** Remove a value that matches the key on `TRUNCATED_HASH_SIZE`. */
|
|
8349
|
-
delete(
|
|
8350
|
-
|
|
8351
|
-
this.dict.delete(
|
|
8794
|
+
delete(key) {
|
|
8795
|
+
const truncatedKey = getTruncatedKey(key);
|
|
8796
|
+
this.dict.delete(truncatedKey);
|
|
8352
8797
|
}
|
|
8353
8798
|
/** Iterator over values of the dictionary. */
|
|
8354
8799
|
values() {
|
|
@@ -8356,9 +8801,7 @@ class TruncatedHashDictionary {
|
|
|
8356
8801
|
}
|
|
8357
8802
|
/** Iterator over entries of the dictionary (with truncated keys) */
|
|
8358
8803
|
*entries() {
|
|
8359
|
-
|
|
8360
|
-
yield [bytes_Bytes.fromBlob(key.raw.subarray(0, TRUNCATED_HASH_SIZE), TRUNCATED_HASH_SIZE).asOpaque(), value];
|
|
8361
|
-
}
|
|
8804
|
+
yield* this.dict.entries();
|
|
8362
8805
|
}
|
|
8363
8806
|
[Symbol.iterator]() {
|
|
8364
8807
|
return this.entries();
|
|
@@ -8375,6 +8818,7 @@ class TruncatedHashDictionary {
|
|
|
8375
8818
|
|
|
8376
8819
|
|
|
8377
8820
|
|
|
8821
|
+
|
|
8378
8822
|
;// CONCATENATED MODULE: ./packages/jam/config/chain-spec.ts
|
|
8379
8823
|
|
|
8380
8824
|
|
|
@@ -12689,11 +13133,32 @@ const ENTROPY_ENTRIES = 4;
|
|
|
12689
13133
|
|
|
12690
13134
|
var state_update_UpdatePreimageKind;
|
|
12691
13135
|
(function (UpdatePreimageKind) {
|
|
12692
|
-
/**
|
|
13136
|
+
/**
|
|
13137
|
+
* Insert new preimage and optionally update it's lookup history.
|
|
13138
|
+
*
|
|
13139
|
+
* Used in: `provide`
|
|
13140
|
+
*
|
|
13141
|
+
* https://graypaper.fluffylabs.dev/#/ab2cdbd/383904383904?v=0.7.2
|
|
13142
|
+
*/
|
|
12693
13143
|
UpdatePreimageKind[UpdatePreimageKind["Provide"] = 0] = "Provide";
|
|
12694
|
-
/**
|
|
13144
|
+
/**
|
|
13145
|
+
* Remove a preimage and it's lookup history.
|
|
13146
|
+
*
|
|
13147
|
+
* Used in: `forget` and `eject`
|
|
13148
|
+
*
|
|
13149
|
+
* https://graypaper.fluffylabs.dev/#/ab2cdbd/38c701380202?v=0.7.2
|
|
13150
|
+
* https://graypaper.fluffylabs.dev/#/ab2cdbd/379102379302?v=0.7.2
|
|
13151
|
+
*/
|
|
12695
13152
|
UpdatePreimageKind[UpdatePreimageKind["Remove"] = 1] = "Remove";
|
|
12696
|
-
/**
|
|
13153
|
+
/**
|
|
13154
|
+
* Update or add lookup history for preimage hash/len to given value.
|
|
13155
|
+
*
|
|
13156
|
+
* Used in: `solicit` and `forget`
|
|
13157
|
+
*
|
|
13158
|
+
* https://graypaper.fluffylabs.dev/#/ab2cdbd/382802382802?v=0.7.2
|
|
13159
|
+
* https://graypaper.fluffylabs.dev/#/ab2cdbd/384002384b02?v=0.7.2
|
|
13160
|
+
* https://graypaper.fluffylabs.dev/#/ab2cdbd/38c60038ea00?v=0.7.2
|
|
13161
|
+
*/
|
|
12697
13162
|
UpdatePreimageKind[UpdatePreimageKind["UpdateOrAdd"] = 2] = "UpdateOrAdd";
|
|
12698
13163
|
})(state_update_UpdatePreimageKind || (state_update_UpdatePreimageKind = {}));
|
|
12699
13164
|
/**
|
|
@@ -12701,7 +13166,7 @@ var state_update_UpdatePreimageKind;
|
|
|
12701
13166
|
*
|
|
12702
13167
|
* Can be one of the following cases:
|
|
12703
13168
|
* 1. Provide a new preimage blob and set the lookup history to available at `slot`.
|
|
12704
|
-
* 2. Remove (
|
|
13169
|
+
* 2. Remove (forget) a preimage and it's lookup history.
|
|
12705
13170
|
* 3. Update `LookupHistory` with given value.
|
|
12706
13171
|
*/
|
|
12707
13172
|
class UpdatePreimage {
|
|
@@ -14584,7 +15049,6 @@ class LeafNode {
|
|
|
14584
15049
|
/**
|
|
14585
15050
|
* Get the byte length of embedded value.
|
|
14586
15051
|
*
|
|
14587
|
-
* @remark
|
|
14588
15052
|
* Note in case this node only contains hash this is going to be 0.
|
|
14589
15053
|
*/
|
|
14590
15054
|
getValueLength() {
|
|
@@ -14595,7 +15059,6 @@ class LeafNode {
|
|
|
14595
15059
|
/**
|
|
14596
15060
|
* Returns the embedded value.
|
|
14597
15061
|
*
|
|
14598
|
-
* @remark
|
|
14599
15062
|
* Note that this is going to be empty for a regular leaf node (i.e. containing a hash).
|
|
14600
15063
|
*/
|
|
14601
15064
|
getValue() {
|
|
@@ -14605,7 +15068,6 @@ class LeafNode {
|
|
|
14605
15068
|
/**
|
|
14606
15069
|
* Returns contained value hash.
|
|
14607
15070
|
*
|
|
14608
|
-
* @remark
|
|
14609
15071
|
* Note that for embedded value this is going to be full 0-padded 32 bytes.
|
|
14610
15072
|
*/
|
|
14611
15073
|
getValueHash() {
|
|
@@ -15801,45 +16263,6 @@ var status_Status;
|
|
|
15801
16263
|
|
|
15802
16264
|
|
|
15803
16265
|
|
|
15804
|
-
;// CONCATENATED MODULE: ./packages/core/pvm-interpreter/gas.ts
|
|
15805
|
-
|
|
15806
|
-
|
|
15807
|
-
/** Create a new gas counter instance depending on the gas value. */
|
|
15808
|
-
function gas_gasCounter(gas) {
|
|
15809
|
-
return new GasCounterU64(tryAsU64(gas));
|
|
15810
|
-
}
|
|
15811
|
-
class GasCounterU64 {
|
|
15812
|
-
gas;
|
|
15813
|
-
initialGas;
|
|
15814
|
-
constructor(gas) {
|
|
15815
|
-
this.gas = gas;
|
|
15816
|
-
this.initialGas = tryAsGas(gas);
|
|
15817
|
-
}
|
|
15818
|
-
set(g) {
|
|
15819
|
-
this.gas = tryAsU64(g);
|
|
15820
|
-
}
|
|
15821
|
-
get() {
|
|
15822
|
-
return tryAsGas(this.gas);
|
|
15823
|
-
}
|
|
15824
|
-
sub(g) {
|
|
15825
|
-
const result = this.gas - tryAsU64(g);
|
|
15826
|
-
if (result >= 0n) {
|
|
15827
|
-
this.gas = tryAsU64(result);
|
|
15828
|
-
return false;
|
|
15829
|
-
}
|
|
15830
|
-
this.gas = tryAsU64(0n);
|
|
15831
|
-
return true;
|
|
15832
|
-
}
|
|
15833
|
-
used() {
|
|
15834
|
-
const gasConsumed = tryAsU64(this.initialGas) - this.gas;
|
|
15835
|
-
// In we have less than zero left we assume that all gas has been consumed.
|
|
15836
|
-
if (gasConsumed < 0) {
|
|
15837
|
-
return this.initialGas;
|
|
15838
|
-
}
|
|
15839
|
-
return tryAsGas(gasConsumed);
|
|
15840
|
-
}
|
|
15841
|
-
}
|
|
15842
|
-
|
|
15843
16266
|
;// CONCATENATED MODULE: ./packages/core/pvm-interpreter/memory/memory-index.ts
|
|
15844
16267
|
|
|
15845
16268
|
|
|
@@ -17574,6 +17997,45 @@ class basic_blocks_BasicBlocks {
|
|
|
17574
17997
|
;// CONCATENATED MODULE: ./packages/core/pvm-interpreter/basic-blocks/index.ts
|
|
17575
17998
|
|
|
17576
17999
|
|
|
18000
|
+
;// CONCATENATED MODULE: ./packages/core/pvm-interpreter/gas.ts
|
|
18001
|
+
|
|
18002
|
+
|
|
18003
|
+
/** Create a new gas counter instance depending on the gas value. */
|
|
18004
|
+
function gas_gasCounter(gas) {
|
|
18005
|
+
return new GasCounterU64(tryAsU64(gas));
|
|
18006
|
+
}
|
|
18007
|
+
class GasCounterU64 {
|
|
18008
|
+
gas;
|
|
18009
|
+
initialGas;
|
|
18010
|
+
constructor(gas) {
|
|
18011
|
+
this.gas = gas;
|
|
18012
|
+
this.initialGas = tryAsGas(gas);
|
|
18013
|
+
}
|
|
18014
|
+
set(g) {
|
|
18015
|
+
this.gas = tryAsU64(g);
|
|
18016
|
+
}
|
|
18017
|
+
get() {
|
|
18018
|
+
return tryAsGas(this.gas);
|
|
18019
|
+
}
|
|
18020
|
+
sub(g) {
|
|
18021
|
+
const result = this.gas - tryAsU64(g);
|
|
18022
|
+
if (result >= 0n) {
|
|
18023
|
+
this.gas = tryAsU64(result);
|
|
18024
|
+
return false;
|
|
18025
|
+
}
|
|
18026
|
+
this.gas = tryAsU64(0n);
|
|
18027
|
+
return true;
|
|
18028
|
+
}
|
|
18029
|
+
used() {
|
|
18030
|
+
const gasConsumed = tryAsU64(this.initialGas) - this.gas;
|
|
18031
|
+
// In we have less than zero left we assume that all gas has been consumed.
|
|
18032
|
+
if (gasConsumed < 0) {
|
|
18033
|
+
return this.initialGas;
|
|
18034
|
+
}
|
|
18035
|
+
return tryAsGas(gasConsumed);
|
|
18036
|
+
}
|
|
18037
|
+
}
|
|
18038
|
+
|
|
17577
18039
|
;// CONCATENATED MODULE: ./packages/core/pvm-interpreter/instruction-gas-map.ts
|
|
17578
18040
|
|
|
17579
18041
|
|
|
@@ -19422,12 +19884,88 @@ class interpreter_Interpreter {
|
|
|
19422
19884
|
}
|
|
19423
19885
|
}
|
|
19424
19886
|
|
|
19887
|
+
;// CONCATENATED MODULE: ./packages/core/pvm-interpreter/debugger-adapter.ts
|
|
19888
|
+
|
|
19889
|
+
|
|
19890
|
+
|
|
19891
|
+
|
|
19892
|
+
|
|
19893
|
+
|
|
19894
|
+
class DebuggerAdapter {
|
|
19895
|
+
pvm;
|
|
19896
|
+
constructor(useSbrkGas = false) {
|
|
19897
|
+
this.pvm = new Interpreter({ useSbrkGas });
|
|
19898
|
+
}
|
|
19899
|
+
resetGeneric(rawProgram, flatRegisters, initialGas) {
|
|
19900
|
+
this.pvm.resetGeneric(rawProgram, 0, tryAsGas(initialGas), new Registers(flatRegisters));
|
|
19901
|
+
}
|
|
19902
|
+
reset(rawProgram, pc, gas, maybeRegisters, maybeMemory) {
|
|
19903
|
+
this.pvm.resetGeneric(rawProgram, pc, tryAsGas(gas), maybeRegisters, maybeMemory);
|
|
19904
|
+
}
|
|
19905
|
+
getPageDump(pageNumber) {
|
|
19906
|
+
const page = this.pvm.getMemoryPage(pageNumber);
|
|
19907
|
+
if (page === null) {
|
|
19908
|
+
// page wasn't allocated so we return an empty page
|
|
19909
|
+
return safeAllocUint8Array(PAGE_SIZE);
|
|
19910
|
+
}
|
|
19911
|
+
if (page.length === PAGE_SIZE) {
|
|
19912
|
+
// page was allocated and has a proper size so we can simply return it
|
|
19913
|
+
return page;
|
|
19914
|
+
}
|
|
19915
|
+
// page was allocated but it is shorter than PAGE_SIZE so we have to extend it
|
|
19916
|
+
const fullPage = safeAllocUint8Array(PAGE_SIZE);
|
|
19917
|
+
fullPage.set(page);
|
|
19918
|
+
return fullPage;
|
|
19919
|
+
}
|
|
19920
|
+
setMemory(address, value) {
|
|
19921
|
+
this.pvm.memory.storeFrom(tryAsMemoryIndex(address), value);
|
|
19922
|
+
}
|
|
19923
|
+
getExitArg() {
|
|
19924
|
+
return this.pvm.getExitParam() ?? 0;
|
|
19925
|
+
}
|
|
19926
|
+
getStatus() {
|
|
19927
|
+
return this.pvm.getStatus();
|
|
19928
|
+
}
|
|
19929
|
+
nextStep() {
|
|
19930
|
+
return this.pvm.nextStep() === Status.OK;
|
|
19931
|
+
}
|
|
19932
|
+
nSteps(steps) {
|
|
19933
|
+
check `${steps >>> 0 > 0} Expected a positive integer got ${steps}`;
|
|
19934
|
+
for (let i = 0; i < steps; i++) {
|
|
19935
|
+
const isOk = this.nextStep();
|
|
19936
|
+
if (!isOk) {
|
|
19937
|
+
return false;
|
|
19938
|
+
}
|
|
19939
|
+
}
|
|
19940
|
+
return true;
|
|
19941
|
+
}
|
|
19942
|
+
getRegisters() {
|
|
19943
|
+
return this.pvm.registers.getAllU64();
|
|
19944
|
+
}
|
|
19945
|
+
setRegisters(registers) {
|
|
19946
|
+
this.pvm.registers.copyFrom(new Registers(registers));
|
|
19947
|
+
}
|
|
19948
|
+
getProgramCounter() {
|
|
19949
|
+
return this.pvm.getPC();
|
|
19950
|
+
}
|
|
19951
|
+
setNextProgramCounter(nextPc) {
|
|
19952
|
+
this.pvm.setNextPC(nextPc);
|
|
19953
|
+
}
|
|
19954
|
+
getGasLeft() {
|
|
19955
|
+
return BigInt(this.pvm.gas.get());
|
|
19956
|
+
}
|
|
19957
|
+
setGasLeft(gas) {
|
|
19958
|
+
this.pvm.gas.set(tryAsGas(gas));
|
|
19959
|
+
}
|
|
19960
|
+
}
|
|
19961
|
+
|
|
19425
19962
|
;// CONCATENATED MODULE: ./packages/core/pvm-interpreter/index.ts
|
|
19426
19963
|
|
|
19427
19964
|
|
|
19428
19965
|
|
|
19429
19966
|
|
|
19430
19967
|
|
|
19968
|
+
|
|
19431
19969
|
;// CONCATENATED MODULE: ./bin/test-runner/w3f/pvm.ts
|
|
19432
19970
|
|
|
19433
19971
|
|