@typeberry/jam 0.4.0 → 0.4.1-0a3acb2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/index.js CHANGED
@@ -24314,7 +24314,11 @@ var TestSuite;
24314
24314
  })(TestSuite || (TestSuite = {}));
24315
24315
  const ALL_VERSIONS_IN_ORDER = [GpVersion.V0_6_7, GpVersion.V0_7_0, GpVersion.V0_7_1, GpVersion.V0_7_2];
24316
24316
  const DEFAULT_SUITE = TestSuite.W3F_DAVXY;
24317
- const DEFAULT_VERSION = GpVersion.V0_7_2;
24317
+ /**
24318
+ * Current version is set to track the jam-conformance testing.
24319
+ * Since we are currently at 0.7.1 not 0.7.2, we set our default version accordingly.
24320
+ */
24321
+ const DEFAULT_VERSION = GpVersion.V0_7_1;
24318
24322
  const env = typeof process === "undefined" ? {} : process.env;
24319
24323
  let CURRENT_VERSION = parseCurrentVersion(env.GP_VERSION) ?? DEFAULT_VERSION;
24320
24324
  let CURRENT_SUITE = parseCurrentSuite(env.TEST_SUITE) ?? DEFAULT_SUITE;
@@ -24373,8 +24377,8 @@ class Compatibility {
24373
24377
  /**
24374
24378
  * Allows selecting different values for different Gray Paper versions from one record.
24375
24379
  *
24376
- * @param fallback The default value to return if no value is found for the current.
24377
- * @param record A record mapping versions to values, checking if the version is greater or equal to the current version.
24380
+ * fallback The default value to return if no value is found for the current.
24381
+ * versions A record mapping versions to values, checking if the version is greater or equal to the current version.
24378
24382
  * @returns The value for the current version, or the default value.
24379
24383
  */
24380
24384
  static selectIfGreaterOrEqual({ fallback, versions, }) {
@@ -24537,7 +24541,7 @@ const workspacePathFix = dev_env.NODE_ENV === "development"
24537
24541
 
24538
24542
  ;// CONCATENATED MODULE: ./packages/core/utils/opaque.ts
24539
24543
  /**
24540
- * @fileoverview `Opaque<Type, Token>` constructs a unique type which is a subset of Type with a
24544
+ * `Opaque<Type, Token>` constructs a unique type which is a subset of Type with a
24541
24545
  * specified unique token Token. It means that base type cannot be assigned to unique type by accident.
24542
24546
  * Good examples of opaque types include:
24543
24547
  * - JWTs or other tokens - these are special kinds of string used for authorization purposes.
@@ -28430,9 +28434,438 @@ class ArrayView {
28430
28434
  }
28431
28435
  }
28432
28436
 
28437
+ ;// CONCATENATED MODULE: ./packages/core/collections/blob-dictionary.ts
28438
+
28439
+
28440
+ /** A map which uses byte blobs as keys */
28441
+ class BlobDictionary extends WithDebug {
28442
+ mapNodeThreshold;
28443
+ /**
28444
+ * The root node of the dictionary.
28445
+ *
28446
+ * This is the main internal data structure that organizes entries
28447
+ * in a tree-like fashion (array-based nodes up to `mapNodeThreshold`,
28448
+ * map-based nodes beyond it). All insertions, updates, and deletions
28449
+ * operate through this structure.
28450
+ */
28451
+ root = Node.withList();
28452
+ /**
28453
+ * Auxiliary map that stores references to the original keys and their values.
28454
+ *
28455
+ * - Overriding a value in the main structure does not replace the original key reference.
28456
+ * - Used for efficient iteration over `keys()`, `values()`, `entries()`, and computing `size`.
28457
+ */
28458
+ keyvals = new Map();
28459
+ /**
28460
+ * Protected constructor used internally by `BlobDictionary.new`
28461
+ * and `BlobDictionary.fromEntries`.
28462
+ *
28463
+ * This enforces controlled instantiation — users should create instances
28464
+ * through the provided static factory methods instead of calling the
28465
+ * constructor directly.
28466
+ *
28467
+ * @param mapNodeThreshold - The threshold that determines when the dictionary
28468
+ * switches from using an array-based (`ListChildren`) node to a map-based (`MapChildren`) node for storing entries.
28469
+ */
28470
+ constructor(mapNodeThreshold) {
28471
+ super();
28472
+ this.mapNodeThreshold = mapNodeThreshold;
28473
+ }
28474
+ /**
28475
+ * Returns the number of entries in the dictionary.
28476
+ *
28477
+ * The count is derived from the auxiliary `keyvals` map, which stores
28478
+ * all original key references and their associated values. This ensures
28479
+ * that the `size` reflects the actual number of entries, independent of
28480
+ * internal overrides in the main `root` structure.
28481
+ *
28482
+ * @returns The total number of entries in the dictionary.
28483
+ */
28484
+ get size() {
28485
+ return this.keyvals.size;
28486
+ }
28487
+ [TEST_COMPARE_USING]() {
28488
+ const vals = Array.from(this);
28489
+ vals.sort((a, b) => a[0].compare(b[0]).value);
28490
+ return vals;
28491
+ }
28492
+ /**
28493
+ * Creates an empty `BlobDictionary`.
28494
+ *
28495
+ * @param mapNodeThreshold - The threshold that determines when the dictionary
28496
+ * switches from using an array-based (`ListChildren`) node to a map-based (`MapChildren`) node for storing entries.
28497
+ * Defaults to `0`.
28498
+ *
28499
+ * @returns A new, empty `BlobDictionary` instance.
28500
+ */
28501
+ static new(mapNodeThreshold = 0) {
28502
+ return new BlobDictionary(mapNodeThreshold);
28503
+ }
28504
+ /**
28505
+ * Creates a new `BlobDictionary` initialized with the given entries.
28506
+ *
28507
+ * @param entries - An array of `[key, value]` pairs used to populate the dictionary.
28508
+ * @param mapNodeThreshold - The threshold that determines when the dictionary
28509
+ * switches from using an array-based (`ListChildren`) node to a map-based (`MapChildren`) node for storing entries.
28510
+ * Defaults to `0`.
28511
+ *
28512
+ * @returns A new `BlobDictionary` containing the provided entries.
28513
+ */
28514
+ static fromEntries(entries, mapNodeThreshold) {
28515
+ const dict = BlobDictionary.new(mapNodeThreshold);
28516
+ for (const [key, value] of entries) {
28517
+ dict.set(key, value);
28518
+ }
28519
+ return dict;
28520
+ }
28521
+ /**
28522
+ * Internal helper that inserts, updates or deletes an entry in the dictionary.
28523
+ *
28524
+ * Behaviour details:
28525
+ * - Passing `undefined` as `value` indicates a deletion. (E.g. `delete` uses `internalSet(key, undefined)`.)
28526
+ * - When an add (new entry) or a delete actually changes the structure, the method returns the affected leaf node.
28527
+ * - When the call only overrides an existing value (no structural add/delete), the method returns `null`.
28528
+ *
28529
+ * This method is intended for internal use by the dictionary implementation and allows `undefined` as a
28530
+ * sentinel value to signal removals.
28531
+ *
28532
+ * @param key - The key to insert, update or remove.
28533
+ * @param value - The value to associate with the key, or `undefined` to remove the key.
28534
+ * @returns The leaf node created or removed on add/delete, or `null` if the operation only overwrote an existing value.
28535
+ */
28536
+ internalSet(key, value) {
28537
+ let node = this.root;
28538
+ const keyChunkGenerator = key.chunks(CHUNK_SIZE);
28539
+ let depth = 0;
28540
+ for (;;) {
28541
+ const maybeKeyChunk = keyChunkGenerator.next().value;
28542
+ if (maybeKeyChunk === undefined) {
28543
+ if (value === undefined) {
28544
+ return node.remove(key);
28545
+ }
28546
+ return node.set(key, value);
28547
+ }
28548
+ const keyChunk = opaque_asOpaqueType(maybeKeyChunk);
28549
+ if (node.children instanceof ListChildren) {
28550
+ const subkey = bytes_BytesBlob.blobFrom(key.raw.subarray(CHUNK_SIZE * depth));
28551
+ const leaf = value !== undefined ? node.children.insert(subkey, { key, value }) : node.children.remove(subkey);
28552
+ if (subkey.length > CHUNK_SIZE && node.children.children.length > this.mapNodeThreshold) {
28553
+ node.convertListChildrenToMap();
28554
+ }
28555
+ return leaf;
28556
+ }
28557
+ depth += 1;
28558
+ const children = node.children;
28559
+ if (children instanceof ListChildren) {
28560
+ throw new Error("We handle list node earlier. If we fall through, we know it's for the `Map` case.");
28561
+ }
28562
+ if (children instanceof MapChildren) {
28563
+ const maybeNode = children.getChild(keyChunk);
28564
+ if (maybeNode !== undefined) {
28565
+ // simply go one level deeper
28566
+ node = maybeNode;
28567
+ }
28568
+ else {
28569
+ // we are trying to remove an item, but it does not exist
28570
+ if (value === undefined) {
28571
+ return null;
28572
+ }
28573
+ // no more child nodes, we insert a new one.
28574
+ const newNode = Node.withList();
28575
+ children.setChild(keyChunk, newNode);
28576
+ node = newNode;
28577
+ }
28578
+ continue;
28579
+ }
28580
+ debug_assertNever(children);
28581
+ }
28582
+ }
28583
+ /**
28584
+ * Adds a new entry to the dictionary or updates the value of an existing key.
28585
+ *
28586
+ * If an entry with the given key already exists, its value is replaced
28587
+ * with the new one.
28588
+ *
28589
+ * @param key - The key to add or update in the dictionary.
28590
+ * @param value - The value to associate with the specified key.
28591
+ * @returns Nothing (`void`).
28592
+ */
28593
+ set(key, value) {
28594
+ const leaf = this.internalSet(key, value);
28595
+ if (leaf !== null) {
28596
+ this.keyvals.set(leaf.key, leaf);
28597
+ }
28598
+ }
28599
+ /**
28600
+ * Retrieves the value associated with the given key from the dictionary.
28601
+ *
28602
+ * If the key does not exist, this method returns `undefined`.
28603
+ *
28604
+ * @param key - The key whose associated value should be retrieved.
28605
+ * @returns The value associated with the specified key, or `undefined` if the key is not present.
28606
+ */
28607
+ get(key) {
28608
+ let node = this.root;
28609
+ const pathChunksGenerator = key.chunks(CHUNK_SIZE);
28610
+ let depth = 0;
28611
+ while (node !== undefined) {
28612
+ const maybePathChunk = pathChunksGenerator.next().value;
28613
+ if (node.children instanceof ListChildren) {
28614
+ const subkey = bytes_BytesBlob.blobFrom(key.raw.subarray(depth * CHUNK_SIZE));
28615
+ const child = node.children.find(subkey);
28616
+ if (child !== null) {
28617
+ return child.value;
28618
+ }
28619
+ }
28620
+ if (maybePathChunk === undefined) {
28621
+ return node.getLeaf()?.value;
28622
+ }
28623
+ if (node.children instanceof MapChildren) {
28624
+ const pathChunk = opaque_asOpaqueType(maybePathChunk);
28625
+ node = node.children.getChild(pathChunk);
28626
+ depth += 1;
28627
+ }
28628
+ }
28629
+ return undefined;
28630
+ }
28631
+ /**
28632
+ * Checks whether the dictionary contains an entry for the given key.
28633
+ *
28634
+ * ⚠️ **Note:** Avoid using `has(...)` together with `get(...)` in a pattern like this:
28635
+ *
28636
+ * ```ts
28637
+ * if (dict.has(key)) {
28638
+ * const value = dict.get(key);
28639
+ * ...
28640
+ * }
28641
+ * ```
28642
+ *
28643
+ * This approach performs two lookups for the same key.
28644
+ *
28645
+ * Instead, prefer the following pattern, which retrieves the value once:
28646
+ *
28647
+ * ```ts
28648
+ * const value = dict.get(key);
28649
+ * if (value !== undefined) {
28650
+ * ...
28651
+ * }
28652
+ * ```
28653
+ *
28654
+ * @param key - The key to check for.
28655
+ * @returns `true` if the dictionary contains an entry for the given key, otherwise `false`.
28656
+ */
28657
+ has(key) {
28658
+ return this.get(key) !== undefined;
28659
+ }
28660
+ /**
28661
+ * Removes an entry with the specified key from the dictionary.
28662
+ *
28663
+ * Internally, this calls {@link internalSet} with `undefined` to mark the entry as deleted.
28664
+ *
28665
+ * @param key - The key of the entry to remove.
28666
+ * @returns `true` if an entry was removed (i.e. the key existed), otherwise `false`.
28667
+ */
28668
+ delete(key) {
28669
+ const leaf = this.internalSet(key, undefined);
28670
+ if (leaf !== null) {
28671
+ this.keyvals.delete(leaf.key);
28672
+ return true;
28673
+ }
28674
+ return false;
28675
+ }
28676
+ /**
28677
+ * Returns an iterator over the keys in the dictionary.
28678
+ *
28679
+ * The iterator yields each key in insertion order.
28680
+ *
28681
+ * @returns An iterator over all keys in the dictionary.
28682
+ */
28683
+ keys() {
28684
+ return this.keyvals.keys();
28685
+ }
28686
+ /**
28687
+ * Returns an iterator over the values in the dictionary.
28688
+ *
28689
+ * The iterator yields each value in insertion order.
28690
+ *
28691
+ * @returns An iterator over all values in the dictionary.
28692
+ */
28693
+ *values() {
28694
+ for (const leaf of this.keyvals.values()) {
28695
+ yield leaf.value;
28696
+ }
28697
+ }
28698
+ /**
28699
+ * Returns an iterator over the `[key, value]` pairs in the dictionary.
28700
+ *
28701
+ * The iterator yields entries in insertion order.
28702
+ *
28703
+ * @returns An iterator over `[key, value]` tuples for each entry in the dictionary.
28704
+ */
28705
+ *entries() {
28706
+ for (const leaf of this.keyvals.values()) {
28707
+ yield [leaf.key, leaf.value];
28708
+ }
28709
+ }
28710
+ /**
28711
+ * Default iterator for the dictionary.
28712
+ *
28713
+ * Equivalent to calling {@link entries}.
28714
+ * Enables iteration with `for...of`:
28715
+ *
28716
+ * ```ts
28717
+ * for (const [key, value] of dict) {
28718
+ * ...
28719
+ * }
28720
+ * ```
28721
+ *
28722
+ * @returns An iterator over `[key, value]` pairs.
28723
+ */
28724
+ [Symbol.iterator]() {
28725
+ return this.entries();
28726
+ }
28727
+ /**
28728
+ * Creates a new sorted array of values, ordered by their corresponding keys.
28729
+ *
28730
+ * Iterates over all entries in the dictionary and sorts them according
28731
+ * to the provided comparator function applied to the keys.
28732
+ *
28733
+ * @param comparator - A comparator function that can compare two keys.
28734
+ *
28735
+ * @returns A new array containing all values from the dictionary,
28736
+ * sorted according to their keys.
28737
+ */
28738
+ toSortedArray(comparator) {
28739
+ const vals = Array.from(this);
28740
+ vals.sort((a, b) => comparator(a[0], b[0]).value);
28741
+ return vals.map((x) => x[1]);
28742
+ }
28743
+ }
28744
+ const CHUNK_SIZE = 6;
28745
+ /**
28746
+ * A function to transform a bytes chunk (up to 6 bytes into U48 number)
28747
+ *
28748
+ * Note that it uses 3 additional bits to store length(`value * 8 + len;`),
28749
+ * It is needed to distinguish shorter chunks that have 0s at the end, for example: [1, 2] and [1, 2, 0]
28750
+ * */
28751
+ function bytesAsU48(bytes) {
28752
+ const len = bytes.length;
28753
+ debug_check `${len <= CHUNK_SIZE} Length has to be <= ${CHUNK_SIZE}, got: ${len}`;
28754
+ let value = bytes[3] | (bytes[2] << 8) | (bytes[1] << 16) | (bytes[0] << 24);
28755
+ for (let i = 4; i < bytes.length; i++) {
28756
+ value = value * 256 + bytes[i];
28757
+ }
28758
+ return value * 8 + len;
28759
+ }
28760
+ class Node {
28761
+ leaf;
28762
+ children;
28763
+ convertListChildrenToMap() {
28764
+ if (!(this.children instanceof ListChildren)) {
28765
+ return;
28766
+ }
28767
+ this.children = MapChildren.fromListNode(this.children);
28768
+ }
28769
+ static withList() {
28770
+ return new Node(undefined, ListChildren.new());
28771
+ }
28772
+ static withMap() {
28773
+ return new Node(undefined, MapChildren.new());
28774
+ }
28775
+ constructor(leaf, children) {
28776
+ this.leaf = leaf;
28777
+ this.children = children;
28778
+ }
28779
+ getLeaf() {
28780
+ return this.leaf;
28781
+ }
28782
+ remove(_key) {
28783
+ if (this.leaf === undefined) {
28784
+ return null;
28785
+ }
28786
+ const removedLeaf = this.leaf;
28787
+ this.leaf = undefined;
28788
+ return removedLeaf;
28789
+ }
28790
+ set(key, value) {
28791
+ if (this.leaf === undefined) {
28792
+ this.leaf = { key, value };
28793
+ return this.leaf;
28794
+ }
28795
+ this.leaf.value = value;
28796
+ return null;
28797
+ }
28798
+ }
28799
+ class ListChildren {
28800
+ children = [];
28801
+ constructor() { }
28802
+ find(key) {
28803
+ const result = this.children.find((item) => item[0].isEqualTo(key));
28804
+ if (result !== undefined) {
28805
+ return result[1];
28806
+ }
28807
+ return null;
28808
+ }
28809
+ remove(key) {
28810
+ const existingIndex = this.children.findIndex((item) => item[0].isEqualTo(key));
28811
+ if (existingIndex >= 0) {
28812
+ const ret = this.children.splice(existingIndex, 1);
28813
+ return ret[0][1];
28814
+ }
28815
+ return null;
28816
+ }
28817
+ insert(key, leaf) {
28818
+ const existingIndex = this.children.findIndex((item) => item[0].isEqualTo(key));
28819
+ if (existingIndex >= 0) {
28820
+ const existing = this.children[existingIndex];
28821
+ existing[1].value = leaf.value;
28822
+ return null;
28823
+ }
28824
+ this.children.push([key, leaf]);
28825
+ return leaf;
28826
+ }
28827
+ static new() {
28828
+ return new ListChildren();
28829
+ }
28830
+ }
28831
+ class MapChildren {
28832
+ children = new Map();
28833
+ constructor() { }
28834
+ static new() {
28835
+ return new MapChildren();
28836
+ }
28837
+ static fromListNode(node) {
28838
+ const mapNode = new MapChildren();
28839
+ for (const [key, leaf] of node.children) {
28840
+ const currentKeyChunk = opaque_asOpaqueType(bytes_BytesBlob.blobFrom(key.raw.subarray(0, CHUNK_SIZE)));
28841
+ const subKey = bytes_BytesBlob.blobFrom(key.raw.subarray(CHUNK_SIZE));
28842
+ let child = mapNode.getChild(currentKeyChunk);
28843
+ if (child === undefined) {
28844
+ child = Node.withList();
28845
+ mapNode.setChild(currentKeyChunk, child);
28846
+ }
28847
+ const children = child.children;
28848
+ children.insert(subKey, leaf);
28849
+ }
28850
+ return mapNode;
28851
+ }
28852
+ getChild(keyChunk) {
28853
+ const chunkAsNumber = bytesAsU48(keyChunk.raw);
28854
+ return this.children.get(chunkAsNumber);
28855
+ }
28856
+ setChild(keyChunk, node) {
28857
+ const chunkAsNumber = bytesAsU48(keyChunk.raw);
28858
+ this.children.set(chunkAsNumber, node);
28859
+ }
28860
+ }
28861
+
28433
28862
  ;// CONCATENATED MODULE: ./packages/core/collections/hash-dictionary.ts
28434
- /** A map which uses hashes as keys. */
28435
- class hash_dictionary_HashDictionary {
28863
+ /**
28864
+ * A map which uses hashes as keys.
28865
+ *
28866
+ * @deprecated
28867
+ * */
28868
+ class StringHashDictionary {
28436
28869
  // TODO [ToDr] [crit] We can't use `TrieHash` directly in the map,
28437
28870
  // because of the way it's being compared. Hence having `string` here.
28438
28871
  // This has to be benchmarked and re-written to a custom map most likely.
@@ -28498,6 +28931,17 @@ class hash_dictionary_HashDictionary {
28498
28931
  }
28499
28932
  }
28500
28933
 
28934
+ /**
28935
+ * A value that indicates when `BlobDictionary` transforms Array nodes into Map nodes.
28936
+ * In practice, it doesn't matter much because, in real life, arrays in this structure usually have a length close to 1.
28937
+ */
28938
+ const BLOB_DICTIONARY_THRESHOLD = 5;
28939
+ class hash_dictionary_HashDictionary extends BlobDictionary {
28940
+ constructor() {
28941
+ super(BLOB_DICTIONARY_THRESHOLD);
28942
+ }
28943
+ }
28944
+
28501
28945
  ;// CONCATENATED MODULE: ./packages/core/collections/hash-set.ts
28502
28946
 
28503
28947
  /** A set specialized for storing hashes. */
@@ -28962,6 +29406,18 @@ class SortedSet extends SortedArray {
28962
29406
 
28963
29407
 
28964
29408
 
29409
+ function getTruncatedKey(key) {
29410
+ // Always return exactly TRUNCATED_HASH_SIZE bytes.
29411
+ if (key.length === TRUNCATED_HASH_SIZE) {
29412
+ return key;
29413
+ }
29414
+ return bytes_Bytes.fromBlob(key.raw.subarray(0, TRUNCATED_HASH_SIZE), TRUNCATED_HASH_SIZE);
29415
+ }
29416
+ /**
29417
+ * A value that indicates when `BlobDictionary` transforms Array nodes into Map nodes.
29418
+ * In practice, it doesn't matter much because, in real life, arrays in this structure usually have a length close to 1.
29419
+ */
29420
+ const truncated_hash_dictionary_BLOB_DICTIONARY_THRESHOLD = 5;
28965
29421
  /**
28966
29422
  * A collection of hash-based keys (likely `StateKey`s) which ignores
28967
29423
  * differences on the last byte.
@@ -28974,48 +29430,37 @@ class TruncatedHashDictionary {
28974
29430
  * Each key will be copied and have the last byte replace with a 0.
28975
29431
  */
28976
29432
  static fromEntries(entries) {
28977
- /** Copy key bytes of an entry and replace the last one with 0. */
28978
- const mapped = Array.from(entries).map(([key, value]) => {
28979
- const newKey = bytes_Bytes.zero(hash_HASH_SIZE).asOpaque();
28980
- newKey.raw.set(key.raw.subarray(0, TRUNCATED_HASH_SIZE));
28981
- return [newKey, value];
28982
- });
28983
- return new TruncatedHashDictionary(hash_dictionary_HashDictionary.fromEntries(mapped));
29433
+ return new TruncatedHashDictionary(BlobDictionary.fromEntries(Array.from(entries).map(([key, value]) => [getTruncatedKey(key), value]), truncated_hash_dictionary_BLOB_DICTIONARY_THRESHOLD));
28984
29434
  }
28985
- /** A truncated key which we re-use to query the dictionary. */
28986
- truncatedKey = bytes_Bytes.zero(hash_HASH_SIZE).asOpaque();
28987
29435
  constructor(dict) {
28988
29436
  this.dict = dict;
28989
29437
  }
28990
29438
  [TEST_COMPARE_USING]() {
28991
- return this.dict;
29439
+ return Array.from(this.dict);
28992
29440
  }
28993
29441
  /** Return number of items in the dictionary. */
28994
29442
  get size() {
28995
29443
  return this.dict.size;
28996
29444
  }
28997
29445
  /** Retrieve a value that matches the key on `TRUNCATED_HASH_SIZE`. */
28998
- get(fullKey) {
28999
- this.truncatedKey.raw.set(fullKey.raw.subarray(0, TRUNCATED_HASH_SIZE));
29000
- return this.dict.get(this.truncatedKey);
29446
+ get(key) {
29447
+ const truncatedKey = getTruncatedKey(key);
29448
+ return this.dict.get(truncatedKey);
29001
29449
  }
29002
29450
  /** Return true if the key is present in the dictionary */
29003
- has(fullKey) {
29004
- this.truncatedKey.raw.set(fullKey.raw.subarray(0, TRUNCATED_HASH_SIZE));
29005
- return this.dict.has(this.truncatedKey);
29451
+ has(key) {
29452
+ const truncatedKey = getTruncatedKey(key);
29453
+ return this.dict.has(truncatedKey);
29006
29454
  }
29007
29455
  /** Set or update a value that matches the key on `TRUNCATED_HASH_SIZE`. */
29008
- set(fullKey, value) {
29009
- // NOTE we can't use the the shared key here, since the collection will
29010
- // store the key for us, hence the copy.
29011
- const key = bytes_Bytes.zero(hash_HASH_SIZE);
29012
- key.raw.set(fullKey.raw.subarray(0, TRUNCATED_HASH_SIZE));
29013
- this.dict.set(key.asOpaque(), value);
29456
+ set(key, value) {
29457
+ const truncatedKey = getTruncatedKey(key);
29458
+ this.dict.set(truncatedKey, value);
29014
29459
  }
29015
29460
  /** Remove a value that matches the key on `TRUNCATED_HASH_SIZE`. */
29016
- delete(fullKey) {
29017
- this.truncatedKey.raw.set(fullKey.raw.subarray(0, TRUNCATED_HASH_SIZE));
29018
- this.dict.delete(this.truncatedKey);
29461
+ delete(key) {
29462
+ const truncatedKey = getTruncatedKey(key);
29463
+ this.dict.delete(truncatedKey);
29019
29464
  }
29020
29465
  /** Iterator over values of the dictionary. */
29021
29466
  values() {
@@ -29023,9 +29468,7 @@ class TruncatedHashDictionary {
29023
29468
  }
29024
29469
  /** Iterator over entries of the dictionary (with truncated keys) */
29025
29470
  *entries() {
29026
- for (const [key, value] of this.dict.entries()) {
29027
- yield [bytes_Bytes.fromBlob(key.raw.subarray(0, TRUNCATED_HASH_SIZE), TRUNCATED_HASH_SIZE).asOpaque(), value];
29028
- }
29471
+ yield* this.dict.entries();
29029
29472
  }
29030
29473
  [Symbol.iterator]() {
29031
29474
  return this.entries();
@@ -29042,6 +29485,7 @@ class TruncatedHashDictionary {
29042
29485
 
29043
29486
 
29044
29487
 
29488
+
29045
29489
  ;// CONCATENATED MODULE: ./packages/jam/block/codec.ts
29046
29490
 
29047
29491
 
@@ -31185,7 +31629,7 @@ function jip_chain_spec_parseBootnode(v) {
31185
31629
  if (name === "" || ip === "" || port === "") {
31186
31630
  throw new Error(`Invalid bootnode format, expected: <name>@<ip>:<port>, got: "${v}"`);
31187
31631
  }
31188
- const portNumber = Number.parseInt(port);
31632
+ const portNumber = Number.parseInt(port, 10);
31189
31633
  if (!isU16(portNumber)) {
31190
31634
  throw new Error(`Invalid port number: "${port}"`);
31191
31635
  }
@@ -34210,11 +34654,32 @@ const ENTROPY_ENTRIES = 4;
34210
34654
 
34211
34655
  var UpdatePreimageKind;
34212
34656
  (function (UpdatePreimageKind) {
34213
- /** Insert new preimage and optionally update it's lookup history. */
34657
+ /**
34658
+ * Insert new preimage and optionally update it's lookup history.
34659
+ *
34660
+ * Used in: `provide`
34661
+ *
34662
+ * https://graypaper.fluffylabs.dev/#/ab2cdbd/383904383904?v=0.7.2
34663
+ */
34214
34664
  UpdatePreimageKind[UpdatePreimageKind["Provide"] = 0] = "Provide";
34215
- /** Remove a preimage and it's lookup history. */
34665
+ /**
34666
+ * Remove a preimage and it's lookup history.
34667
+ *
34668
+ * Used in: `forget` and `eject`
34669
+ *
34670
+ * https://graypaper.fluffylabs.dev/#/ab2cdbd/38c701380202?v=0.7.2
34671
+ * https://graypaper.fluffylabs.dev/#/ab2cdbd/379102379302?v=0.7.2
34672
+ */
34216
34673
  UpdatePreimageKind[UpdatePreimageKind["Remove"] = 1] = "Remove";
34217
- /** update or add lookup history for preimage hash/len to given value. */
34674
+ /**
34675
+ * Update or add lookup history for preimage hash/len to given value.
34676
+ *
34677
+ * Used in: `solicit` and `forget`
34678
+ *
34679
+ * https://graypaper.fluffylabs.dev/#/ab2cdbd/382802382802?v=0.7.2
34680
+ * https://graypaper.fluffylabs.dev/#/ab2cdbd/384002384b02?v=0.7.2
34681
+ * https://graypaper.fluffylabs.dev/#/ab2cdbd/38c60038ea00?v=0.7.2
34682
+ */
34218
34683
  UpdatePreimageKind[UpdatePreimageKind["UpdateOrAdd"] = 2] = "UpdateOrAdd";
34219
34684
  })(UpdatePreimageKind || (UpdatePreimageKind = {}));
34220
34685
  /**
@@ -34222,7 +34687,7 @@ var UpdatePreimageKind;
34222
34687
  *
34223
34688
  * Can be one of the following cases:
34224
34689
  * 1. Provide a new preimage blob and set the lookup history to available at `slot`.
34225
- * 2. Remove (expunge) a preimage and it's lookup history.
34690
+ * 2. Remove (forget) a preimage and it's lookup history.
34226
34691
  * 3. Update `LookupHistory` with given value.
34227
34692
  */
34228
34693
  class UpdatePreimage {
@@ -35445,7 +35910,6 @@ class LeafNode {
35445
35910
  /**
35446
35911
  * Get the byte length of embedded value.
35447
35912
  *
35448
- * @remark
35449
35913
  * Note in case this node only contains hash this is going to be 0.
35450
35914
  */
35451
35915
  getValueLength() {
@@ -35456,7 +35920,6 @@ class LeafNode {
35456
35920
  /**
35457
35921
  * Returns the embedded value.
35458
35922
  *
35459
- * @remark
35460
35923
  * Note that this is going to be empty for a regular leaf node (i.e. containing a hash).
35461
35924
  */
35462
35925
  getValue() {
@@ -35466,7 +35929,6 @@ class LeafNode {
35466
35929
  /**
35467
35930
  * Returns contained value hash.
35468
35931
  *
35469
- * @remark
35470
35932
  * Note that for embedded value this is going to be full 0-padded 32 bytes.
35471
35933
  */
35472
35934
  getValueHash() {
@@ -37588,11 +38050,9 @@ class JamConfig {
37588
38050
 
37589
38051
  /** Helper function to create most used hashes in the block */
37590
38052
  class hasher_TransitionHasher {
37591
- context;
37592
38053
  keccakHasher;
37593
38054
  blake2b;
37594
- constructor(context, keccakHasher, blake2b) {
37595
- this.context = context;
38055
+ constructor(keccakHasher, blake2b) {
37596
38056
  this.keccakHasher = keccakHasher;
37597
38057
  this.blake2b = blake2b;
37598
38058
  }
@@ -39016,7 +39476,11 @@ class PartiallyUpdatedState {
39016
39476
  const service = this.state.getService(serviceId);
39017
39477
  return service?.getPreimage(hash) ?? null;
39018
39478
  }
39019
- /** Get status of a preimage of current service taking into account any updates. */
39479
+ /**
39480
+ * Get status of a preimage of current service taking into account any updates.
39481
+ *
39482
+ * https://graypaper.fluffylabs.dev/#/ab2cdbd/110201110201?v=0.7.2
39483
+ */
39020
39484
  getLookupHistory(currentTimeslot, serviceId, hash, length) {
39021
39485
  const updatedService = this.stateUpdate.services.updated.get(serviceId);
39022
39486
  /** Return lookup history item for newly created service */
@@ -39053,12 +39517,7 @@ class PartiallyUpdatedState {
39053
39517
  return new LookupHistoryItem(hash, updatedPreimage.length, tryAsLookupHistorySlots([currentTimeslot]));
39054
39518
  }
39055
39519
  case UpdatePreimageKind.Remove: {
39056
- const state = stateFallback();
39057
- // kinda impossible, since we know it's there because it's removed.
39058
- if (state === null) {
39059
- return null;
39060
- }
39061
- return new LookupHistoryItem(hash, state.length, tryAsLookupHistorySlots([...state.slots, currentTimeslot]));
39520
+ return null;
39062
39521
  }
39063
39522
  case UpdatePreimageKind.UpdateOrAdd: {
39064
39523
  return action.item;
@@ -39151,7 +39610,7 @@ const gas_tryAsSmallGas = (v) => opaque_asOpaqueType(numbers_tryAsU32(v));
39151
39610
  /** Attempt to convert given number into U64 gas representation. */
39152
39611
  const tryAsBigGas = (v) => opaque_asOpaqueType(numbers_tryAsU64(v));
39153
39612
  /** Attempt to convert given number into gas. */
39154
- const tryAsGas = (v) => typeof v === "number" && v < 2 ** 32 ? gas_tryAsSmallGas(v) : tryAsBigGas(v);
39613
+ const gas_tryAsGas = (v) => typeof v === "number" && v < 2 ** 32 ? gas_tryAsSmallGas(v) : tryAsBigGas(v);
39155
39614
 
39156
39615
  ;// CONCATENATED MODULE: ./packages/core/pvm-interface/memory.ts
39157
39616
 
@@ -39434,7 +39893,7 @@ const tryAsRegisterIndex = (index) => {
39434
39893
  debug_check `${index >= 0 && index < registers_NO_OF_REGISTERS} Incorrect register index: ${index}!`;
39435
39894
  return opaque_asOpaqueType(index);
39436
39895
  };
39437
- class Registers {
39896
+ class registers_Registers {
39438
39897
  bytes;
39439
39898
  asSigned;
39440
39899
  asUnsigned;
@@ -39453,7 +39912,7 @@ class Registers {
39453
39912
  }
39454
39913
  static fromBytes(bytes) {
39455
39914
  debug_check `${bytes.length === registers_NO_OF_REGISTERS << REGISTER_SIZE_SHIFT} Invalid size of registers array.`;
39456
- return new Registers(bytes);
39915
+ return new registers_Registers(bytes);
39457
39916
  }
39458
39917
  getBytesAsLittleEndian(index, len) {
39459
39918
  const offset = index << REGISTER_SIZE_SHIFT;
@@ -39767,49 +40226,10 @@ class NoopMissing {
39767
40226
  }
39768
40227
  }
39769
40228
 
39770
- ;// CONCATENATED MODULE: ./packages/core/pvm-interpreter/gas.ts
39771
-
39772
-
39773
- /** Create a new gas counter instance depending on the gas value. */
39774
- function gasCounter(gas) {
39775
- return new GasCounterU64(numbers_tryAsU64(gas));
39776
- }
39777
- class GasCounterU64 {
39778
- gas;
39779
- initialGas;
39780
- constructor(gas) {
39781
- this.gas = gas;
39782
- this.initialGas = tryAsGas(gas);
39783
- }
39784
- set(g) {
39785
- this.gas = numbers_tryAsU64(g);
39786
- }
39787
- get() {
39788
- return tryAsGas(this.gas);
39789
- }
39790
- sub(g) {
39791
- const result = this.gas - numbers_tryAsU64(g);
39792
- if (result >= 0n) {
39793
- this.gas = numbers_tryAsU64(result);
39794
- return false;
39795
- }
39796
- this.gas = numbers_tryAsU64(0n);
39797
- return true;
39798
- }
39799
- used() {
39800
- const gasConsumed = numbers_tryAsU64(this.initialGas) - this.gas;
39801
- // In we have less than zero left we assume that all gas has been consumed.
39802
- if (gasConsumed < 0) {
39803
- return this.initialGas;
39804
- }
39805
- return tryAsGas(gasConsumed);
39806
- }
39807
- }
39808
-
39809
40229
  ;// CONCATENATED MODULE: ./packages/core/pvm-interpreter/memory/memory-index.ts
39810
40230
 
39811
40231
 
39812
- const tryAsMemoryIndex = (index) => {
40232
+ const memory_index_tryAsMemoryIndex = (index) => {
39813
40233
  debug_check `${index >= 0 && index <= MAX_MEMORY_INDEX} Incorrect memory index: ${index}!`;
39814
40234
  return opaque_asOpaqueType(index);
39815
40235
  };
@@ -39823,25 +40243,25 @@ const tryAsSbrkIndex = (index) => {
39823
40243
 
39824
40244
  const memory_consts_PAGE_SIZE_SHIFT = 12;
39825
40245
  // PAGE_SIZE has to be a power of 2
39826
- const PAGE_SIZE = 1 << memory_consts_PAGE_SIZE_SHIFT;
40246
+ const memory_consts_PAGE_SIZE = 1 << memory_consts_PAGE_SIZE_SHIFT;
39827
40247
  const MIN_ALLOCATION_SHIFT = (() => {
39828
40248
  const MIN_ALLOCATION_SHIFT = 7;
39829
40249
  debug_check `${MIN_ALLOCATION_SHIFT >= 0 && MIN_ALLOCATION_SHIFT < memory_consts_PAGE_SIZE_SHIFT} incorrect minimal allocation shift`;
39830
40250
  return MIN_ALLOCATION_SHIFT;
39831
40251
  })();
39832
- const MIN_ALLOCATION_LENGTH = PAGE_SIZE >> MIN_ALLOCATION_SHIFT;
39833
- const LAST_PAGE_NUMBER = (MEMORY_SIZE - PAGE_SIZE) / PAGE_SIZE;
40252
+ const MIN_ALLOCATION_LENGTH = memory_consts_PAGE_SIZE >> MIN_ALLOCATION_SHIFT;
40253
+ const LAST_PAGE_NUMBER = (MEMORY_SIZE - memory_consts_PAGE_SIZE) / memory_consts_PAGE_SIZE;
39834
40254
  /** https://graypaper.fluffylabs.dev/#/68eaa1f/35a60235a602?v=0.6.4 */
39835
40255
  const RESERVED_NUMBER_OF_PAGES = 16;
39836
40256
  /** https://graypaper.fluffylabs.dev/#/68eaa1f/35a60235a602?v=0.6.4 */
39837
- const MAX_NUMBER_OF_PAGES = MEMORY_SIZE / PAGE_SIZE;
40257
+ const MAX_NUMBER_OF_PAGES = MEMORY_SIZE / memory_consts_PAGE_SIZE;
39838
40258
 
39839
40259
  ;// CONCATENATED MODULE: ./packages/core/pvm-interpreter/memory/pages/page-utils.ts
39840
40260
 
39841
40261
 
39842
40262
  /** Ensure that given memory `index` is within `[0...PAGE_SIZE)` and can be used to index a page */
39843
40263
  const tryAsPageIndex = (index) => {
39844
- debug_check `${index >= 0 && index < PAGE_SIZE}, Incorect page index: ${index}!`;
40264
+ debug_check `${index >= 0 && index < memory_consts_PAGE_SIZE}, Incorect page index: ${index}!`;
39845
40265
  return opaque_asOpaqueType(index);
39846
40266
  };
39847
40267
  /** Ensure that given `index` represents an index of one of the pages. */
@@ -39869,17 +40289,17 @@ function getNextPageNumber(pageNumber) {
39869
40289
 
39870
40290
 
39871
40291
  function alignToPageSize(length) {
39872
- return PAGE_SIZE * Math.ceil(length / PAGE_SIZE);
40292
+ return memory_consts_PAGE_SIZE * Math.ceil(length / memory_consts_PAGE_SIZE);
39873
40293
  }
39874
40294
  function getPageNumber(address) {
39875
40295
  return tryAsPageNumber(address >>> memory_consts_PAGE_SIZE_SHIFT);
39876
40296
  }
39877
40297
  function getStartPageIndex(address) {
39878
- return tryAsMemoryIndex((address >>> memory_consts_PAGE_SIZE_SHIFT) << memory_consts_PAGE_SIZE_SHIFT);
40298
+ return memory_index_tryAsMemoryIndex((address >>> memory_consts_PAGE_SIZE_SHIFT) << memory_consts_PAGE_SIZE_SHIFT);
39879
40299
  }
39880
40300
  function getStartPageIndexFromPageNumber(pageNumber) {
39881
40301
  // >>> 0 is needed to avoid changing sign of the number
39882
- return tryAsMemoryIndex((pageNumber << memory_consts_PAGE_SIZE_SHIFT) >>> 0);
40302
+ return memory_index_tryAsMemoryIndex((pageNumber << memory_consts_PAGE_SIZE_SHIFT) >>> 0);
39883
40303
  }
39884
40304
 
39885
40305
  ;// CONCATENATED MODULE: ./packages/core/pvm-interpreter/memory/errors.ts
@@ -39901,7 +40321,7 @@ class PageFault {
39901
40321
  return new PageFault(numbers_tryAsU32(startPageIndex), isAccessFault);
39902
40322
  }
39903
40323
  static fromMemoryIndex(maybeMemoryIndex, isAccessFault = false) {
39904
- const memoryIndex = tryAsMemoryIndex(maybeMemoryIndex % MEMORY_SIZE);
40324
+ const memoryIndex = memory_index_tryAsMemoryIndex(maybeMemoryIndex % MEMORY_SIZE);
39905
40325
  const startPageIndex = getStartPageIndex(memoryIndex);
39906
40326
  return new PageFault(numbers_tryAsU32(startPageIndex), isAccessFault);
39907
40327
  }
@@ -39980,9 +40400,9 @@ class MemoryRange {
39980
40400
  constructor(start, length) {
39981
40401
  this.start = start;
39982
40402
  this.length = length;
39983
- this.end = tryAsMemoryIndex((this.start + this.length) % MEMORY_SIZE);
40403
+ this.end = memory_index_tryAsMemoryIndex((this.start + this.length) % MEMORY_SIZE);
39984
40404
  if (length > 0) {
39985
- this.lastIndex = tryAsMemoryIndex((this.end - 1 + MEMORY_SIZE) % MEMORY_SIZE);
40405
+ this.lastIndex = memory_index_tryAsMemoryIndex((this.end - 1 + MEMORY_SIZE) % MEMORY_SIZE);
39986
40406
  }
39987
40407
  }
39988
40408
  /** Creates a memory range from given starting point and length */
@@ -40025,7 +40445,7 @@ class MemoryRange {
40025
40445
  *
40026
40446
  * it should be in `memory-consts` but it cannot be there because of circular dependency
40027
40447
  */
40028
- const RESERVED_MEMORY_RANGE = MemoryRange.fromStartAndLength(tryAsMemoryIndex(0), RESERVED_NUMBER_OF_PAGES * PAGE_SIZE);
40448
+ const RESERVED_MEMORY_RANGE = MemoryRange.fromStartAndLength(memory_index_tryAsMemoryIndex(0), RESERVED_NUMBER_OF_PAGES * memory_consts_PAGE_SIZE);
40029
40449
 
40030
40450
  ;// CONCATENATED MODULE: ./packages/core/pvm-interpreter/memory/page-range.ts
40031
40451
 
@@ -40063,7 +40483,7 @@ class PageRange {
40063
40483
  // lastIndex is not null because we just ensured that the range is not empty
40064
40484
  const pageWithLastIndex = getPageNumber(range.lastIndex ?? range.end);
40065
40485
  const endPage = getNextPageNumber(pageWithLastIndex);
40066
- if ((startPage === endPage || startPage === pageWithLastIndex) && range.length > PAGE_SIZE) {
40486
+ if ((startPage === endPage || startPage === pageWithLastIndex) && range.length > memory_consts_PAGE_SIZE) {
40067
40487
  // full range
40068
40488
  return new PageRange(startPage, MAX_NUMBER_OF_PAGES);
40069
40489
  }
@@ -40127,8 +40547,8 @@ class ReadablePage extends MemoryPage {
40127
40547
  }
40128
40548
  loadInto(result, startIndex, length) {
40129
40549
  const endIndex = startIndex + length;
40130
- if (endIndex > PAGE_SIZE) {
40131
- return result_Result.error(PageFault.fromMemoryIndex(this.start + PAGE_SIZE), () => `Page fault: read beyond page boundary at ${this.start + PAGE_SIZE}`);
40550
+ if (endIndex > memory_consts_PAGE_SIZE) {
40551
+ return result_Result.error(PageFault.fromMemoryIndex(this.start + memory_consts_PAGE_SIZE), () => `Page fault: read beyond page boundary at ${this.start + memory_consts_PAGE_SIZE}`);
40132
40552
  }
40133
40553
  const bytes = this.data.subarray(startIndex, endIndex);
40134
40554
  // we zero the bytes, since data might not yet be initialized at `endIndex`.
@@ -40161,8 +40581,8 @@ class WriteablePage extends MemoryPage {
40161
40581
  constructor(pageNumber, initialData) {
40162
40582
  super(pageNumber);
40163
40583
  const dataLength = initialData?.length ?? 0;
40164
- const initialPageLength = Math.min(PAGE_SIZE, Math.max(dataLength, MIN_ALLOCATION_LENGTH));
40165
- this.buffer = new ArrayBuffer(initialPageLength, { maxByteLength: PAGE_SIZE });
40584
+ const initialPageLength = Math.min(memory_consts_PAGE_SIZE, Math.max(dataLength, MIN_ALLOCATION_LENGTH));
40585
+ this.buffer = new ArrayBuffer(initialPageLength, { maxByteLength: memory_consts_PAGE_SIZE });
40166
40586
  this.view = new Uint8Array(this.buffer);
40167
40587
  if (initialData !== undefined) {
40168
40588
  this.view.set(initialData);
@@ -40170,8 +40590,8 @@ class WriteablePage extends MemoryPage {
40170
40590
  }
40171
40591
  loadInto(result, startIndex, length) {
40172
40592
  const endIndex = startIndex + length;
40173
- if (endIndex > PAGE_SIZE) {
40174
- return result_Result.error(PageFault.fromMemoryIndex(this.start + PAGE_SIZE), () => `Page fault: read beyond page boundary at ${this.start + PAGE_SIZE}`);
40593
+ if (endIndex > memory_consts_PAGE_SIZE) {
40594
+ return result_Result.error(PageFault.fromMemoryIndex(this.start + memory_consts_PAGE_SIZE), () => `Page fault: read beyond page boundary at ${this.start + memory_consts_PAGE_SIZE}`);
40175
40595
  }
40176
40596
  const bytes = this.view.subarray(startIndex, endIndex);
40177
40597
  // we zero the bytes, since the view might not yet be initialized at `endIndex`.
@@ -40180,16 +40600,16 @@ class WriteablePage extends MemoryPage {
40180
40600
  return result_Result.ok(result_OK);
40181
40601
  }
40182
40602
  storeFrom(startIndex, bytes) {
40183
- if (this.buffer.byteLength < startIndex + bytes.length && this.buffer.byteLength < PAGE_SIZE) {
40184
- const newLength = Math.min(PAGE_SIZE, Math.max(MIN_ALLOCATION_LENGTH, startIndex + bytes.length));
40603
+ if (this.buffer.byteLength < startIndex + bytes.length && this.buffer.byteLength < memory_consts_PAGE_SIZE) {
40604
+ const newLength = Math.min(memory_consts_PAGE_SIZE, Math.max(MIN_ALLOCATION_LENGTH, startIndex + bytes.length));
40185
40605
  this.buffer.resize(newLength);
40186
40606
  }
40187
40607
  this.view.set(bytes, startIndex);
40188
40608
  return result_Result.ok(result_OK);
40189
40609
  }
40190
40610
  setData(pageIndex, data) {
40191
- if (this.buffer.byteLength < pageIndex + data.length && this.buffer.byteLength < PAGE_SIZE) {
40192
- const newLength = Math.min(PAGE_SIZE, Math.max(MIN_ALLOCATION_LENGTH, pageIndex + data.length));
40611
+ if (this.buffer.byteLength < pageIndex + data.length && this.buffer.byteLength < memory_consts_PAGE_SIZE) {
40612
+ const newLength = Math.min(memory_consts_PAGE_SIZE, Math.max(MIN_ALLOCATION_LENGTH, pageIndex + data.length));
40193
40613
  this.buffer.resize(newLength);
40194
40614
  }
40195
40615
  this.view.set(data, pageIndex);
@@ -40240,10 +40660,10 @@ class Memory {
40240
40660
  this.memory = memory;
40241
40661
  }
40242
40662
  store(address, bytes) {
40243
- return this.storeFrom(tryAsMemoryIndex(address), bytes);
40663
+ return this.storeFrom(memory_index_tryAsMemoryIndex(address), bytes);
40244
40664
  }
40245
40665
  read(address, output) {
40246
- return this.loadInto(output, tryAsMemoryIndex(address));
40666
+ return this.loadInto(output, memory_index_tryAsMemoryIndex(address));
40247
40667
  }
40248
40668
  reset() {
40249
40669
  this.sbrkIndex = tryAsSbrkIndex(RESERVED_MEMORY_RANGE.end);
@@ -40270,8 +40690,8 @@ class Memory {
40270
40690
  let currentPosition = address;
40271
40691
  let bytesLeft = bytes.length;
40272
40692
  for (const page of pages) {
40273
- const pageStartIndex = tryAsPageIndex(currentPosition % PAGE_SIZE);
40274
- const bytesToWrite = Math.min(PAGE_SIZE - pageStartIndex, bytesLeft);
40693
+ const pageStartIndex = tryAsPageIndex(currentPosition % memory_consts_PAGE_SIZE);
40694
+ const bytesToWrite = Math.min(memory_consts_PAGE_SIZE - pageStartIndex, bytesLeft);
40275
40695
  const sourceStartIndex = currentPosition - address;
40276
40696
  const source = bytes.subarray(sourceStartIndex, sourceStartIndex + bytesToWrite);
40277
40697
  page.storeFrom(pageStartIndex, source);
@@ -40320,8 +40740,8 @@ class Memory {
40320
40740
  let currentPosition = startAddress;
40321
40741
  let bytesLeft = result.length;
40322
40742
  for (const page of pages) {
40323
- const pageStartIndex = tryAsPageIndex(currentPosition % PAGE_SIZE);
40324
- const bytesToRead = Math.min(PAGE_SIZE - pageStartIndex, bytesLeft);
40743
+ const pageStartIndex = tryAsPageIndex(currentPosition % memory_consts_PAGE_SIZE);
40744
+ const bytesToRead = Math.min(memory_consts_PAGE_SIZE - pageStartIndex, bytesLeft);
40325
40745
  const destinationStartIndex = currentPosition - startAddress;
40326
40746
  const destination = result.subarray(destinationStartIndex);
40327
40747
  page.loadInto(destination, pageStartIndex, bytesToRead);
@@ -40348,7 +40768,7 @@ class Memory {
40348
40768
  const newSbrkIndex = tryAsSbrkIndex(alignToPageSize(newVirtualSbrkIndex));
40349
40769
  // TODO [MaSi]: `getPageNumber` works incorrectly for SbrkIndex. Sbrk index should be changed to MemoryIndex
40350
40770
  const firstPageNumber = getPageNumber(currentSbrkIndex);
40351
- const pagesToAllocate = (newSbrkIndex - currentSbrkIndex) / PAGE_SIZE;
40771
+ const pagesToAllocate = (newSbrkIndex - currentSbrkIndex) / memory_consts_PAGE_SIZE;
40352
40772
  const rangeToAllocate = PageRange.fromStartAndLength(firstPageNumber, pagesToAllocate);
40353
40773
  for (const pageNumber of rangeToAllocate) {
40354
40774
  const page = new WriteablePage(pageNumber);
@@ -40403,8 +40823,8 @@ class MemoryBuilder {
40403
40823
  setReadablePages(start, end, data = new Uint8Array()) {
40404
40824
  this.ensureNotFinalized();
40405
40825
  debug_check `${start < end} end has to be bigger than start`;
40406
- debug_check `${start % PAGE_SIZE === 0} start needs to be a multiple of page size (${PAGE_SIZE})`;
40407
- debug_check `${end % PAGE_SIZE === 0} end needs to be a multiple of page size (${PAGE_SIZE})`;
40826
+ debug_check `${start % memory_consts_PAGE_SIZE === 0} start needs to be a multiple of page size (${memory_consts_PAGE_SIZE})`;
40827
+ debug_check `${end % memory_consts_PAGE_SIZE === 0} end needs to be a multiple of page size (${memory_consts_PAGE_SIZE})`;
40408
40828
  debug_check `${data.length <= end - start} the initial data is longer than address range`;
40409
40829
  const length = end - start;
40410
40830
  const range = MemoryRange.fromStartAndLength(start, length);
@@ -40413,7 +40833,7 @@ class MemoryBuilder {
40413
40833
  const noOfPages = pages.length;
40414
40834
  for (let i = 0; i < noOfPages; i++) {
40415
40835
  const pageNumber = pages[i];
40416
- const dataChunk = data.subarray(i * PAGE_SIZE, (i + 1) * PAGE_SIZE);
40836
+ const dataChunk = data.subarray(i * memory_consts_PAGE_SIZE, (i + 1) * memory_consts_PAGE_SIZE);
40417
40837
  const page = new ReadablePage(pageNumber, dataChunk);
40418
40838
  this.initialMemory.set(pageNumber, page);
40419
40839
  }
@@ -40431,8 +40851,8 @@ class MemoryBuilder {
40431
40851
  setWriteablePages(start, end, data = new Uint8Array()) {
40432
40852
  this.ensureNotFinalized();
40433
40853
  debug_check `${start < end} end has to be bigger than start`;
40434
- debug_check `${start % PAGE_SIZE === 0} start needs to be a multiple of page size (${PAGE_SIZE})`;
40435
- debug_check `${end % PAGE_SIZE === 0} end needs to be a multiple of page size (${PAGE_SIZE})`;
40854
+ debug_check `${start % memory_consts_PAGE_SIZE === 0} start needs to be a multiple of page size (${memory_consts_PAGE_SIZE})`;
40855
+ debug_check `${end % memory_consts_PAGE_SIZE === 0} end needs to be a multiple of page size (${memory_consts_PAGE_SIZE})`;
40436
40856
  debug_check `${data.length <= end - start} the initial data is longer than address range`;
40437
40857
  const length = end - start;
40438
40858
  const range = MemoryRange.fromStartAndLength(start, length);
@@ -40441,7 +40861,7 @@ class MemoryBuilder {
40441
40861
  const noOfPages = pages.length;
40442
40862
  for (let i = 0; i < noOfPages; i++) {
40443
40863
  const pageNumber = pages[i];
40444
- const dataChunk = data.subarray(i * PAGE_SIZE, (i + 1) * PAGE_SIZE);
40864
+ const dataChunk = data.subarray(i * memory_consts_PAGE_SIZE, (i + 1) * memory_consts_PAGE_SIZE);
40445
40865
  const page = new WriteablePage(pageNumber, dataChunk);
40446
40866
  this.initialMemory.set(pageNumber, page);
40447
40867
  }
@@ -40453,8 +40873,8 @@ class MemoryBuilder {
40453
40873
  */
40454
40874
  setData(start, data) {
40455
40875
  this.ensureNotFinalized();
40456
- const pageOffset = start % PAGE_SIZE;
40457
- const remainingSpaceOnPage = PAGE_SIZE - pageOffset;
40876
+ const pageOffset = start % memory_consts_PAGE_SIZE;
40877
+ const remainingSpaceOnPage = memory_consts_PAGE_SIZE - pageOffset;
40458
40878
  debug_check `${data.length <= remainingSpaceOnPage} The data has to fit into a single page.`;
40459
40879
  const length = data.length;
40460
40880
  const range = MemoryRange.fromStartAndLength(start, length);
@@ -40645,27 +41065,27 @@ class Program {
40645
41065
  static fromSpi(blob, args, hasMetadata) {
40646
41066
  const { code: spiCode, metadata } = hasMetadata ? extractCodeAndMetadata(blob) : { code: blob };
40647
41067
  const { code, memory: rawMemory, registers } = decodeStandardProgram(spiCode, args);
40648
- const regs = new Registers();
41068
+ const regs = new registers_Registers();
40649
41069
  regs.copyFrom(registers);
40650
41070
  const memoryBuilder = new MemoryBuilder();
40651
41071
  for (const { start, end, data } of rawMemory.readable) {
40652
- const startIndex = tryAsMemoryIndex(start);
40653
- const endIndex = tryAsMemoryIndex(end);
41072
+ const startIndex = memory_index_tryAsMemoryIndex(start);
41073
+ const endIndex = memory_index_tryAsMemoryIndex(end);
40654
41074
  memoryBuilder.setReadablePages(startIndex, endIndex, data ?? new Uint8Array());
40655
41075
  }
40656
41076
  for (const { start, end, data } of rawMemory.writeable) {
40657
- const startIndex = tryAsMemoryIndex(start);
40658
- const endIndex = tryAsMemoryIndex(end);
41077
+ const startIndex = memory_index_tryAsMemoryIndex(start);
41078
+ const endIndex = memory_index_tryAsMemoryIndex(end);
40659
41079
  memoryBuilder.setWriteablePages(startIndex, endIndex, data ?? new Uint8Array());
40660
41080
  }
40661
- const heapStart = tryAsMemoryIndex(rawMemory.sbrkIndex);
41081
+ const heapStart = memory_index_tryAsMemoryIndex(rawMemory.sbrkIndex);
40662
41082
  const heapEnd = tryAsSbrkIndex(rawMemory.heapEnd);
40663
41083
  const memory = memoryBuilder.finalize(heapStart, heapEnd);
40664
41084
  return new Program(code, regs, memory, metadata);
40665
41085
  }
40666
41086
  static fromGeneric(blob, hasMetadata) {
40667
41087
  const { code, metadata } = hasMetadata ? extractCodeAndMetadata(blob) : { code: blob };
40668
- const regs = new Registers();
41088
+ const regs = new registers_Registers();
40669
41089
  const memory = new Memory();
40670
41090
  return new Program(code, regs, memory, metadata);
40671
41091
  }
@@ -41680,6 +42100,45 @@ class BasicBlocks {
41680
42100
  ;// CONCATENATED MODULE: ./packages/core/pvm-interpreter/basic-blocks/index.ts
41681
42101
 
41682
42102
 
42103
+ ;// CONCATENATED MODULE: ./packages/core/pvm-interpreter/gas.ts
42104
+
42105
+
42106
+ /** Create a new gas counter instance depending on the gas value. */
42107
+ function gasCounter(gas) {
42108
+ return new GasCounterU64(numbers_tryAsU64(gas));
42109
+ }
42110
+ class GasCounterU64 {
42111
+ gas;
42112
+ initialGas;
42113
+ constructor(gas) {
42114
+ this.gas = gas;
42115
+ this.initialGas = gas_tryAsGas(gas);
42116
+ }
42117
+ set(g) {
42118
+ this.gas = numbers_tryAsU64(g);
42119
+ }
42120
+ get() {
42121
+ return gas_tryAsGas(this.gas);
42122
+ }
42123
+ sub(g) {
42124
+ const result = this.gas - numbers_tryAsU64(g);
42125
+ if (result >= 0n) {
42126
+ this.gas = numbers_tryAsU64(result);
42127
+ return false;
42128
+ }
42129
+ this.gas = numbers_tryAsU64(0n);
42130
+ return true;
42131
+ }
42132
+ used() {
42133
+ const gasConsumed = numbers_tryAsU64(this.initialGas) - this.gas;
42134
+ // In we have less than zero left we assume that all gas has been consumed.
42135
+ if (gasConsumed < 0) {
42136
+ return this.initialGas;
42137
+ }
42138
+ return gas_tryAsGas(gasConsumed);
42139
+ }
42140
+ }
42141
+
41683
42142
  ;// CONCATENATED MODULE: ./packages/core/pvm-interpreter/instruction-gas-map.ts
41684
42143
 
41685
42144
 
@@ -42256,7 +42715,7 @@ class LoadOps {
42256
42715
  }
42257
42716
  loadNumber(address, registerIndex, numberLength) {
42258
42717
  const registerBytes = this.regs.getBytesAsLittleEndian(registerIndex, REG_SIZE_BYTES);
42259
- const loadResult = this.memory.loadInto(registerBytes.subarray(0, numberLength), tryAsMemoryIndex(address));
42718
+ const loadResult = this.memory.loadInto(registerBytes.subarray(0, numberLength), memory_index_tryAsMemoryIndex(address));
42260
42719
  if (loadResult.isError) {
42261
42720
  if (loadResult.error.isAccessFault) {
42262
42721
  this.instructionResult.status = pvm_interpreter_result_Result.FAULT_ACCESS;
@@ -42272,7 +42731,7 @@ class LoadOps {
42272
42731
  loadSignedNumber(address, registerIndex, numberLength) {
42273
42732
  // load all bytes from register to correctly handle the sign.
42274
42733
  const registerBytes = this.regs.getBytesAsLittleEndian(registerIndex, REG_SIZE_BYTES);
42275
- const loadResult = this.memory.loadInto(registerBytes.subarray(0, numberLength), tryAsMemoryIndex(address));
42734
+ const loadResult = this.memory.loadInto(registerBytes.subarray(0, numberLength), memory_index_tryAsMemoryIndex(address));
42276
42735
  if (loadResult.isError) {
42277
42736
  if (loadResult.error.isAccessFault) {
42278
42737
  this.instructionResult.status = pvm_interpreter_result_Result.FAULT_ACCESS;
@@ -42694,7 +43153,7 @@ class StoreOps {
42694
43153
  this.store(address, secondImmediateDecoder.getExtendedBytesAsLittleEndian());
42695
43154
  }
42696
43155
  store(address, bytes) {
42697
- const storeResult = this.memory.storeFrom(tryAsMemoryIndex(address), bytes);
43156
+ const storeResult = this.memory.storeFrom(memory_index_tryAsMemoryIndex(address), bytes);
42698
43157
  if (storeResult.isOk) {
42699
43158
  return;
42700
43159
  }
@@ -42703,7 +43162,7 @@ class StoreOps {
42703
43162
  }
42704
43163
  else {
42705
43164
  this.instructionResult.status = pvm_interpreter_result_Result.FAULT;
42706
- this.instructionResult.exitParam = getStartPageIndex(tryAsMemoryIndex(storeResult.error.address));
43165
+ this.instructionResult.exitParam = getStartPageIndex(memory_index_tryAsMemoryIndex(storeResult.error.address));
42707
43166
  }
42708
43167
  }
42709
43168
  }
@@ -43502,11 +43961,11 @@ class ProgramDecoder {
43502
43961
 
43503
43962
 
43504
43963
  const interpreter_logger = logger_Logger.new(import.meta.filename, "pvm");
43505
- class Interpreter {
43964
+ class interpreter_Interpreter {
43506
43965
  useSbrkGas;
43507
- registers = new Registers();
43966
+ registers = new registers_Registers();
43508
43967
  memory = new Memory();
43509
- gas = gasCounter(tryAsGas(0));
43968
+ gas = gasCounter(gas_tryAsGas(0));
43510
43969
  code = new Uint8Array();
43511
43970
  mask = Mask.empty();
43512
43971
  pc = 0;
@@ -43640,8 +44099,8 @@ class Interpreter {
43640
44099
  break;
43641
44100
  case ArgumentType.TWO_REGISTERS:
43642
44101
  if (this.useSbrkGas && currentInstruction === Instruction.SBRK) {
43643
- const calculateSbrkCost = (length) => (alignToPageSize(length) / PAGE_SIZE) * 16;
43644
- const underflow = this.gas.sub(tryAsGas(calculateSbrkCost(this.registers.getLowerU32(argsResult.firstRegisterIndex))));
44102
+ const calculateSbrkCost = (length) => (alignToPageSize(length) / memory_consts_PAGE_SIZE) * 16;
44103
+ const underflow = this.gas.sub(gas_tryAsGas(calculateSbrkCost(this.registers.getLowerU32(argsResult.firstRegisterIndex))));
43645
44104
  if (underflow) {
43646
44105
  this.status = status_Status.OOG;
43647
44106
  return this.status;
@@ -43736,12 +44195,88 @@ class Interpreter {
43736
44195
  }
43737
44196
  }
43738
44197
 
44198
+ ;// CONCATENATED MODULE: ./packages/core/pvm-interpreter/debugger-adapter.ts
44199
+
44200
+
44201
+
44202
+
44203
+
44204
+
44205
+ class DebuggerAdapter {
44206
+ pvm;
44207
+ constructor(useSbrkGas = false) {
44208
+ this.pvm = new Interpreter({ useSbrkGas });
44209
+ }
44210
+ resetGeneric(rawProgram, flatRegisters, initialGas) {
44211
+ this.pvm.resetGeneric(rawProgram, 0, tryAsGas(initialGas), new Registers(flatRegisters));
44212
+ }
44213
+ reset(rawProgram, pc, gas, maybeRegisters, maybeMemory) {
44214
+ this.pvm.resetGeneric(rawProgram, pc, tryAsGas(gas), maybeRegisters, maybeMemory);
44215
+ }
44216
+ getPageDump(pageNumber) {
44217
+ const page = this.pvm.getMemoryPage(pageNumber);
44218
+ if (page === null) {
44219
+ // page wasn't allocated so we return an empty page
44220
+ return safeAllocUint8Array(PAGE_SIZE);
44221
+ }
44222
+ if (page.length === PAGE_SIZE) {
44223
+ // page was allocated and has a proper size so we can simply return it
44224
+ return page;
44225
+ }
44226
+ // page was allocated but it is shorter than PAGE_SIZE so we have to extend it
44227
+ const fullPage = safeAllocUint8Array(PAGE_SIZE);
44228
+ fullPage.set(page);
44229
+ return fullPage;
44230
+ }
44231
+ setMemory(address, value) {
44232
+ this.pvm.memory.storeFrom(tryAsMemoryIndex(address), value);
44233
+ }
44234
+ getExitArg() {
44235
+ return this.pvm.getExitParam() ?? 0;
44236
+ }
44237
+ getStatus() {
44238
+ return this.pvm.getStatus();
44239
+ }
44240
+ nextStep() {
44241
+ return this.pvm.nextStep() === Status.OK;
44242
+ }
44243
+ nSteps(steps) {
44244
+ check `${steps >>> 0 > 0} Expected a positive integer got ${steps}`;
44245
+ for (let i = 0; i < steps; i++) {
44246
+ const isOk = this.nextStep();
44247
+ if (!isOk) {
44248
+ return false;
44249
+ }
44250
+ }
44251
+ return true;
44252
+ }
44253
+ getRegisters() {
44254
+ return this.pvm.registers.getAllU64();
44255
+ }
44256
+ setRegisters(registers) {
44257
+ this.pvm.registers.copyFrom(new Registers(registers));
44258
+ }
44259
+ getProgramCounter() {
44260
+ return this.pvm.getPC();
44261
+ }
44262
+ setNextProgramCounter(nextPc) {
44263
+ this.pvm.setNextPC(nextPc);
44264
+ }
44265
+ getGasLeft() {
44266
+ return BigInt(this.pvm.gas.get());
44267
+ }
44268
+ setGasLeft(gas) {
44269
+ this.pvm.gas.set(tryAsGas(gas));
44270
+ }
44271
+ }
44272
+
43739
44273
  ;// CONCATENATED MODULE: ./packages/core/pvm-interpreter/index.ts
43740
44274
 
43741
44275
 
43742
44276
 
43743
44277
 
43744
44278
 
44279
+
43745
44280
  ;// CONCATENATED MODULE: ./node_modules/@fluffylabs/anan-as/build/debug-raw.js
43746
44281
  async function instantiate(module, imports = {}) {
43747
44282
  const adaptedImports = {
@@ -44195,12 +44730,12 @@ class AnanasMemory {
44195
44730
  }
44196
44731
  class AnanasGasCounter {
44197
44732
  instance;
44198
- initialGas = tryAsGas(0n);
44733
+ initialGas = gas_tryAsGas(0n);
44199
44734
  constructor(instance) {
44200
44735
  this.instance = instance;
44201
44736
  }
44202
44737
  get() {
44203
- return tryAsGas(this.instance.getGasLeft());
44738
+ return gas_tryAsGas(this.instance.getGasLeft());
44204
44739
  }
44205
44740
  set(g) {
44206
44741
  this.instance.setGasLeft(BigInt(g));
@@ -44305,7 +44840,7 @@ class InterpreterInstanceManager {
44305
44840
  const instances = [];
44306
44841
  switch (interpreter) {
44307
44842
  case PvmBackend.BuiltIn:
44308
- instances.push(new Interpreter({
44843
+ instances.push(new interpreter_Interpreter({
44309
44844
  useSbrkGas: false,
44310
44845
  }));
44311
44846
  break;
@@ -44479,10 +45014,10 @@ class Info {
44479
45014
  const chunk = encodedInfo.raw.subarray(Number(offset), Number(offset + length));
44480
45015
  const writeResult = memory.storeFrom(outputStart, chunk);
44481
45016
  if (writeResult.isError) {
44482
- logger_logger.trace `INFO(${serviceId}, off: ${offset}, len: ${length}) <- PANIC`;
45017
+ logger_logger.trace `[${this.currentServiceId}] INFO(${serviceId}, off: ${offset}, len: ${length}) <- PANIC`;
44483
45018
  return PvmExecution.Panic;
44484
45019
  }
44485
- logger_logger.trace `INFO(${serviceId}, off: ${offset}, len: ${length}) <- ${bytes_BytesBlob.blobFrom(chunk)}`;
45020
+ logger_logger.trace `[${this.currentServiceId}] INFO(${serviceId}, off: ${offset}, len: ${length}) <- ${bytes_BytesBlob.blobFrom(chunk)}`;
44486
45021
  if (accountInfo === null) {
44487
45022
  regs.set(IN_OUT_REG, HostCallResult.NONE);
44488
45023
  return;
@@ -44706,7 +45241,7 @@ class AccumulateExternalities {
44706
45241
  const bytes = serviceInfo.storageUtilisationBytes - length - LOOKUP_HISTORY_ENTRY_BYTES;
44707
45242
  return this.updatedState.updateServiceStorageUtilisation(this.currentServiceId, items, bytes, serviceInfo);
44708
45243
  };
44709
- // https://graypaper.fluffylabs.dev/#/9a08063/389501389501?v=0.6.6
45244
+ // https://graypaper.fluffylabs.dev/#/ab2cdbd/380802380802?v=0.7.2
44710
45245
  if (s.status === PreimageStatusKind.Requested) {
44711
45246
  const res = updateStorageUtilisation();
44712
45247
  if (res.isError) {
@@ -44719,7 +45254,7 @@ class AccumulateExternalities {
44719
45254
  return result_Result.ok(result_OK);
44720
45255
  }
44721
45256
  const t = this.currentTimeslot;
44722
- // https://graypaper.fluffylabs.dev/#/9a08063/378102378102?v=0.6.6
45257
+ // https://graypaper.fluffylabs.dev/#/ab2cdbd/380802380802?v=0.7.2
44723
45258
  if (s.status === PreimageStatusKind.Unavailable) {
44724
45259
  const y = s.data[1];
44725
45260
  if (y < t - this.chainSpec.preimageExpungePeriod) {
@@ -44735,14 +45270,14 @@ class AccumulateExternalities {
44735
45270
  }
44736
45271
  return result_Result.error(ForgetPreimageError.NotExpired, () => `Preimage not expired: y=${y}, timeslot=${t}, period=${this.chainSpec.preimageExpungePeriod}`);
44737
45272
  }
44738
- // https://graypaper.fluffylabs.dev/#/9a08063/38c80138c801?v=0.6.6
45273
+ // https://graypaper.fluffylabs.dev/#/ab2cdbd/382802383302?v=0.7.2
44739
45274
  if (s.status === PreimageStatusKind.Available) {
44740
45275
  this.updatedState.updatePreimage(serviceId, UpdatePreimage.updateOrAdd({
44741
45276
  lookupHistory: new LookupHistoryItem(status.hash, status.length, tryAsLookupHistorySlots([s.data[0], t])),
44742
45277
  }));
44743
45278
  return result_Result.ok(result_OK);
44744
45279
  }
44745
- // https://graypaper.fluffylabs.dev/#/9a08063/38d00138d001?v=0.6.6
45280
+ // https://graypaper.fluffylabs.dev/#/ab2cdbd/384002384c02?v=0.7.2
44746
45281
  if (s.status === PreimageStatusKind.Reavailable) {
44747
45282
  const y = s.data[1];
44748
45283
  if (y < t - this.chainSpec.preimageExpungePeriod) {
@@ -45578,13 +46113,11 @@ class AccumulateDataItem {
45578
46113
  * - gas cost and reports length for each service (statistics)
45579
46114
  */
45580
46115
  class AccumulateData {
45581
- autoAccumulateServicesByServiceId;
45582
46116
  reportsDataByServiceId;
45583
46117
  transfersByServiceId;
45584
46118
  serviceIds;
45585
46119
  gasLimitByServiceId;
45586
46120
  constructor(reports, transfers, autoAccumulateServicesByServiceId) {
45587
- this.autoAccumulateServicesByServiceId = autoAccumulateServicesByServiceId;
45588
46121
  const serviceIdsFromAutoAccumulate = new Set(autoAccumulateServicesByServiceId.keys());
45589
46122
  const { reportsDataByServiceId, serviceIds: serviceIdsFromReports, gasLimitByServiceId: reportsGasLimitByServiceId, } = this.transformReports(reports);
45590
46123
  this.reportsDataByServiceId = reportsDataByServiceId;
@@ -45904,12 +46437,12 @@ function createMergeContext(chainSpec, state, inputState, results) {
45904
46437
  }
45905
46438
  function updatePrivilegedService(currentServiceId, serviceIdUpdatedByManager, selfUpdatedServiceId) {
45906
46439
  if (currentServiceId === serviceIdUpdatedByManager) {
45907
- return serviceIdUpdatedByManager;
46440
+ return selfUpdatedServiceId;
45908
46441
  }
45909
- return selfUpdatedServiceId;
46442
+ return serviceIdUpdatedByManager;
45910
46443
  }
45911
46444
  function mergePrivilegedServices(mergeContext, [serviceId, { stateUpdate }]) {
45912
- const { outputState, currentPrivilegedServices, chainSpec } = mergeContext;
46445
+ const { outputState, currentPrivilegedServices, chainSpec, privilegedServicesUpdatedByManager } = mergeContext;
45913
46446
  const currentManager = currentPrivilegedServices.manager;
45914
46447
  const currentRegistrar = currentPrivilegedServices.registrar;
45915
46448
  const currentDelegator = currentPrivilegedServices.delegator;
@@ -45927,28 +46460,35 @@ function mergePrivilegedServices(mergeContext, [serviceId, { stateUpdate }]) {
45927
46460
  });
45928
46461
  }
45929
46462
  if (serviceId === currentRegistrar) {
45930
- const newRegistrar = updatePrivilegedService(currentPrivilegedServices.registrar, outputState.privilegedServices.registrar, privilegedServices.registrar);
46463
+ const newRegistrar = updatePrivilegedService(currentPrivilegedServices.registrar, privilegedServicesUpdatedByManager.registrar, privilegedServices.registrar);
45931
46464
  outputState.privilegedServices = PrivilegedServices.create({
45932
46465
  ...outputState.privilegedServices,
45933
46466
  registrar: newRegistrar,
45934
46467
  });
45935
46468
  }
45936
46469
  if (serviceId === currentDelegator) {
45937
- const newDelegator = updatePrivilegedService(currentPrivilegedServices.delegator, outputState.privilegedServices.delegator, privilegedServices.delegator);
46470
+ const newDelegator = updatePrivilegedService(currentPrivilegedServices.delegator, privilegedServicesUpdatedByManager.delegator, privilegedServices.delegator);
45938
46471
  outputState.privilegedServices = PrivilegedServices.create({
45939
46472
  ...outputState.privilegedServices,
45940
46473
  delegator: newDelegator,
45941
46474
  });
45942
46475
  }
45943
- const assignersFromOutputState = outputState.privilegedServices;
45944
- const newAssigners = currentAssigners.map((currentAssigner, coreIndex) => serviceId === currentAssigner
45945
- ? updatePrivilegedService(currentPrivilegedServices.assigners[coreIndex], assignersFromOutputState.assigners[coreIndex], privilegedServices.assigners[coreIndex])
45946
- : currentAssigner);
45947
- const newAssignersPerCore = tryAsPerCore(newAssigners, chainSpec);
45948
- outputState.privilegedServices = PrivilegedServices.create({
45949
- ...outputState.privilegedServices,
45950
- assigners: newAssignersPerCore,
46476
+ let shouldUpdateAssigners = false;
46477
+ const newAssigners = currentAssigners.map((currentAssigner, coreIndex) => {
46478
+ if (serviceId === currentAssigner) {
46479
+ const newAssigner = updatePrivilegedService(currentPrivilegedServices.assigners[coreIndex], privilegedServicesUpdatedByManager.assigners[coreIndex], privilegedServices.assigners[coreIndex]);
46480
+ shouldUpdateAssigners = shouldUpdateAssigners || newAssigner !== currentAssigner;
46481
+ return newAssigner;
46482
+ }
46483
+ return currentAssigner;
45951
46484
  });
46485
+ if (shouldUpdateAssigners) {
46486
+ const newAssignersPerCore = tryAsPerCore(newAssigners, chainSpec);
46487
+ outputState.privilegedServices = PrivilegedServices.create({
46488
+ ...outputState.privilegedServices,
46489
+ assigners: newAssignersPerCore,
46490
+ });
46491
+ }
45952
46492
  }
45953
46493
  }
45954
46494
  function mergeValidatorsData(mergeContext, [serviceId, { stateUpdate }]) {
@@ -46093,7 +46633,7 @@ class Assign {
46093
46633
  const memoryReadResult = memory.loadInto(res, authorizationQueueStart);
46094
46634
  // error while reading the memory.
46095
46635
  if (memoryReadResult.isError) {
46096
- logger_logger.trace `ASSIGN() <- PANIC`;
46636
+ logger_logger.trace `[${this.currentServiceId}] ASSIGN() <- PANIC`;
46097
46637
  return PvmExecution.Panic;
46098
46638
  }
46099
46639
  if (maybeCoreIndex >= this.chainSpec.coresCount) {
@@ -46108,18 +46648,18 @@ class Assign {
46108
46648
  const result = this.partialState.updateAuthorizationQueue(coreIndex, fixedSizeAuthQueue, assigners);
46109
46649
  if (result.isOk) {
46110
46650
  regs.set(assign_IN_OUT_REG, HostCallResult.OK);
46111
- logger_logger.trace `ASSIGN(${coreIndex}, ${fixedSizeAuthQueue}) <- OK`;
46651
+ logger_logger.trace `[${this.currentServiceId}] ASSIGN(${coreIndex}, ${fixedSizeAuthQueue}) <- OK`;
46112
46652
  return;
46113
46653
  }
46114
46654
  const e = result.error;
46115
46655
  if (e === UpdatePrivilegesError.UnprivilegedService) {
46116
46656
  regs.set(assign_IN_OUT_REG, HostCallResult.HUH);
46117
- logger_logger.trace `ASSIGN(${coreIndex}, ${fixedSizeAuthQueue}) <- HUH`;
46657
+ logger_logger.trace `[${this.currentServiceId}] ASSIGN(${coreIndex}, ${fixedSizeAuthQueue}) <- HUH`;
46118
46658
  return;
46119
46659
  }
46120
46660
  if (e === UpdatePrivilegesError.InvalidServiceId) {
46121
46661
  regs.set(assign_IN_OUT_REG, HostCallResult.WHO);
46122
- logger_logger.trace `ASSIGN(${coreIndex}, ${fixedSizeAuthQueue}) <- HUH`;
46662
+ logger_logger.trace `[${this.currentServiceId}] ASSIGN(${coreIndex}, ${fixedSizeAuthQueue}) <- HUH`;
46123
46663
  return;
46124
46664
  }
46125
46665
  debug_assertNever(e);
@@ -46190,7 +46730,7 @@ class Bless {
46190
46730
  decoder.resetTo(0);
46191
46731
  const memoryReadResult = memory.loadInto(result, memIndex);
46192
46732
  if (memoryReadResult.isError) {
46193
- logger_logger.trace `BLESS(m: ${manager}, v: ${delegator}, r: ${registrar}) <- PANIC`;
46733
+ logger_logger.trace `[${this.currentServiceId}] BLESS(m: ${manager}, v: ${delegator}, r: ${registrar}) <- PANIC`;
46194
46734
  return PvmExecution.Panic;
46195
46735
  }
46196
46736
  const { serviceId, gas } = decoder.object(serviceIdAndGasCodec);
@@ -46203,26 +46743,26 @@ class Bless {
46203
46743
  const authorizersDecoder = decoder_Decoder.fromBlob(res);
46204
46744
  const memoryReadResult = memory.loadInto(res, authorization);
46205
46745
  if (memoryReadResult.isError) {
46206
- logger_logger.trace `BLESS(m: ${manager}, v: ${delegator}, r: ${registrar}, ${lazyInspect(autoAccumulate)}) <- PANIC`;
46746
+ logger_logger.trace `[${this.currentServiceId}] BLESS(m: ${manager}, v: ${delegator}, r: ${registrar}, ${lazyInspect(autoAccumulate)}) <- PANIC`;
46207
46747
  return PvmExecution.Panic;
46208
46748
  }
46209
46749
  // `a`
46210
46750
  const authorizers = tryAsPerCore(authorizersDecoder.sequenceFixLen(descriptors_codec.u32.asOpaque(), this.chainSpec.coresCount), this.chainSpec);
46211
46751
  const updateResult = this.partialState.updatePrivilegedServices(manager, authorizers, delegator, registrar, autoAccumulate);
46212
46752
  if (updateResult.isOk) {
46213
- logger_logger.trace `BLESS(m: ${manager}, a: [${authorizers}], v: ${delegator}, r: ${registrar}, ${lazyInspect(autoAccumulate)}) <- OK`;
46753
+ logger_logger.trace `[${this.currentServiceId}] BLESS(m: ${manager}, a: [${authorizers}], v: ${delegator}, r: ${registrar}, ${lazyInspect(autoAccumulate)}) <- OK`;
46214
46754
  regs.set(bless_IN_OUT_REG, HostCallResult.OK);
46215
46755
  return;
46216
46756
  }
46217
46757
  const e = updateResult.error;
46218
46758
  // NOTE: `UpdatePrivilegesError.UnprivilegedService` won't happen in 0.7.1+
46219
46759
  if (e === UpdatePrivilegesError.UnprivilegedService) {
46220
- logger_logger.trace `BLESS(m: ${manager}, a: [${authorizers}], v: ${delegator}, r: ${registrar}, ${lazyInspect(autoAccumulate)}) <- HUH`;
46760
+ logger_logger.trace `[${this.currentServiceId}] BLESS(m: ${manager}, a: [${authorizers}], v: ${delegator}, r: ${registrar}, ${lazyInspect(autoAccumulate)}) <- HUH`;
46221
46761
  regs.set(bless_IN_OUT_REG, HostCallResult.HUH);
46222
46762
  return;
46223
46763
  }
46224
46764
  if (e === UpdatePrivilegesError.InvalidServiceId) {
46225
- logger_logger.trace `BLESS(m: ${manager}, a: [${authorizers}], v: ${delegator}, r: ${registrar}, ${lazyInspect(autoAccumulate)}) <- WHO`;
46765
+ logger_logger.trace `[${this.currentServiceId}] BLESS(m: ${manager}, a: [${authorizers}], v: ${delegator}, r: ${registrar}, ${lazyInspect(autoAccumulate)}) <- WHO`;
46226
46766
  regs.set(bless_IN_OUT_REG, HostCallResult.WHO);
46227
46767
  return;
46228
46768
  }
@@ -46252,7 +46792,7 @@ class GasHostCall {
46252
46792
  }
46253
46793
  execute(gas, regs) {
46254
46794
  const gasValue = gas.get();
46255
- logger_logger.trace `GAS <- ${gasValue}`;
46795
+ logger_logger.trace `[${this.currentServiceId}] GAS <- ${gasValue}`;
46256
46796
  regs.set(7, numbers_tryAsU64(gasValue));
46257
46797
  return Promise.resolve(undefined);
46258
46798
  }
@@ -46284,7 +46824,7 @@ class Checkpoint {
46284
46824
  async execute(gas, regs) {
46285
46825
  await this.gasHostCall.execute(gas, regs);
46286
46826
  this.partialState.checkpoint();
46287
- logger_logger.trace `CHECKPOINT()`;
46827
+ logger_logger.trace `[${this.currentServiceId}] CHECKPOINT()`;
46288
46828
  return;
46289
46829
  }
46290
46830
  }
@@ -46324,18 +46864,18 @@ class Designate {
46324
46864
  const memoryReadResult = memory.loadInto(res, validatorsStart);
46325
46865
  // error while reading the memory.
46326
46866
  if (memoryReadResult.isError) {
46327
- logger_logger.trace `DESIGNATE() <- PANIC`;
46867
+ logger_logger.trace `[${this.currentServiceId}] DESIGNATE() <- PANIC`;
46328
46868
  return PvmExecution.Panic;
46329
46869
  }
46330
46870
  const decoder = decoder_Decoder.fromBlob(res);
46331
46871
  const validatorsData = decoder.sequenceFixLen(ValidatorData.Codec, this.chainSpec.validatorsCount);
46332
46872
  const result = this.partialState.updateValidatorsData(tryAsPerValidator(validatorsData, this.chainSpec));
46333
46873
  if (result.isError) {
46334
- logger_logger.trace `DESIGNATE([${validatorsData[0]}, ${validatorsData[1]}, ...]) <- HUH`;
46874
+ logger_logger.trace `[${this.currentServiceId}] DESIGNATE([${validatorsData[0]}, ${validatorsData[1]}, ...]) <- HUH`;
46335
46875
  regs.set(designate_IN_OUT_REG, HostCallResult.HUH);
46336
46876
  }
46337
46877
  else {
46338
- logger_logger.trace `DESIGNATE([${validatorsData[0]}, ${validatorsData[1]}, ...]) <- OK`;
46878
+ logger_logger.trace `[${this.currentServiceId}] DESIGNATE([${validatorsData[0]}, ${validatorsData[1]}, ...]) <- OK`;
46339
46879
  regs.set(designate_IN_OUT_REG, HostCallResult.OK);
46340
46880
  }
46341
46881
  }
@@ -46376,29 +46916,29 @@ class Eject {
46376
46916
  const previousCodeHash = bytes_Bytes.zero(hash_HASH_SIZE).asOpaque();
46377
46917
  const memoryReadResult = memory.loadInto(previousCodeHash.raw, preimageHashStart);
46378
46918
  if (memoryReadResult.isError) {
46379
- logger_logger.trace `EJECT(${serviceId}) <- PANIC`;
46919
+ logger_logger.trace `[${this.currentServiceId}] EJECT(${serviceId}) <- PANIC`;
46380
46920
  return PvmExecution.Panic;
46381
46921
  }
46382
46922
  // cannot eject self
46383
46923
  if (serviceId === this.currentServiceId) {
46384
46924
  regs.set(eject_IN_OUT_REG, HostCallResult.WHO);
46385
- logger_logger.trace `EJECT(${serviceId}, ${previousCodeHash}) <- WHO`;
46925
+ logger_logger.trace `[${this.currentServiceId}] EJECT(${serviceId}, ${previousCodeHash}) <- WHO`;
46386
46926
  return;
46387
46927
  }
46388
46928
  const result = this.partialState.eject(serviceId, previousCodeHash);
46389
46929
  // All good!
46390
46930
  if (result.isOk) {
46391
- logger_logger.trace `EJECT(${serviceId}, ${previousCodeHash}) <- OK`;
46931
+ logger_logger.trace `[${this.currentServiceId}] EJECT(${serviceId}, ${previousCodeHash}) <- OK`;
46392
46932
  regs.set(eject_IN_OUT_REG, HostCallResult.OK);
46393
46933
  return;
46394
46934
  }
46395
46935
  const e = result.error;
46396
46936
  if (e === EjectError.InvalidService) {
46397
- logger_logger.trace `EJECT(${serviceId}, ${previousCodeHash}) <- WHO ${result_resultToString(result)}`;
46937
+ logger_logger.trace `[${this.currentServiceId}] EJECT(${serviceId}, ${previousCodeHash}) <- WHO ${result_resultToString(result)}`;
46398
46938
  regs.set(eject_IN_OUT_REG, HostCallResult.WHO);
46399
46939
  }
46400
46940
  else if (e === EjectError.InvalidPreimage) {
46401
- logger_logger.trace `EJECT(${serviceId}, ${previousCodeHash}) <- HUH ${result_resultToString(result)}`;
46941
+ logger_logger.trace `[${this.currentServiceId}] EJECT(${serviceId}, ${previousCodeHash}) <- HUH ${result_resultToString(result)}`;
46402
46942
  regs.set(eject_IN_OUT_REG, HostCallResult.HUH);
46403
46943
  }
46404
46944
  else {
@@ -46417,9 +46957,9 @@ class Eject {
46417
46957
 
46418
46958
  const forget_IN_OUT_REG = 7;
46419
46959
  /**
46420
- * Mark a preimage hash as unavailable.
46960
+ * Delete preimage hash or mark as unavailable if it was available.
46421
46961
  *
46422
- * https://graypaper.fluffylabs.dev/#/7e6ff6a/382d01382d01?v=0.6.7
46962
+ * https://graypaper.fluffylabs.dev/#/ab2cdbd/385d01385d01?v=0.7.2
46423
46963
  */
46424
46964
  class Forget {
46425
46965
  currentServiceId;
@@ -46440,11 +46980,11 @@ class Forget {
46440
46980
  const memoryReadResult = memory.loadInto(hash.raw, hashStart);
46441
46981
  // error while reading the memory.
46442
46982
  if (memoryReadResult.isError) {
46443
- logger_logger.trace `FORGET(${hash}, ${length}) <- PANIC`;
46983
+ logger_logger.trace `[${this.currentServiceId}] FORGET(${hash}, ${length}) <- PANIC`;
46444
46984
  return PvmExecution.Panic;
46445
46985
  }
46446
46986
  const result = this.partialState.forgetPreimage(hash.asOpaque(), length);
46447
- logger_logger.trace `FORGET(${hash}, ${length}) <- ${result_resultToString(result)}`;
46987
+ logger_logger.trace `[${this.currentServiceId}] FORGET(${hash}, ${length}) <- ${result_resultToString(result)}`;
46448
46988
  if (result.isOk) {
46449
46989
  regs.set(forget_IN_OUT_REG, HostCallResult.OK);
46450
46990
  }
@@ -46501,11 +47041,11 @@ class New {
46501
47041
  const memoryReadResult = memory.loadInto(codeHash.raw, codeHashStart);
46502
47042
  // error while reading the memory.
46503
47043
  if (memoryReadResult.isError) {
46504
- logger_logger.trace `NEW(${codeHash}, ${codeLength}, ${gas}, ${allowance}, ${gratisStorage}, ${requestedServiceId}) <- PANIC`;
47044
+ logger_logger.trace `[${this.currentServiceId}] NEW(${codeHash}, ${codeLength}, ${gas}, ${allowance}, ${gratisStorage}, ${requestedServiceId}) <- PANIC`;
46505
47045
  return PvmExecution.Panic;
46506
47046
  }
46507
47047
  const assignedId = this.partialState.newService(codeHash.asOpaque(), codeLength, gas, allowance, gratisStorage, requestedServiceId);
46508
- logger_logger.trace `NEW(${codeHash}, ${codeLength}, ${gas}, ${allowance}, ${gratisStorage}, ${requestedServiceId}) <- ${result_resultToString(assignedId)}`;
47048
+ logger_logger.trace `[${this.currentServiceId}] NEW(${codeHash}, ${codeLength}, ${gas}, ${allowance}, ${gratisStorage}, ${requestedServiceId}) <- ${result_resultToString(assignedId)}`;
46509
47049
  if (assignedId.isOk) {
46510
47050
  regs.set(new_IN_OUT_REG, numbers_tryAsU64(assignedId.ok));
46511
47051
  return;
@@ -46565,11 +47105,11 @@ class Provide {
46565
47105
  const preimage = bytes_BytesBlob.blobFrom(safe_alloc_uint8array_safeAllocUint8Array(length));
46566
47106
  const memoryReadResult = memory.loadInto(preimage.raw, preimageStart);
46567
47107
  if (memoryReadResult.isError) {
46568
- logger_logger.trace `PROVIDE(${serviceId}, ${preimage.toStringTruncated()}) <- PANIC`;
47108
+ logger_logger.trace `[${this.currentServiceId}] PROVIDE(${serviceId}, ${preimage.toStringTruncated()}) <- PANIC`;
46569
47109
  return PvmExecution.Panic;
46570
47110
  }
46571
47111
  const result = this.partialState.providePreimage(serviceId, preimage);
46572
- logger_logger.trace `PROVIDE(${serviceId}, ${preimage.toStringTruncated()}) <- ${result_resultToString(result)}`;
47112
+ logger_logger.trace `[${this.currentServiceId}] PROVIDE(${serviceId}, ${preimage.toStringTruncated()}) <- ${result_resultToString(result)}`;
46573
47113
  if (result.isOk) {
46574
47114
  regs.set(provide_IN_OUT_REG, HostCallResult.OK);
46575
47115
  return;
@@ -46625,35 +47165,35 @@ class Query {
46625
47165
  const memoryReadResult = memory.loadInto(hash.raw, hashStart);
46626
47166
  // error while reading the memory.
46627
47167
  if (memoryReadResult.isError) {
46628
- logger_logger.trace `QUERY(${hash}, ${length}) <- PANIC`;
47168
+ logger_logger.trace `[${this.currentServiceId}] QUERY(${hash}, ${length}) <- PANIC`;
46629
47169
  return PvmExecution.Panic;
46630
47170
  }
46631
47171
  const result = this.partialState.checkPreimageStatus(hash.asOpaque(), length);
46632
47172
  const zero = numbers_tryAsU64(0n);
46633
47173
  if (result === null) {
46634
- logger_logger.trace `QUERY(${hash}, ${length}) <- NONE`;
47174
+ logger_logger.trace `[${this.currentServiceId}] QUERY(${hash}, ${length}) <- NONE`;
46635
47175
  regs.set(IN_OUT_REG_1, HostCallResult.NONE);
46636
47176
  regs.set(IN_OUT_REG_2, zero);
46637
47177
  return;
46638
47178
  }
46639
47179
  switch (result.status) {
46640
47180
  case PreimageStatusKind.Requested:
46641
- logger_logger.trace `QUERY(${hash}, ${length}) <- REQUESTED`;
47181
+ logger_logger.trace `[${this.currentServiceId}] QUERY(${hash}, ${length}) <- REQUESTED`;
46642
47182
  regs.set(IN_OUT_REG_1, zero);
46643
47183
  regs.set(IN_OUT_REG_2, zero);
46644
47184
  return;
46645
47185
  case PreimageStatusKind.Available:
46646
- logger_logger.trace `QUERY(${hash}, ${length}) <- AVAILABLE [${result.data}]`;
47186
+ logger_logger.trace `[${this.currentServiceId}] QUERY(${hash}, ${length}) <- AVAILABLE [${result.data}]`;
46647
47187
  regs.set(IN_OUT_REG_1, numbers_tryAsU64((BigInt(result.data[0]) << UPPER_BITS_SHIFT) + 1n));
46648
47188
  regs.set(IN_OUT_REG_2, zero);
46649
47189
  return;
46650
47190
  case PreimageStatusKind.Unavailable:
46651
- logger_logger.trace `QUERY(${hash}, ${length}) <- UNAVAILABLE [${result.data.join(", ")}]`;
47191
+ logger_logger.trace `[${this.currentServiceId}] QUERY(${hash}, ${length}) <- UNAVAILABLE [${result.data.join(", ")}]`;
46652
47192
  regs.set(IN_OUT_REG_1, numbers_tryAsU64((BigInt(result.data[0]) << UPPER_BITS_SHIFT) + 2n));
46653
47193
  regs.set(IN_OUT_REG_2, numbers_tryAsU64(result.data[1]));
46654
47194
  return;
46655
47195
  case PreimageStatusKind.Reavailable:
46656
- logger_logger.trace `QUERY(${hash}, ${length}) <- REAVAILABLE [${result.data.join(", ")}]`;
47196
+ logger_logger.trace `[${this.currentServiceId}] QUERY(${hash}, ${length}) <- REAVAILABLE [${result.data.join(", ")}]`;
46657
47197
  regs.set(IN_OUT_REG_1, numbers_tryAsU64((BigInt(result.data[0]) << UPPER_BITS_SHIFT) + 3n));
46658
47198
  regs.set(IN_OUT_REG_2, numbers_tryAsU64((BigInt(result.data[2]) << UPPER_BITS_SHIFT) + BigInt(result.data[1])));
46659
47199
  return;
@@ -46694,11 +47234,11 @@ class Solicit {
46694
47234
  const hash = bytes_Bytes.zero(hash_HASH_SIZE);
46695
47235
  const memoryReadResult = memory.loadInto(hash.raw, hashStart);
46696
47236
  if (memoryReadResult.isError) {
46697
- logger_logger.trace `SOLICIT(${hash}, ${length}) <- PANIC`;
47237
+ logger_logger.trace `[${this.currentServiceId}] SOLICIT(${hash}, ${length}) <- PANIC`;
46698
47238
  return PvmExecution.Panic;
46699
47239
  }
46700
47240
  const result = this.partialState.requestPreimage(hash.asOpaque(), length);
46701
- logger_logger.trace `SOLICIT(${hash}, ${length}) <- ${result_resultToString(result)}`;
47241
+ logger_logger.trace `[${this.currentServiceId}] SOLICIT(${hash}, ${length}) <- ${result_resultToString(result)}`;
46702
47242
  if (result.isOk) {
46703
47243
  regs.set(solicit_IN_OUT_REG, HostCallResult.OK);
46704
47244
  return;
@@ -46756,7 +47296,7 @@ class Transfer {
46756
47296
  */
46757
47297
  basicGasCost = Compatibility.isGreaterOrEqual(GpVersion.V0_7_2)
46758
47298
  ? gas_tryAsSmallGas(10)
46759
- : (regs) => tryAsGas(10n + regs.get(TRANSFER_GAS_FEE_REG));
47299
+ : (regs) => gas_tryAsGas(10n + regs.get(TRANSFER_GAS_FEE_REG));
46760
47300
  tracedRegisters = traceRegisters(transfer_IN_OUT_REG, AMOUNT_REG, TRANSFER_GAS_FEE_REG, MEMO_START_REG);
46761
47301
  constructor(currentServiceId, partialState) {
46762
47302
  this.currentServiceId = currentServiceId;
@@ -46775,16 +47315,16 @@ class Transfer {
46775
47315
  const memoryReadResult = memory.loadInto(memo.raw, memoStart);
46776
47316
  // page fault while reading the memory.
46777
47317
  if (memoryReadResult.isError) {
46778
- logger_logger.trace `TRANSFER(${destination}, ${amount}, ${transferGasFee}, ${memo}) <- PANIC`;
47318
+ logger_logger.trace `[${this.currentServiceId}] TRANSFER(${destination}, ${amount}, ${transferGasFee}, ${memo}) <- PANIC`;
46779
47319
  return PvmExecution.Panic;
46780
47320
  }
46781
47321
  const transferResult = this.partialState.transfer(destination, amount, transferGasFee, memo);
46782
- logger_logger.trace `TRANSFER(${destination}, ${amount}, ${transferGasFee}, ${memo}) <- ${result_resultToString(transferResult)}`;
47322
+ logger_logger.trace `[${this.currentServiceId}] TRANSFER(${destination}, ${amount}, ${transferGasFee}, ${memo}) <- ${result_resultToString(transferResult)}`;
46783
47323
  // All good!
46784
47324
  if (transferResult.isOk) {
46785
47325
  if (Compatibility.isGreaterOrEqual(GpVersion.V0_7_2)) {
46786
47326
  // substracting value `t`
46787
- const underflow = gas.sub(tryAsGas(transferGasFee));
47327
+ const underflow = gas.sub(gas_tryAsGas(transferGasFee));
46788
47328
  if (underflow) {
46789
47329
  return PvmExecution.OOG;
46790
47330
  }
@@ -46845,11 +47385,11 @@ class Upgrade {
46845
47385
  const codeHash = bytes_Bytes.zero(hash_HASH_SIZE);
46846
47386
  const memoryReadResult = memory.loadInto(codeHash.raw, codeHashStart);
46847
47387
  if (memoryReadResult.isError) {
46848
- logger_logger.trace `UPGRADE(${codeHash}, ${gas}, ${allowance}) <- PANIC`;
47388
+ logger_logger.trace `[${this.currentServiceId}] UPGRADE(${codeHash}, ${gas}, ${allowance}) <- PANIC`;
46849
47389
  return PvmExecution.Panic;
46850
47390
  }
46851
47391
  this.partialState.upgradeService(codeHash.asOpaque(), gas, allowance);
46852
- logger_logger.trace `UPGRADE(${codeHash}, ${gas}, ${allowance})`;
47392
+ logger_logger.trace `[${this.currentServiceId}] UPGRADE(${codeHash}, ${gas}, ${allowance})`;
46853
47393
  regs.set(upgrade_IN_OUT_REG, HostCallResult.OK);
46854
47394
  }
46855
47395
  }
@@ -46883,11 +47423,11 @@ class Yield {
46883
47423
  const hash = bytes_Bytes.zero(hash_HASH_SIZE);
46884
47424
  const memoryReadResult = memory.loadInto(hash.raw, hashStart);
46885
47425
  if (memoryReadResult.isError) {
46886
- logger_logger.trace `YIELD() <- PANIC`;
47426
+ logger_logger.trace `[${this.currentServiceId}] YIELD() <- PANIC`;
46887
47427
  return PvmExecution.Panic;
46888
47428
  }
46889
47429
  this.partialState.yield(hash);
46890
- logger_logger.trace `YIELD(${hash})`;
47430
+ logger_logger.trace `[${this.currentServiceId}] YIELD(${hash})`;
46891
47431
  regs.set(yield_IN_OUT_REG, HostCallResult.OK);
46892
47432
  }
46893
47433
  }
@@ -46930,10 +47470,10 @@ class Fetch {
46930
47470
  const chunk = value === null ? new Uint8Array() : value.raw.subarray(Number(offset), Number(offset + length));
46931
47471
  const storeResult = memory.storeFrom(output, chunk);
46932
47472
  if (storeResult.isError) {
46933
- logger_logger.trace `FETCH(${kind}) <- PANIC`;
47473
+ logger_logger.trace `[${this.currentServiceId}] FETCH(${kind}) <- PANIC`;
46934
47474
  return PvmExecution.Panic;
46935
47475
  }
46936
- logger_logger.trace `FETCH(${kind}) <- ${value?.toStringTruncated()}`;
47476
+ logger_logger.trace `[${this.currentServiceId}] FETCH(${kind}) <- ${value?.toStringTruncated()}`;
46937
47477
  // write result
46938
47478
  regs.set(fetch_IN_OUT_REG, value === null ? HostCallResult.NONE : valueLength);
46939
47479
  }
@@ -47083,7 +47623,7 @@ class LogHostCall {
47083
47623
  }
47084
47624
  memory.loadInto(message, msgStart);
47085
47625
  const level = clampU64ToU32(lvl);
47086
- logger_logger.trace `LOG(${this.currentServiceId}, ${level < Levels.UNKNOWN ? Levels[level] : Levels[Levels.UNKNOWN]}(${lvl}), ${decoder.decode(target)}, ${decoder.decode(message)})`;
47626
+ logger_logger.trace `[${this.currentServiceId}] LOG(${this.currentServiceId}, ${level < Levels.UNKNOWN ? Levels[level] : Levels[Levels.UNKNOWN]}(${lvl}), ${decoder.decode(target)}, ${decoder.decode(message)})`;
47087
47627
  return Promise.resolve(undefined);
47088
47628
  }
47089
47629
  }
@@ -47124,12 +47664,12 @@ class Lookup {
47124
47664
  const preImageHash = bytes_Bytes.zero(hash_HASH_SIZE);
47125
47665
  const memoryReadResult = memory.loadInto(preImageHash.raw, hashAddress);
47126
47666
  if (memoryReadResult.isError) {
47127
- logger_logger.trace `LOOKUP(${serviceId}, ${preImageHash}) <- PANIC`;
47667
+ logger_logger.trace `[${this.currentServiceId}] LOOKUP(${serviceId}, ${preImageHash}) <- PANIC`;
47128
47668
  return PvmExecution.Panic;
47129
47669
  }
47130
47670
  // v
47131
47671
  const preImage = this.account.lookup(serviceId, preImageHash);
47132
- logger_logger.trace `LOOKUP(${serviceId}, ${preImageHash}) <- ${preImage?.toStringTruncated() ?? "<missing>"}...`;
47672
+ logger_logger.trace `[${this.currentServiceId}] LOOKUP(${serviceId}, ${preImageHash}) <- ${preImage?.toStringTruncated() ?? "<missing>"}...`;
47133
47673
  const preImageLength = preImage === null ? numbers_tryAsU64(0) : numbers_tryAsU64(preImage.raw.length);
47134
47674
  const preimageBlobOffset = regs.get(10);
47135
47675
  const lengthToWrite = regs.get(11);
@@ -47226,20 +47766,20 @@ class Read {
47226
47766
  const chunk = value === null ? safe_alloc_uint8array_safeAllocUint8Array(0) : value.raw.subarray(Number(offset), Number(offset + blobLength));
47227
47767
  const memoryWriteResult = memory.storeFrom(destinationAddress, chunk);
47228
47768
  if (memoryWriteResult.isError) {
47229
- logger_logger.trace `READ(${serviceId}, ${rawKey}) <- PANIC`;
47769
+ logger_logger.trace `[${this.currentServiceId}] READ(${serviceId}, ${rawKey}) <- PANIC`;
47230
47770
  return PvmExecution.Panic;
47231
47771
  }
47232
47772
  if (value === null) {
47233
- logger_logger.trace `READ(${serviceId}, ${rawKey}) <- NONE`;
47773
+ logger_logger.trace `[${this.currentServiceId}] READ(${serviceId}, ${rawKey}) <- NONE`;
47234
47774
  regs.set(read_IN_OUT_REG, HostCallResult.NONE);
47235
47775
  return;
47236
47776
  }
47237
47777
  if (chunk.length > 0) {
47238
- logger_logger.trace `READ(${serviceId}, ${rawKey}) <- ${bytes_BytesBlob.blobFrom(chunk).toStringTruncated()}`;
47778
+ logger_logger.trace `[${this.currentServiceId}] READ(${serviceId}, ${rawKey}) <- ${bytes_BytesBlob.blobFrom(chunk).toStringTruncated()}`;
47239
47779
  }
47240
47780
  else {
47241
47781
  // just a query for length of stored data
47242
- logger_logger.trace `READ(${serviceId}, ${rawKey}) <- (${valueLength} ${valueLength === 1n ? "byte" : "bytes"})`;
47782
+ logger_logger.trace `[${this.currentServiceId}] READ(${serviceId}, ${rawKey}) <- (${valueLength} ${valueLength === 1n ? "byte" : "bytes"})`;
47243
47783
  }
47244
47784
  regs.set(read_IN_OUT_REG, valueLength);
47245
47785
  }
@@ -47283,7 +47823,7 @@ class Write {
47283
47823
  const rawStorageKey = safe_alloc_uint8array_safeAllocUint8Array(storageKeyLengthClamped);
47284
47824
  const keyLoadingResult = memory.loadInto(rawStorageKey, storageKeyStartAddress);
47285
47825
  if (keyLoadingResult.isError) {
47286
- logger_logger.trace `WRITE() <- PANIC`;
47826
+ logger_logger.trace `[${this.currentServiceId}] WRITE() <- PANIC`;
47287
47827
  return PvmExecution.Panic;
47288
47828
  }
47289
47829
  // k
@@ -47293,14 +47833,14 @@ class Write {
47293
47833
  const valueLoadingResult = memory.loadInto(value, valueStart);
47294
47834
  // Note [MaSo] this is ok to return bcs if valueLength is 0, then this panic won't happen
47295
47835
  if (valueLoadingResult.isError) {
47296
- logger_logger.trace `WRITE(${storageKey}) <- PANIC`;
47836
+ logger_logger.trace `[${this.currentServiceId}] WRITE(${storageKey}) <- PANIC`;
47297
47837
  return PvmExecution.Panic;
47298
47838
  }
47299
47839
  /** https://graypaper.fluffylabs.dev/#/9a08063/33af0133b201?v=0.6.6 */
47300
47840
  const maybeValue = valueLength === 0n ? null : bytes_BytesBlob.blobFrom(value);
47301
47841
  // a
47302
47842
  const result = this.account.write(storageKey, maybeValue);
47303
- logger_logger.trace `WRITE(${storageKey}, ${maybeValue?.toStringTruncated() ?? "remove"}) <- ${result_resultToString(result)}`;
47843
+ logger_logger.trace `[${this.currentServiceId}] WRITE(${storageKey}, ${maybeValue?.toStringTruncated() ?? "remove"}) <- ${result_resultToString(result)}`;
47304
47844
  if (result.isError) {
47305
47845
  regs.set(write_IN_OUT_REG, HostCallResult.FULL);
47306
47846
  return;
@@ -47529,7 +48069,7 @@ class Accumulate {
47529
48069
  serviceId,
47530
48070
  argsLength: numbers_tryAsU32(transfers.length + operands.length),
47531
48071
  });
47532
- const result = await executor.run(invocationArgs, tryAsGas(gas));
48072
+ const result = await executor.run(invocationArgs, gas_tryAsGas(gas));
47533
48073
  const [newState, checkpoint] = partialState.getStateUpdates();
47534
48074
  /**
47535
48075
  * PVM invocation returned and error so we return the checkpoint
@@ -47730,19 +48270,19 @@ class Accumulate {
47730
48270
  for (let serviceIndex = 0; serviceIndex < serviceIdsLength; serviceIndex += 1) {
47731
48271
  const serviceId = serviceIds[serviceIndex];
47732
48272
  const checkpoint = AccumulationStateUpdate.copyFrom(inputStateUpdate);
47733
- const promise = this.accumulateSingleService(serviceId, accumulateData.getTransfers(serviceId), accumulateData.getOperands(serviceId), accumulateData.getGasLimit(serviceId), slot, entropy, AccumulationStateUpdate.copyFrom(inputStateUpdate)).then(({ consumedGas, stateUpdate }) => ({
47734
- consumedGas,
47735
- stateUpdate: stateUpdate === null ? checkpoint : stateUpdate,
47736
- }));
48273
+ const promise = this.accumulateSingleService(serviceId, accumulateData.getTransfers(serviceId), accumulateData.getOperands(serviceId), accumulateData.getGasLimit(serviceId), slot, entropy, AccumulationStateUpdate.copyFrom(inputStateUpdate)).then(({ consumedGas, stateUpdate }) => {
48274
+ const resultEntry = [
48275
+ serviceId,
48276
+ {
48277
+ consumedGas,
48278
+ stateUpdate: stateUpdate === null ? checkpoint : stateUpdate,
48279
+ },
48280
+ ];
48281
+ return resultEntry;
48282
+ });
47737
48283
  resultPromises[serviceIndex] = promise;
47738
48284
  }
47739
- return Promise.all(resultPromises).then((results) => {
47740
- const map = new Map();
47741
- for (let serviceIndex = 0; serviceIndex < serviceIdsLength; serviceIndex += 1) {
47742
- map.set(serviceIds[serviceIndex], results[serviceIndex]);
47743
- }
47744
- return map;
47745
- });
48285
+ return Promise.all(resultPromises).then((results) => new Map(results));
47746
48286
  }
47747
48287
  /**
47748
48288
  * A method that updates `recentlyAccumulated`, `accumulationQueue` and `timeslot` in state
@@ -47831,9 +48371,10 @@ class Accumulate {
47831
48371
  const _gasCost = gasCost;
47832
48372
  assertEmpty(rest);
47833
48373
  const accumulated = accumulatableReports.subview(0, accumulatedReports);
47834
- const { yieldedRoot, services, transfers: _transfers, validatorsData, privilegedServices, authorizationQueues, ...stateUpdateRest } = state;
48374
+ const { yieldedRoot, services, transfers, validatorsData, privilegedServices, authorizationQueues, ...stateUpdateRest } = state;
47835
48375
  assertEmpty(stateUpdateRest);
47836
- // yielded root is retrieved after each pvm invocation so we can ignore it here
48376
+ // transfers and yielded root are retrieved after each pvm invocation so we can ignore it here
48377
+ const _transfers = transfers;
47837
48378
  const _yieldedRoot = yieldedRoot;
47838
48379
  if (this.hasDuplicatedServiceIdCreated(services.created)) {
47839
48380
  accumulate_logger.trace `Duplicated Service creation detected. Block is invalid.`;
@@ -47932,7 +48473,7 @@ class DeferredTransfers {
47932
48473
  partiallyUpdatedState.updateServiceInfo(serviceId, newInfo);
47933
48474
  const partialState = new AccumulateExternalities(this.chainSpec, this.blake2b, partiallyUpdatedState, serviceId, serviceId, timeslot);
47934
48475
  const fetchExternalities = FetchExternalities.createForOnTransfer({ entropy, transfers }, this.chainSpec);
47935
- let consumedGas = tryAsGas(0);
48476
+ let consumedGas = gas_tryAsGas(0);
47936
48477
  const hasTransfers = transfers.length > 0;
47937
48478
  const isCodeCorrect = code !== null && code.length <= W_C;
47938
48479
  if (!hasTransfers || !isCodeCorrect) {
@@ -47950,7 +48491,7 @@ class DeferredTransfers {
47950
48491
  const executor = await PvmExecutor.createOnTransferExecutor(serviceId, code, { partialState, fetchExternalities }, this.pvm);
47951
48492
  const args = encoder_Encoder.encodeObject(deferred_transfers_ARGS_CODEC, { timeslot, serviceId, transfersLength: numbers_tryAsU32(transfers.length) }, this.chainSpec);
47952
48493
  const gas = transfers.reduce((acc, item) => acc + item.gas, 0n);
47953
- consumedGas = (await executor.run(args, tryAsGas(gas))).consumedGas;
48494
+ consumedGas = (await executor.run(args, gas_tryAsGas(gas))).consumedGas;
47954
48495
  }
47955
48496
  transferStatistics.set(serviceId, { count: numbers_tryAsU32(transfers.length), gasUsed: tryAsServiceGas(consumedGas) });
47956
48497
  const [updatedState] = partialState.getStateUpdates();
@@ -49764,7 +50305,7 @@ async function createImporter(config) {
49764
50305
  const interpreter = config.workerParams.pvm;
49765
50306
  const blocks = db.getBlocksDb();
49766
50307
  const states = db.getStatesDb();
49767
- const hasher = new hasher_TransitionHasher(chainSpec, await keccakHasher, await blake2b);
50308
+ const hasher = new hasher_TransitionHasher(await keccakHasher, await blake2b);
49768
50309
  const importer = new Importer(chainSpec, interpreter, hasher, main_logger, blocks, states);
49769
50310
  return {
49770
50311
  importer,
@@ -52421,7 +52962,7 @@ function initializeExtensions(api) {
52421
52962
  }
52422
52963
 
52423
52964
  ;// CONCATENATED MODULE: ./packages/jam/node/package.json
52424
- const package_namespaceObject = {"rE":"0.4.0"};
52965
+ const package_namespaceObject = {"rE":"0.4.1"};
52425
52966
  ;// CONCATENATED MODULE: ./packages/workers/block-generator/generator.ts
52426
52967
 
52427
52968
 
@@ -52488,7 +53029,7 @@ class generator_Generator {
52488
53029
  // select validator for block
52489
53030
  const validatorId = tryAsValidatorIndex(newTimeSlot % 6);
52490
53031
  // retriev data from previous block
52491
- const hasher = new TransitionHasher(this.chainSpec, this.keccakHasher, this.blake2b);
53032
+ const hasher = new TransitionHasher(this.keccakHasher, this.blake2b);
52492
53033
  const parentHeaderHash = this.lastHeaderHash;
52493
53034
  const stateRoot = this.states.getStateRoot(this.lastState);
52494
53035
  // create extrinsic
@@ -53055,7 +53596,7 @@ function readJsonBlock(file, chainSpec) {
53055
53596
  var minimist = __nccwpck_require__(8595);
53056
53597
  var minimist_default = /*#__PURE__*/__nccwpck_require__.n(minimist);
53057
53598
  ;// CONCATENATED MODULE: ./bin/jam/package.json
53058
- const jam_package_namespaceObject = {"rE":"0.4.0"};
53599
+ const jam_package_namespaceObject = {"rE":"0.4.1"};
53059
53600
  ;// CONCATENATED MODULE: ./bin/jam/args.ts
53060
53601
 
53061
53602