@typeberry/jam 0.4.0-fcdfbb1 → 0.4.1-0a3acb2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -3550,7 +3550,11 @@ var TestSuite;
3550
3550
  })(TestSuite || (TestSuite = {}));
3551
3551
  const ALL_VERSIONS_IN_ORDER = [compatibility_GpVersion.V0_6_7, compatibility_GpVersion.V0_7_0, compatibility_GpVersion.V0_7_1, compatibility_GpVersion.V0_7_2];
3552
3552
  const DEFAULT_SUITE = TestSuite.W3F_DAVXY;
3553
- const DEFAULT_VERSION = compatibility_GpVersion.V0_7_2;
3553
+ /**
3554
+ * Current version is set to track the jam-conformance testing.
3555
+ * Since we are currently at 0.7.1 not 0.7.2, we set our default version accordingly.
3556
+ */
3557
+ const DEFAULT_VERSION = compatibility_GpVersion.V0_7_1;
3554
3558
  const env = typeof process === "undefined" ? {} : process.env;
3555
3559
  let CURRENT_VERSION = parseCurrentVersion(env.GP_VERSION) ?? DEFAULT_VERSION;
3556
3560
  let CURRENT_SUITE = parseCurrentSuite(env.TEST_SUITE) ?? DEFAULT_SUITE;
@@ -3609,8 +3613,8 @@ class compatibility_Compatibility {
3609
3613
  /**
3610
3614
  * Allows selecting different values for different Gray Paper versions from one record.
3611
3615
  *
3612
- * @param fallback The default value to return if no value is found for the current.
3613
- * @param record A record mapping versions to values, checking if the version is greater or equal to the current version.
3616
+ * fallback The default value to return if no value is found for the current.
3617
+ * versions A record mapping versions to values, checking if the version is greater or equal to the current version.
3614
3618
  * @returns The value for the current version, or the default value.
3615
3619
  */
3616
3620
  static selectIfGreaterOrEqual({ fallback, versions, }) {
@@ -3773,7 +3777,7 @@ const workspacePathFix = dev_env.NODE_ENV === "development"
3773
3777
 
3774
3778
  ;// CONCATENATED MODULE: ./packages/core/utils/opaque.ts
3775
3779
  /**
3776
- * @fileoverview `Opaque<Type, Token>` constructs a unique type which is a subset of Type with a
3780
+ * `Opaque<Type, Token>` constructs a unique type which is a subset of Type with a
3777
3781
  * specified unique token Token. It means that base type cannot be assigned to unique type by accident.
3778
3782
  * Good examples of opaque types include:
3779
3783
  * - JWTs or other tokens - these are special kinds of string used for authorization purposes.
@@ -7224,9 +7228,438 @@ class ArrayView {
7224
7228
  }
7225
7229
  }
7226
7230
 
7231
+ ;// CONCATENATED MODULE: ./packages/core/collections/blob-dictionary.ts
7232
+
7233
+
7234
+ /** A map which uses byte blobs as keys */
7235
+ class BlobDictionary extends WithDebug {
7236
+ mapNodeThreshold;
7237
+ /**
7238
+ * The root node of the dictionary.
7239
+ *
7240
+ * This is the main internal data structure that organizes entries
7241
+ * in a tree-like fashion (array-based nodes up to `mapNodeThreshold`,
7242
+ * map-based nodes beyond it). All insertions, updates, and deletions
7243
+ * operate through this structure.
7244
+ */
7245
+ root = Node.withList();
7246
+ /**
7247
+ * Auxiliary map that stores references to the original keys and their values.
7248
+ *
7249
+ * - Overriding a value in the main structure does not replace the original key reference.
7250
+ * - Used for efficient iteration over `keys()`, `values()`, `entries()`, and computing `size`.
7251
+ */
7252
+ keyvals = new Map();
7253
+ /**
7254
+ * Protected constructor used internally by `BlobDictionary.new`
7255
+ * and `BlobDictionary.fromEntries`.
7256
+ *
7257
+ * This enforces controlled instantiation — users should create instances
7258
+ * through the provided static factory methods instead of calling the
7259
+ * constructor directly.
7260
+ *
7261
+ * @param mapNodeThreshold - The threshold that determines when the dictionary
7262
+ * switches from using an array-based (`ListChildren`) node to a map-based (`MapChildren`) node for storing entries.
7263
+ */
7264
+ constructor(mapNodeThreshold) {
7265
+ super();
7266
+ this.mapNodeThreshold = mapNodeThreshold;
7267
+ }
7268
+ /**
7269
+ * Returns the number of entries in the dictionary.
7270
+ *
7271
+ * The count is derived from the auxiliary `keyvals` map, which stores
7272
+ * all original key references and their associated values. This ensures
7273
+ * that the `size` reflects the actual number of entries, independent of
7274
+ * internal overrides in the main `root` structure.
7275
+ *
7276
+ * @returns The total number of entries in the dictionary.
7277
+ */
7278
+ get size() {
7279
+ return this.keyvals.size;
7280
+ }
7281
+ [TEST_COMPARE_USING]() {
7282
+ const vals = Array.from(this);
7283
+ vals.sort((a, b) => a[0].compare(b[0]).value);
7284
+ return vals;
7285
+ }
7286
+ /**
7287
+ * Creates an empty `BlobDictionary`.
7288
+ *
7289
+ * @param mapNodeThreshold - The threshold that determines when the dictionary
7290
+ * switches from using an array-based (`ListChildren`) node to a map-based (`MapChildren`) node for storing entries.
7291
+ * Defaults to `0`.
7292
+ *
7293
+ * @returns A new, empty `BlobDictionary` instance.
7294
+ */
7295
+ static new(mapNodeThreshold = 0) {
7296
+ return new BlobDictionary(mapNodeThreshold);
7297
+ }
7298
+ /**
7299
+ * Creates a new `BlobDictionary` initialized with the given entries.
7300
+ *
7301
+ * @param entries - An array of `[key, value]` pairs used to populate the dictionary.
7302
+ * @param mapNodeThreshold - The threshold that determines when the dictionary
7303
+ * switches from using an array-based (`ListChildren`) node to a map-based (`MapChildren`) node for storing entries.
7304
+ * Defaults to `0`.
7305
+ *
7306
+ * @returns A new `BlobDictionary` containing the provided entries.
7307
+ */
7308
+ static fromEntries(entries, mapNodeThreshold) {
7309
+ const dict = BlobDictionary.new(mapNodeThreshold);
7310
+ for (const [key, value] of entries) {
7311
+ dict.set(key, value);
7312
+ }
7313
+ return dict;
7314
+ }
7315
+ /**
7316
+ * Internal helper that inserts, updates or deletes an entry in the dictionary.
7317
+ *
7318
+ * Behaviour details:
7319
+ * - Passing `undefined` as `value` indicates a deletion. (E.g. `delete` uses `internalSet(key, undefined)`.)
7320
+ * - When an add (new entry) or a delete actually changes the structure, the method returns the affected leaf node.
7321
+ * - When the call only overrides an existing value (no structural add/delete), the method returns `null`.
7322
+ *
7323
+ * This method is intended for internal use by the dictionary implementation and allows `undefined` as a
7324
+ * sentinel value to signal removals.
7325
+ *
7326
+ * @param key - The key to insert, update or remove.
7327
+ * @param value - The value to associate with the key, or `undefined` to remove the key.
7328
+ * @returns The leaf node created or removed on add/delete, or `null` if the operation only overwrote an existing value.
7329
+ */
7330
+ internalSet(key, value) {
7331
+ let node = this.root;
7332
+ const keyChunkGenerator = key.chunks(CHUNK_SIZE);
7333
+ let depth = 0;
7334
+ for (;;) {
7335
+ const maybeKeyChunk = keyChunkGenerator.next().value;
7336
+ if (maybeKeyChunk === undefined) {
7337
+ if (value === undefined) {
7338
+ return node.remove(key);
7339
+ }
7340
+ return node.set(key, value);
7341
+ }
7342
+ const keyChunk = opaque_asOpaqueType(maybeKeyChunk);
7343
+ if (node.children instanceof ListChildren) {
7344
+ const subkey = bytes_BytesBlob.blobFrom(key.raw.subarray(CHUNK_SIZE * depth));
7345
+ const leaf = value !== undefined ? node.children.insert(subkey, { key, value }) : node.children.remove(subkey);
7346
+ if (subkey.length > CHUNK_SIZE && node.children.children.length > this.mapNodeThreshold) {
7347
+ node.convertListChildrenToMap();
7348
+ }
7349
+ return leaf;
7350
+ }
7351
+ depth += 1;
7352
+ const children = node.children;
7353
+ if (children instanceof ListChildren) {
7354
+ throw new Error("We handle list node earlier. If we fall through, we know it's for the `Map` case.");
7355
+ }
7356
+ if (children instanceof MapChildren) {
7357
+ const maybeNode = children.getChild(keyChunk);
7358
+ if (maybeNode !== undefined) {
7359
+ // simply go one level deeper
7360
+ node = maybeNode;
7361
+ }
7362
+ else {
7363
+ // we are trying to remove an item, but it does not exist
7364
+ if (value === undefined) {
7365
+ return null;
7366
+ }
7367
+ // no more child nodes, we insert a new one.
7368
+ const newNode = Node.withList();
7369
+ children.setChild(keyChunk, newNode);
7370
+ node = newNode;
7371
+ }
7372
+ continue;
7373
+ }
7374
+ assertNever(children);
7375
+ }
7376
+ }
7377
+ /**
7378
+ * Adds a new entry to the dictionary or updates the value of an existing key.
7379
+ *
7380
+ * If an entry with the given key already exists, its value is replaced
7381
+ * with the new one.
7382
+ *
7383
+ * @param key - The key to add or update in the dictionary.
7384
+ * @param value - The value to associate with the specified key.
7385
+ * @returns Nothing (`void`).
7386
+ */
7387
+ set(key, value) {
7388
+ const leaf = this.internalSet(key, value);
7389
+ if (leaf !== null) {
7390
+ this.keyvals.set(leaf.key, leaf);
7391
+ }
7392
+ }
7393
+ /**
7394
+ * Retrieves the value associated with the given key from the dictionary.
7395
+ *
7396
+ * If the key does not exist, this method returns `undefined`.
7397
+ *
7398
+ * @param key - The key whose associated value should be retrieved.
7399
+ * @returns The value associated with the specified key, or `undefined` if the key is not present.
7400
+ */
7401
+ get(key) {
7402
+ let node = this.root;
7403
+ const pathChunksGenerator = key.chunks(CHUNK_SIZE);
7404
+ let depth = 0;
7405
+ while (node !== undefined) {
7406
+ const maybePathChunk = pathChunksGenerator.next().value;
7407
+ if (node.children instanceof ListChildren) {
7408
+ const subkey = bytes_BytesBlob.blobFrom(key.raw.subarray(depth * CHUNK_SIZE));
7409
+ const child = node.children.find(subkey);
7410
+ if (child !== null) {
7411
+ return child.value;
7412
+ }
7413
+ }
7414
+ if (maybePathChunk === undefined) {
7415
+ return node.getLeaf()?.value;
7416
+ }
7417
+ if (node.children instanceof MapChildren) {
7418
+ const pathChunk = opaque_asOpaqueType(maybePathChunk);
7419
+ node = node.children.getChild(pathChunk);
7420
+ depth += 1;
7421
+ }
7422
+ }
7423
+ return undefined;
7424
+ }
7425
+ /**
7426
+ * Checks whether the dictionary contains an entry for the given key.
7427
+ *
7428
+ * ⚠️ **Note:** Avoid using `has(...)` together with `get(...)` in a pattern like this:
7429
+ *
7430
+ * ```ts
7431
+ * if (dict.has(key)) {
7432
+ * const value = dict.get(key);
7433
+ * ...
7434
+ * }
7435
+ * ```
7436
+ *
7437
+ * This approach performs two lookups for the same key.
7438
+ *
7439
+ * Instead, prefer the following pattern, which retrieves the value once:
7440
+ *
7441
+ * ```ts
7442
+ * const value = dict.get(key);
7443
+ * if (value !== undefined) {
7444
+ * ...
7445
+ * }
7446
+ * ```
7447
+ *
7448
+ * @param key - The key to check for.
7449
+ * @returns `true` if the dictionary contains an entry for the given key, otherwise `false`.
7450
+ */
7451
+ has(key) {
7452
+ return this.get(key) !== undefined;
7453
+ }
7454
+ /**
7455
+ * Removes an entry with the specified key from the dictionary.
7456
+ *
7457
+ * Internally, this calls {@link internalSet} with `undefined` to mark the entry as deleted.
7458
+ *
7459
+ * @param key - The key of the entry to remove.
7460
+ * @returns `true` if an entry was removed (i.e. the key existed), otherwise `false`.
7461
+ */
7462
+ delete(key) {
7463
+ const leaf = this.internalSet(key, undefined);
7464
+ if (leaf !== null) {
7465
+ this.keyvals.delete(leaf.key);
7466
+ return true;
7467
+ }
7468
+ return false;
7469
+ }
7470
+ /**
7471
+ * Returns an iterator over the keys in the dictionary.
7472
+ *
7473
+ * The iterator yields each key in insertion order.
7474
+ *
7475
+ * @returns An iterator over all keys in the dictionary.
7476
+ */
7477
+ keys() {
7478
+ return this.keyvals.keys();
7479
+ }
7480
+ /**
7481
+ * Returns an iterator over the values in the dictionary.
7482
+ *
7483
+ * The iterator yields each value in insertion order.
7484
+ *
7485
+ * @returns An iterator over all values in the dictionary.
7486
+ */
7487
+ *values() {
7488
+ for (const leaf of this.keyvals.values()) {
7489
+ yield leaf.value;
7490
+ }
7491
+ }
7492
+ /**
7493
+ * Returns an iterator over the `[key, value]` pairs in the dictionary.
7494
+ *
7495
+ * The iterator yields entries in insertion order.
7496
+ *
7497
+ * @returns An iterator over `[key, value]` tuples for each entry in the dictionary.
7498
+ */
7499
+ *entries() {
7500
+ for (const leaf of this.keyvals.values()) {
7501
+ yield [leaf.key, leaf.value];
7502
+ }
7503
+ }
7504
+ /**
7505
+ * Default iterator for the dictionary.
7506
+ *
7507
+ * Equivalent to calling {@link entries}.
7508
+ * Enables iteration with `for...of`:
7509
+ *
7510
+ * ```ts
7511
+ * for (const [key, value] of dict) {
7512
+ * ...
7513
+ * }
7514
+ * ```
7515
+ *
7516
+ * @returns An iterator over `[key, value]` pairs.
7517
+ */
7518
+ [Symbol.iterator]() {
7519
+ return this.entries();
7520
+ }
7521
+ /**
7522
+ * Creates a new sorted array of values, ordered by their corresponding keys.
7523
+ *
7524
+ * Iterates over all entries in the dictionary and sorts them according
7525
+ * to the provided comparator function applied to the keys.
7526
+ *
7527
+ * @param comparator - A comparator function that can compare two keys.
7528
+ *
7529
+ * @returns A new array containing all values from the dictionary,
7530
+ * sorted according to their keys.
7531
+ */
7532
+ toSortedArray(comparator) {
7533
+ const vals = Array.from(this);
7534
+ vals.sort((a, b) => comparator(a[0], b[0]).value);
7535
+ return vals.map((x) => x[1]);
7536
+ }
7537
+ }
7538
+ const CHUNK_SIZE = 6;
7539
+ /**
7540
+ * A function to transform a bytes chunk (up to 6 bytes into U48 number)
7541
+ *
7542
+ * Note that it uses 3 additional bits to store length(`value * 8 + len;`),
7543
+ * It is needed to distinguish shorter chunks that have 0s at the end, for example: [1, 2] and [1, 2, 0]
7544
+ * */
7545
+ function bytesAsU48(bytes) {
7546
+ const len = bytes.length;
7547
+ debug_check `${len <= CHUNK_SIZE} Length has to be <= ${CHUNK_SIZE}, got: ${len}`;
7548
+ let value = bytes[3] | (bytes[2] << 8) | (bytes[1] << 16) | (bytes[0] << 24);
7549
+ for (let i = 4; i < bytes.length; i++) {
7550
+ value = value * 256 + bytes[i];
7551
+ }
7552
+ return value * 8 + len;
7553
+ }
7554
+ class Node {
7555
+ leaf;
7556
+ children;
7557
+ convertListChildrenToMap() {
7558
+ if (!(this.children instanceof ListChildren)) {
7559
+ return;
7560
+ }
7561
+ this.children = MapChildren.fromListNode(this.children);
7562
+ }
7563
+ static withList() {
7564
+ return new Node(undefined, ListChildren.new());
7565
+ }
7566
+ static withMap() {
7567
+ return new Node(undefined, MapChildren.new());
7568
+ }
7569
+ constructor(leaf, children) {
7570
+ this.leaf = leaf;
7571
+ this.children = children;
7572
+ }
7573
+ getLeaf() {
7574
+ return this.leaf;
7575
+ }
7576
+ remove(_key) {
7577
+ if (this.leaf === undefined) {
7578
+ return null;
7579
+ }
7580
+ const removedLeaf = this.leaf;
7581
+ this.leaf = undefined;
7582
+ return removedLeaf;
7583
+ }
7584
+ set(key, value) {
7585
+ if (this.leaf === undefined) {
7586
+ this.leaf = { key, value };
7587
+ return this.leaf;
7588
+ }
7589
+ this.leaf.value = value;
7590
+ return null;
7591
+ }
7592
+ }
7593
+ class ListChildren {
7594
+ children = [];
7595
+ constructor() { }
7596
+ find(key) {
7597
+ const result = this.children.find((item) => item[0].isEqualTo(key));
7598
+ if (result !== undefined) {
7599
+ return result[1];
7600
+ }
7601
+ return null;
7602
+ }
7603
+ remove(key) {
7604
+ const existingIndex = this.children.findIndex((item) => item[0].isEqualTo(key));
7605
+ if (existingIndex >= 0) {
7606
+ const ret = this.children.splice(existingIndex, 1);
7607
+ return ret[0][1];
7608
+ }
7609
+ return null;
7610
+ }
7611
+ insert(key, leaf) {
7612
+ const existingIndex = this.children.findIndex((item) => item[0].isEqualTo(key));
7613
+ if (existingIndex >= 0) {
7614
+ const existing = this.children[existingIndex];
7615
+ existing[1].value = leaf.value;
7616
+ return null;
7617
+ }
7618
+ this.children.push([key, leaf]);
7619
+ return leaf;
7620
+ }
7621
+ static new() {
7622
+ return new ListChildren();
7623
+ }
7624
+ }
7625
+ class MapChildren {
7626
+ children = new Map();
7627
+ constructor() { }
7628
+ static new() {
7629
+ return new MapChildren();
7630
+ }
7631
+ static fromListNode(node) {
7632
+ const mapNode = new MapChildren();
7633
+ for (const [key, leaf] of node.children) {
7634
+ const currentKeyChunk = opaque_asOpaqueType(bytes_BytesBlob.blobFrom(key.raw.subarray(0, CHUNK_SIZE)));
7635
+ const subKey = bytes_BytesBlob.blobFrom(key.raw.subarray(CHUNK_SIZE));
7636
+ let child = mapNode.getChild(currentKeyChunk);
7637
+ if (child === undefined) {
7638
+ child = Node.withList();
7639
+ mapNode.setChild(currentKeyChunk, child);
7640
+ }
7641
+ const children = child.children;
7642
+ children.insert(subKey, leaf);
7643
+ }
7644
+ return mapNode;
7645
+ }
7646
+ getChild(keyChunk) {
7647
+ const chunkAsNumber = bytesAsU48(keyChunk.raw);
7648
+ return this.children.get(chunkAsNumber);
7649
+ }
7650
+ setChild(keyChunk, node) {
7651
+ const chunkAsNumber = bytesAsU48(keyChunk.raw);
7652
+ this.children.set(chunkAsNumber, node);
7653
+ }
7654
+ }
7655
+
7227
7656
  ;// CONCATENATED MODULE: ./packages/core/collections/hash-dictionary.ts
7228
- /** A map which uses hashes as keys. */
7229
- class hash_dictionary_HashDictionary {
7657
+ /**
7658
+ * A map which uses hashes as keys.
7659
+ *
7660
+ * @deprecated
7661
+ * */
7662
+ class StringHashDictionary {
7230
7663
  // TODO [ToDr] [crit] We can't use `TrieHash` directly in the map,
7231
7664
  // because of the way it's being compared. Hence having `string` here.
7232
7665
  // This has to be benchmarked and re-written to a custom map most likely.
@@ -7292,6 +7725,17 @@ class hash_dictionary_HashDictionary {
7292
7725
  }
7293
7726
  }
7294
7727
 
7728
+ /**
7729
+ * A value that indicates when `BlobDictionary` transforms Array nodes into Map nodes.
7730
+ * In practice, it doesn't matter much because, in real life, arrays in this structure usually have a length close to 1.
7731
+ */
7732
+ const BLOB_DICTIONARY_THRESHOLD = 5;
7733
+ class hash_dictionary_HashDictionary extends BlobDictionary {
7734
+ constructor() {
7735
+ super(BLOB_DICTIONARY_THRESHOLD);
7736
+ }
7737
+ }
7738
+
7295
7739
  ;// CONCATENATED MODULE: ./packages/core/collections/hash-set.ts
7296
7740
 
7297
7741
  /** A set specialized for storing hashes. */
@@ -7756,6 +8200,18 @@ class SortedSet extends SortedArray {
7756
8200
 
7757
8201
 
7758
8202
 
8203
+ function getTruncatedKey(key) {
8204
+ // Always return exactly TRUNCATED_HASH_SIZE bytes.
8205
+ if (key.length === TRUNCATED_HASH_SIZE) {
8206
+ return key;
8207
+ }
8208
+ return bytes_Bytes.fromBlob(key.raw.subarray(0, TRUNCATED_HASH_SIZE), TRUNCATED_HASH_SIZE);
8209
+ }
8210
+ /**
8211
+ * A value that indicates when `BlobDictionary` transforms Array nodes into Map nodes.
8212
+ * In practice, it doesn't matter much because, in real life, arrays in this structure usually have a length close to 1.
8213
+ */
8214
+ const truncated_hash_dictionary_BLOB_DICTIONARY_THRESHOLD = 5;
7759
8215
  /**
7760
8216
  * A collection of hash-based keys (likely `StateKey`s) which ignores
7761
8217
  * differences on the last byte.
@@ -7768,48 +8224,37 @@ class TruncatedHashDictionary {
7768
8224
  * Each key will be copied and have the last byte replace with a 0.
7769
8225
  */
7770
8226
  static fromEntries(entries) {
7771
- /** Copy key bytes of an entry and replace the last one with 0. */
7772
- const mapped = Array.from(entries).map(([key, value]) => {
7773
- const newKey = bytes_Bytes.zero(hash_HASH_SIZE).asOpaque();
7774
- newKey.raw.set(key.raw.subarray(0, TRUNCATED_HASH_SIZE));
7775
- return [newKey, value];
7776
- });
7777
- return new TruncatedHashDictionary(hash_dictionary_HashDictionary.fromEntries(mapped));
8227
+ return new TruncatedHashDictionary(BlobDictionary.fromEntries(Array.from(entries).map(([key, value]) => [getTruncatedKey(key), value]), truncated_hash_dictionary_BLOB_DICTIONARY_THRESHOLD));
7778
8228
  }
7779
- /** A truncated key which we re-use to query the dictionary. */
7780
- truncatedKey = bytes_Bytes.zero(hash_HASH_SIZE).asOpaque();
7781
8229
  constructor(dict) {
7782
8230
  this.dict = dict;
7783
8231
  }
7784
8232
  [TEST_COMPARE_USING]() {
7785
- return this.dict;
8233
+ return Array.from(this.dict);
7786
8234
  }
7787
8235
  /** Return number of items in the dictionary. */
7788
8236
  get size() {
7789
8237
  return this.dict.size;
7790
8238
  }
7791
8239
  /** Retrieve a value that matches the key on `TRUNCATED_HASH_SIZE`. */
7792
- get(fullKey) {
7793
- this.truncatedKey.raw.set(fullKey.raw.subarray(0, TRUNCATED_HASH_SIZE));
7794
- return this.dict.get(this.truncatedKey);
8240
+ get(key) {
8241
+ const truncatedKey = getTruncatedKey(key);
8242
+ return this.dict.get(truncatedKey);
7795
8243
  }
7796
8244
  /** Return true if the key is present in the dictionary */
7797
- has(fullKey) {
7798
- this.truncatedKey.raw.set(fullKey.raw.subarray(0, TRUNCATED_HASH_SIZE));
7799
- return this.dict.has(this.truncatedKey);
8245
+ has(key) {
8246
+ const truncatedKey = getTruncatedKey(key);
8247
+ return this.dict.has(truncatedKey);
7800
8248
  }
7801
8249
  /** Set or update a value that matches the key on `TRUNCATED_HASH_SIZE`. */
7802
- set(fullKey, value) {
7803
- // NOTE we can't use the the shared key here, since the collection will
7804
- // store the key for us, hence the copy.
7805
- const key = bytes_Bytes.zero(hash_HASH_SIZE);
7806
- key.raw.set(fullKey.raw.subarray(0, TRUNCATED_HASH_SIZE));
7807
- this.dict.set(key.asOpaque(), value);
8250
+ set(key, value) {
8251
+ const truncatedKey = getTruncatedKey(key);
8252
+ this.dict.set(truncatedKey, value);
7808
8253
  }
7809
8254
  /** Remove a value that matches the key on `TRUNCATED_HASH_SIZE`. */
7810
- delete(fullKey) {
7811
- this.truncatedKey.raw.set(fullKey.raw.subarray(0, TRUNCATED_HASH_SIZE));
7812
- this.dict.delete(this.truncatedKey);
8255
+ delete(key) {
8256
+ const truncatedKey = getTruncatedKey(key);
8257
+ this.dict.delete(truncatedKey);
7813
8258
  }
7814
8259
  /** Iterator over values of the dictionary. */
7815
8260
  values() {
@@ -7817,9 +8262,7 @@ class TruncatedHashDictionary {
7817
8262
  }
7818
8263
  /** Iterator over entries of the dictionary (with truncated keys) */
7819
8264
  *entries() {
7820
- for (const [key, value] of this.dict.entries()) {
7821
- yield [bytes_Bytes.fromBlob(key.raw.subarray(0, TRUNCATED_HASH_SIZE), TRUNCATED_HASH_SIZE).asOpaque(), value];
7822
- }
8265
+ yield* this.dict.entries();
7823
8266
  }
7824
8267
  [Symbol.iterator]() {
7825
8268
  return this.entries();
@@ -7836,6 +8279,7 @@ class TruncatedHashDictionary {
7836
8279
 
7837
8280
 
7838
8281
 
8282
+
7839
8283
  ;// CONCATENATED MODULE: ./packages/jam/config/chain-spec.ts
7840
8284
 
7841
8285
 
@@ -10628,11 +11072,32 @@ const ENTROPY_ENTRIES = 4;
10628
11072
 
10629
11073
  var state_update_UpdatePreimageKind;
10630
11074
  (function (UpdatePreimageKind) {
10631
- /** Insert new preimage and optionally update it's lookup history. */
11075
+ /**
11076
+ * Insert new preimage and optionally update it's lookup history.
11077
+ *
11078
+ * Used in: `provide`
11079
+ *
11080
+ * https://graypaper.fluffylabs.dev/#/ab2cdbd/383904383904?v=0.7.2
11081
+ */
10632
11082
  UpdatePreimageKind[UpdatePreimageKind["Provide"] = 0] = "Provide";
10633
- /** Remove a preimage and it's lookup history. */
11083
+ /**
11084
+ * Remove a preimage and it's lookup history.
11085
+ *
11086
+ * Used in: `forget` and `eject`
11087
+ *
11088
+ * https://graypaper.fluffylabs.dev/#/ab2cdbd/38c701380202?v=0.7.2
11089
+ * https://graypaper.fluffylabs.dev/#/ab2cdbd/379102379302?v=0.7.2
11090
+ */
10634
11091
  UpdatePreimageKind[UpdatePreimageKind["Remove"] = 1] = "Remove";
10635
- /** update or add lookup history for preimage hash/len to given value. */
11092
+ /**
11093
+ * Update or add lookup history for preimage hash/len to given value.
11094
+ *
11095
+ * Used in: `solicit` and `forget`
11096
+ *
11097
+ * https://graypaper.fluffylabs.dev/#/ab2cdbd/382802382802?v=0.7.2
11098
+ * https://graypaper.fluffylabs.dev/#/ab2cdbd/384002384b02?v=0.7.2
11099
+ * https://graypaper.fluffylabs.dev/#/ab2cdbd/38c60038ea00?v=0.7.2
11100
+ */
10636
11101
  UpdatePreimageKind[UpdatePreimageKind["UpdateOrAdd"] = 2] = "UpdateOrAdd";
10637
11102
  })(state_update_UpdatePreimageKind || (state_update_UpdatePreimageKind = {}));
10638
11103
  /**
@@ -10640,7 +11105,7 @@ var state_update_UpdatePreimageKind;
10640
11105
  *
10641
11106
  * Can be one of the following cases:
10642
11107
  * 1. Provide a new preimage blob and set the lookup history to available at `slot`.
10643
- * 2. Remove (expunge) a preimage and it's lookup history.
11108
+ * 2. Remove (forget) a preimage and it's lookup history.
10644
11109
  * 3. Update `LookupHistory` with given value.
10645
11110
  */
10646
11111
  class UpdatePreimage {
@@ -13210,7 +13675,6 @@ class LeafNode {
13210
13675
  /**
13211
13676
  * Get the byte length of embedded value.
13212
13677
  *
13213
- * @remark
13214
13678
  * Note in case this node only contains hash this is going to be 0.
13215
13679
  */
13216
13680
  getValueLength() {
@@ -13221,7 +13685,6 @@ class LeafNode {
13221
13685
  /**
13222
13686
  * Returns the embedded value.
13223
13687
  *
13224
- * @remark
13225
13688
  * Note that this is going to be empty for a regular leaf node (i.e. containing a hash).
13226
13689
  */
13227
13690
  getValue() {
@@ -13231,7 +13694,6 @@ class LeafNode {
13231
13694
  /**
13232
13695
  * Returns contained value hash.
13233
13696
  *
13234
- * @remark
13235
13697
  * Note that for embedded value this is going to be full 0-padded 32 bytes.
13236
13698
  */
13237
13699
  getValueHash() {
@@ -14252,7 +14714,11 @@ class PartiallyUpdatedState {
14252
14714
  const service = this.state.getService(serviceId);
14253
14715
  return service?.getPreimage(hash) ?? null;
14254
14716
  }
14255
- /** Get status of a preimage of current service taking into account any updates. */
14717
+ /**
14718
+ * Get status of a preimage of current service taking into account any updates.
14719
+ *
14720
+ * https://graypaper.fluffylabs.dev/#/ab2cdbd/110201110201?v=0.7.2
14721
+ */
14256
14722
  getLookupHistory(currentTimeslot, serviceId, hash, length) {
14257
14723
  const updatedService = this.stateUpdate.services.updated.get(serviceId);
14258
14724
  /** Return lookup history item for newly created service */
@@ -14289,12 +14755,7 @@ class PartiallyUpdatedState {
14289
14755
  return new LookupHistoryItem(hash, updatedPreimage.length, service_tryAsLookupHistorySlots([currentTimeslot]));
14290
14756
  }
14291
14757
  case state_update_UpdatePreimageKind.Remove: {
14292
- const state = stateFallback();
14293
- // kinda impossible, since we know it's there because it's removed.
14294
- if (state === null) {
14295
- return null;
14296
- }
14297
- return new LookupHistoryItem(hash, state.length, service_tryAsLookupHistorySlots([...state.slots, currentTimeslot]));
14758
+ return null;
14298
14759
  }
14299
14760
  case state_update_UpdatePreimageKind.UpdateOrAdd: {
14300
14761
  return action.item;
@@ -14387,7 +14848,7 @@ const gas_tryAsSmallGas = (v) => opaque_asOpaqueType(numbers_tryAsU32(v));
14387
14848
  /** Attempt to convert given number into U64 gas representation. */
14388
14849
  const tryAsBigGas = (v) => opaque_asOpaqueType(numbers_tryAsU64(v));
14389
14850
  /** Attempt to convert given number into gas. */
14390
- const tryAsGas = (v) => typeof v === "number" && v < 2 ** 32 ? gas_tryAsSmallGas(v) : tryAsBigGas(v);
14851
+ const gas_tryAsGas = (v) => typeof v === "number" && v < 2 ** 32 ? gas_tryAsSmallGas(v) : tryAsBigGas(v);
14391
14852
 
14392
14853
  ;// CONCATENATED MODULE: ./packages/core/pvm-interface/memory.ts
14393
14854
 
@@ -14670,7 +15131,7 @@ const tryAsRegisterIndex = (index) => {
14670
15131
  debug_check `${index >= 0 && index < registers_NO_OF_REGISTERS} Incorrect register index: ${index}!`;
14671
15132
  return opaque_asOpaqueType(index);
14672
15133
  };
14673
- class Registers {
15134
+ class registers_Registers {
14674
15135
  bytes;
14675
15136
  asSigned;
14676
15137
  asUnsigned;
@@ -14689,7 +15150,7 @@ class Registers {
14689
15150
  }
14690
15151
  static fromBytes(bytes) {
14691
15152
  debug_check `${bytes.length === registers_NO_OF_REGISTERS << REGISTER_SIZE_SHIFT} Invalid size of registers array.`;
14692
- return new Registers(bytes);
15153
+ return new registers_Registers(bytes);
14693
15154
  }
14694
15155
  getBytesAsLittleEndian(index, len) {
14695
15156
  const offset = index << REGISTER_SIZE_SHIFT;
@@ -15003,49 +15464,10 @@ class NoopMissing {
15003
15464
  }
15004
15465
  }
15005
15466
 
15006
- ;// CONCATENATED MODULE: ./packages/core/pvm-interpreter/gas.ts
15007
-
15008
-
15009
- /** Create a new gas counter instance depending on the gas value. */
15010
- function gasCounter(gas) {
15011
- return new GasCounterU64(numbers_tryAsU64(gas));
15012
- }
15013
- class GasCounterU64 {
15014
- gas;
15015
- initialGas;
15016
- constructor(gas) {
15017
- this.gas = gas;
15018
- this.initialGas = tryAsGas(gas);
15019
- }
15020
- set(g) {
15021
- this.gas = numbers_tryAsU64(g);
15022
- }
15023
- get() {
15024
- return tryAsGas(this.gas);
15025
- }
15026
- sub(g) {
15027
- const result = this.gas - numbers_tryAsU64(g);
15028
- if (result >= 0n) {
15029
- this.gas = numbers_tryAsU64(result);
15030
- return false;
15031
- }
15032
- this.gas = numbers_tryAsU64(0n);
15033
- return true;
15034
- }
15035
- used() {
15036
- const gasConsumed = numbers_tryAsU64(this.initialGas) - this.gas;
15037
- // In we have less than zero left we assume that all gas has been consumed.
15038
- if (gasConsumed < 0) {
15039
- return this.initialGas;
15040
- }
15041
- return tryAsGas(gasConsumed);
15042
- }
15043
- }
15044
-
15045
15467
  ;// CONCATENATED MODULE: ./packages/core/pvm-interpreter/memory/memory-index.ts
15046
15468
 
15047
15469
 
15048
- const tryAsMemoryIndex = (index) => {
15470
+ const memory_index_tryAsMemoryIndex = (index) => {
15049
15471
  debug_check `${index >= 0 && index <= MAX_MEMORY_INDEX} Incorrect memory index: ${index}!`;
15050
15472
  return opaque_asOpaqueType(index);
15051
15473
  };
@@ -15059,25 +15481,25 @@ const tryAsSbrkIndex = (index) => {
15059
15481
 
15060
15482
  const memory_consts_PAGE_SIZE_SHIFT = 12;
15061
15483
  // PAGE_SIZE has to be a power of 2
15062
- const PAGE_SIZE = 1 << memory_consts_PAGE_SIZE_SHIFT;
15484
+ const memory_consts_PAGE_SIZE = 1 << memory_consts_PAGE_SIZE_SHIFT;
15063
15485
  const MIN_ALLOCATION_SHIFT = (() => {
15064
15486
  const MIN_ALLOCATION_SHIFT = 7;
15065
15487
  debug_check `${MIN_ALLOCATION_SHIFT >= 0 && MIN_ALLOCATION_SHIFT < memory_consts_PAGE_SIZE_SHIFT} incorrect minimal allocation shift`;
15066
15488
  return MIN_ALLOCATION_SHIFT;
15067
15489
  })();
15068
- const MIN_ALLOCATION_LENGTH = PAGE_SIZE >> MIN_ALLOCATION_SHIFT;
15069
- const LAST_PAGE_NUMBER = (MEMORY_SIZE - PAGE_SIZE) / PAGE_SIZE;
15490
+ const MIN_ALLOCATION_LENGTH = memory_consts_PAGE_SIZE >> MIN_ALLOCATION_SHIFT;
15491
+ const LAST_PAGE_NUMBER = (MEMORY_SIZE - memory_consts_PAGE_SIZE) / memory_consts_PAGE_SIZE;
15070
15492
  /** https://graypaper.fluffylabs.dev/#/68eaa1f/35a60235a602?v=0.6.4 */
15071
15493
  const RESERVED_NUMBER_OF_PAGES = 16;
15072
15494
  /** https://graypaper.fluffylabs.dev/#/68eaa1f/35a60235a602?v=0.6.4 */
15073
- const MAX_NUMBER_OF_PAGES = MEMORY_SIZE / PAGE_SIZE;
15495
+ const MAX_NUMBER_OF_PAGES = MEMORY_SIZE / memory_consts_PAGE_SIZE;
15074
15496
 
15075
15497
  ;// CONCATENATED MODULE: ./packages/core/pvm-interpreter/memory/pages/page-utils.ts
15076
15498
 
15077
15499
 
15078
15500
  /** Ensure that given memory `index` is within `[0...PAGE_SIZE)` and can be used to index a page */
15079
15501
  const tryAsPageIndex = (index) => {
15080
- debug_check `${index >= 0 && index < PAGE_SIZE}, Incorect page index: ${index}!`;
15502
+ debug_check `${index >= 0 && index < memory_consts_PAGE_SIZE}, Incorect page index: ${index}!`;
15081
15503
  return opaque_asOpaqueType(index);
15082
15504
  };
15083
15505
  /** Ensure that given `index` represents an index of one of the pages. */
@@ -15105,17 +15527,17 @@ function getNextPageNumber(pageNumber) {
15105
15527
 
15106
15528
 
15107
15529
  function alignToPageSize(length) {
15108
- return PAGE_SIZE * Math.ceil(length / PAGE_SIZE);
15530
+ return memory_consts_PAGE_SIZE * Math.ceil(length / memory_consts_PAGE_SIZE);
15109
15531
  }
15110
15532
  function getPageNumber(address) {
15111
15533
  return tryAsPageNumber(address >>> memory_consts_PAGE_SIZE_SHIFT);
15112
15534
  }
15113
15535
  function getStartPageIndex(address) {
15114
- return tryAsMemoryIndex((address >>> memory_consts_PAGE_SIZE_SHIFT) << memory_consts_PAGE_SIZE_SHIFT);
15536
+ return memory_index_tryAsMemoryIndex((address >>> memory_consts_PAGE_SIZE_SHIFT) << memory_consts_PAGE_SIZE_SHIFT);
15115
15537
  }
15116
15538
  function getStartPageIndexFromPageNumber(pageNumber) {
15117
15539
  // >>> 0 is needed to avoid changing sign of the number
15118
- return tryAsMemoryIndex((pageNumber << memory_consts_PAGE_SIZE_SHIFT) >>> 0);
15540
+ return memory_index_tryAsMemoryIndex((pageNumber << memory_consts_PAGE_SIZE_SHIFT) >>> 0);
15119
15541
  }
15120
15542
 
15121
15543
  ;// CONCATENATED MODULE: ./packages/core/pvm-interpreter/memory/errors.ts
@@ -15137,7 +15559,7 @@ class PageFault {
15137
15559
  return new PageFault(numbers_tryAsU32(startPageIndex), isAccessFault);
15138
15560
  }
15139
15561
  static fromMemoryIndex(maybeMemoryIndex, isAccessFault = false) {
15140
- const memoryIndex = tryAsMemoryIndex(maybeMemoryIndex % MEMORY_SIZE);
15562
+ const memoryIndex = memory_index_tryAsMemoryIndex(maybeMemoryIndex % MEMORY_SIZE);
15141
15563
  const startPageIndex = getStartPageIndex(memoryIndex);
15142
15564
  return new PageFault(numbers_tryAsU32(startPageIndex), isAccessFault);
15143
15565
  }
@@ -15216,9 +15638,9 @@ class MemoryRange {
15216
15638
  constructor(start, length) {
15217
15639
  this.start = start;
15218
15640
  this.length = length;
15219
- this.end = tryAsMemoryIndex((this.start + this.length) % MEMORY_SIZE);
15641
+ this.end = memory_index_tryAsMemoryIndex((this.start + this.length) % MEMORY_SIZE);
15220
15642
  if (length > 0) {
15221
- this.lastIndex = tryAsMemoryIndex((this.end - 1 + MEMORY_SIZE) % MEMORY_SIZE);
15643
+ this.lastIndex = memory_index_tryAsMemoryIndex((this.end - 1 + MEMORY_SIZE) % MEMORY_SIZE);
15222
15644
  }
15223
15645
  }
15224
15646
  /** Creates a memory range from given starting point and length */
@@ -15261,7 +15683,7 @@ class MemoryRange {
15261
15683
  *
15262
15684
  * it should be in `memory-consts` but it cannot be there because of circular dependency
15263
15685
  */
15264
- const RESERVED_MEMORY_RANGE = MemoryRange.fromStartAndLength(tryAsMemoryIndex(0), RESERVED_NUMBER_OF_PAGES * PAGE_SIZE);
15686
+ const RESERVED_MEMORY_RANGE = MemoryRange.fromStartAndLength(memory_index_tryAsMemoryIndex(0), RESERVED_NUMBER_OF_PAGES * memory_consts_PAGE_SIZE);
15265
15687
 
15266
15688
  ;// CONCATENATED MODULE: ./packages/core/pvm-interpreter/memory/page-range.ts
15267
15689
 
@@ -15299,7 +15721,7 @@ class PageRange {
15299
15721
  // lastIndex is not null because we just ensured that the range is not empty
15300
15722
  const pageWithLastIndex = getPageNumber(range.lastIndex ?? range.end);
15301
15723
  const endPage = getNextPageNumber(pageWithLastIndex);
15302
- if ((startPage === endPage || startPage === pageWithLastIndex) && range.length > PAGE_SIZE) {
15724
+ if ((startPage === endPage || startPage === pageWithLastIndex) && range.length > memory_consts_PAGE_SIZE) {
15303
15725
  // full range
15304
15726
  return new PageRange(startPage, MAX_NUMBER_OF_PAGES);
15305
15727
  }
@@ -15363,8 +15785,8 @@ class ReadablePage extends MemoryPage {
15363
15785
  }
15364
15786
  loadInto(result, startIndex, length) {
15365
15787
  const endIndex = startIndex + length;
15366
- if (endIndex > PAGE_SIZE) {
15367
- return Result.error(PageFault.fromMemoryIndex(this.start + PAGE_SIZE), () => `Page fault: read beyond page boundary at ${this.start + PAGE_SIZE}`);
15788
+ if (endIndex > memory_consts_PAGE_SIZE) {
15789
+ return Result.error(PageFault.fromMemoryIndex(this.start + memory_consts_PAGE_SIZE), () => `Page fault: read beyond page boundary at ${this.start + memory_consts_PAGE_SIZE}`);
15368
15790
  }
15369
15791
  const bytes = this.data.subarray(startIndex, endIndex);
15370
15792
  // we zero the bytes, since data might not yet be initialized at `endIndex`.
@@ -15397,8 +15819,8 @@ class WriteablePage extends MemoryPage {
15397
15819
  constructor(pageNumber, initialData) {
15398
15820
  super(pageNumber);
15399
15821
  const dataLength = initialData?.length ?? 0;
15400
- const initialPageLength = Math.min(PAGE_SIZE, Math.max(dataLength, MIN_ALLOCATION_LENGTH));
15401
- this.buffer = new ArrayBuffer(initialPageLength, { maxByteLength: PAGE_SIZE });
15822
+ const initialPageLength = Math.min(memory_consts_PAGE_SIZE, Math.max(dataLength, MIN_ALLOCATION_LENGTH));
15823
+ this.buffer = new ArrayBuffer(initialPageLength, { maxByteLength: memory_consts_PAGE_SIZE });
15402
15824
  this.view = new Uint8Array(this.buffer);
15403
15825
  if (initialData !== undefined) {
15404
15826
  this.view.set(initialData);
@@ -15406,8 +15828,8 @@ class WriteablePage extends MemoryPage {
15406
15828
  }
15407
15829
  loadInto(result, startIndex, length) {
15408
15830
  const endIndex = startIndex + length;
15409
- if (endIndex > PAGE_SIZE) {
15410
- return Result.error(PageFault.fromMemoryIndex(this.start + PAGE_SIZE), () => `Page fault: read beyond page boundary at ${this.start + PAGE_SIZE}`);
15831
+ if (endIndex > memory_consts_PAGE_SIZE) {
15832
+ return Result.error(PageFault.fromMemoryIndex(this.start + memory_consts_PAGE_SIZE), () => `Page fault: read beyond page boundary at ${this.start + memory_consts_PAGE_SIZE}`);
15411
15833
  }
15412
15834
  const bytes = this.view.subarray(startIndex, endIndex);
15413
15835
  // we zero the bytes, since the view might not yet be initialized at `endIndex`.
@@ -15416,16 +15838,16 @@ class WriteablePage extends MemoryPage {
15416
15838
  return Result.ok(OK);
15417
15839
  }
15418
15840
  storeFrom(startIndex, bytes) {
15419
- if (this.buffer.byteLength < startIndex + bytes.length && this.buffer.byteLength < PAGE_SIZE) {
15420
- const newLength = Math.min(PAGE_SIZE, Math.max(MIN_ALLOCATION_LENGTH, startIndex + bytes.length));
15841
+ if (this.buffer.byteLength < startIndex + bytes.length && this.buffer.byteLength < memory_consts_PAGE_SIZE) {
15842
+ const newLength = Math.min(memory_consts_PAGE_SIZE, Math.max(MIN_ALLOCATION_LENGTH, startIndex + bytes.length));
15421
15843
  this.buffer.resize(newLength);
15422
15844
  }
15423
15845
  this.view.set(bytes, startIndex);
15424
15846
  return Result.ok(OK);
15425
15847
  }
15426
15848
  setData(pageIndex, data) {
15427
- if (this.buffer.byteLength < pageIndex + data.length && this.buffer.byteLength < PAGE_SIZE) {
15428
- const newLength = Math.min(PAGE_SIZE, Math.max(MIN_ALLOCATION_LENGTH, pageIndex + data.length));
15849
+ if (this.buffer.byteLength < pageIndex + data.length && this.buffer.byteLength < memory_consts_PAGE_SIZE) {
15850
+ const newLength = Math.min(memory_consts_PAGE_SIZE, Math.max(MIN_ALLOCATION_LENGTH, pageIndex + data.length));
15429
15851
  this.buffer.resize(newLength);
15430
15852
  }
15431
15853
  this.view.set(data, pageIndex);
@@ -15476,10 +15898,10 @@ class Memory {
15476
15898
  this.memory = memory;
15477
15899
  }
15478
15900
  store(address, bytes) {
15479
- return this.storeFrom(tryAsMemoryIndex(address), bytes);
15901
+ return this.storeFrom(memory_index_tryAsMemoryIndex(address), bytes);
15480
15902
  }
15481
15903
  read(address, output) {
15482
- return this.loadInto(output, tryAsMemoryIndex(address));
15904
+ return this.loadInto(output, memory_index_tryAsMemoryIndex(address));
15483
15905
  }
15484
15906
  reset() {
15485
15907
  this.sbrkIndex = tryAsSbrkIndex(RESERVED_MEMORY_RANGE.end);
@@ -15506,8 +15928,8 @@ class Memory {
15506
15928
  let currentPosition = address;
15507
15929
  let bytesLeft = bytes.length;
15508
15930
  for (const page of pages) {
15509
- const pageStartIndex = tryAsPageIndex(currentPosition % PAGE_SIZE);
15510
- const bytesToWrite = Math.min(PAGE_SIZE - pageStartIndex, bytesLeft);
15931
+ const pageStartIndex = tryAsPageIndex(currentPosition % memory_consts_PAGE_SIZE);
15932
+ const bytesToWrite = Math.min(memory_consts_PAGE_SIZE - pageStartIndex, bytesLeft);
15511
15933
  const sourceStartIndex = currentPosition - address;
15512
15934
  const source = bytes.subarray(sourceStartIndex, sourceStartIndex + bytesToWrite);
15513
15935
  page.storeFrom(pageStartIndex, source);
@@ -15556,8 +15978,8 @@ class Memory {
15556
15978
  let currentPosition = startAddress;
15557
15979
  let bytesLeft = result.length;
15558
15980
  for (const page of pages) {
15559
- const pageStartIndex = tryAsPageIndex(currentPosition % PAGE_SIZE);
15560
- const bytesToRead = Math.min(PAGE_SIZE - pageStartIndex, bytesLeft);
15981
+ const pageStartIndex = tryAsPageIndex(currentPosition % memory_consts_PAGE_SIZE);
15982
+ const bytesToRead = Math.min(memory_consts_PAGE_SIZE - pageStartIndex, bytesLeft);
15561
15983
  const destinationStartIndex = currentPosition - startAddress;
15562
15984
  const destination = result.subarray(destinationStartIndex);
15563
15985
  page.loadInto(destination, pageStartIndex, bytesToRead);
@@ -15584,7 +16006,7 @@ class Memory {
15584
16006
  const newSbrkIndex = tryAsSbrkIndex(alignToPageSize(newVirtualSbrkIndex));
15585
16007
  // TODO [MaSi]: `getPageNumber` works incorrectly for SbrkIndex. Sbrk index should be changed to MemoryIndex
15586
16008
  const firstPageNumber = getPageNumber(currentSbrkIndex);
15587
- const pagesToAllocate = (newSbrkIndex - currentSbrkIndex) / PAGE_SIZE;
16009
+ const pagesToAllocate = (newSbrkIndex - currentSbrkIndex) / memory_consts_PAGE_SIZE;
15588
16010
  const rangeToAllocate = PageRange.fromStartAndLength(firstPageNumber, pagesToAllocate);
15589
16011
  for (const pageNumber of rangeToAllocate) {
15590
16012
  const page = new WriteablePage(pageNumber);
@@ -15639,8 +16061,8 @@ class MemoryBuilder {
15639
16061
  setReadablePages(start, end, data = new Uint8Array()) {
15640
16062
  this.ensureNotFinalized();
15641
16063
  debug_check `${start < end} end has to be bigger than start`;
15642
- debug_check `${start % PAGE_SIZE === 0} start needs to be a multiple of page size (${PAGE_SIZE})`;
15643
- debug_check `${end % PAGE_SIZE === 0} end needs to be a multiple of page size (${PAGE_SIZE})`;
16064
+ debug_check `${start % memory_consts_PAGE_SIZE === 0} start needs to be a multiple of page size (${memory_consts_PAGE_SIZE})`;
16065
+ debug_check `${end % memory_consts_PAGE_SIZE === 0} end needs to be a multiple of page size (${memory_consts_PAGE_SIZE})`;
15644
16066
  debug_check `${data.length <= end - start} the initial data is longer than address range`;
15645
16067
  const length = end - start;
15646
16068
  const range = MemoryRange.fromStartAndLength(start, length);
@@ -15649,7 +16071,7 @@ class MemoryBuilder {
15649
16071
  const noOfPages = pages.length;
15650
16072
  for (let i = 0; i < noOfPages; i++) {
15651
16073
  const pageNumber = pages[i];
15652
- const dataChunk = data.subarray(i * PAGE_SIZE, (i + 1) * PAGE_SIZE);
16074
+ const dataChunk = data.subarray(i * memory_consts_PAGE_SIZE, (i + 1) * memory_consts_PAGE_SIZE);
15653
16075
  const page = new ReadablePage(pageNumber, dataChunk);
15654
16076
  this.initialMemory.set(pageNumber, page);
15655
16077
  }
@@ -15667,8 +16089,8 @@ class MemoryBuilder {
15667
16089
  setWriteablePages(start, end, data = new Uint8Array()) {
15668
16090
  this.ensureNotFinalized();
15669
16091
  debug_check `${start < end} end has to be bigger than start`;
15670
- debug_check `${start % PAGE_SIZE === 0} start needs to be a multiple of page size (${PAGE_SIZE})`;
15671
- debug_check `${end % PAGE_SIZE === 0} end needs to be a multiple of page size (${PAGE_SIZE})`;
16092
+ debug_check `${start % memory_consts_PAGE_SIZE === 0} start needs to be a multiple of page size (${memory_consts_PAGE_SIZE})`;
16093
+ debug_check `${end % memory_consts_PAGE_SIZE === 0} end needs to be a multiple of page size (${memory_consts_PAGE_SIZE})`;
15672
16094
  debug_check `${data.length <= end - start} the initial data is longer than address range`;
15673
16095
  const length = end - start;
15674
16096
  const range = MemoryRange.fromStartAndLength(start, length);
@@ -15677,7 +16099,7 @@ class MemoryBuilder {
15677
16099
  const noOfPages = pages.length;
15678
16100
  for (let i = 0; i < noOfPages; i++) {
15679
16101
  const pageNumber = pages[i];
15680
- const dataChunk = data.subarray(i * PAGE_SIZE, (i + 1) * PAGE_SIZE);
16102
+ const dataChunk = data.subarray(i * memory_consts_PAGE_SIZE, (i + 1) * memory_consts_PAGE_SIZE);
15681
16103
  const page = new WriteablePage(pageNumber, dataChunk);
15682
16104
  this.initialMemory.set(pageNumber, page);
15683
16105
  }
@@ -15689,8 +16111,8 @@ class MemoryBuilder {
15689
16111
  */
15690
16112
  setData(start, data) {
15691
16113
  this.ensureNotFinalized();
15692
- const pageOffset = start % PAGE_SIZE;
15693
- const remainingSpaceOnPage = PAGE_SIZE - pageOffset;
16114
+ const pageOffset = start % memory_consts_PAGE_SIZE;
16115
+ const remainingSpaceOnPage = memory_consts_PAGE_SIZE - pageOffset;
15694
16116
  debug_check `${data.length <= remainingSpaceOnPage} The data has to fit into a single page.`;
15695
16117
  const length = data.length;
15696
16118
  const range = MemoryRange.fromStartAndLength(start, length);
@@ -15881,27 +16303,27 @@ class Program {
15881
16303
  static fromSpi(blob, args, hasMetadata) {
15882
16304
  const { code: spiCode, metadata } = hasMetadata ? extractCodeAndMetadata(blob) : { code: blob };
15883
16305
  const { code, memory: rawMemory, registers } = decodeStandardProgram(spiCode, args);
15884
- const regs = new Registers();
16306
+ const regs = new registers_Registers();
15885
16307
  regs.copyFrom(registers);
15886
16308
  const memoryBuilder = new MemoryBuilder();
15887
16309
  for (const { start, end, data } of rawMemory.readable) {
15888
- const startIndex = tryAsMemoryIndex(start);
15889
- const endIndex = tryAsMemoryIndex(end);
16310
+ const startIndex = memory_index_tryAsMemoryIndex(start);
16311
+ const endIndex = memory_index_tryAsMemoryIndex(end);
15890
16312
  memoryBuilder.setReadablePages(startIndex, endIndex, data ?? new Uint8Array());
15891
16313
  }
15892
16314
  for (const { start, end, data } of rawMemory.writeable) {
15893
- const startIndex = tryAsMemoryIndex(start);
15894
- const endIndex = tryAsMemoryIndex(end);
16315
+ const startIndex = memory_index_tryAsMemoryIndex(start);
16316
+ const endIndex = memory_index_tryAsMemoryIndex(end);
15895
16317
  memoryBuilder.setWriteablePages(startIndex, endIndex, data ?? new Uint8Array());
15896
16318
  }
15897
- const heapStart = tryAsMemoryIndex(rawMemory.sbrkIndex);
16319
+ const heapStart = memory_index_tryAsMemoryIndex(rawMemory.sbrkIndex);
15898
16320
  const heapEnd = tryAsSbrkIndex(rawMemory.heapEnd);
15899
16321
  const memory = memoryBuilder.finalize(heapStart, heapEnd);
15900
16322
  return new Program(code, regs, memory, metadata);
15901
16323
  }
15902
16324
  static fromGeneric(blob, hasMetadata) {
15903
16325
  const { code, metadata } = hasMetadata ? extractCodeAndMetadata(blob) : { code: blob };
15904
- const regs = new Registers();
16326
+ const regs = new registers_Registers();
15905
16327
  const memory = new Memory();
15906
16328
  return new Program(code, regs, memory, metadata);
15907
16329
  }
@@ -16916,6 +17338,45 @@ class BasicBlocks {
16916
17338
  ;// CONCATENATED MODULE: ./packages/core/pvm-interpreter/basic-blocks/index.ts
16917
17339
 
16918
17340
 
17341
+ ;// CONCATENATED MODULE: ./packages/core/pvm-interpreter/gas.ts
17342
+
17343
+
17344
+ /** Create a new gas counter instance depending on the gas value. */
17345
+ function gasCounter(gas) {
17346
+ return new GasCounterU64(numbers_tryAsU64(gas));
17347
+ }
17348
+ class GasCounterU64 {
17349
+ gas;
17350
+ initialGas;
17351
+ constructor(gas) {
17352
+ this.gas = gas;
17353
+ this.initialGas = gas_tryAsGas(gas);
17354
+ }
17355
+ set(g) {
17356
+ this.gas = numbers_tryAsU64(g);
17357
+ }
17358
+ get() {
17359
+ return gas_tryAsGas(this.gas);
17360
+ }
17361
+ sub(g) {
17362
+ const result = this.gas - numbers_tryAsU64(g);
17363
+ if (result >= 0n) {
17364
+ this.gas = numbers_tryAsU64(result);
17365
+ return false;
17366
+ }
17367
+ this.gas = numbers_tryAsU64(0n);
17368
+ return true;
17369
+ }
17370
+ used() {
17371
+ const gasConsumed = numbers_tryAsU64(this.initialGas) - this.gas;
17372
+ // In we have less than zero left we assume that all gas has been consumed.
17373
+ if (gasConsumed < 0) {
17374
+ return this.initialGas;
17375
+ }
17376
+ return gas_tryAsGas(gasConsumed);
17377
+ }
17378
+ }
17379
+
16919
17380
  ;// CONCATENATED MODULE: ./packages/core/pvm-interpreter/instruction-gas-map.ts
16920
17381
 
16921
17382
 
@@ -17492,7 +17953,7 @@ class LoadOps {
17492
17953
  }
17493
17954
  loadNumber(address, registerIndex, numberLength) {
17494
17955
  const registerBytes = this.regs.getBytesAsLittleEndian(registerIndex, REG_SIZE_BYTES);
17495
- const loadResult = this.memory.loadInto(registerBytes.subarray(0, numberLength), tryAsMemoryIndex(address));
17956
+ const loadResult = this.memory.loadInto(registerBytes.subarray(0, numberLength), memory_index_tryAsMemoryIndex(address));
17496
17957
  if (loadResult.isError) {
17497
17958
  if (loadResult.error.isAccessFault) {
17498
17959
  this.instructionResult.status = result_Result.FAULT_ACCESS;
@@ -17508,7 +17969,7 @@ class LoadOps {
17508
17969
  loadSignedNumber(address, registerIndex, numberLength) {
17509
17970
  // load all bytes from register to correctly handle the sign.
17510
17971
  const registerBytes = this.regs.getBytesAsLittleEndian(registerIndex, REG_SIZE_BYTES);
17511
- const loadResult = this.memory.loadInto(registerBytes.subarray(0, numberLength), tryAsMemoryIndex(address));
17972
+ const loadResult = this.memory.loadInto(registerBytes.subarray(0, numberLength), memory_index_tryAsMemoryIndex(address));
17512
17973
  if (loadResult.isError) {
17513
17974
  if (loadResult.error.isAccessFault) {
17514
17975
  this.instructionResult.status = result_Result.FAULT_ACCESS;
@@ -17930,7 +18391,7 @@ class StoreOps {
17930
18391
  this.store(address, secondImmediateDecoder.getExtendedBytesAsLittleEndian());
17931
18392
  }
17932
18393
  store(address, bytes) {
17933
- const storeResult = this.memory.storeFrom(tryAsMemoryIndex(address), bytes);
18394
+ const storeResult = this.memory.storeFrom(memory_index_tryAsMemoryIndex(address), bytes);
17934
18395
  if (storeResult.isOk) {
17935
18396
  return;
17936
18397
  }
@@ -17939,7 +18400,7 @@ class StoreOps {
17939
18400
  }
17940
18401
  else {
17941
18402
  this.instructionResult.status = result_Result.FAULT;
17942
- this.instructionResult.exitParam = getStartPageIndex(tryAsMemoryIndex(storeResult.error.address));
18403
+ this.instructionResult.exitParam = getStartPageIndex(memory_index_tryAsMemoryIndex(storeResult.error.address));
17943
18404
  }
17944
18405
  }
17945
18406
  }
@@ -18738,11 +19199,11 @@ class ProgramDecoder {
18738
19199
 
18739
19200
 
18740
19201
  const interpreter_logger = Logger.new(import.meta.filename, "pvm");
18741
- class Interpreter {
19202
+ class interpreter_Interpreter {
18742
19203
  useSbrkGas;
18743
- registers = new Registers();
19204
+ registers = new registers_Registers();
18744
19205
  memory = new Memory();
18745
- gas = gasCounter(tryAsGas(0));
19206
+ gas = gasCounter(gas_tryAsGas(0));
18746
19207
  code = new Uint8Array();
18747
19208
  mask = Mask.empty();
18748
19209
  pc = 0;
@@ -18876,8 +19337,8 @@ class Interpreter {
18876
19337
  break;
18877
19338
  case ArgumentType.TWO_REGISTERS:
18878
19339
  if (this.useSbrkGas && currentInstruction === Instruction.SBRK) {
18879
- const calculateSbrkCost = (length) => (alignToPageSize(length) / PAGE_SIZE) * 16;
18880
- const underflow = this.gas.sub(tryAsGas(calculateSbrkCost(this.registers.getLowerU32(argsResult.firstRegisterIndex))));
19340
+ const calculateSbrkCost = (length) => (alignToPageSize(length) / memory_consts_PAGE_SIZE) * 16;
19341
+ const underflow = this.gas.sub(gas_tryAsGas(calculateSbrkCost(this.registers.getLowerU32(argsResult.firstRegisterIndex))));
18881
19342
  if (underflow) {
18882
19343
  this.status = status_Status.OOG;
18883
19344
  return this.status;
@@ -18972,12 +19433,88 @@ class Interpreter {
18972
19433
  }
18973
19434
  }
18974
19435
 
19436
+ ;// CONCATENATED MODULE: ./packages/core/pvm-interpreter/debugger-adapter.ts
19437
+
19438
+
19439
+
19440
+
19441
+
19442
+
19443
+ class DebuggerAdapter {
19444
+ pvm;
19445
+ constructor(useSbrkGas = false) {
19446
+ this.pvm = new Interpreter({ useSbrkGas });
19447
+ }
19448
+ resetGeneric(rawProgram, flatRegisters, initialGas) {
19449
+ this.pvm.resetGeneric(rawProgram, 0, tryAsGas(initialGas), new Registers(flatRegisters));
19450
+ }
19451
+ reset(rawProgram, pc, gas, maybeRegisters, maybeMemory) {
19452
+ this.pvm.resetGeneric(rawProgram, pc, tryAsGas(gas), maybeRegisters, maybeMemory);
19453
+ }
19454
+ getPageDump(pageNumber) {
19455
+ const page = this.pvm.getMemoryPage(pageNumber);
19456
+ if (page === null) {
19457
+ // page wasn't allocated so we return an empty page
19458
+ return safeAllocUint8Array(PAGE_SIZE);
19459
+ }
19460
+ if (page.length === PAGE_SIZE) {
19461
+ // page was allocated and has a proper size so we can simply return it
19462
+ return page;
19463
+ }
19464
+ // page was allocated but it is shorter than PAGE_SIZE so we have to extend it
19465
+ const fullPage = safeAllocUint8Array(PAGE_SIZE);
19466
+ fullPage.set(page);
19467
+ return fullPage;
19468
+ }
19469
+ setMemory(address, value) {
19470
+ this.pvm.memory.storeFrom(tryAsMemoryIndex(address), value);
19471
+ }
19472
+ getExitArg() {
19473
+ return this.pvm.getExitParam() ?? 0;
19474
+ }
19475
+ getStatus() {
19476
+ return this.pvm.getStatus();
19477
+ }
19478
+ nextStep() {
19479
+ return this.pvm.nextStep() === Status.OK;
19480
+ }
19481
+ nSteps(steps) {
19482
+ check `${steps >>> 0 > 0} Expected a positive integer got ${steps}`;
19483
+ for (let i = 0; i < steps; i++) {
19484
+ const isOk = this.nextStep();
19485
+ if (!isOk) {
19486
+ return false;
19487
+ }
19488
+ }
19489
+ return true;
19490
+ }
19491
+ getRegisters() {
19492
+ return this.pvm.registers.getAllU64();
19493
+ }
19494
+ setRegisters(registers) {
19495
+ this.pvm.registers.copyFrom(new Registers(registers));
19496
+ }
19497
+ getProgramCounter() {
19498
+ return this.pvm.getPC();
19499
+ }
19500
+ setNextProgramCounter(nextPc) {
19501
+ this.pvm.setNextPC(nextPc);
19502
+ }
19503
+ getGasLeft() {
19504
+ return BigInt(this.pvm.gas.get());
19505
+ }
19506
+ setGasLeft(gas) {
19507
+ this.pvm.gas.set(tryAsGas(gas));
19508
+ }
19509
+ }
19510
+
18975
19511
  ;// CONCATENATED MODULE: ./packages/core/pvm-interpreter/index.ts
18976
19512
 
18977
19513
 
18978
19514
 
18979
19515
 
18980
19516
 
19517
+
18981
19518
  ;// CONCATENATED MODULE: ./node_modules/@fluffylabs/anan-as/build/debug-raw.js
18982
19519
  async function instantiate(module, imports = {}) {
18983
19520
  const adaptedImports = {
@@ -19431,12 +19968,12 @@ class AnanasMemory {
19431
19968
  }
19432
19969
  class AnanasGasCounter {
19433
19970
  instance;
19434
- initialGas = tryAsGas(0n);
19971
+ initialGas = gas_tryAsGas(0n);
19435
19972
  constructor(instance) {
19436
19973
  this.instance = instance;
19437
19974
  }
19438
19975
  get() {
19439
- return tryAsGas(this.instance.getGasLeft());
19976
+ return gas_tryAsGas(this.instance.getGasLeft());
19440
19977
  }
19441
19978
  set(g) {
19442
19979
  this.instance.setGasLeft(BigInt(g));
@@ -19541,7 +20078,7 @@ class InterpreterInstanceManager {
19541
20078
  const instances = [];
19542
20079
  switch (interpreter) {
19543
20080
  case PvmBackend.BuiltIn:
19544
- instances.push(new Interpreter({
20081
+ instances.push(new interpreter_Interpreter({
19545
20082
  useSbrkGas: false,
19546
20083
  }));
19547
20084
  break;
@@ -19715,10 +20252,10 @@ class Info {
19715
20252
  const chunk = encodedInfo.raw.subarray(Number(offset), Number(offset + length));
19716
20253
  const writeResult = memory.storeFrom(outputStart, chunk);
19717
20254
  if (writeResult.isError) {
19718
- logger_logger.trace `INFO(${serviceId}, off: ${offset}, len: ${length}) <- PANIC`;
20255
+ logger_logger.trace `[${this.currentServiceId}] INFO(${serviceId}, off: ${offset}, len: ${length}) <- PANIC`;
19719
20256
  return PvmExecution.Panic;
19720
20257
  }
19721
- logger_logger.trace `INFO(${serviceId}, off: ${offset}, len: ${length}) <- ${bytes_BytesBlob.blobFrom(chunk)}`;
20258
+ logger_logger.trace `[${this.currentServiceId}] INFO(${serviceId}, off: ${offset}, len: ${length}) <- ${bytes_BytesBlob.blobFrom(chunk)}`;
19722
20259
  if (accountInfo === null) {
19723
20260
  regs.set(IN_OUT_REG, HostCallResult.NONE);
19724
20261
  return;
@@ -19942,7 +20479,7 @@ class AccumulateExternalities {
19942
20479
  const bytes = serviceInfo.storageUtilisationBytes - length - LOOKUP_HISTORY_ENTRY_BYTES;
19943
20480
  return this.updatedState.updateServiceStorageUtilisation(this.currentServiceId, items, bytes, serviceInfo);
19944
20481
  };
19945
- // https://graypaper.fluffylabs.dev/#/9a08063/389501389501?v=0.6.6
20482
+ // https://graypaper.fluffylabs.dev/#/ab2cdbd/380802380802?v=0.7.2
19946
20483
  if (s.status === PreimageStatusKind.Requested) {
19947
20484
  const res = updateStorageUtilisation();
19948
20485
  if (res.isError) {
@@ -19955,7 +20492,7 @@ class AccumulateExternalities {
19955
20492
  return Result.ok(OK);
19956
20493
  }
19957
20494
  const t = this.currentTimeslot;
19958
- // https://graypaper.fluffylabs.dev/#/9a08063/378102378102?v=0.6.6
20495
+ // https://graypaper.fluffylabs.dev/#/ab2cdbd/380802380802?v=0.7.2
19959
20496
  if (s.status === PreimageStatusKind.Unavailable) {
19960
20497
  const y = s.data[1];
19961
20498
  if (y < t - this.chainSpec.preimageExpungePeriod) {
@@ -19971,14 +20508,14 @@ class AccumulateExternalities {
19971
20508
  }
19972
20509
  return Result.error(ForgetPreimageError.NotExpired, () => `Preimage not expired: y=${y}, timeslot=${t}, period=${this.chainSpec.preimageExpungePeriod}`);
19973
20510
  }
19974
- // https://graypaper.fluffylabs.dev/#/9a08063/38c80138c801?v=0.6.6
20511
+ // https://graypaper.fluffylabs.dev/#/ab2cdbd/382802383302?v=0.7.2
19975
20512
  if (s.status === PreimageStatusKind.Available) {
19976
20513
  this.updatedState.updatePreimage(serviceId, UpdatePreimage.updateOrAdd({
19977
20514
  lookupHistory: new LookupHistoryItem(status.hash, status.length, service_tryAsLookupHistorySlots([s.data[0], t])),
19978
20515
  }));
19979
20516
  return Result.ok(OK);
19980
20517
  }
19981
- // https://graypaper.fluffylabs.dev/#/9a08063/38d00138d001?v=0.6.6
20518
+ // https://graypaper.fluffylabs.dev/#/ab2cdbd/384002384c02?v=0.7.2
19982
20519
  if (s.status === PreimageStatusKind.Reavailable) {
19983
20520
  const y = s.data[1];
19984
20521
  if (y < t - this.chainSpec.preimageExpungePeriod) {
@@ -21138,12 +21675,12 @@ function createMergeContext(chainSpec, state, inputState, results) {
21138
21675
  }
21139
21676
  function updatePrivilegedService(currentServiceId, serviceIdUpdatedByManager, selfUpdatedServiceId) {
21140
21677
  if (currentServiceId === serviceIdUpdatedByManager) {
21141
- return serviceIdUpdatedByManager;
21678
+ return selfUpdatedServiceId;
21142
21679
  }
21143
- return selfUpdatedServiceId;
21680
+ return serviceIdUpdatedByManager;
21144
21681
  }
21145
21682
  function mergePrivilegedServices(mergeContext, [serviceId, { stateUpdate }]) {
21146
- const { outputState, currentPrivilegedServices, chainSpec } = mergeContext;
21683
+ const { outputState, currentPrivilegedServices, chainSpec, privilegedServicesUpdatedByManager } = mergeContext;
21147
21684
  const currentManager = currentPrivilegedServices.manager;
21148
21685
  const currentRegistrar = currentPrivilegedServices.registrar;
21149
21686
  const currentDelegator = currentPrivilegedServices.delegator;
@@ -21161,28 +21698,35 @@ function mergePrivilegedServices(mergeContext, [serviceId, { stateUpdate }]) {
21161
21698
  });
21162
21699
  }
21163
21700
  if (serviceId === currentRegistrar) {
21164
- const newRegistrar = updatePrivilegedService(currentPrivilegedServices.registrar, outputState.privilegedServices.registrar, privilegedServices.registrar);
21701
+ const newRegistrar = updatePrivilegedService(currentPrivilegedServices.registrar, privilegedServicesUpdatedByManager.registrar, privilegedServices.registrar);
21165
21702
  outputState.privilegedServices = PrivilegedServices.create({
21166
21703
  ...outputState.privilegedServices,
21167
21704
  registrar: newRegistrar,
21168
21705
  });
21169
21706
  }
21170
21707
  if (serviceId === currentDelegator) {
21171
- const newDelegator = updatePrivilegedService(currentPrivilegedServices.delegator, outputState.privilegedServices.delegator, privilegedServices.delegator);
21708
+ const newDelegator = updatePrivilegedService(currentPrivilegedServices.delegator, privilegedServicesUpdatedByManager.delegator, privilegedServices.delegator);
21172
21709
  outputState.privilegedServices = PrivilegedServices.create({
21173
21710
  ...outputState.privilegedServices,
21174
21711
  delegator: newDelegator,
21175
21712
  });
21176
21713
  }
21177
- const assignersFromOutputState = outputState.privilegedServices;
21178
- const newAssigners = currentAssigners.map((currentAssigner, coreIndex) => serviceId === currentAssigner
21179
- ? updatePrivilegedService(currentPrivilegedServices.assigners[coreIndex], assignersFromOutputState.assigners[coreIndex], privilegedServices.assigners[coreIndex])
21180
- : currentAssigner);
21181
- const newAssignersPerCore = tryAsPerCore(newAssigners, chainSpec);
21182
- outputState.privilegedServices = PrivilegedServices.create({
21183
- ...outputState.privilegedServices,
21184
- assigners: newAssignersPerCore,
21714
+ let shouldUpdateAssigners = false;
21715
+ const newAssigners = currentAssigners.map((currentAssigner, coreIndex) => {
21716
+ if (serviceId === currentAssigner) {
21717
+ const newAssigner = updatePrivilegedService(currentPrivilegedServices.assigners[coreIndex], privilegedServicesUpdatedByManager.assigners[coreIndex], privilegedServices.assigners[coreIndex]);
21718
+ shouldUpdateAssigners = shouldUpdateAssigners || newAssigner !== currentAssigner;
21719
+ return newAssigner;
21720
+ }
21721
+ return currentAssigner;
21185
21722
  });
21723
+ if (shouldUpdateAssigners) {
21724
+ const newAssignersPerCore = tryAsPerCore(newAssigners, chainSpec);
21725
+ outputState.privilegedServices = PrivilegedServices.create({
21726
+ ...outputState.privilegedServices,
21727
+ assigners: newAssignersPerCore,
21728
+ });
21729
+ }
21186
21730
  }
21187
21731
  }
21188
21732
  function mergeValidatorsData(mergeContext, [serviceId, { stateUpdate }]) {
@@ -21327,7 +21871,7 @@ class Assign {
21327
21871
  const memoryReadResult = memory.loadInto(res, authorizationQueueStart);
21328
21872
  // error while reading the memory.
21329
21873
  if (memoryReadResult.isError) {
21330
- logger_logger.trace `ASSIGN() <- PANIC`;
21874
+ logger_logger.trace `[${this.currentServiceId}] ASSIGN() <- PANIC`;
21331
21875
  return PvmExecution.Panic;
21332
21876
  }
21333
21877
  if (maybeCoreIndex >= this.chainSpec.coresCount) {
@@ -21342,18 +21886,18 @@ class Assign {
21342
21886
  const result = this.partialState.updateAuthorizationQueue(coreIndex, fixedSizeAuthQueue, assigners);
21343
21887
  if (result.isOk) {
21344
21888
  regs.set(assign_IN_OUT_REG, HostCallResult.OK);
21345
- logger_logger.trace `ASSIGN(${coreIndex}, ${fixedSizeAuthQueue}) <- OK`;
21889
+ logger_logger.trace `[${this.currentServiceId}] ASSIGN(${coreIndex}, ${fixedSizeAuthQueue}) <- OK`;
21346
21890
  return;
21347
21891
  }
21348
21892
  const e = result.error;
21349
21893
  if (e === UpdatePrivilegesError.UnprivilegedService) {
21350
21894
  regs.set(assign_IN_OUT_REG, HostCallResult.HUH);
21351
- logger_logger.trace `ASSIGN(${coreIndex}, ${fixedSizeAuthQueue}) <- HUH`;
21895
+ logger_logger.trace `[${this.currentServiceId}] ASSIGN(${coreIndex}, ${fixedSizeAuthQueue}) <- HUH`;
21352
21896
  return;
21353
21897
  }
21354
21898
  if (e === UpdatePrivilegesError.InvalidServiceId) {
21355
21899
  regs.set(assign_IN_OUT_REG, HostCallResult.WHO);
21356
- logger_logger.trace `ASSIGN(${coreIndex}, ${fixedSizeAuthQueue}) <- HUH`;
21900
+ logger_logger.trace `[${this.currentServiceId}] ASSIGN(${coreIndex}, ${fixedSizeAuthQueue}) <- HUH`;
21357
21901
  return;
21358
21902
  }
21359
21903
  assertNever(e);
@@ -21424,7 +21968,7 @@ class Bless {
21424
21968
  decoder.resetTo(0);
21425
21969
  const memoryReadResult = memory.loadInto(result, memIndex);
21426
21970
  if (memoryReadResult.isError) {
21427
- logger_logger.trace `BLESS(m: ${manager}, v: ${delegator}, r: ${registrar}) <- PANIC`;
21971
+ logger_logger.trace `[${this.currentServiceId}] BLESS(m: ${manager}, v: ${delegator}, r: ${registrar}) <- PANIC`;
21428
21972
  return PvmExecution.Panic;
21429
21973
  }
21430
21974
  const { serviceId, gas } = decoder.object(serviceIdAndGasCodec);
@@ -21437,26 +21981,26 @@ class Bless {
21437
21981
  const authorizersDecoder = decoder_Decoder.fromBlob(res);
21438
21982
  const memoryReadResult = memory.loadInto(res, authorization);
21439
21983
  if (memoryReadResult.isError) {
21440
- logger_logger.trace `BLESS(m: ${manager}, v: ${delegator}, r: ${registrar}, ${lazyInspect(autoAccumulate)}) <- PANIC`;
21984
+ logger_logger.trace `[${this.currentServiceId}] BLESS(m: ${manager}, v: ${delegator}, r: ${registrar}, ${lazyInspect(autoAccumulate)}) <- PANIC`;
21441
21985
  return PvmExecution.Panic;
21442
21986
  }
21443
21987
  // `a`
21444
21988
  const authorizers = tryAsPerCore(authorizersDecoder.sequenceFixLen(descriptors_codec.u32.asOpaque(), this.chainSpec.coresCount), this.chainSpec);
21445
21989
  const updateResult = this.partialState.updatePrivilegedServices(manager, authorizers, delegator, registrar, autoAccumulate);
21446
21990
  if (updateResult.isOk) {
21447
- logger_logger.trace `BLESS(m: ${manager}, a: [${authorizers}], v: ${delegator}, r: ${registrar}, ${lazyInspect(autoAccumulate)}) <- OK`;
21991
+ logger_logger.trace `[${this.currentServiceId}] BLESS(m: ${manager}, a: [${authorizers}], v: ${delegator}, r: ${registrar}, ${lazyInspect(autoAccumulate)}) <- OK`;
21448
21992
  regs.set(bless_IN_OUT_REG, HostCallResult.OK);
21449
21993
  return;
21450
21994
  }
21451
21995
  const e = updateResult.error;
21452
21996
  // NOTE: `UpdatePrivilegesError.UnprivilegedService` won't happen in 0.7.1+
21453
21997
  if (e === UpdatePrivilegesError.UnprivilegedService) {
21454
- logger_logger.trace `BLESS(m: ${manager}, a: [${authorizers}], v: ${delegator}, r: ${registrar}, ${lazyInspect(autoAccumulate)}) <- HUH`;
21998
+ logger_logger.trace `[${this.currentServiceId}] BLESS(m: ${manager}, a: [${authorizers}], v: ${delegator}, r: ${registrar}, ${lazyInspect(autoAccumulate)}) <- HUH`;
21455
21999
  regs.set(bless_IN_OUT_REG, HostCallResult.HUH);
21456
22000
  return;
21457
22001
  }
21458
22002
  if (e === UpdatePrivilegesError.InvalidServiceId) {
21459
- logger_logger.trace `BLESS(m: ${manager}, a: [${authorizers}], v: ${delegator}, r: ${registrar}, ${lazyInspect(autoAccumulate)}) <- WHO`;
22003
+ logger_logger.trace `[${this.currentServiceId}] BLESS(m: ${manager}, a: [${authorizers}], v: ${delegator}, r: ${registrar}, ${lazyInspect(autoAccumulate)}) <- WHO`;
21460
22004
  regs.set(bless_IN_OUT_REG, HostCallResult.WHO);
21461
22005
  return;
21462
22006
  }
@@ -21486,7 +22030,7 @@ class GasHostCall {
21486
22030
  }
21487
22031
  execute(gas, regs) {
21488
22032
  const gasValue = gas.get();
21489
- logger_logger.trace `GAS <- ${gasValue}`;
22033
+ logger_logger.trace `[${this.currentServiceId}] GAS <- ${gasValue}`;
21490
22034
  regs.set(7, numbers_tryAsU64(gasValue));
21491
22035
  return Promise.resolve(undefined);
21492
22036
  }
@@ -21518,7 +22062,7 @@ class Checkpoint {
21518
22062
  async execute(gas, regs) {
21519
22063
  await this.gasHostCall.execute(gas, regs);
21520
22064
  this.partialState.checkpoint();
21521
- logger_logger.trace `CHECKPOINT()`;
22065
+ logger_logger.trace `[${this.currentServiceId}] CHECKPOINT()`;
21522
22066
  return;
21523
22067
  }
21524
22068
  }
@@ -21558,18 +22102,18 @@ class Designate {
21558
22102
  const memoryReadResult = memory.loadInto(res, validatorsStart);
21559
22103
  // error while reading the memory.
21560
22104
  if (memoryReadResult.isError) {
21561
- logger_logger.trace `DESIGNATE() <- PANIC`;
22105
+ logger_logger.trace `[${this.currentServiceId}] DESIGNATE() <- PANIC`;
21562
22106
  return PvmExecution.Panic;
21563
22107
  }
21564
22108
  const decoder = decoder_Decoder.fromBlob(res);
21565
22109
  const validatorsData = decoder.sequenceFixLen(ValidatorData.Codec, this.chainSpec.validatorsCount);
21566
22110
  const result = this.partialState.updateValidatorsData(tryAsPerValidator(validatorsData, this.chainSpec));
21567
22111
  if (result.isError) {
21568
- logger_logger.trace `DESIGNATE([${validatorsData[0]}, ${validatorsData[1]}, ...]) <- HUH`;
22112
+ logger_logger.trace `[${this.currentServiceId}] DESIGNATE([${validatorsData[0]}, ${validatorsData[1]}, ...]) <- HUH`;
21569
22113
  regs.set(designate_IN_OUT_REG, HostCallResult.HUH);
21570
22114
  }
21571
22115
  else {
21572
- logger_logger.trace `DESIGNATE([${validatorsData[0]}, ${validatorsData[1]}, ...]) <- OK`;
22116
+ logger_logger.trace `[${this.currentServiceId}] DESIGNATE([${validatorsData[0]}, ${validatorsData[1]}, ...]) <- OK`;
21573
22117
  regs.set(designate_IN_OUT_REG, HostCallResult.OK);
21574
22118
  }
21575
22119
  }
@@ -21610,29 +22154,29 @@ class Eject {
21610
22154
  const previousCodeHash = bytes_Bytes.zero(hash_HASH_SIZE).asOpaque();
21611
22155
  const memoryReadResult = memory.loadInto(previousCodeHash.raw, preimageHashStart);
21612
22156
  if (memoryReadResult.isError) {
21613
- logger_logger.trace `EJECT(${serviceId}) <- PANIC`;
22157
+ logger_logger.trace `[${this.currentServiceId}] EJECT(${serviceId}) <- PANIC`;
21614
22158
  return PvmExecution.Panic;
21615
22159
  }
21616
22160
  // cannot eject self
21617
22161
  if (serviceId === this.currentServiceId) {
21618
22162
  regs.set(eject_IN_OUT_REG, HostCallResult.WHO);
21619
- logger_logger.trace `EJECT(${serviceId}, ${previousCodeHash}) <- WHO`;
22163
+ logger_logger.trace `[${this.currentServiceId}] EJECT(${serviceId}, ${previousCodeHash}) <- WHO`;
21620
22164
  return;
21621
22165
  }
21622
22166
  const result = this.partialState.eject(serviceId, previousCodeHash);
21623
22167
  // All good!
21624
22168
  if (result.isOk) {
21625
- logger_logger.trace `EJECT(${serviceId}, ${previousCodeHash}) <- OK`;
22169
+ logger_logger.trace `[${this.currentServiceId}] EJECT(${serviceId}, ${previousCodeHash}) <- OK`;
21626
22170
  regs.set(eject_IN_OUT_REG, HostCallResult.OK);
21627
22171
  return;
21628
22172
  }
21629
22173
  const e = result.error;
21630
22174
  if (e === EjectError.InvalidService) {
21631
- logger_logger.trace `EJECT(${serviceId}, ${previousCodeHash}) <- WHO ${resultToString(result)}`;
22175
+ logger_logger.trace `[${this.currentServiceId}] EJECT(${serviceId}, ${previousCodeHash}) <- WHO ${resultToString(result)}`;
21632
22176
  regs.set(eject_IN_OUT_REG, HostCallResult.WHO);
21633
22177
  }
21634
22178
  else if (e === EjectError.InvalidPreimage) {
21635
- logger_logger.trace `EJECT(${serviceId}, ${previousCodeHash}) <- HUH ${resultToString(result)}`;
22179
+ logger_logger.trace `[${this.currentServiceId}] EJECT(${serviceId}, ${previousCodeHash}) <- HUH ${resultToString(result)}`;
21636
22180
  regs.set(eject_IN_OUT_REG, HostCallResult.HUH);
21637
22181
  }
21638
22182
  else {
@@ -21651,9 +22195,9 @@ class Eject {
21651
22195
 
21652
22196
  const forget_IN_OUT_REG = 7;
21653
22197
  /**
21654
- * Mark a preimage hash as unavailable.
22198
+ * Delete preimage hash or mark as unavailable if it was available.
21655
22199
  *
21656
- * https://graypaper.fluffylabs.dev/#/7e6ff6a/382d01382d01?v=0.6.7
22200
+ * https://graypaper.fluffylabs.dev/#/ab2cdbd/385d01385d01?v=0.7.2
21657
22201
  */
21658
22202
  class Forget {
21659
22203
  currentServiceId;
@@ -21674,11 +22218,11 @@ class Forget {
21674
22218
  const memoryReadResult = memory.loadInto(hash.raw, hashStart);
21675
22219
  // error while reading the memory.
21676
22220
  if (memoryReadResult.isError) {
21677
- logger_logger.trace `FORGET(${hash}, ${length}) <- PANIC`;
22221
+ logger_logger.trace `[${this.currentServiceId}] FORGET(${hash}, ${length}) <- PANIC`;
21678
22222
  return PvmExecution.Panic;
21679
22223
  }
21680
22224
  const result = this.partialState.forgetPreimage(hash.asOpaque(), length);
21681
- logger_logger.trace `FORGET(${hash}, ${length}) <- ${resultToString(result)}`;
22225
+ logger_logger.trace `[${this.currentServiceId}] FORGET(${hash}, ${length}) <- ${resultToString(result)}`;
21682
22226
  if (result.isOk) {
21683
22227
  regs.set(forget_IN_OUT_REG, HostCallResult.OK);
21684
22228
  }
@@ -21735,11 +22279,11 @@ class New {
21735
22279
  const memoryReadResult = memory.loadInto(codeHash.raw, codeHashStart);
21736
22280
  // error while reading the memory.
21737
22281
  if (memoryReadResult.isError) {
21738
- logger_logger.trace `NEW(${codeHash}, ${codeLength}, ${gas}, ${allowance}, ${gratisStorage}, ${requestedServiceId}) <- PANIC`;
22282
+ logger_logger.trace `[${this.currentServiceId}] NEW(${codeHash}, ${codeLength}, ${gas}, ${allowance}, ${gratisStorage}, ${requestedServiceId}) <- PANIC`;
21739
22283
  return PvmExecution.Panic;
21740
22284
  }
21741
22285
  const assignedId = this.partialState.newService(codeHash.asOpaque(), codeLength, gas, allowance, gratisStorage, requestedServiceId);
21742
- logger_logger.trace `NEW(${codeHash}, ${codeLength}, ${gas}, ${allowance}, ${gratisStorage}, ${requestedServiceId}) <- ${resultToString(assignedId)}`;
22286
+ logger_logger.trace `[${this.currentServiceId}] NEW(${codeHash}, ${codeLength}, ${gas}, ${allowance}, ${gratisStorage}, ${requestedServiceId}) <- ${resultToString(assignedId)}`;
21743
22287
  if (assignedId.isOk) {
21744
22288
  regs.set(new_IN_OUT_REG, numbers_tryAsU64(assignedId.ok));
21745
22289
  return;
@@ -21799,11 +22343,11 @@ class Provide {
21799
22343
  const preimage = bytes_BytesBlob.blobFrom(safe_alloc_uint8array_safeAllocUint8Array(length));
21800
22344
  const memoryReadResult = memory.loadInto(preimage.raw, preimageStart);
21801
22345
  if (memoryReadResult.isError) {
21802
- logger_logger.trace `PROVIDE(${serviceId}, ${preimage.toStringTruncated()}) <- PANIC`;
22346
+ logger_logger.trace `[${this.currentServiceId}] PROVIDE(${serviceId}, ${preimage.toStringTruncated()}) <- PANIC`;
21803
22347
  return PvmExecution.Panic;
21804
22348
  }
21805
22349
  const result = this.partialState.providePreimage(serviceId, preimage);
21806
- logger_logger.trace `PROVIDE(${serviceId}, ${preimage.toStringTruncated()}) <- ${resultToString(result)}`;
22350
+ logger_logger.trace `[${this.currentServiceId}] PROVIDE(${serviceId}, ${preimage.toStringTruncated()}) <- ${resultToString(result)}`;
21807
22351
  if (result.isOk) {
21808
22352
  regs.set(provide_IN_OUT_REG, HostCallResult.OK);
21809
22353
  return;
@@ -21859,35 +22403,35 @@ class Query {
21859
22403
  const memoryReadResult = memory.loadInto(hash.raw, hashStart);
21860
22404
  // error while reading the memory.
21861
22405
  if (memoryReadResult.isError) {
21862
- logger_logger.trace `QUERY(${hash}, ${length}) <- PANIC`;
22406
+ logger_logger.trace `[${this.currentServiceId}] QUERY(${hash}, ${length}) <- PANIC`;
21863
22407
  return PvmExecution.Panic;
21864
22408
  }
21865
22409
  const result = this.partialState.checkPreimageStatus(hash.asOpaque(), length);
21866
22410
  const zero = numbers_tryAsU64(0n);
21867
22411
  if (result === null) {
21868
- logger_logger.trace `QUERY(${hash}, ${length}) <- NONE`;
22412
+ logger_logger.trace `[${this.currentServiceId}] QUERY(${hash}, ${length}) <- NONE`;
21869
22413
  regs.set(IN_OUT_REG_1, HostCallResult.NONE);
21870
22414
  regs.set(IN_OUT_REG_2, zero);
21871
22415
  return;
21872
22416
  }
21873
22417
  switch (result.status) {
21874
22418
  case PreimageStatusKind.Requested:
21875
- logger_logger.trace `QUERY(${hash}, ${length}) <- REQUESTED`;
22419
+ logger_logger.trace `[${this.currentServiceId}] QUERY(${hash}, ${length}) <- REQUESTED`;
21876
22420
  regs.set(IN_OUT_REG_1, zero);
21877
22421
  regs.set(IN_OUT_REG_2, zero);
21878
22422
  return;
21879
22423
  case PreimageStatusKind.Available:
21880
- logger_logger.trace `QUERY(${hash}, ${length}) <- AVAILABLE [${result.data}]`;
22424
+ logger_logger.trace `[${this.currentServiceId}] QUERY(${hash}, ${length}) <- AVAILABLE [${result.data}]`;
21881
22425
  regs.set(IN_OUT_REG_1, numbers_tryAsU64((BigInt(result.data[0]) << UPPER_BITS_SHIFT) + 1n));
21882
22426
  regs.set(IN_OUT_REG_2, zero);
21883
22427
  return;
21884
22428
  case PreimageStatusKind.Unavailable:
21885
- logger_logger.trace `QUERY(${hash}, ${length}) <- UNAVAILABLE [${result.data.join(", ")}]`;
22429
+ logger_logger.trace `[${this.currentServiceId}] QUERY(${hash}, ${length}) <- UNAVAILABLE [${result.data.join(", ")}]`;
21886
22430
  regs.set(IN_OUT_REG_1, numbers_tryAsU64((BigInt(result.data[0]) << UPPER_BITS_SHIFT) + 2n));
21887
22431
  regs.set(IN_OUT_REG_2, numbers_tryAsU64(result.data[1]));
21888
22432
  return;
21889
22433
  case PreimageStatusKind.Reavailable:
21890
- logger_logger.trace `QUERY(${hash}, ${length}) <- REAVAILABLE [${result.data.join(", ")}]`;
22434
+ logger_logger.trace `[${this.currentServiceId}] QUERY(${hash}, ${length}) <- REAVAILABLE [${result.data.join(", ")}]`;
21891
22435
  regs.set(IN_OUT_REG_1, numbers_tryAsU64((BigInt(result.data[0]) << UPPER_BITS_SHIFT) + 3n));
21892
22436
  regs.set(IN_OUT_REG_2, numbers_tryAsU64((BigInt(result.data[2]) << UPPER_BITS_SHIFT) + BigInt(result.data[1])));
21893
22437
  return;
@@ -21928,11 +22472,11 @@ class Solicit {
21928
22472
  const hash = bytes_Bytes.zero(hash_HASH_SIZE);
21929
22473
  const memoryReadResult = memory.loadInto(hash.raw, hashStart);
21930
22474
  if (memoryReadResult.isError) {
21931
- logger_logger.trace `SOLICIT(${hash}, ${length}) <- PANIC`;
22475
+ logger_logger.trace `[${this.currentServiceId}] SOLICIT(${hash}, ${length}) <- PANIC`;
21932
22476
  return PvmExecution.Panic;
21933
22477
  }
21934
22478
  const result = this.partialState.requestPreimage(hash.asOpaque(), length);
21935
- logger_logger.trace `SOLICIT(${hash}, ${length}) <- ${resultToString(result)}`;
22479
+ logger_logger.trace `[${this.currentServiceId}] SOLICIT(${hash}, ${length}) <- ${resultToString(result)}`;
21936
22480
  if (result.isOk) {
21937
22481
  regs.set(solicit_IN_OUT_REG, HostCallResult.OK);
21938
22482
  return;
@@ -21990,7 +22534,7 @@ class Transfer {
21990
22534
  */
21991
22535
  basicGasCost = compatibility_Compatibility.isGreaterOrEqual(compatibility_GpVersion.V0_7_2)
21992
22536
  ? gas_tryAsSmallGas(10)
21993
- : (regs) => tryAsGas(10n + regs.get(TRANSFER_GAS_FEE_REG));
22537
+ : (regs) => gas_tryAsGas(10n + regs.get(TRANSFER_GAS_FEE_REG));
21994
22538
  tracedRegisters = traceRegisters(transfer_IN_OUT_REG, AMOUNT_REG, TRANSFER_GAS_FEE_REG, MEMO_START_REG);
21995
22539
  constructor(currentServiceId, partialState) {
21996
22540
  this.currentServiceId = currentServiceId;
@@ -22009,16 +22553,16 @@ class Transfer {
22009
22553
  const memoryReadResult = memory.loadInto(memo.raw, memoStart);
22010
22554
  // page fault while reading the memory.
22011
22555
  if (memoryReadResult.isError) {
22012
- logger_logger.trace `TRANSFER(${destination}, ${amount}, ${transferGasFee}, ${memo}) <- PANIC`;
22556
+ logger_logger.trace `[${this.currentServiceId}] TRANSFER(${destination}, ${amount}, ${transferGasFee}, ${memo}) <- PANIC`;
22013
22557
  return PvmExecution.Panic;
22014
22558
  }
22015
22559
  const transferResult = this.partialState.transfer(destination, amount, transferGasFee, memo);
22016
- logger_logger.trace `TRANSFER(${destination}, ${amount}, ${transferGasFee}, ${memo}) <- ${resultToString(transferResult)}`;
22560
+ logger_logger.trace `[${this.currentServiceId}] TRANSFER(${destination}, ${amount}, ${transferGasFee}, ${memo}) <- ${resultToString(transferResult)}`;
22017
22561
  // All good!
22018
22562
  if (transferResult.isOk) {
22019
22563
  if (compatibility_Compatibility.isGreaterOrEqual(compatibility_GpVersion.V0_7_2)) {
22020
22564
  // substracting value `t`
22021
- const underflow = gas.sub(tryAsGas(transferGasFee));
22565
+ const underflow = gas.sub(gas_tryAsGas(transferGasFee));
22022
22566
  if (underflow) {
22023
22567
  return PvmExecution.OOG;
22024
22568
  }
@@ -22079,11 +22623,11 @@ class Upgrade {
22079
22623
  const codeHash = bytes_Bytes.zero(hash_HASH_SIZE);
22080
22624
  const memoryReadResult = memory.loadInto(codeHash.raw, codeHashStart);
22081
22625
  if (memoryReadResult.isError) {
22082
- logger_logger.trace `UPGRADE(${codeHash}, ${gas}, ${allowance}) <- PANIC`;
22626
+ logger_logger.trace `[${this.currentServiceId}] UPGRADE(${codeHash}, ${gas}, ${allowance}) <- PANIC`;
22083
22627
  return PvmExecution.Panic;
22084
22628
  }
22085
22629
  this.partialState.upgradeService(codeHash.asOpaque(), gas, allowance);
22086
- logger_logger.trace `UPGRADE(${codeHash}, ${gas}, ${allowance})`;
22630
+ logger_logger.trace `[${this.currentServiceId}] UPGRADE(${codeHash}, ${gas}, ${allowance})`;
22087
22631
  regs.set(upgrade_IN_OUT_REG, HostCallResult.OK);
22088
22632
  }
22089
22633
  }
@@ -22117,11 +22661,11 @@ class Yield {
22117
22661
  const hash = bytes_Bytes.zero(hash_HASH_SIZE);
22118
22662
  const memoryReadResult = memory.loadInto(hash.raw, hashStart);
22119
22663
  if (memoryReadResult.isError) {
22120
- logger_logger.trace `YIELD() <- PANIC`;
22664
+ logger_logger.trace `[${this.currentServiceId}] YIELD() <- PANIC`;
22121
22665
  return PvmExecution.Panic;
22122
22666
  }
22123
22667
  this.partialState.yield(hash);
22124
- logger_logger.trace `YIELD(${hash})`;
22668
+ logger_logger.trace `[${this.currentServiceId}] YIELD(${hash})`;
22125
22669
  regs.set(yield_IN_OUT_REG, HostCallResult.OK);
22126
22670
  }
22127
22671
  }
@@ -22164,10 +22708,10 @@ class Fetch {
22164
22708
  const chunk = value === null ? new Uint8Array() : value.raw.subarray(Number(offset), Number(offset + length));
22165
22709
  const storeResult = memory.storeFrom(output, chunk);
22166
22710
  if (storeResult.isError) {
22167
- logger_logger.trace `FETCH(${kind}) <- PANIC`;
22711
+ logger_logger.trace `[${this.currentServiceId}] FETCH(${kind}) <- PANIC`;
22168
22712
  return PvmExecution.Panic;
22169
22713
  }
22170
- logger_logger.trace `FETCH(${kind}) <- ${value?.toStringTruncated()}`;
22714
+ logger_logger.trace `[${this.currentServiceId}] FETCH(${kind}) <- ${value?.toStringTruncated()}`;
22171
22715
  // write result
22172
22716
  regs.set(fetch_IN_OUT_REG, value === null ? HostCallResult.NONE : valueLength);
22173
22717
  }
@@ -22317,7 +22861,7 @@ class LogHostCall {
22317
22861
  }
22318
22862
  memory.loadInto(message, msgStart);
22319
22863
  const level = clampU64ToU32(lvl);
22320
- logger_logger.trace `LOG(${this.currentServiceId}, ${level < Levels.UNKNOWN ? Levels[level] : Levels[Levels.UNKNOWN]}(${lvl}), ${decoder.decode(target)}, ${decoder.decode(message)})`;
22864
+ logger_logger.trace `[${this.currentServiceId}] LOG(${this.currentServiceId}, ${level < Levels.UNKNOWN ? Levels[level] : Levels[Levels.UNKNOWN]}(${lvl}), ${decoder.decode(target)}, ${decoder.decode(message)})`;
22321
22865
  return Promise.resolve(undefined);
22322
22866
  }
22323
22867
  }
@@ -22358,12 +22902,12 @@ class Lookup {
22358
22902
  const preImageHash = bytes_Bytes.zero(hash_HASH_SIZE);
22359
22903
  const memoryReadResult = memory.loadInto(preImageHash.raw, hashAddress);
22360
22904
  if (memoryReadResult.isError) {
22361
- logger_logger.trace `LOOKUP(${serviceId}, ${preImageHash}) <- PANIC`;
22905
+ logger_logger.trace `[${this.currentServiceId}] LOOKUP(${serviceId}, ${preImageHash}) <- PANIC`;
22362
22906
  return PvmExecution.Panic;
22363
22907
  }
22364
22908
  // v
22365
22909
  const preImage = this.account.lookup(serviceId, preImageHash);
22366
- logger_logger.trace `LOOKUP(${serviceId}, ${preImageHash}) <- ${preImage?.toStringTruncated() ?? "<missing>"}...`;
22910
+ logger_logger.trace `[${this.currentServiceId}] LOOKUP(${serviceId}, ${preImageHash}) <- ${preImage?.toStringTruncated() ?? "<missing>"}...`;
22367
22911
  const preImageLength = preImage === null ? numbers_tryAsU64(0) : numbers_tryAsU64(preImage.raw.length);
22368
22912
  const preimageBlobOffset = regs.get(10);
22369
22913
  const lengthToWrite = regs.get(11);
@@ -22460,20 +23004,20 @@ class Read {
22460
23004
  const chunk = value === null ? safe_alloc_uint8array_safeAllocUint8Array(0) : value.raw.subarray(Number(offset), Number(offset + blobLength));
22461
23005
  const memoryWriteResult = memory.storeFrom(destinationAddress, chunk);
22462
23006
  if (memoryWriteResult.isError) {
22463
- logger_logger.trace `READ(${serviceId}, ${rawKey}) <- PANIC`;
23007
+ logger_logger.trace `[${this.currentServiceId}] READ(${serviceId}, ${rawKey}) <- PANIC`;
22464
23008
  return PvmExecution.Panic;
22465
23009
  }
22466
23010
  if (value === null) {
22467
- logger_logger.trace `READ(${serviceId}, ${rawKey}) <- NONE`;
23011
+ logger_logger.trace `[${this.currentServiceId}] READ(${serviceId}, ${rawKey}) <- NONE`;
22468
23012
  regs.set(read_IN_OUT_REG, HostCallResult.NONE);
22469
23013
  return;
22470
23014
  }
22471
23015
  if (chunk.length > 0) {
22472
- logger_logger.trace `READ(${serviceId}, ${rawKey}) <- ${bytes_BytesBlob.blobFrom(chunk).toStringTruncated()}`;
23016
+ logger_logger.trace `[${this.currentServiceId}] READ(${serviceId}, ${rawKey}) <- ${bytes_BytesBlob.blobFrom(chunk).toStringTruncated()}`;
22473
23017
  }
22474
23018
  else {
22475
23019
  // just a query for length of stored data
22476
- logger_logger.trace `READ(${serviceId}, ${rawKey}) <- (${valueLength} ${valueLength === 1n ? "byte" : "bytes"})`;
23020
+ logger_logger.trace `[${this.currentServiceId}] READ(${serviceId}, ${rawKey}) <- (${valueLength} ${valueLength === 1n ? "byte" : "bytes"})`;
22477
23021
  }
22478
23022
  regs.set(read_IN_OUT_REG, valueLength);
22479
23023
  }
@@ -22517,7 +23061,7 @@ class Write {
22517
23061
  const rawStorageKey = safe_alloc_uint8array_safeAllocUint8Array(storageKeyLengthClamped);
22518
23062
  const keyLoadingResult = memory.loadInto(rawStorageKey, storageKeyStartAddress);
22519
23063
  if (keyLoadingResult.isError) {
22520
- logger_logger.trace `WRITE() <- PANIC`;
23064
+ logger_logger.trace `[${this.currentServiceId}] WRITE() <- PANIC`;
22521
23065
  return PvmExecution.Panic;
22522
23066
  }
22523
23067
  // k
@@ -22527,14 +23071,14 @@ class Write {
22527
23071
  const valueLoadingResult = memory.loadInto(value, valueStart);
22528
23072
  // Note [MaSo] this is ok to return bcs if valueLength is 0, then this panic won't happen
22529
23073
  if (valueLoadingResult.isError) {
22530
- logger_logger.trace `WRITE(${storageKey}) <- PANIC`;
23074
+ logger_logger.trace `[${this.currentServiceId}] WRITE(${storageKey}) <- PANIC`;
22531
23075
  return PvmExecution.Panic;
22532
23076
  }
22533
23077
  /** https://graypaper.fluffylabs.dev/#/9a08063/33af0133b201?v=0.6.6 */
22534
23078
  const maybeValue = valueLength === 0n ? null : bytes_BytesBlob.blobFrom(value);
22535
23079
  // a
22536
23080
  const result = this.account.write(storageKey, maybeValue);
22537
- logger_logger.trace `WRITE(${storageKey}, ${maybeValue?.toStringTruncated() ?? "remove"}) <- ${resultToString(result)}`;
23081
+ logger_logger.trace `[${this.currentServiceId}] WRITE(${storageKey}, ${maybeValue?.toStringTruncated() ?? "remove"}) <- ${resultToString(result)}`;
22538
23082
  if (result.isError) {
22539
23083
  regs.set(write_IN_OUT_REG, HostCallResult.FULL);
22540
23084
  return;
@@ -22763,7 +23307,7 @@ class Accumulate {
22763
23307
  serviceId,
22764
23308
  argsLength: numbers_tryAsU32(transfers.length + operands.length),
22765
23309
  });
22766
- const result = await executor.run(invocationArgs, tryAsGas(gas));
23310
+ const result = await executor.run(invocationArgs, gas_tryAsGas(gas));
22767
23311
  const [newState, checkpoint] = partialState.getStateUpdates();
22768
23312
  /**
22769
23313
  * PVM invocation returned and error so we return the checkpoint
@@ -22964,19 +23508,19 @@ class Accumulate {
22964
23508
  for (let serviceIndex = 0; serviceIndex < serviceIdsLength; serviceIndex += 1) {
22965
23509
  const serviceId = serviceIds[serviceIndex];
22966
23510
  const checkpoint = AccumulationStateUpdate.copyFrom(inputStateUpdate);
22967
- const promise = this.accumulateSingleService(serviceId, accumulateData.getTransfers(serviceId), accumulateData.getOperands(serviceId), accumulateData.getGasLimit(serviceId), slot, entropy, AccumulationStateUpdate.copyFrom(inputStateUpdate)).then(({ consumedGas, stateUpdate }) => ({
22968
- consumedGas,
22969
- stateUpdate: stateUpdate === null ? checkpoint : stateUpdate,
22970
- }));
23511
+ const promise = this.accumulateSingleService(serviceId, accumulateData.getTransfers(serviceId), accumulateData.getOperands(serviceId), accumulateData.getGasLimit(serviceId), slot, entropy, AccumulationStateUpdate.copyFrom(inputStateUpdate)).then(({ consumedGas, stateUpdate }) => {
23512
+ const resultEntry = [
23513
+ serviceId,
23514
+ {
23515
+ consumedGas,
23516
+ stateUpdate: stateUpdate === null ? checkpoint : stateUpdate,
23517
+ },
23518
+ ];
23519
+ return resultEntry;
23520
+ });
22971
23521
  resultPromises[serviceIndex] = promise;
22972
23522
  }
22973
- return Promise.all(resultPromises).then((results) => {
22974
- const map = new Map();
22975
- for (let serviceIndex = 0; serviceIndex < serviceIdsLength; serviceIndex += 1) {
22976
- map.set(serviceIds[serviceIndex], results[serviceIndex]);
22977
- }
22978
- return map;
22979
- });
23523
+ return Promise.all(resultPromises).then((results) => new Map(results));
22980
23524
  }
22981
23525
  /**
22982
23526
  * A method that updates `recentlyAccumulated`, `accumulationQueue` and `timeslot` in state
@@ -23065,9 +23609,10 @@ class Accumulate {
23065
23609
  const _gasCost = gasCost;
23066
23610
  assertEmpty(rest);
23067
23611
  const accumulated = accumulatableReports.subview(0, accumulatedReports);
23068
- const { yieldedRoot, services, transfers: _transfers, validatorsData, privilegedServices, authorizationQueues, ...stateUpdateRest } = state;
23612
+ const { yieldedRoot, services, transfers, validatorsData, privilegedServices, authorizationQueues, ...stateUpdateRest } = state;
23069
23613
  assertEmpty(stateUpdateRest);
23070
- // yielded root is retrieved after each pvm invocation so we can ignore it here
23614
+ // transfers and yielded root are retrieved after each pvm invocation so we can ignore it here
23615
+ const _transfers = transfers;
23071
23616
  const _yieldedRoot = yieldedRoot;
23072
23617
  if (this.hasDuplicatedServiceIdCreated(services.created)) {
23073
23618
  accumulate_logger.trace `Duplicated Service creation detected. Block is invalid.`;
@@ -23166,7 +23711,7 @@ class DeferredTransfers {
23166
23711
  partiallyUpdatedState.updateServiceInfo(serviceId, newInfo);
23167
23712
  const partialState = new AccumulateExternalities(this.chainSpec, this.blake2b, partiallyUpdatedState, serviceId, serviceId, timeslot);
23168
23713
  const fetchExternalities = FetchExternalities.createForOnTransfer({ entropy, transfers }, this.chainSpec);
23169
- let consumedGas = tryAsGas(0);
23714
+ let consumedGas = gas_tryAsGas(0);
23170
23715
  const hasTransfers = transfers.length > 0;
23171
23716
  const isCodeCorrect = code !== null && code.length <= W_C;
23172
23717
  if (!hasTransfers || !isCodeCorrect) {
@@ -23184,7 +23729,7 @@ class DeferredTransfers {
23184
23729
  const executor = await PvmExecutor.createOnTransferExecutor(serviceId, code, { partialState, fetchExternalities }, this.pvm);
23185
23730
  const args = encoder_Encoder.encodeObject(deferred_transfers_ARGS_CODEC, { timeslot, serviceId, transfersLength: numbers_tryAsU32(transfers.length) }, this.chainSpec);
23186
23731
  const gas = transfers.reduce((acc, item) => acc + item.gas, 0n);
23187
- consumedGas = (await executor.run(args, tryAsGas(gas))).consumedGas;
23732
+ consumedGas = (await executor.run(args, gas_tryAsGas(gas))).consumedGas;
23188
23733
  }
23189
23734
  transferStatistics.set(serviceId, { count: numbers_tryAsU32(transfers.length), gasUsed: tryAsServiceGas(consumedGas) });
23190
23735
  const [updatedState] = partialState.getStateUpdates();