@typeberry/jam 0.4.0 → 0.4.1-69ce381

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -3550,7 +3550,11 @@ var TestSuite;
3550
3550
  })(TestSuite || (TestSuite = {}));
3551
3551
  const ALL_VERSIONS_IN_ORDER = [compatibility_GpVersion.V0_6_7, compatibility_GpVersion.V0_7_0, compatibility_GpVersion.V0_7_1, compatibility_GpVersion.V0_7_2];
3552
3552
  const DEFAULT_SUITE = TestSuite.W3F_DAVXY;
3553
- const DEFAULT_VERSION = compatibility_GpVersion.V0_7_2;
3553
+ /**
3554
+ * Current version is set to track the jam-conformance testing.
3555
+ * Since we are currently at 0.7.1 not 0.7.2, we set our default version accordingly.
3556
+ */
3557
+ const DEFAULT_VERSION = compatibility_GpVersion.V0_7_1;
3554
3558
  const env = typeof process === "undefined" ? {} : process.env;
3555
3559
  let CURRENT_VERSION = parseCurrentVersion(env.GP_VERSION) ?? DEFAULT_VERSION;
3556
3560
  let CURRENT_SUITE = parseCurrentSuite(env.TEST_SUITE) ?? DEFAULT_SUITE;
@@ -3609,8 +3613,8 @@ class compatibility_Compatibility {
3609
3613
  /**
3610
3614
  * Allows selecting different values for different Gray Paper versions from one record.
3611
3615
  *
3612
- * @param fallback The default value to return if no value is found for the current.
3613
- * @param record A record mapping versions to values, checking if the version is greater or equal to the current version.
3616
+ * fallback The default value to return if no value is found for the current.
3617
+ * versions A record mapping versions to values, checking if the version is greater or equal to the current version.
3614
3618
  * @returns The value for the current version, or the default value.
3615
3619
  */
3616
3620
  static selectIfGreaterOrEqual({ fallback, versions, }) {
@@ -3773,7 +3777,7 @@ const workspacePathFix = dev_env.NODE_ENV === "development"
3773
3777
 
3774
3778
  ;// CONCATENATED MODULE: ./packages/core/utils/opaque.ts
3775
3779
  /**
3776
- * @fileoverview `Opaque<Type, Token>` constructs a unique type which is a subset of Type with a
3780
+ * `Opaque<Type, Token>` constructs a unique type which is a subset of Type with a
3777
3781
  * specified unique token Token. It means that base type cannot be assigned to unique type by accident.
3778
3782
  * Good examples of opaque types include:
3779
3783
  * - JWTs or other tokens - these are special kinds of string used for authorization purposes.
@@ -7100,11 +7104,9 @@ function sequenceViewFixLen(type, { fixedLength }) {
7100
7104
 
7101
7105
  /** Helper function to create most used hashes in the block */
7102
7106
  class TransitionHasher {
7103
- context;
7104
7107
  keccakHasher;
7105
7108
  blake2b;
7106
- constructor(context, keccakHasher, blake2b) {
7107
- this.context = context;
7109
+ constructor(keccakHasher, blake2b) {
7108
7110
  this.keccakHasher = keccakHasher;
7109
7111
  this.blake2b = blake2b;
7110
7112
  }
@@ -7226,9 +7228,438 @@ class ArrayView {
7226
7228
  }
7227
7229
  }
7228
7230
 
7231
+ ;// CONCATENATED MODULE: ./packages/core/collections/blob-dictionary.ts
7232
+
7233
+
7234
+ /** A map which uses byte blobs as keys */
7235
+ class BlobDictionary extends WithDebug {
7236
+ mapNodeThreshold;
7237
+ /**
7238
+ * The root node of the dictionary.
7239
+ *
7240
+ * This is the main internal data structure that organizes entries
7241
+ * in a tree-like fashion (array-based nodes up to `mapNodeThreshold`,
7242
+ * map-based nodes beyond it). All insertions, updates, and deletions
7243
+ * operate through this structure.
7244
+ */
7245
+ root = Node.withList();
7246
+ /**
7247
+ * Auxiliary map that stores references to the original keys and their values.
7248
+ *
7249
+ * - Overriding a value in the main structure does not replace the original key reference.
7250
+ * - Used for efficient iteration over `keys()`, `values()`, `entries()`, and computing `size`.
7251
+ */
7252
+ keyvals = new Map();
7253
+ /**
7254
+ * Protected constructor used internally by `BlobDictionary.new`
7255
+ * and `BlobDictionary.fromEntries`.
7256
+ *
7257
+ * This enforces controlled instantiation — users should create instances
7258
+ * through the provided static factory methods instead of calling the
7259
+ * constructor directly.
7260
+ *
7261
+ * @param mapNodeThreshold - The threshold that determines when the dictionary
7262
+ * switches from using an array-based (`ListChildren`) node to a map-based (`MapChildren`) node for storing entries.
7263
+ */
7264
+ constructor(mapNodeThreshold) {
7265
+ super();
7266
+ this.mapNodeThreshold = mapNodeThreshold;
7267
+ }
7268
+ /**
7269
+ * Returns the number of entries in the dictionary.
7270
+ *
7271
+ * The count is derived from the auxiliary `keyvals` map, which stores
7272
+ * all original key references and their associated values. This ensures
7273
+ * that the `size` reflects the actual number of entries, independent of
7274
+ * internal overrides in the main `root` structure.
7275
+ *
7276
+ * @returns The total number of entries in the dictionary.
7277
+ */
7278
+ get size() {
7279
+ return this.keyvals.size;
7280
+ }
7281
+ [TEST_COMPARE_USING]() {
7282
+ const vals = Array.from(this);
7283
+ vals.sort((a, b) => a[0].compare(b[0]).value);
7284
+ return vals;
7285
+ }
7286
+ /**
7287
+ * Creates an empty `BlobDictionary`.
7288
+ *
7289
+ * @param mapNodeThreshold - The threshold that determines when the dictionary
7290
+ * switches from using an array-based (`ListChildren`) node to a map-based (`MapChildren`) node for storing entries.
7291
+ * Defaults to `0`.
7292
+ *
7293
+ * @returns A new, empty `BlobDictionary` instance.
7294
+ */
7295
+ static new(mapNodeThreshold = 0) {
7296
+ return new BlobDictionary(mapNodeThreshold);
7297
+ }
7298
+ /**
7299
+ * Creates a new `BlobDictionary` initialized with the given entries.
7300
+ *
7301
+ * @param entries - An array of `[key, value]` pairs used to populate the dictionary.
7302
+ * @param mapNodeThreshold - The threshold that determines when the dictionary
7303
+ * switches from using an array-based (`ListChildren`) node to a map-based (`MapChildren`) node for storing entries.
7304
+ * Defaults to `0`.
7305
+ *
7306
+ * @returns A new `BlobDictionary` containing the provided entries.
7307
+ */
7308
+ static fromEntries(entries, mapNodeThreshold) {
7309
+ const dict = BlobDictionary.new(mapNodeThreshold);
7310
+ for (const [key, value] of entries) {
7311
+ dict.set(key, value);
7312
+ }
7313
+ return dict;
7314
+ }
7315
+ /**
7316
+ * Internal helper that inserts, updates or deletes an entry in the dictionary.
7317
+ *
7318
+ * Behaviour details:
7319
+ * - Passing `undefined` as `value` indicates a deletion. (E.g. `delete` uses `internalSet(key, undefined)`.)
7320
+ * - When an add (new entry) or a delete actually changes the structure, the method returns the affected leaf node.
7321
+ * - When the call only overrides an existing value (no structural add/delete), the method returns `null`.
7322
+ *
7323
+ * This method is intended for internal use by the dictionary implementation and allows `undefined` as a
7324
+ * sentinel value to signal removals.
7325
+ *
7326
+ * @param key - The key to insert, update or remove.
7327
+ * @param value - The value to associate with the key, or `undefined` to remove the key.
7328
+ * @returns The leaf node created or removed on add/delete, or `null` if the operation only overwrote an existing value.
7329
+ */
7330
+ internalSet(key, value) {
7331
+ let node = this.root;
7332
+ const keyChunkGenerator = key.chunks(CHUNK_SIZE);
7333
+ let depth = 0;
7334
+ for (;;) {
7335
+ const maybeKeyChunk = keyChunkGenerator.next().value;
7336
+ if (maybeKeyChunk === undefined) {
7337
+ if (value === undefined) {
7338
+ return node.remove(key);
7339
+ }
7340
+ return node.set(key, value);
7341
+ }
7342
+ const keyChunk = opaque_asOpaqueType(maybeKeyChunk);
7343
+ if (node.children instanceof ListChildren) {
7344
+ const subkey = bytes_BytesBlob.blobFrom(key.raw.subarray(CHUNK_SIZE * depth));
7345
+ const leaf = value !== undefined ? node.children.insert(subkey, { key, value }) : node.children.remove(subkey);
7346
+ if (subkey.length > CHUNK_SIZE && node.children.children.length > this.mapNodeThreshold) {
7347
+ node.convertListChildrenToMap();
7348
+ }
7349
+ return leaf;
7350
+ }
7351
+ depth += 1;
7352
+ const children = node.children;
7353
+ if (children instanceof ListChildren) {
7354
+ throw new Error("We handle list node earlier. If we fall through, we know it's for the `Map` case.");
7355
+ }
7356
+ if (children instanceof MapChildren) {
7357
+ const maybeNode = children.getChild(keyChunk);
7358
+ if (maybeNode !== undefined) {
7359
+ // simply go one level deeper
7360
+ node = maybeNode;
7361
+ }
7362
+ else {
7363
+ // we are trying to remove an item, but it does not exist
7364
+ if (value === undefined) {
7365
+ return null;
7366
+ }
7367
+ // no more child nodes, we insert a new one.
7368
+ const newNode = Node.withList();
7369
+ children.setChild(keyChunk, newNode);
7370
+ node = newNode;
7371
+ }
7372
+ continue;
7373
+ }
7374
+ assertNever(children);
7375
+ }
7376
+ }
7377
+ /**
7378
+ * Adds a new entry to the dictionary or updates the value of an existing key.
7379
+ *
7380
+ * If an entry with the given key already exists, its value is replaced
7381
+ * with the new one.
7382
+ *
7383
+ * @param key - The key to add or update in the dictionary.
7384
+ * @param value - The value to associate with the specified key.
7385
+ * @returns Nothing (`void`).
7386
+ */
7387
+ set(key, value) {
7388
+ const leaf = this.internalSet(key, value);
7389
+ if (leaf !== null) {
7390
+ this.keyvals.set(leaf.key, leaf);
7391
+ }
7392
+ }
7393
+ /**
7394
+ * Retrieves the value associated with the given key from the dictionary.
7395
+ *
7396
+ * If the key does not exist, this method returns `undefined`.
7397
+ *
7398
+ * @param key - The key whose associated value should be retrieved.
7399
+ * @returns The value associated with the specified key, or `undefined` if the key is not present.
7400
+ */
7401
+ get(key) {
7402
+ let node = this.root;
7403
+ const pathChunksGenerator = key.chunks(CHUNK_SIZE);
7404
+ let depth = 0;
7405
+ while (node !== undefined) {
7406
+ const maybePathChunk = pathChunksGenerator.next().value;
7407
+ if (node.children instanceof ListChildren) {
7408
+ const subkey = bytes_BytesBlob.blobFrom(key.raw.subarray(depth * CHUNK_SIZE));
7409
+ const child = node.children.find(subkey);
7410
+ if (child !== null) {
7411
+ return child.value;
7412
+ }
7413
+ }
7414
+ if (maybePathChunk === undefined) {
7415
+ return node.getLeaf()?.value;
7416
+ }
7417
+ if (node.children instanceof MapChildren) {
7418
+ const pathChunk = opaque_asOpaqueType(maybePathChunk);
7419
+ node = node.children.getChild(pathChunk);
7420
+ depth += 1;
7421
+ }
7422
+ }
7423
+ return undefined;
7424
+ }
7425
+ /**
7426
+ * Checks whether the dictionary contains an entry for the given key.
7427
+ *
7428
+ * ⚠️ **Note:** Avoid using `has(...)` together with `get(...)` in a pattern like this:
7429
+ *
7430
+ * ```ts
7431
+ * if (dict.has(key)) {
7432
+ * const value = dict.get(key);
7433
+ * ...
7434
+ * }
7435
+ * ```
7436
+ *
7437
+ * This approach performs two lookups for the same key.
7438
+ *
7439
+ * Instead, prefer the following pattern, which retrieves the value once:
7440
+ *
7441
+ * ```ts
7442
+ * const value = dict.get(key);
7443
+ * if (value !== undefined) {
7444
+ * ...
7445
+ * }
7446
+ * ```
7447
+ *
7448
+ * @param key - The key to check for.
7449
+ * @returns `true` if the dictionary contains an entry for the given key, otherwise `false`.
7450
+ */
7451
+ has(key) {
7452
+ return this.get(key) !== undefined;
7453
+ }
7454
+ /**
7455
+ * Removes an entry with the specified key from the dictionary.
7456
+ *
7457
+ * Internally, this calls {@link internalSet} with `undefined` to mark the entry as deleted.
7458
+ *
7459
+ * @param key - The key of the entry to remove.
7460
+ * @returns `true` if an entry was removed (i.e. the key existed), otherwise `false`.
7461
+ */
7462
+ delete(key) {
7463
+ const leaf = this.internalSet(key, undefined);
7464
+ if (leaf !== null) {
7465
+ this.keyvals.delete(leaf.key);
7466
+ return true;
7467
+ }
7468
+ return false;
7469
+ }
7470
+ /**
7471
+ * Returns an iterator over the keys in the dictionary.
7472
+ *
7473
+ * The iterator yields each key in insertion order.
7474
+ *
7475
+ * @returns An iterator over all keys in the dictionary.
7476
+ */
7477
+ keys() {
7478
+ return this.keyvals.keys();
7479
+ }
7480
+ /**
7481
+ * Returns an iterator over the values in the dictionary.
7482
+ *
7483
+ * The iterator yields each value in insertion order.
7484
+ *
7485
+ * @returns An iterator over all values in the dictionary.
7486
+ */
7487
+ *values() {
7488
+ for (const leaf of this.keyvals.values()) {
7489
+ yield leaf.value;
7490
+ }
7491
+ }
7492
+ /**
7493
+ * Returns an iterator over the `[key, value]` pairs in the dictionary.
7494
+ *
7495
+ * The iterator yields entries in insertion order.
7496
+ *
7497
+ * @returns An iterator over `[key, value]` tuples for each entry in the dictionary.
7498
+ */
7499
+ *entries() {
7500
+ for (const leaf of this.keyvals.values()) {
7501
+ yield [leaf.key, leaf.value];
7502
+ }
7503
+ }
7504
+ /**
7505
+ * Default iterator for the dictionary.
7506
+ *
7507
+ * Equivalent to calling {@link entries}.
7508
+ * Enables iteration with `for...of`:
7509
+ *
7510
+ * ```ts
7511
+ * for (const [key, value] of dict) {
7512
+ * ...
7513
+ * }
7514
+ * ```
7515
+ *
7516
+ * @returns An iterator over `[key, value]` pairs.
7517
+ */
7518
+ [Symbol.iterator]() {
7519
+ return this.entries();
7520
+ }
7521
+ /**
7522
+ * Creates a new sorted array of values, ordered by their corresponding keys.
7523
+ *
7524
+ * Iterates over all entries in the dictionary and sorts them according
7525
+ * to the provided comparator function applied to the keys.
7526
+ *
7527
+ * @param comparator - A comparator function that can compare two keys.
7528
+ *
7529
+ * @returns A new array containing all values from the dictionary,
7530
+ * sorted according to their keys.
7531
+ */
7532
+ toSortedArray(comparator) {
7533
+ const vals = Array.from(this);
7534
+ vals.sort((a, b) => comparator(a[0], b[0]).value);
7535
+ return vals.map((x) => x[1]);
7536
+ }
7537
+ }
7538
+ const CHUNK_SIZE = 6;
7539
+ /**
7540
+ * A function to transform a bytes chunk (up to 6 bytes into U48 number)
7541
+ *
7542
+ * Note that it uses 3 additional bits to store length(`value * 8 + len;`),
7543
+ * It is needed to distinguish shorter chunks that have 0s at the end, for example: [1, 2] and [1, 2, 0]
7544
+ * */
7545
+ function bytesAsU48(bytes) {
7546
+ const len = bytes.length;
7547
+ debug_check `${len <= CHUNK_SIZE} Length has to be <= ${CHUNK_SIZE}, got: ${len}`;
7548
+ let value = bytes[3] | (bytes[2] << 8) | (bytes[1] << 16) | (bytes[0] << 24);
7549
+ for (let i = 4; i < bytes.length; i++) {
7550
+ value = value * 256 + bytes[i];
7551
+ }
7552
+ return value * 8 + len;
7553
+ }
7554
+ class Node {
7555
+ leaf;
7556
+ children;
7557
+ convertListChildrenToMap() {
7558
+ if (!(this.children instanceof ListChildren)) {
7559
+ return;
7560
+ }
7561
+ this.children = MapChildren.fromListNode(this.children);
7562
+ }
7563
+ static withList() {
7564
+ return new Node(undefined, ListChildren.new());
7565
+ }
7566
+ static withMap() {
7567
+ return new Node(undefined, MapChildren.new());
7568
+ }
7569
+ constructor(leaf, children) {
7570
+ this.leaf = leaf;
7571
+ this.children = children;
7572
+ }
7573
+ getLeaf() {
7574
+ return this.leaf;
7575
+ }
7576
+ remove(_key) {
7577
+ if (this.leaf === undefined) {
7578
+ return null;
7579
+ }
7580
+ const removedLeaf = this.leaf;
7581
+ this.leaf = undefined;
7582
+ return removedLeaf;
7583
+ }
7584
+ set(key, value) {
7585
+ if (this.leaf === undefined) {
7586
+ this.leaf = { key, value };
7587
+ return this.leaf;
7588
+ }
7589
+ this.leaf.value = value;
7590
+ return null;
7591
+ }
7592
+ }
7593
+ class ListChildren {
7594
+ children = [];
7595
+ constructor() { }
7596
+ find(key) {
7597
+ const result = this.children.find((item) => item[0].isEqualTo(key));
7598
+ if (result !== undefined) {
7599
+ return result[1];
7600
+ }
7601
+ return null;
7602
+ }
7603
+ remove(key) {
7604
+ const existingIndex = this.children.findIndex((item) => item[0].isEqualTo(key));
7605
+ if (existingIndex >= 0) {
7606
+ const ret = this.children.splice(existingIndex, 1);
7607
+ return ret[0][1];
7608
+ }
7609
+ return null;
7610
+ }
7611
+ insert(key, leaf) {
7612
+ const existingIndex = this.children.findIndex((item) => item[0].isEqualTo(key));
7613
+ if (existingIndex >= 0) {
7614
+ const existing = this.children[existingIndex];
7615
+ existing[1].value = leaf.value;
7616
+ return null;
7617
+ }
7618
+ this.children.push([key, leaf]);
7619
+ return leaf;
7620
+ }
7621
+ static new() {
7622
+ return new ListChildren();
7623
+ }
7624
+ }
7625
+ class MapChildren {
7626
+ children = new Map();
7627
+ constructor() { }
7628
+ static new() {
7629
+ return new MapChildren();
7630
+ }
7631
+ static fromListNode(node) {
7632
+ const mapNode = new MapChildren();
7633
+ for (const [key, leaf] of node.children) {
7634
+ const currentKeyChunk = opaque_asOpaqueType(bytes_BytesBlob.blobFrom(key.raw.subarray(0, CHUNK_SIZE)));
7635
+ const subKey = bytes_BytesBlob.blobFrom(key.raw.subarray(CHUNK_SIZE));
7636
+ let child = mapNode.getChild(currentKeyChunk);
7637
+ if (child === undefined) {
7638
+ child = Node.withList();
7639
+ mapNode.setChild(currentKeyChunk, child);
7640
+ }
7641
+ const children = child.children;
7642
+ children.insert(subKey, leaf);
7643
+ }
7644
+ return mapNode;
7645
+ }
7646
+ getChild(keyChunk) {
7647
+ const chunkAsNumber = bytesAsU48(keyChunk.raw);
7648
+ return this.children.get(chunkAsNumber);
7649
+ }
7650
+ setChild(keyChunk, node) {
7651
+ const chunkAsNumber = bytesAsU48(keyChunk.raw);
7652
+ this.children.set(chunkAsNumber, node);
7653
+ }
7654
+ }
7655
+
7229
7656
  ;// CONCATENATED MODULE: ./packages/core/collections/hash-dictionary.ts
7230
- /** A map which uses hashes as keys. */
7231
- class hash_dictionary_HashDictionary {
7657
+ /**
7658
+ * A map which uses hashes as keys.
7659
+ *
7660
+ * @deprecated
7661
+ * */
7662
+ class StringHashDictionary {
7232
7663
  // TODO [ToDr] [crit] We can't use `TrieHash` directly in the map,
7233
7664
  // because of the way it's being compared. Hence having `string` here.
7234
7665
  // This has to be benchmarked and re-written to a custom map most likely.
@@ -7294,6 +7725,17 @@ class hash_dictionary_HashDictionary {
7294
7725
  }
7295
7726
  }
7296
7727
 
7728
+ /**
7729
+ * A value that indicates when `BlobDictionary` transforms Array nodes into Map nodes.
7730
+ * In practice, it doesn't matter much because, in real life, arrays in this structure usually have a length close to 1.
7731
+ */
7732
+ const BLOB_DICTIONARY_THRESHOLD = 5;
7733
+ class hash_dictionary_HashDictionary extends BlobDictionary {
7734
+ constructor() {
7735
+ super(BLOB_DICTIONARY_THRESHOLD);
7736
+ }
7737
+ }
7738
+
7297
7739
  ;// CONCATENATED MODULE: ./packages/core/collections/hash-set.ts
7298
7740
 
7299
7741
  /** A set specialized for storing hashes. */
@@ -7758,6 +8200,18 @@ class SortedSet extends SortedArray {
7758
8200
 
7759
8201
 
7760
8202
 
8203
+ function getTruncatedKey(key) {
8204
+ // Always return exactly TRUNCATED_HASH_SIZE bytes.
8205
+ if (key.length === TRUNCATED_HASH_SIZE) {
8206
+ return key;
8207
+ }
8208
+ return bytes_Bytes.fromBlob(key.raw.subarray(0, TRUNCATED_HASH_SIZE), TRUNCATED_HASH_SIZE);
8209
+ }
8210
+ /**
8211
+ * A value that indicates when `BlobDictionary` transforms Array nodes into Map nodes.
8212
+ * In practice, it doesn't matter much because, in real life, arrays in this structure usually have a length close to 1.
8213
+ */
8214
+ const truncated_hash_dictionary_BLOB_DICTIONARY_THRESHOLD = 5;
7761
8215
  /**
7762
8216
  * A collection of hash-based keys (likely `StateKey`s) which ignores
7763
8217
  * differences on the last byte.
@@ -7770,48 +8224,37 @@ class TruncatedHashDictionary {
7770
8224
  * Each key will be copied and have the last byte replace with a 0.
7771
8225
  */
7772
8226
  static fromEntries(entries) {
7773
- /** Copy key bytes of an entry and replace the last one with 0. */
7774
- const mapped = Array.from(entries).map(([key, value]) => {
7775
- const newKey = bytes_Bytes.zero(hash_HASH_SIZE).asOpaque();
7776
- newKey.raw.set(key.raw.subarray(0, TRUNCATED_HASH_SIZE));
7777
- return [newKey, value];
7778
- });
7779
- return new TruncatedHashDictionary(hash_dictionary_HashDictionary.fromEntries(mapped));
8227
+ return new TruncatedHashDictionary(BlobDictionary.fromEntries(Array.from(entries).map(([key, value]) => [getTruncatedKey(key), value]), truncated_hash_dictionary_BLOB_DICTIONARY_THRESHOLD));
7780
8228
  }
7781
- /** A truncated key which we re-use to query the dictionary. */
7782
- truncatedKey = bytes_Bytes.zero(hash_HASH_SIZE).asOpaque();
7783
8229
  constructor(dict) {
7784
8230
  this.dict = dict;
7785
8231
  }
7786
8232
  [TEST_COMPARE_USING]() {
7787
- return this.dict;
8233
+ return Array.from(this.dict);
7788
8234
  }
7789
8235
  /** Return number of items in the dictionary. */
7790
8236
  get size() {
7791
8237
  return this.dict.size;
7792
8238
  }
7793
8239
  /** Retrieve a value that matches the key on `TRUNCATED_HASH_SIZE`. */
7794
- get(fullKey) {
7795
- this.truncatedKey.raw.set(fullKey.raw.subarray(0, TRUNCATED_HASH_SIZE));
7796
- return this.dict.get(this.truncatedKey);
8240
+ get(key) {
8241
+ const truncatedKey = getTruncatedKey(key);
8242
+ return this.dict.get(truncatedKey);
7797
8243
  }
7798
8244
  /** Return true if the key is present in the dictionary */
7799
- has(fullKey) {
7800
- this.truncatedKey.raw.set(fullKey.raw.subarray(0, TRUNCATED_HASH_SIZE));
7801
- return this.dict.has(this.truncatedKey);
8245
+ has(key) {
8246
+ const truncatedKey = getTruncatedKey(key);
8247
+ return this.dict.has(truncatedKey);
7802
8248
  }
7803
8249
  /** Set or update a value that matches the key on `TRUNCATED_HASH_SIZE`. */
7804
- set(fullKey, value) {
7805
- // NOTE we can't use the the shared key here, since the collection will
7806
- // store the key for us, hence the copy.
7807
- const key = bytes_Bytes.zero(hash_HASH_SIZE);
7808
- key.raw.set(fullKey.raw.subarray(0, TRUNCATED_HASH_SIZE));
7809
- this.dict.set(key.asOpaque(), value);
8250
+ set(key, value) {
8251
+ const truncatedKey = getTruncatedKey(key);
8252
+ this.dict.set(truncatedKey, value);
7810
8253
  }
7811
8254
  /** Remove a value that matches the key on `TRUNCATED_HASH_SIZE`. */
7812
- delete(fullKey) {
7813
- this.truncatedKey.raw.set(fullKey.raw.subarray(0, TRUNCATED_HASH_SIZE));
7814
- this.dict.delete(this.truncatedKey);
8255
+ delete(key) {
8256
+ const truncatedKey = getTruncatedKey(key);
8257
+ this.dict.delete(truncatedKey);
7815
8258
  }
7816
8259
  /** Iterator over values of the dictionary. */
7817
8260
  values() {
@@ -7819,9 +8262,7 @@ class TruncatedHashDictionary {
7819
8262
  }
7820
8263
  /** Iterator over entries of the dictionary (with truncated keys) */
7821
8264
  *entries() {
7822
- for (const [key, value] of this.dict.entries()) {
7823
- yield [bytes_Bytes.fromBlob(key.raw.subarray(0, TRUNCATED_HASH_SIZE), TRUNCATED_HASH_SIZE).asOpaque(), value];
7824
- }
8265
+ yield* this.dict.entries();
7825
8266
  }
7826
8267
  [Symbol.iterator]() {
7827
8268
  return this.entries();
@@ -7838,6 +8279,7 @@ class TruncatedHashDictionary {
7838
8279
 
7839
8280
 
7840
8281
 
8282
+
7841
8283
  ;// CONCATENATED MODULE: ./packages/jam/config/chain-spec.ts
7842
8284
 
7843
8285
 
@@ -10630,11 +11072,32 @@ const ENTROPY_ENTRIES = 4;
10630
11072
 
10631
11073
  var state_update_UpdatePreimageKind;
10632
11074
  (function (UpdatePreimageKind) {
10633
- /** Insert new preimage and optionally update it's lookup history. */
11075
+ /**
11076
+ * Insert new preimage and optionally update it's lookup history.
11077
+ *
11078
+ * Used in: `provide`
11079
+ *
11080
+ * https://graypaper.fluffylabs.dev/#/ab2cdbd/383904383904?v=0.7.2
11081
+ */
10634
11082
  UpdatePreimageKind[UpdatePreimageKind["Provide"] = 0] = "Provide";
10635
- /** Remove a preimage and it's lookup history. */
11083
+ /**
11084
+ * Remove a preimage and it's lookup history.
11085
+ *
11086
+ * Used in: `forget` and `eject`
11087
+ *
11088
+ * https://graypaper.fluffylabs.dev/#/ab2cdbd/38c701380202?v=0.7.2
11089
+ * https://graypaper.fluffylabs.dev/#/ab2cdbd/379102379302?v=0.7.2
11090
+ */
10636
11091
  UpdatePreimageKind[UpdatePreimageKind["Remove"] = 1] = "Remove";
10637
- /** update or add lookup history for preimage hash/len to given value. */
11092
+ /**
11093
+ * Update or add lookup history for preimage hash/len to given value.
11094
+ *
11095
+ * Used in: `solicit` and `forget`
11096
+ *
11097
+ * https://graypaper.fluffylabs.dev/#/ab2cdbd/382802382802?v=0.7.2
11098
+ * https://graypaper.fluffylabs.dev/#/ab2cdbd/384002384b02?v=0.7.2
11099
+ * https://graypaper.fluffylabs.dev/#/ab2cdbd/38c60038ea00?v=0.7.2
11100
+ */
10638
11101
  UpdatePreimageKind[UpdatePreimageKind["UpdateOrAdd"] = 2] = "UpdateOrAdd";
10639
11102
  })(state_update_UpdatePreimageKind || (state_update_UpdatePreimageKind = {}));
10640
11103
  /**
@@ -10642,7 +11105,7 @@ var state_update_UpdatePreimageKind;
10642
11105
  *
10643
11106
  * Can be one of the following cases:
10644
11107
  * 1. Provide a new preimage blob and set the lookup history to available at `slot`.
10645
- * 2. Remove (expunge) a preimage and it's lookup history.
11108
+ * 2. Remove (forget) a preimage and it's lookup history.
10646
11109
  * 3. Update `LookupHistory` with given value.
10647
11110
  */
10648
11111
  class UpdatePreimage {
@@ -11348,7 +11811,7 @@ class BlockVerifier {
11348
11811
  this.hasher = hasher;
11349
11812
  this.blocks = blocks;
11350
11813
  }
11351
- async verifyBlock(block) {
11814
+ async verifyBlock(block, options = { skipParentAndStateRoot: false }) {
11352
11815
  const headerView = block.header.view();
11353
11816
  const headerHash = this.hasher.header(headerView);
11354
11817
  // check if current block is already imported
@@ -11360,7 +11823,7 @@ class BlockVerifier {
11360
11823
  // https://graypaper.fluffylabs.dev/#/cc517d7/0c9d000c9d00?v=0.6.5
11361
11824
  const parentHash = headerView.parentHeaderHash.materialize();
11362
11825
  // importing genesis block
11363
- if (!parentHash.isEqualTo(block_verifier_ZERO_HASH)) {
11826
+ if (!parentHash.isEqualTo(block_verifier_ZERO_HASH) && !options.skipParentAndStateRoot) {
11364
11827
  const parentBlock = this.blocks.getHeader(parentHash);
11365
11828
  if (parentBlock === null) {
11366
11829
  return Result.error(BlockVerifierError.ParentNotFound, () => `Parent ${parentHash.toString()} not found`);
@@ -11380,21 +11843,20 @@ class BlockVerifier {
11380
11843
  if (!extrinsicHash.isEqualTo(extrinsicMerkleCommitment.hash)) {
11381
11844
  return Result.error(BlockVerifierError.InvalidExtrinsic, () => `Invalid extrinsic hash: ${extrinsicHash.toString()}, expected ${extrinsicMerkleCommitment.hash.toString()}`);
11382
11845
  }
11383
- // Check if the state root is valid.
11384
- // https://graypaper.fluffylabs.dev/#/cc517d7/0c18010c1801?v=0.6.5
11385
- const stateRoot = headerView.priorStateRoot.materialize();
11386
- const posteriorStateRoot = this.blocks.getPostStateRoot(parentHash);
11387
- if (posteriorStateRoot === null) {
11388
- return Result.error(BlockVerifierError.StateRootNotFound, () => `Posterior state root ${parentHash.toString()} not found`);
11389
- }
11390
- if (!stateRoot.isEqualTo(posteriorStateRoot)) {
11391
- return Result.error(BlockVerifierError.InvalidStateRoot, () => `Invalid prior state root: ${stateRoot.toString()}, expected ${posteriorStateRoot.toString()} (ours)`);
11846
+ if (!options.skipParentAndStateRoot) {
11847
+ // Check if the state root is valid.
11848
+ // https://graypaper.fluffylabs.dev/#/ab2cdbd/0c73010c7301?v=0.7.2
11849
+ const stateRoot = headerView.priorStateRoot.materialize();
11850
+ const posteriorStateRoot = this.blocks.getPostStateRoot(parentHash);
11851
+ if (posteriorStateRoot === null) {
11852
+ return Result.error(BlockVerifierError.StateRootNotFound, () => `Posterior state root ${parentHash.toString()} not found`);
11853
+ }
11854
+ if (!stateRoot.isEqualTo(posteriorStateRoot)) {
11855
+ return Result.error(BlockVerifierError.InvalidStateRoot, () => `Invalid prior state root: ${stateRoot.toString()}, expected ${posteriorStateRoot.toString()} (ours)`);
11856
+ }
11392
11857
  }
11393
11858
  return Result.ok(headerHash.hash);
11394
11859
  }
11395
- hashHeader(block) {
11396
- return this.hasher.header(block.header.view());
11397
- }
11398
11860
  }
11399
11861
 
11400
11862
  ;// CONCATENATED MODULE: ./packages/jam/transition/disputes/disputes-error-code.ts
@@ -13212,7 +13674,6 @@ class LeafNode {
13212
13674
  /**
13213
13675
  * Get the byte length of embedded value.
13214
13676
  *
13215
- * @remark
13216
13677
  * Note in case this node only contains hash this is going to be 0.
13217
13678
  */
13218
13679
  getValueLength() {
@@ -13223,7 +13684,6 @@ class LeafNode {
13223
13684
  /**
13224
13685
  * Returns the embedded value.
13225
13686
  *
13226
- * @remark
13227
13687
  * Note that this is going to be empty for a regular leaf node (i.e. containing a hash).
13228
13688
  */
13229
13689
  getValue() {
@@ -13233,7 +13693,6 @@ class LeafNode {
13233
13693
  /**
13234
13694
  * Returns contained value hash.
13235
13695
  *
13236
- * @remark
13237
13696
  * Note that for embedded value this is going to be full 0-padded 32 bytes.
13238
13697
  */
13239
13698
  getValueHash() {
@@ -14254,7 +14713,11 @@ class PartiallyUpdatedState {
14254
14713
  const service = this.state.getService(serviceId);
14255
14714
  return service?.getPreimage(hash) ?? null;
14256
14715
  }
14257
- /** Get status of a preimage of current service taking into account any updates. */
14716
+ /**
14717
+ * Get status of a preimage of current service taking into account any updates.
14718
+ *
14719
+ * https://graypaper.fluffylabs.dev/#/ab2cdbd/110201110201?v=0.7.2
14720
+ */
14258
14721
  getLookupHistory(currentTimeslot, serviceId, hash, length) {
14259
14722
  const updatedService = this.stateUpdate.services.updated.get(serviceId);
14260
14723
  /** Return lookup history item for newly created service */
@@ -14291,12 +14754,7 @@ class PartiallyUpdatedState {
14291
14754
  return new LookupHistoryItem(hash, updatedPreimage.length, service_tryAsLookupHistorySlots([currentTimeslot]));
14292
14755
  }
14293
14756
  case state_update_UpdatePreimageKind.Remove: {
14294
- const state = stateFallback();
14295
- // kinda impossible, since we know it's there because it's removed.
14296
- if (state === null) {
14297
- return null;
14298
- }
14299
- return new LookupHistoryItem(hash, state.length, service_tryAsLookupHistorySlots([...state.slots, currentTimeslot]));
14757
+ return null;
14300
14758
  }
14301
14759
  case state_update_UpdatePreimageKind.UpdateOrAdd: {
14302
14760
  return action.item;
@@ -14389,7 +14847,7 @@ const gas_tryAsSmallGas = (v) => opaque_asOpaqueType(numbers_tryAsU32(v));
14389
14847
  /** Attempt to convert given number into U64 gas representation. */
14390
14848
  const tryAsBigGas = (v) => opaque_asOpaqueType(numbers_tryAsU64(v));
14391
14849
  /** Attempt to convert given number into gas. */
14392
- const tryAsGas = (v) => typeof v === "number" && v < 2 ** 32 ? gas_tryAsSmallGas(v) : tryAsBigGas(v);
14850
+ const gas_tryAsGas = (v) => typeof v === "number" && v < 2 ** 32 ? gas_tryAsSmallGas(v) : tryAsBigGas(v);
14393
14851
 
14394
14852
  ;// CONCATENATED MODULE: ./packages/core/pvm-interface/memory.ts
14395
14853
 
@@ -14672,7 +15130,7 @@ const tryAsRegisterIndex = (index) => {
14672
15130
  debug_check `${index >= 0 && index < registers_NO_OF_REGISTERS} Incorrect register index: ${index}!`;
14673
15131
  return opaque_asOpaqueType(index);
14674
15132
  };
14675
- class Registers {
15133
+ class registers_Registers {
14676
15134
  bytes;
14677
15135
  asSigned;
14678
15136
  asUnsigned;
@@ -14691,7 +15149,7 @@ class Registers {
14691
15149
  }
14692
15150
  static fromBytes(bytes) {
14693
15151
  debug_check `${bytes.length === registers_NO_OF_REGISTERS << REGISTER_SIZE_SHIFT} Invalid size of registers array.`;
14694
- return new Registers(bytes);
15152
+ return new registers_Registers(bytes);
14695
15153
  }
14696
15154
  getBytesAsLittleEndian(index, len) {
14697
15155
  const offset = index << REGISTER_SIZE_SHIFT;
@@ -15005,49 +15463,10 @@ class NoopMissing {
15005
15463
  }
15006
15464
  }
15007
15465
 
15008
- ;// CONCATENATED MODULE: ./packages/core/pvm-interpreter/gas.ts
15009
-
15010
-
15011
- /** Create a new gas counter instance depending on the gas value. */
15012
- function gasCounter(gas) {
15013
- return new GasCounterU64(numbers_tryAsU64(gas));
15014
- }
15015
- class GasCounterU64 {
15016
- gas;
15017
- initialGas;
15018
- constructor(gas) {
15019
- this.gas = gas;
15020
- this.initialGas = tryAsGas(gas);
15021
- }
15022
- set(g) {
15023
- this.gas = numbers_tryAsU64(g);
15024
- }
15025
- get() {
15026
- return tryAsGas(this.gas);
15027
- }
15028
- sub(g) {
15029
- const result = this.gas - numbers_tryAsU64(g);
15030
- if (result >= 0n) {
15031
- this.gas = numbers_tryAsU64(result);
15032
- return false;
15033
- }
15034
- this.gas = numbers_tryAsU64(0n);
15035
- return true;
15036
- }
15037
- used() {
15038
- const gasConsumed = numbers_tryAsU64(this.initialGas) - this.gas;
15039
- // In we have less than zero left we assume that all gas has been consumed.
15040
- if (gasConsumed < 0) {
15041
- return this.initialGas;
15042
- }
15043
- return tryAsGas(gasConsumed);
15044
- }
15045
- }
15046
-
15047
15466
  ;// CONCATENATED MODULE: ./packages/core/pvm-interpreter/memory/memory-index.ts
15048
15467
 
15049
15468
 
15050
- const tryAsMemoryIndex = (index) => {
15469
+ const memory_index_tryAsMemoryIndex = (index) => {
15051
15470
  debug_check `${index >= 0 && index <= MAX_MEMORY_INDEX} Incorrect memory index: ${index}!`;
15052
15471
  return opaque_asOpaqueType(index);
15053
15472
  };
@@ -15061,25 +15480,25 @@ const tryAsSbrkIndex = (index) => {
15061
15480
 
15062
15481
  const memory_consts_PAGE_SIZE_SHIFT = 12;
15063
15482
  // PAGE_SIZE has to be a power of 2
15064
- const PAGE_SIZE = 1 << memory_consts_PAGE_SIZE_SHIFT;
15483
+ const memory_consts_PAGE_SIZE = 1 << memory_consts_PAGE_SIZE_SHIFT;
15065
15484
  const MIN_ALLOCATION_SHIFT = (() => {
15066
15485
  const MIN_ALLOCATION_SHIFT = 7;
15067
15486
  debug_check `${MIN_ALLOCATION_SHIFT >= 0 && MIN_ALLOCATION_SHIFT < memory_consts_PAGE_SIZE_SHIFT} incorrect minimal allocation shift`;
15068
15487
  return MIN_ALLOCATION_SHIFT;
15069
15488
  })();
15070
- const MIN_ALLOCATION_LENGTH = PAGE_SIZE >> MIN_ALLOCATION_SHIFT;
15071
- const LAST_PAGE_NUMBER = (MEMORY_SIZE - PAGE_SIZE) / PAGE_SIZE;
15489
+ const MIN_ALLOCATION_LENGTH = memory_consts_PAGE_SIZE >> MIN_ALLOCATION_SHIFT;
15490
+ const LAST_PAGE_NUMBER = (MEMORY_SIZE - memory_consts_PAGE_SIZE) / memory_consts_PAGE_SIZE;
15072
15491
  /** https://graypaper.fluffylabs.dev/#/68eaa1f/35a60235a602?v=0.6.4 */
15073
15492
  const RESERVED_NUMBER_OF_PAGES = 16;
15074
15493
  /** https://graypaper.fluffylabs.dev/#/68eaa1f/35a60235a602?v=0.6.4 */
15075
- const MAX_NUMBER_OF_PAGES = MEMORY_SIZE / PAGE_SIZE;
15494
+ const MAX_NUMBER_OF_PAGES = MEMORY_SIZE / memory_consts_PAGE_SIZE;
15076
15495
 
15077
15496
  ;// CONCATENATED MODULE: ./packages/core/pvm-interpreter/memory/pages/page-utils.ts
15078
15497
 
15079
15498
 
15080
15499
  /** Ensure that given memory `index` is within `[0...PAGE_SIZE)` and can be used to index a page */
15081
15500
  const tryAsPageIndex = (index) => {
15082
- debug_check `${index >= 0 && index < PAGE_SIZE}, Incorect page index: ${index}!`;
15501
+ debug_check `${index >= 0 && index < memory_consts_PAGE_SIZE}, Incorect page index: ${index}!`;
15083
15502
  return opaque_asOpaqueType(index);
15084
15503
  };
15085
15504
  /** Ensure that given `index` represents an index of one of the pages. */
@@ -15107,17 +15526,17 @@ function getNextPageNumber(pageNumber) {
15107
15526
 
15108
15527
 
15109
15528
  function alignToPageSize(length) {
15110
- return PAGE_SIZE * Math.ceil(length / PAGE_SIZE);
15529
+ return memory_consts_PAGE_SIZE * Math.ceil(length / memory_consts_PAGE_SIZE);
15111
15530
  }
15112
15531
  function getPageNumber(address) {
15113
15532
  return tryAsPageNumber(address >>> memory_consts_PAGE_SIZE_SHIFT);
15114
15533
  }
15115
15534
  function getStartPageIndex(address) {
15116
- return tryAsMemoryIndex((address >>> memory_consts_PAGE_SIZE_SHIFT) << memory_consts_PAGE_SIZE_SHIFT);
15535
+ return memory_index_tryAsMemoryIndex((address >>> memory_consts_PAGE_SIZE_SHIFT) << memory_consts_PAGE_SIZE_SHIFT);
15117
15536
  }
15118
15537
  function getStartPageIndexFromPageNumber(pageNumber) {
15119
15538
  // >>> 0 is needed to avoid changing sign of the number
15120
- return tryAsMemoryIndex((pageNumber << memory_consts_PAGE_SIZE_SHIFT) >>> 0);
15539
+ return memory_index_tryAsMemoryIndex((pageNumber << memory_consts_PAGE_SIZE_SHIFT) >>> 0);
15121
15540
  }
15122
15541
 
15123
15542
  ;// CONCATENATED MODULE: ./packages/core/pvm-interpreter/memory/errors.ts
@@ -15139,7 +15558,7 @@ class PageFault {
15139
15558
  return new PageFault(numbers_tryAsU32(startPageIndex), isAccessFault);
15140
15559
  }
15141
15560
  static fromMemoryIndex(maybeMemoryIndex, isAccessFault = false) {
15142
- const memoryIndex = tryAsMemoryIndex(maybeMemoryIndex % MEMORY_SIZE);
15561
+ const memoryIndex = memory_index_tryAsMemoryIndex(maybeMemoryIndex % MEMORY_SIZE);
15143
15562
  const startPageIndex = getStartPageIndex(memoryIndex);
15144
15563
  return new PageFault(numbers_tryAsU32(startPageIndex), isAccessFault);
15145
15564
  }
@@ -15218,9 +15637,9 @@ class MemoryRange {
15218
15637
  constructor(start, length) {
15219
15638
  this.start = start;
15220
15639
  this.length = length;
15221
- this.end = tryAsMemoryIndex((this.start + this.length) % MEMORY_SIZE);
15640
+ this.end = memory_index_tryAsMemoryIndex((this.start + this.length) % MEMORY_SIZE);
15222
15641
  if (length > 0) {
15223
- this.lastIndex = tryAsMemoryIndex((this.end - 1 + MEMORY_SIZE) % MEMORY_SIZE);
15642
+ this.lastIndex = memory_index_tryAsMemoryIndex((this.end - 1 + MEMORY_SIZE) % MEMORY_SIZE);
15224
15643
  }
15225
15644
  }
15226
15645
  /** Creates a memory range from given starting point and length */
@@ -15263,7 +15682,7 @@ class MemoryRange {
15263
15682
  *
15264
15683
  * it should be in `memory-consts` but it cannot be there because of circular dependency
15265
15684
  */
15266
- const RESERVED_MEMORY_RANGE = MemoryRange.fromStartAndLength(tryAsMemoryIndex(0), RESERVED_NUMBER_OF_PAGES * PAGE_SIZE);
15685
+ const RESERVED_MEMORY_RANGE = MemoryRange.fromStartAndLength(memory_index_tryAsMemoryIndex(0), RESERVED_NUMBER_OF_PAGES * memory_consts_PAGE_SIZE);
15267
15686
 
15268
15687
  ;// CONCATENATED MODULE: ./packages/core/pvm-interpreter/memory/page-range.ts
15269
15688
 
@@ -15301,7 +15720,7 @@ class PageRange {
15301
15720
  // lastIndex is not null because we just ensured that the range is not empty
15302
15721
  const pageWithLastIndex = getPageNumber(range.lastIndex ?? range.end);
15303
15722
  const endPage = getNextPageNumber(pageWithLastIndex);
15304
- if ((startPage === endPage || startPage === pageWithLastIndex) && range.length > PAGE_SIZE) {
15723
+ if ((startPage === endPage || startPage === pageWithLastIndex) && range.length > memory_consts_PAGE_SIZE) {
15305
15724
  // full range
15306
15725
  return new PageRange(startPage, MAX_NUMBER_OF_PAGES);
15307
15726
  }
@@ -15365,8 +15784,8 @@ class ReadablePage extends MemoryPage {
15365
15784
  }
15366
15785
  loadInto(result, startIndex, length) {
15367
15786
  const endIndex = startIndex + length;
15368
- if (endIndex > PAGE_SIZE) {
15369
- return Result.error(PageFault.fromMemoryIndex(this.start + PAGE_SIZE), () => `Page fault: read beyond page boundary at ${this.start + PAGE_SIZE}`);
15787
+ if (endIndex > memory_consts_PAGE_SIZE) {
15788
+ return Result.error(PageFault.fromMemoryIndex(this.start + memory_consts_PAGE_SIZE), () => `Page fault: read beyond page boundary at ${this.start + memory_consts_PAGE_SIZE}`);
15370
15789
  }
15371
15790
  const bytes = this.data.subarray(startIndex, endIndex);
15372
15791
  // we zero the bytes, since data might not yet be initialized at `endIndex`.
@@ -15399,8 +15818,8 @@ class WriteablePage extends MemoryPage {
15399
15818
  constructor(pageNumber, initialData) {
15400
15819
  super(pageNumber);
15401
15820
  const dataLength = initialData?.length ?? 0;
15402
- const initialPageLength = Math.min(PAGE_SIZE, Math.max(dataLength, MIN_ALLOCATION_LENGTH));
15403
- this.buffer = new ArrayBuffer(initialPageLength, { maxByteLength: PAGE_SIZE });
15821
+ const initialPageLength = Math.min(memory_consts_PAGE_SIZE, Math.max(dataLength, MIN_ALLOCATION_LENGTH));
15822
+ this.buffer = new ArrayBuffer(initialPageLength, { maxByteLength: memory_consts_PAGE_SIZE });
15404
15823
  this.view = new Uint8Array(this.buffer);
15405
15824
  if (initialData !== undefined) {
15406
15825
  this.view.set(initialData);
@@ -15408,8 +15827,8 @@ class WriteablePage extends MemoryPage {
15408
15827
  }
15409
15828
  loadInto(result, startIndex, length) {
15410
15829
  const endIndex = startIndex + length;
15411
- if (endIndex > PAGE_SIZE) {
15412
- return Result.error(PageFault.fromMemoryIndex(this.start + PAGE_SIZE), () => `Page fault: read beyond page boundary at ${this.start + PAGE_SIZE}`);
15830
+ if (endIndex > memory_consts_PAGE_SIZE) {
15831
+ return Result.error(PageFault.fromMemoryIndex(this.start + memory_consts_PAGE_SIZE), () => `Page fault: read beyond page boundary at ${this.start + memory_consts_PAGE_SIZE}`);
15413
15832
  }
15414
15833
  const bytes = this.view.subarray(startIndex, endIndex);
15415
15834
  // we zero the bytes, since the view might not yet be initialized at `endIndex`.
@@ -15418,16 +15837,16 @@ class WriteablePage extends MemoryPage {
15418
15837
  return Result.ok(OK);
15419
15838
  }
15420
15839
  storeFrom(startIndex, bytes) {
15421
- if (this.buffer.byteLength < startIndex + bytes.length && this.buffer.byteLength < PAGE_SIZE) {
15422
- const newLength = Math.min(PAGE_SIZE, Math.max(MIN_ALLOCATION_LENGTH, startIndex + bytes.length));
15840
+ if (this.buffer.byteLength < startIndex + bytes.length && this.buffer.byteLength < memory_consts_PAGE_SIZE) {
15841
+ const newLength = Math.min(memory_consts_PAGE_SIZE, Math.max(MIN_ALLOCATION_LENGTH, startIndex + bytes.length));
15423
15842
  this.buffer.resize(newLength);
15424
15843
  }
15425
15844
  this.view.set(bytes, startIndex);
15426
15845
  return Result.ok(OK);
15427
15846
  }
15428
15847
  setData(pageIndex, data) {
15429
- if (this.buffer.byteLength < pageIndex + data.length && this.buffer.byteLength < PAGE_SIZE) {
15430
- const newLength = Math.min(PAGE_SIZE, Math.max(MIN_ALLOCATION_LENGTH, pageIndex + data.length));
15848
+ if (this.buffer.byteLength < pageIndex + data.length && this.buffer.byteLength < memory_consts_PAGE_SIZE) {
15849
+ const newLength = Math.min(memory_consts_PAGE_SIZE, Math.max(MIN_ALLOCATION_LENGTH, pageIndex + data.length));
15431
15850
  this.buffer.resize(newLength);
15432
15851
  }
15433
15852
  this.view.set(data, pageIndex);
@@ -15478,10 +15897,10 @@ class Memory {
15478
15897
  this.memory = memory;
15479
15898
  }
15480
15899
  store(address, bytes) {
15481
- return this.storeFrom(tryAsMemoryIndex(address), bytes);
15900
+ return this.storeFrom(memory_index_tryAsMemoryIndex(address), bytes);
15482
15901
  }
15483
15902
  read(address, output) {
15484
- return this.loadInto(output, tryAsMemoryIndex(address));
15903
+ return this.loadInto(output, memory_index_tryAsMemoryIndex(address));
15485
15904
  }
15486
15905
  reset() {
15487
15906
  this.sbrkIndex = tryAsSbrkIndex(RESERVED_MEMORY_RANGE.end);
@@ -15508,8 +15927,8 @@ class Memory {
15508
15927
  let currentPosition = address;
15509
15928
  let bytesLeft = bytes.length;
15510
15929
  for (const page of pages) {
15511
- const pageStartIndex = tryAsPageIndex(currentPosition % PAGE_SIZE);
15512
- const bytesToWrite = Math.min(PAGE_SIZE - pageStartIndex, bytesLeft);
15930
+ const pageStartIndex = tryAsPageIndex(currentPosition % memory_consts_PAGE_SIZE);
15931
+ const bytesToWrite = Math.min(memory_consts_PAGE_SIZE - pageStartIndex, bytesLeft);
15513
15932
  const sourceStartIndex = currentPosition - address;
15514
15933
  const source = bytes.subarray(sourceStartIndex, sourceStartIndex + bytesToWrite);
15515
15934
  page.storeFrom(pageStartIndex, source);
@@ -15558,8 +15977,8 @@ class Memory {
15558
15977
  let currentPosition = startAddress;
15559
15978
  let bytesLeft = result.length;
15560
15979
  for (const page of pages) {
15561
- const pageStartIndex = tryAsPageIndex(currentPosition % PAGE_SIZE);
15562
- const bytesToRead = Math.min(PAGE_SIZE - pageStartIndex, bytesLeft);
15980
+ const pageStartIndex = tryAsPageIndex(currentPosition % memory_consts_PAGE_SIZE);
15981
+ const bytesToRead = Math.min(memory_consts_PAGE_SIZE - pageStartIndex, bytesLeft);
15563
15982
  const destinationStartIndex = currentPosition - startAddress;
15564
15983
  const destination = result.subarray(destinationStartIndex);
15565
15984
  page.loadInto(destination, pageStartIndex, bytesToRead);
@@ -15586,7 +16005,7 @@ class Memory {
15586
16005
  const newSbrkIndex = tryAsSbrkIndex(alignToPageSize(newVirtualSbrkIndex));
15587
16006
  // TODO [MaSi]: `getPageNumber` works incorrectly for SbrkIndex. Sbrk index should be changed to MemoryIndex
15588
16007
  const firstPageNumber = getPageNumber(currentSbrkIndex);
15589
- const pagesToAllocate = (newSbrkIndex - currentSbrkIndex) / PAGE_SIZE;
16008
+ const pagesToAllocate = (newSbrkIndex - currentSbrkIndex) / memory_consts_PAGE_SIZE;
15590
16009
  const rangeToAllocate = PageRange.fromStartAndLength(firstPageNumber, pagesToAllocate);
15591
16010
  for (const pageNumber of rangeToAllocate) {
15592
16011
  const page = new WriteablePage(pageNumber);
@@ -15641,8 +16060,8 @@ class MemoryBuilder {
15641
16060
  setReadablePages(start, end, data = new Uint8Array()) {
15642
16061
  this.ensureNotFinalized();
15643
16062
  debug_check `${start < end} end has to be bigger than start`;
15644
- debug_check `${start % PAGE_SIZE === 0} start needs to be a multiple of page size (${PAGE_SIZE})`;
15645
- debug_check `${end % PAGE_SIZE === 0} end needs to be a multiple of page size (${PAGE_SIZE})`;
16063
+ debug_check `${start % memory_consts_PAGE_SIZE === 0} start needs to be a multiple of page size (${memory_consts_PAGE_SIZE})`;
16064
+ debug_check `${end % memory_consts_PAGE_SIZE === 0} end needs to be a multiple of page size (${memory_consts_PAGE_SIZE})`;
15646
16065
  debug_check `${data.length <= end - start} the initial data is longer than address range`;
15647
16066
  const length = end - start;
15648
16067
  const range = MemoryRange.fromStartAndLength(start, length);
@@ -15651,7 +16070,7 @@ class MemoryBuilder {
15651
16070
  const noOfPages = pages.length;
15652
16071
  for (let i = 0; i < noOfPages; i++) {
15653
16072
  const pageNumber = pages[i];
15654
- const dataChunk = data.subarray(i * PAGE_SIZE, (i + 1) * PAGE_SIZE);
16073
+ const dataChunk = data.subarray(i * memory_consts_PAGE_SIZE, (i + 1) * memory_consts_PAGE_SIZE);
15655
16074
  const page = new ReadablePage(pageNumber, dataChunk);
15656
16075
  this.initialMemory.set(pageNumber, page);
15657
16076
  }
@@ -15669,8 +16088,8 @@ class MemoryBuilder {
15669
16088
  setWriteablePages(start, end, data = new Uint8Array()) {
15670
16089
  this.ensureNotFinalized();
15671
16090
  debug_check `${start < end} end has to be bigger than start`;
15672
- debug_check `${start % PAGE_SIZE === 0} start needs to be a multiple of page size (${PAGE_SIZE})`;
15673
- debug_check `${end % PAGE_SIZE === 0} end needs to be a multiple of page size (${PAGE_SIZE})`;
16091
+ debug_check `${start % memory_consts_PAGE_SIZE === 0} start needs to be a multiple of page size (${memory_consts_PAGE_SIZE})`;
16092
+ debug_check `${end % memory_consts_PAGE_SIZE === 0} end needs to be a multiple of page size (${memory_consts_PAGE_SIZE})`;
15674
16093
  debug_check `${data.length <= end - start} the initial data is longer than address range`;
15675
16094
  const length = end - start;
15676
16095
  const range = MemoryRange.fromStartAndLength(start, length);
@@ -15679,7 +16098,7 @@ class MemoryBuilder {
15679
16098
  const noOfPages = pages.length;
15680
16099
  for (let i = 0; i < noOfPages; i++) {
15681
16100
  const pageNumber = pages[i];
15682
- const dataChunk = data.subarray(i * PAGE_SIZE, (i + 1) * PAGE_SIZE);
16101
+ const dataChunk = data.subarray(i * memory_consts_PAGE_SIZE, (i + 1) * memory_consts_PAGE_SIZE);
15683
16102
  const page = new WriteablePage(pageNumber, dataChunk);
15684
16103
  this.initialMemory.set(pageNumber, page);
15685
16104
  }
@@ -15691,8 +16110,8 @@ class MemoryBuilder {
15691
16110
  */
15692
16111
  setData(start, data) {
15693
16112
  this.ensureNotFinalized();
15694
- const pageOffset = start % PAGE_SIZE;
15695
- const remainingSpaceOnPage = PAGE_SIZE - pageOffset;
16113
+ const pageOffset = start % memory_consts_PAGE_SIZE;
16114
+ const remainingSpaceOnPage = memory_consts_PAGE_SIZE - pageOffset;
15696
16115
  debug_check `${data.length <= remainingSpaceOnPage} The data has to fit into a single page.`;
15697
16116
  const length = data.length;
15698
16117
  const range = MemoryRange.fromStartAndLength(start, length);
@@ -15883,27 +16302,27 @@ class Program {
15883
16302
  static fromSpi(blob, args, hasMetadata) {
15884
16303
  const { code: spiCode, metadata } = hasMetadata ? extractCodeAndMetadata(blob) : { code: blob };
15885
16304
  const { code, memory: rawMemory, registers } = decodeStandardProgram(spiCode, args);
15886
- const regs = new Registers();
16305
+ const regs = new registers_Registers();
15887
16306
  regs.copyFrom(registers);
15888
16307
  const memoryBuilder = new MemoryBuilder();
15889
16308
  for (const { start, end, data } of rawMemory.readable) {
15890
- const startIndex = tryAsMemoryIndex(start);
15891
- const endIndex = tryAsMemoryIndex(end);
16309
+ const startIndex = memory_index_tryAsMemoryIndex(start);
16310
+ const endIndex = memory_index_tryAsMemoryIndex(end);
15892
16311
  memoryBuilder.setReadablePages(startIndex, endIndex, data ?? new Uint8Array());
15893
16312
  }
15894
16313
  for (const { start, end, data } of rawMemory.writeable) {
15895
- const startIndex = tryAsMemoryIndex(start);
15896
- const endIndex = tryAsMemoryIndex(end);
16314
+ const startIndex = memory_index_tryAsMemoryIndex(start);
16315
+ const endIndex = memory_index_tryAsMemoryIndex(end);
15897
16316
  memoryBuilder.setWriteablePages(startIndex, endIndex, data ?? new Uint8Array());
15898
16317
  }
15899
- const heapStart = tryAsMemoryIndex(rawMemory.sbrkIndex);
16318
+ const heapStart = memory_index_tryAsMemoryIndex(rawMemory.sbrkIndex);
15900
16319
  const heapEnd = tryAsSbrkIndex(rawMemory.heapEnd);
15901
16320
  const memory = memoryBuilder.finalize(heapStart, heapEnd);
15902
16321
  return new Program(code, regs, memory, metadata);
15903
16322
  }
15904
16323
  static fromGeneric(blob, hasMetadata) {
15905
16324
  const { code, metadata } = hasMetadata ? extractCodeAndMetadata(blob) : { code: blob };
15906
- const regs = new Registers();
16325
+ const regs = new registers_Registers();
15907
16326
  const memory = new Memory();
15908
16327
  return new Program(code, regs, memory, metadata);
15909
16328
  }
@@ -16918,6 +17337,45 @@ class BasicBlocks {
16918
17337
  ;// CONCATENATED MODULE: ./packages/core/pvm-interpreter/basic-blocks/index.ts
16919
17338
 
16920
17339
 
17340
+ ;// CONCATENATED MODULE: ./packages/core/pvm-interpreter/gas.ts
17341
+
17342
+
17343
+ /** Create a new gas counter instance depending on the gas value. */
17344
+ function gasCounter(gas) {
17345
+ return new GasCounterU64(numbers_tryAsU64(gas));
17346
+ }
17347
+ class GasCounterU64 {
17348
+ gas;
17349
+ initialGas;
17350
+ constructor(gas) {
17351
+ this.gas = gas;
17352
+ this.initialGas = gas_tryAsGas(gas);
17353
+ }
17354
+ set(g) {
17355
+ this.gas = numbers_tryAsU64(g);
17356
+ }
17357
+ get() {
17358
+ return gas_tryAsGas(this.gas);
17359
+ }
17360
+ sub(g) {
17361
+ const result = this.gas - numbers_tryAsU64(g);
17362
+ if (result >= 0n) {
17363
+ this.gas = numbers_tryAsU64(result);
17364
+ return false;
17365
+ }
17366
+ this.gas = numbers_tryAsU64(0n);
17367
+ return true;
17368
+ }
17369
+ used() {
17370
+ const gasConsumed = numbers_tryAsU64(this.initialGas) - this.gas;
17371
+ // In we have less than zero left we assume that all gas has been consumed.
17372
+ if (gasConsumed < 0) {
17373
+ return this.initialGas;
17374
+ }
17375
+ return gas_tryAsGas(gasConsumed);
17376
+ }
17377
+ }
17378
+
16921
17379
  ;// CONCATENATED MODULE: ./packages/core/pvm-interpreter/instruction-gas-map.ts
16922
17380
 
16923
17381
 
@@ -17494,7 +17952,7 @@ class LoadOps {
17494
17952
  }
17495
17953
  loadNumber(address, registerIndex, numberLength) {
17496
17954
  const registerBytes = this.regs.getBytesAsLittleEndian(registerIndex, REG_SIZE_BYTES);
17497
- const loadResult = this.memory.loadInto(registerBytes.subarray(0, numberLength), tryAsMemoryIndex(address));
17955
+ const loadResult = this.memory.loadInto(registerBytes.subarray(0, numberLength), memory_index_tryAsMemoryIndex(address));
17498
17956
  if (loadResult.isError) {
17499
17957
  if (loadResult.error.isAccessFault) {
17500
17958
  this.instructionResult.status = result_Result.FAULT_ACCESS;
@@ -17510,7 +17968,7 @@ class LoadOps {
17510
17968
  loadSignedNumber(address, registerIndex, numberLength) {
17511
17969
  // load all bytes from register to correctly handle the sign.
17512
17970
  const registerBytes = this.regs.getBytesAsLittleEndian(registerIndex, REG_SIZE_BYTES);
17513
- const loadResult = this.memory.loadInto(registerBytes.subarray(0, numberLength), tryAsMemoryIndex(address));
17971
+ const loadResult = this.memory.loadInto(registerBytes.subarray(0, numberLength), memory_index_tryAsMemoryIndex(address));
17514
17972
  if (loadResult.isError) {
17515
17973
  if (loadResult.error.isAccessFault) {
17516
17974
  this.instructionResult.status = result_Result.FAULT_ACCESS;
@@ -17932,7 +18390,7 @@ class StoreOps {
17932
18390
  this.store(address, secondImmediateDecoder.getExtendedBytesAsLittleEndian());
17933
18391
  }
17934
18392
  store(address, bytes) {
17935
- const storeResult = this.memory.storeFrom(tryAsMemoryIndex(address), bytes);
18393
+ const storeResult = this.memory.storeFrom(memory_index_tryAsMemoryIndex(address), bytes);
17936
18394
  if (storeResult.isOk) {
17937
18395
  return;
17938
18396
  }
@@ -17941,7 +18399,7 @@ class StoreOps {
17941
18399
  }
17942
18400
  else {
17943
18401
  this.instructionResult.status = result_Result.FAULT;
17944
- this.instructionResult.exitParam = getStartPageIndex(tryAsMemoryIndex(storeResult.error.address));
18402
+ this.instructionResult.exitParam = getStartPageIndex(memory_index_tryAsMemoryIndex(storeResult.error.address));
17945
18403
  }
17946
18404
  }
17947
18405
  }
@@ -18740,11 +19198,11 @@ class ProgramDecoder {
18740
19198
 
18741
19199
 
18742
19200
  const interpreter_logger = Logger.new(import.meta.filename, "pvm");
18743
- class Interpreter {
19201
+ class interpreter_Interpreter {
18744
19202
  useSbrkGas;
18745
- registers = new Registers();
19203
+ registers = new registers_Registers();
18746
19204
  memory = new Memory();
18747
- gas = gasCounter(tryAsGas(0));
19205
+ gas = gasCounter(gas_tryAsGas(0));
18748
19206
  code = new Uint8Array();
18749
19207
  mask = Mask.empty();
18750
19208
  pc = 0;
@@ -18878,8 +19336,8 @@ class Interpreter {
18878
19336
  break;
18879
19337
  case ArgumentType.TWO_REGISTERS:
18880
19338
  if (this.useSbrkGas && currentInstruction === Instruction.SBRK) {
18881
- const calculateSbrkCost = (length) => (alignToPageSize(length) / PAGE_SIZE) * 16;
18882
- const underflow = this.gas.sub(tryAsGas(calculateSbrkCost(this.registers.getLowerU32(argsResult.firstRegisterIndex))));
19339
+ const calculateSbrkCost = (length) => (alignToPageSize(length) / memory_consts_PAGE_SIZE) * 16;
19340
+ const underflow = this.gas.sub(gas_tryAsGas(calculateSbrkCost(this.registers.getLowerU32(argsResult.firstRegisterIndex))));
18883
19341
  if (underflow) {
18884
19342
  this.status = status_Status.OOG;
18885
19343
  return this.status;
@@ -18974,12 +19432,88 @@ class Interpreter {
18974
19432
  }
18975
19433
  }
18976
19434
 
19435
+ ;// CONCATENATED MODULE: ./packages/core/pvm-interpreter/debugger-adapter.ts
19436
+
19437
+
19438
+
19439
+
19440
+
19441
+
19442
+ class DebuggerAdapter {
19443
+ pvm;
19444
+ constructor(useSbrkGas = false) {
19445
+ this.pvm = new Interpreter({ useSbrkGas });
19446
+ }
19447
+ resetGeneric(rawProgram, flatRegisters, initialGas) {
19448
+ this.pvm.resetGeneric(rawProgram, 0, tryAsGas(initialGas), new Registers(flatRegisters));
19449
+ }
19450
+ reset(rawProgram, pc, gas, maybeRegisters, maybeMemory) {
19451
+ this.pvm.resetGeneric(rawProgram, pc, tryAsGas(gas), maybeRegisters, maybeMemory);
19452
+ }
19453
+ getPageDump(pageNumber) {
19454
+ const page = this.pvm.getMemoryPage(pageNumber);
19455
+ if (page === null) {
19456
+ // page wasn't allocated so we return an empty page
19457
+ return safeAllocUint8Array(PAGE_SIZE);
19458
+ }
19459
+ if (page.length === PAGE_SIZE) {
19460
+ // page was allocated and has a proper size so we can simply return it
19461
+ return page;
19462
+ }
19463
+ // page was allocated but it is shorter than PAGE_SIZE so we have to extend it
19464
+ const fullPage = safeAllocUint8Array(PAGE_SIZE);
19465
+ fullPage.set(page);
19466
+ return fullPage;
19467
+ }
19468
+ setMemory(address, value) {
19469
+ this.pvm.memory.storeFrom(tryAsMemoryIndex(address), value);
19470
+ }
19471
+ getExitArg() {
19472
+ return this.pvm.getExitParam() ?? 0;
19473
+ }
19474
+ getStatus() {
19475
+ return this.pvm.getStatus();
19476
+ }
19477
+ nextStep() {
19478
+ return this.pvm.nextStep() === Status.OK;
19479
+ }
19480
+ nSteps(steps) {
19481
+ check `${steps >>> 0 > 0} Expected a positive integer got ${steps}`;
19482
+ for (let i = 0; i < steps; i++) {
19483
+ const isOk = this.nextStep();
19484
+ if (!isOk) {
19485
+ return false;
19486
+ }
19487
+ }
19488
+ return true;
19489
+ }
19490
+ getRegisters() {
19491
+ return this.pvm.registers.getAllU64();
19492
+ }
19493
+ setRegisters(registers) {
19494
+ this.pvm.registers.copyFrom(new Registers(registers));
19495
+ }
19496
+ getProgramCounter() {
19497
+ return this.pvm.getPC();
19498
+ }
19499
+ setNextProgramCounter(nextPc) {
19500
+ this.pvm.setNextPC(nextPc);
19501
+ }
19502
+ getGasLeft() {
19503
+ return BigInt(this.pvm.gas.get());
19504
+ }
19505
+ setGasLeft(gas) {
19506
+ this.pvm.gas.set(tryAsGas(gas));
19507
+ }
19508
+ }
19509
+
18977
19510
  ;// CONCATENATED MODULE: ./packages/core/pvm-interpreter/index.ts
18978
19511
 
18979
19512
 
18980
19513
 
18981
19514
 
18982
19515
 
19516
+
18983
19517
  ;// CONCATENATED MODULE: ./node_modules/@fluffylabs/anan-as/build/debug-raw.js
18984
19518
  async function instantiate(module, imports = {}) {
18985
19519
  const adaptedImports = {
@@ -19433,12 +19967,12 @@ class AnanasMemory {
19433
19967
  }
19434
19968
  class AnanasGasCounter {
19435
19969
  instance;
19436
- initialGas = tryAsGas(0n);
19970
+ initialGas = gas_tryAsGas(0n);
19437
19971
  constructor(instance) {
19438
19972
  this.instance = instance;
19439
19973
  }
19440
19974
  get() {
19441
- return tryAsGas(this.instance.getGasLeft());
19975
+ return gas_tryAsGas(this.instance.getGasLeft());
19442
19976
  }
19443
19977
  set(g) {
19444
19978
  this.instance.setGasLeft(BigInt(g));
@@ -19543,7 +20077,7 @@ class InterpreterInstanceManager {
19543
20077
  const instances = [];
19544
20078
  switch (interpreter) {
19545
20079
  case PvmBackend.BuiltIn:
19546
- instances.push(new Interpreter({
20080
+ instances.push(new interpreter_Interpreter({
19547
20081
  useSbrkGas: false,
19548
20082
  }));
19549
20083
  break;
@@ -19717,10 +20251,10 @@ class Info {
19717
20251
  const chunk = encodedInfo.raw.subarray(Number(offset), Number(offset + length));
19718
20252
  const writeResult = memory.storeFrom(outputStart, chunk);
19719
20253
  if (writeResult.isError) {
19720
- logger_logger.trace `INFO(${serviceId}, off: ${offset}, len: ${length}) <- PANIC`;
20254
+ logger_logger.trace `[${this.currentServiceId}] INFO(${serviceId}, off: ${offset}, len: ${length}) <- PANIC`;
19721
20255
  return PvmExecution.Panic;
19722
20256
  }
19723
- logger_logger.trace `INFO(${serviceId}, off: ${offset}, len: ${length}) <- ${bytes_BytesBlob.blobFrom(chunk)}`;
20257
+ logger_logger.trace `[${this.currentServiceId}] INFO(${serviceId}, off: ${offset}, len: ${length}) <- ${bytes_BytesBlob.blobFrom(chunk)}`;
19724
20258
  if (accountInfo === null) {
19725
20259
  regs.set(IN_OUT_REG, HostCallResult.NONE);
19726
20260
  return;
@@ -19944,7 +20478,7 @@ class AccumulateExternalities {
19944
20478
  const bytes = serviceInfo.storageUtilisationBytes - length - LOOKUP_HISTORY_ENTRY_BYTES;
19945
20479
  return this.updatedState.updateServiceStorageUtilisation(this.currentServiceId, items, bytes, serviceInfo);
19946
20480
  };
19947
- // https://graypaper.fluffylabs.dev/#/9a08063/389501389501?v=0.6.6
20481
+ // https://graypaper.fluffylabs.dev/#/ab2cdbd/380802380802?v=0.7.2
19948
20482
  if (s.status === PreimageStatusKind.Requested) {
19949
20483
  const res = updateStorageUtilisation();
19950
20484
  if (res.isError) {
@@ -19957,7 +20491,7 @@ class AccumulateExternalities {
19957
20491
  return Result.ok(OK);
19958
20492
  }
19959
20493
  const t = this.currentTimeslot;
19960
- // https://graypaper.fluffylabs.dev/#/9a08063/378102378102?v=0.6.6
20494
+ // https://graypaper.fluffylabs.dev/#/ab2cdbd/380802380802?v=0.7.2
19961
20495
  if (s.status === PreimageStatusKind.Unavailable) {
19962
20496
  const y = s.data[1];
19963
20497
  if (y < t - this.chainSpec.preimageExpungePeriod) {
@@ -19973,14 +20507,14 @@ class AccumulateExternalities {
19973
20507
  }
19974
20508
  return Result.error(ForgetPreimageError.NotExpired, () => `Preimage not expired: y=${y}, timeslot=${t}, period=${this.chainSpec.preimageExpungePeriod}`);
19975
20509
  }
19976
- // https://graypaper.fluffylabs.dev/#/9a08063/38c80138c801?v=0.6.6
20510
+ // https://graypaper.fluffylabs.dev/#/ab2cdbd/382802383302?v=0.7.2
19977
20511
  if (s.status === PreimageStatusKind.Available) {
19978
20512
  this.updatedState.updatePreimage(serviceId, UpdatePreimage.updateOrAdd({
19979
20513
  lookupHistory: new LookupHistoryItem(status.hash, status.length, service_tryAsLookupHistorySlots([s.data[0], t])),
19980
20514
  }));
19981
20515
  return Result.ok(OK);
19982
20516
  }
19983
- // https://graypaper.fluffylabs.dev/#/9a08063/38d00138d001?v=0.6.6
20517
+ // https://graypaper.fluffylabs.dev/#/ab2cdbd/384002384c02?v=0.7.2
19984
20518
  if (s.status === PreimageStatusKind.Reavailable) {
19985
20519
  const y = s.data[1];
19986
20520
  if (y < t - this.chainSpec.preimageExpungePeriod) {
@@ -20816,13 +21350,11 @@ class AccumulateDataItem {
20816
21350
  * - gas cost and reports length for each service (statistics)
20817
21351
  */
20818
21352
  class AccumulateData {
20819
- autoAccumulateServicesByServiceId;
20820
21353
  reportsDataByServiceId;
20821
21354
  transfersByServiceId;
20822
21355
  serviceIds;
20823
21356
  gasLimitByServiceId;
20824
21357
  constructor(reports, transfers, autoAccumulateServicesByServiceId) {
20825
- this.autoAccumulateServicesByServiceId = autoAccumulateServicesByServiceId;
20826
21358
  const serviceIdsFromAutoAccumulate = new Set(autoAccumulateServicesByServiceId.keys());
20827
21359
  const { reportsDataByServiceId, serviceIds: serviceIdsFromReports, gasLimitByServiceId: reportsGasLimitByServiceId, } = this.transformReports(reports);
20828
21360
  this.reportsDataByServiceId = reportsDataByServiceId;
@@ -21142,12 +21674,12 @@ function createMergeContext(chainSpec, state, inputState, results) {
21142
21674
  }
21143
21675
  function updatePrivilegedService(currentServiceId, serviceIdUpdatedByManager, selfUpdatedServiceId) {
21144
21676
  if (currentServiceId === serviceIdUpdatedByManager) {
21145
- return serviceIdUpdatedByManager;
21677
+ return selfUpdatedServiceId;
21146
21678
  }
21147
- return selfUpdatedServiceId;
21679
+ return serviceIdUpdatedByManager;
21148
21680
  }
21149
21681
  function mergePrivilegedServices(mergeContext, [serviceId, { stateUpdate }]) {
21150
- const { outputState, currentPrivilegedServices, chainSpec } = mergeContext;
21682
+ const { outputState, currentPrivilegedServices, chainSpec, privilegedServicesUpdatedByManager } = mergeContext;
21151
21683
  const currentManager = currentPrivilegedServices.manager;
21152
21684
  const currentRegistrar = currentPrivilegedServices.registrar;
21153
21685
  const currentDelegator = currentPrivilegedServices.delegator;
@@ -21165,28 +21697,35 @@ function mergePrivilegedServices(mergeContext, [serviceId, { stateUpdate }]) {
21165
21697
  });
21166
21698
  }
21167
21699
  if (serviceId === currentRegistrar) {
21168
- const newRegistrar = updatePrivilegedService(currentPrivilegedServices.registrar, outputState.privilegedServices.registrar, privilegedServices.registrar);
21700
+ const newRegistrar = updatePrivilegedService(currentPrivilegedServices.registrar, privilegedServicesUpdatedByManager.registrar, privilegedServices.registrar);
21169
21701
  outputState.privilegedServices = PrivilegedServices.create({
21170
21702
  ...outputState.privilegedServices,
21171
21703
  registrar: newRegistrar,
21172
21704
  });
21173
21705
  }
21174
21706
  if (serviceId === currentDelegator) {
21175
- const newDelegator = updatePrivilegedService(currentPrivilegedServices.delegator, outputState.privilegedServices.delegator, privilegedServices.delegator);
21707
+ const newDelegator = updatePrivilegedService(currentPrivilegedServices.delegator, privilegedServicesUpdatedByManager.delegator, privilegedServices.delegator);
21176
21708
  outputState.privilegedServices = PrivilegedServices.create({
21177
21709
  ...outputState.privilegedServices,
21178
21710
  delegator: newDelegator,
21179
21711
  });
21180
21712
  }
21181
- const assignersFromOutputState = outputState.privilegedServices;
21182
- const newAssigners = currentAssigners.map((currentAssigner, coreIndex) => serviceId === currentAssigner
21183
- ? updatePrivilegedService(currentPrivilegedServices.assigners[coreIndex], assignersFromOutputState.assigners[coreIndex], privilegedServices.assigners[coreIndex])
21184
- : currentAssigner);
21185
- const newAssignersPerCore = tryAsPerCore(newAssigners, chainSpec);
21186
- outputState.privilegedServices = PrivilegedServices.create({
21187
- ...outputState.privilegedServices,
21188
- assigners: newAssignersPerCore,
21713
+ let shouldUpdateAssigners = false;
21714
+ const newAssigners = currentAssigners.map((currentAssigner, coreIndex) => {
21715
+ if (serviceId === currentAssigner) {
21716
+ const newAssigner = updatePrivilegedService(currentPrivilegedServices.assigners[coreIndex], privilegedServicesUpdatedByManager.assigners[coreIndex], privilegedServices.assigners[coreIndex]);
21717
+ shouldUpdateAssigners = shouldUpdateAssigners || newAssigner !== currentAssigner;
21718
+ return newAssigner;
21719
+ }
21720
+ return currentAssigner;
21189
21721
  });
21722
+ if (shouldUpdateAssigners) {
21723
+ const newAssignersPerCore = tryAsPerCore(newAssigners, chainSpec);
21724
+ outputState.privilegedServices = PrivilegedServices.create({
21725
+ ...outputState.privilegedServices,
21726
+ assigners: newAssignersPerCore,
21727
+ });
21728
+ }
21190
21729
  }
21191
21730
  }
21192
21731
  function mergeValidatorsData(mergeContext, [serviceId, { stateUpdate }]) {
@@ -21331,7 +21870,7 @@ class Assign {
21331
21870
  const memoryReadResult = memory.loadInto(res, authorizationQueueStart);
21332
21871
  // error while reading the memory.
21333
21872
  if (memoryReadResult.isError) {
21334
- logger_logger.trace `ASSIGN() <- PANIC`;
21873
+ logger_logger.trace `[${this.currentServiceId}] ASSIGN() <- PANIC`;
21335
21874
  return PvmExecution.Panic;
21336
21875
  }
21337
21876
  if (maybeCoreIndex >= this.chainSpec.coresCount) {
@@ -21346,18 +21885,18 @@ class Assign {
21346
21885
  const result = this.partialState.updateAuthorizationQueue(coreIndex, fixedSizeAuthQueue, assigners);
21347
21886
  if (result.isOk) {
21348
21887
  regs.set(assign_IN_OUT_REG, HostCallResult.OK);
21349
- logger_logger.trace `ASSIGN(${coreIndex}, ${fixedSizeAuthQueue}) <- OK`;
21888
+ logger_logger.trace `[${this.currentServiceId}] ASSIGN(${coreIndex}, ${fixedSizeAuthQueue}) <- OK`;
21350
21889
  return;
21351
21890
  }
21352
21891
  const e = result.error;
21353
21892
  if (e === UpdatePrivilegesError.UnprivilegedService) {
21354
21893
  regs.set(assign_IN_OUT_REG, HostCallResult.HUH);
21355
- logger_logger.trace `ASSIGN(${coreIndex}, ${fixedSizeAuthQueue}) <- HUH`;
21894
+ logger_logger.trace `[${this.currentServiceId}] ASSIGN(${coreIndex}, ${fixedSizeAuthQueue}) <- HUH`;
21356
21895
  return;
21357
21896
  }
21358
21897
  if (e === UpdatePrivilegesError.InvalidServiceId) {
21359
21898
  regs.set(assign_IN_OUT_REG, HostCallResult.WHO);
21360
- logger_logger.trace `ASSIGN(${coreIndex}, ${fixedSizeAuthQueue}) <- HUH`;
21899
+ logger_logger.trace `[${this.currentServiceId}] ASSIGN(${coreIndex}, ${fixedSizeAuthQueue}) <- HUH`;
21361
21900
  return;
21362
21901
  }
21363
21902
  assertNever(e);
@@ -21428,7 +21967,7 @@ class Bless {
21428
21967
  decoder.resetTo(0);
21429
21968
  const memoryReadResult = memory.loadInto(result, memIndex);
21430
21969
  if (memoryReadResult.isError) {
21431
- logger_logger.trace `BLESS(m: ${manager}, v: ${delegator}, r: ${registrar}) <- PANIC`;
21970
+ logger_logger.trace `[${this.currentServiceId}] BLESS(m: ${manager}, v: ${delegator}, r: ${registrar}) <- PANIC`;
21432
21971
  return PvmExecution.Panic;
21433
21972
  }
21434
21973
  const { serviceId, gas } = decoder.object(serviceIdAndGasCodec);
@@ -21441,26 +21980,26 @@ class Bless {
21441
21980
  const authorizersDecoder = decoder_Decoder.fromBlob(res);
21442
21981
  const memoryReadResult = memory.loadInto(res, authorization);
21443
21982
  if (memoryReadResult.isError) {
21444
- logger_logger.trace `BLESS(m: ${manager}, v: ${delegator}, r: ${registrar}, ${lazyInspect(autoAccumulate)}) <- PANIC`;
21983
+ logger_logger.trace `[${this.currentServiceId}] BLESS(m: ${manager}, v: ${delegator}, r: ${registrar}, ${lazyInspect(autoAccumulate)}) <- PANIC`;
21445
21984
  return PvmExecution.Panic;
21446
21985
  }
21447
21986
  // `a`
21448
21987
  const authorizers = tryAsPerCore(authorizersDecoder.sequenceFixLen(descriptors_codec.u32.asOpaque(), this.chainSpec.coresCount), this.chainSpec);
21449
21988
  const updateResult = this.partialState.updatePrivilegedServices(manager, authorizers, delegator, registrar, autoAccumulate);
21450
21989
  if (updateResult.isOk) {
21451
- logger_logger.trace `BLESS(m: ${manager}, a: [${authorizers}], v: ${delegator}, r: ${registrar}, ${lazyInspect(autoAccumulate)}) <- OK`;
21990
+ logger_logger.trace `[${this.currentServiceId}] BLESS(m: ${manager}, a: [${authorizers}], v: ${delegator}, r: ${registrar}, ${lazyInspect(autoAccumulate)}) <- OK`;
21452
21991
  regs.set(bless_IN_OUT_REG, HostCallResult.OK);
21453
21992
  return;
21454
21993
  }
21455
21994
  const e = updateResult.error;
21456
21995
  // NOTE: `UpdatePrivilegesError.UnprivilegedService` won't happen in 0.7.1+
21457
21996
  if (e === UpdatePrivilegesError.UnprivilegedService) {
21458
- logger_logger.trace `BLESS(m: ${manager}, a: [${authorizers}], v: ${delegator}, r: ${registrar}, ${lazyInspect(autoAccumulate)}) <- HUH`;
21997
+ logger_logger.trace `[${this.currentServiceId}] BLESS(m: ${manager}, a: [${authorizers}], v: ${delegator}, r: ${registrar}, ${lazyInspect(autoAccumulate)}) <- HUH`;
21459
21998
  regs.set(bless_IN_OUT_REG, HostCallResult.HUH);
21460
21999
  return;
21461
22000
  }
21462
22001
  if (e === UpdatePrivilegesError.InvalidServiceId) {
21463
- logger_logger.trace `BLESS(m: ${manager}, a: [${authorizers}], v: ${delegator}, r: ${registrar}, ${lazyInspect(autoAccumulate)}) <- WHO`;
22002
+ logger_logger.trace `[${this.currentServiceId}] BLESS(m: ${manager}, a: [${authorizers}], v: ${delegator}, r: ${registrar}, ${lazyInspect(autoAccumulate)}) <- WHO`;
21464
22003
  regs.set(bless_IN_OUT_REG, HostCallResult.WHO);
21465
22004
  return;
21466
22005
  }
@@ -21490,7 +22029,7 @@ class GasHostCall {
21490
22029
  }
21491
22030
  execute(gas, regs) {
21492
22031
  const gasValue = gas.get();
21493
- logger_logger.trace `GAS <- ${gasValue}`;
22032
+ logger_logger.trace `[${this.currentServiceId}] GAS <- ${gasValue}`;
21494
22033
  regs.set(7, numbers_tryAsU64(gasValue));
21495
22034
  return Promise.resolve(undefined);
21496
22035
  }
@@ -21522,7 +22061,7 @@ class Checkpoint {
21522
22061
  async execute(gas, regs) {
21523
22062
  await this.gasHostCall.execute(gas, regs);
21524
22063
  this.partialState.checkpoint();
21525
- logger_logger.trace `CHECKPOINT()`;
22064
+ logger_logger.trace `[${this.currentServiceId}] CHECKPOINT()`;
21526
22065
  return;
21527
22066
  }
21528
22067
  }
@@ -21562,18 +22101,18 @@ class Designate {
21562
22101
  const memoryReadResult = memory.loadInto(res, validatorsStart);
21563
22102
  // error while reading the memory.
21564
22103
  if (memoryReadResult.isError) {
21565
- logger_logger.trace `DESIGNATE() <- PANIC`;
22104
+ logger_logger.trace `[${this.currentServiceId}] DESIGNATE() <- PANIC`;
21566
22105
  return PvmExecution.Panic;
21567
22106
  }
21568
22107
  const decoder = decoder_Decoder.fromBlob(res);
21569
22108
  const validatorsData = decoder.sequenceFixLen(ValidatorData.Codec, this.chainSpec.validatorsCount);
21570
22109
  const result = this.partialState.updateValidatorsData(tryAsPerValidator(validatorsData, this.chainSpec));
21571
22110
  if (result.isError) {
21572
- logger_logger.trace `DESIGNATE([${validatorsData[0]}, ${validatorsData[1]}, ...]) <- HUH`;
22111
+ logger_logger.trace `[${this.currentServiceId}] DESIGNATE([${validatorsData[0]}, ${validatorsData[1]}, ...]) <- HUH`;
21573
22112
  regs.set(designate_IN_OUT_REG, HostCallResult.HUH);
21574
22113
  }
21575
22114
  else {
21576
- logger_logger.trace `DESIGNATE([${validatorsData[0]}, ${validatorsData[1]}, ...]) <- OK`;
22115
+ logger_logger.trace `[${this.currentServiceId}] DESIGNATE([${validatorsData[0]}, ${validatorsData[1]}, ...]) <- OK`;
21577
22116
  regs.set(designate_IN_OUT_REG, HostCallResult.OK);
21578
22117
  }
21579
22118
  }
@@ -21614,29 +22153,29 @@ class Eject {
21614
22153
  const previousCodeHash = bytes_Bytes.zero(hash_HASH_SIZE).asOpaque();
21615
22154
  const memoryReadResult = memory.loadInto(previousCodeHash.raw, preimageHashStart);
21616
22155
  if (memoryReadResult.isError) {
21617
- logger_logger.trace `EJECT(${serviceId}) <- PANIC`;
22156
+ logger_logger.trace `[${this.currentServiceId}] EJECT(${serviceId}) <- PANIC`;
21618
22157
  return PvmExecution.Panic;
21619
22158
  }
21620
22159
  // cannot eject self
21621
22160
  if (serviceId === this.currentServiceId) {
21622
22161
  regs.set(eject_IN_OUT_REG, HostCallResult.WHO);
21623
- logger_logger.trace `EJECT(${serviceId}, ${previousCodeHash}) <- WHO`;
22162
+ logger_logger.trace `[${this.currentServiceId}] EJECT(${serviceId}, ${previousCodeHash}) <- WHO`;
21624
22163
  return;
21625
22164
  }
21626
22165
  const result = this.partialState.eject(serviceId, previousCodeHash);
21627
22166
  // All good!
21628
22167
  if (result.isOk) {
21629
- logger_logger.trace `EJECT(${serviceId}, ${previousCodeHash}) <- OK`;
22168
+ logger_logger.trace `[${this.currentServiceId}] EJECT(${serviceId}, ${previousCodeHash}) <- OK`;
21630
22169
  regs.set(eject_IN_OUT_REG, HostCallResult.OK);
21631
22170
  return;
21632
22171
  }
21633
22172
  const e = result.error;
21634
22173
  if (e === EjectError.InvalidService) {
21635
- logger_logger.trace `EJECT(${serviceId}, ${previousCodeHash}) <- WHO ${resultToString(result)}`;
22174
+ logger_logger.trace `[${this.currentServiceId}] EJECT(${serviceId}, ${previousCodeHash}) <- WHO ${resultToString(result)}`;
21636
22175
  regs.set(eject_IN_OUT_REG, HostCallResult.WHO);
21637
22176
  }
21638
22177
  else if (e === EjectError.InvalidPreimage) {
21639
- logger_logger.trace `EJECT(${serviceId}, ${previousCodeHash}) <- HUH ${resultToString(result)}`;
22178
+ logger_logger.trace `[${this.currentServiceId}] EJECT(${serviceId}, ${previousCodeHash}) <- HUH ${resultToString(result)}`;
21640
22179
  regs.set(eject_IN_OUT_REG, HostCallResult.HUH);
21641
22180
  }
21642
22181
  else {
@@ -21655,9 +22194,9 @@ class Eject {
21655
22194
 
21656
22195
  const forget_IN_OUT_REG = 7;
21657
22196
  /**
21658
- * Mark a preimage hash as unavailable.
22197
+ * Delete preimage hash or mark as unavailable if it was available.
21659
22198
  *
21660
- * https://graypaper.fluffylabs.dev/#/7e6ff6a/382d01382d01?v=0.6.7
22199
+ * https://graypaper.fluffylabs.dev/#/ab2cdbd/385d01385d01?v=0.7.2
21661
22200
  */
21662
22201
  class Forget {
21663
22202
  currentServiceId;
@@ -21678,11 +22217,11 @@ class Forget {
21678
22217
  const memoryReadResult = memory.loadInto(hash.raw, hashStart);
21679
22218
  // error while reading the memory.
21680
22219
  if (memoryReadResult.isError) {
21681
- logger_logger.trace `FORGET(${hash}, ${length}) <- PANIC`;
22220
+ logger_logger.trace `[${this.currentServiceId}] FORGET(${hash}, ${length}) <- PANIC`;
21682
22221
  return PvmExecution.Panic;
21683
22222
  }
21684
22223
  const result = this.partialState.forgetPreimage(hash.asOpaque(), length);
21685
- logger_logger.trace `FORGET(${hash}, ${length}) <- ${resultToString(result)}`;
22224
+ logger_logger.trace `[${this.currentServiceId}] FORGET(${hash}, ${length}) <- ${resultToString(result)}`;
21686
22225
  if (result.isOk) {
21687
22226
  regs.set(forget_IN_OUT_REG, HostCallResult.OK);
21688
22227
  }
@@ -21739,11 +22278,11 @@ class New {
21739
22278
  const memoryReadResult = memory.loadInto(codeHash.raw, codeHashStart);
21740
22279
  // error while reading the memory.
21741
22280
  if (memoryReadResult.isError) {
21742
- logger_logger.trace `NEW(${codeHash}, ${codeLength}, ${gas}, ${allowance}, ${gratisStorage}, ${requestedServiceId}) <- PANIC`;
22281
+ logger_logger.trace `[${this.currentServiceId}] NEW(${codeHash}, ${codeLength}, ${gas}, ${allowance}, ${gratisStorage}, ${requestedServiceId}) <- PANIC`;
21743
22282
  return PvmExecution.Panic;
21744
22283
  }
21745
22284
  const assignedId = this.partialState.newService(codeHash.asOpaque(), codeLength, gas, allowance, gratisStorage, requestedServiceId);
21746
- logger_logger.trace `NEW(${codeHash}, ${codeLength}, ${gas}, ${allowance}, ${gratisStorage}, ${requestedServiceId}) <- ${resultToString(assignedId)}`;
22285
+ logger_logger.trace `[${this.currentServiceId}] NEW(${codeHash}, ${codeLength}, ${gas}, ${allowance}, ${gratisStorage}, ${requestedServiceId}) <- ${resultToString(assignedId)}`;
21747
22286
  if (assignedId.isOk) {
21748
22287
  regs.set(new_IN_OUT_REG, numbers_tryAsU64(assignedId.ok));
21749
22288
  return;
@@ -21803,11 +22342,11 @@ class Provide {
21803
22342
  const preimage = bytes_BytesBlob.blobFrom(safe_alloc_uint8array_safeAllocUint8Array(length));
21804
22343
  const memoryReadResult = memory.loadInto(preimage.raw, preimageStart);
21805
22344
  if (memoryReadResult.isError) {
21806
- logger_logger.trace `PROVIDE(${serviceId}, ${preimage.toStringTruncated()}) <- PANIC`;
22345
+ logger_logger.trace `[${this.currentServiceId}] PROVIDE(${serviceId}, ${preimage.toStringTruncated()}) <- PANIC`;
21807
22346
  return PvmExecution.Panic;
21808
22347
  }
21809
22348
  const result = this.partialState.providePreimage(serviceId, preimage);
21810
- logger_logger.trace `PROVIDE(${serviceId}, ${preimage.toStringTruncated()}) <- ${resultToString(result)}`;
22349
+ logger_logger.trace `[${this.currentServiceId}] PROVIDE(${serviceId}, ${preimage.toStringTruncated()}) <- ${resultToString(result)}`;
21811
22350
  if (result.isOk) {
21812
22351
  regs.set(provide_IN_OUT_REG, HostCallResult.OK);
21813
22352
  return;
@@ -21863,35 +22402,35 @@ class Query {
21863
22402
  const memoryReadResult = memory.loadInto(hash.raw, hashStart);
21864
22403
  // error while reading the memory.
21865
22404
  if (memoryReadResult.isError) {
21866
- logger_logger.trace `QUERY(${hash}, ${length}) <- PANIC`;
22405
+ logger_logger.trace `[${this.currentServiceId}] QUERY(${hash}, ${length}) <- PANIC`;
21867
22406
  return PvmExecution.Panic;
21868
22407
  }
21869
22408
  const result = this.partialState.checkPreimageStatus(hash.asOpaque(), length);
21870
22409
  const zero = numbers_tryAsU64(0n);
21871
22410
  if (result === null) {
21872
- logger_logger.trace `QUERY(${hash}, ${length}) <- NONE`;
22411
+ logger_logger.trace `[${this.currentServiceId}] QUERY(${hash}, ${length}) <- NONE`;
21873
22412
  regs.set(IN_OUT_REG_1, HostCallResult.NONE);
21874
22413
  regs.set(IN_OUT_REG_2, zero);
21875
22414
  return;
21876
22415
  }
21877
22416
  switch (result.status) {
21878
22417
  case PreimageStatusKind.Requested:
21879
- logger_logger.trace `QUERY(${hash}, ${length}) <- REQUESTED`;
22418
+ logger_logger.trace `[${this.currentServiceId}] QUERY(${hash}, ${length}) <- REQUESTED`;
21880
22419
  regs.set(IN_OUT_REG_1, zero);
21881
22420
  regs.set(IN_OUT_REG_2, zero);
21882
22421
  return;
21883
22422
  case PreimageStatusKind.Available:
21884
- logger_logger.trace `QUERY(${hash}, ${length}) <- AVAILABLE [${result.data}]`;
22423
+ logger_logger.trace `[${this.currentServiceId}] QUERY(${hash}, ${length}) <- AVAILABLE [${result.data}]`;
21885
22424
  regs.set(IN_OUT_REG_1, numbers_tryAsU64((BigInt(result.data[0]) << UPPER_BITS_SHIFT) + 1n));
21886
22425
  regs.set(IN_OUT_REG_2, zero);
21887
22426
  return;
21888
22427
  case PreimageStatusKind.Unavailable:
21889
- logger_logger.trace `QUERY(${hash}, ${length}) <- UNAVAILABLE [${result.data.join(", ")}]`;
22428
+ logger_logger.trace `[${this.currentServiceId}] QUERY(${hash}, ${length}) <- UNAVAILABLE [${result.data.join(", ")}]`;
21890
22429
  regs.set(IN_OUT_REG_1, numbers_tryAsU64((BigInt(result.data[0]) << UPPER_BITS_SHIFT) + 2n));
21891
22430
  regs.set(IN_OUT_REG_2, numbers_tryAsU64(result.data[1]));
21892
22431
  return;
21893
22432
  case PreimageStatusKind.Reavailable:
21894
- logger_logger.trace `QUERY(${hash}, ${length}) <- REAVAILABLE [${result.data.join(", ")}]`;
22433
+ logger_logger.trace `[${this.currentServiceId}] QUERY(${hash}, ${length}) <- REAVAILABLE [${result.data.join(", ")}]`;
21895
22434
  regs.set(IN_OUT_REG_1, numbers_tryAsU64((BigInt(result.data[0]) << UPPER_BITS_SHIFT) + 3n));
21896
22435
  regs.set(IN_OUT_REG_2, numbers_tryAsU64((BigInt(result.data[2]) << UPPER_BITS_SHIFT) + BigInt(result.data[1])));
21897
22436
  return;
@@ -21932,11 +22471,11 @@ class Solicit {
21932
22471
  const hash = bytes_Bytes.zero(hash_HASH_SIZE);
21933
22472
  const memoryReadResult = memory.loadInto(hash.raw, hashStart);
21934
22473
  if (memoryReadResult.isError) {
21935
- logger_logger.trace `SOLICIT(${hash}, ${length}) <- PANIC`;
22474
+ logger_logger.trace `[${this.currentServiceId}] SOLICIT(${hash}, ${length}) <- PANIC`;
21936
22475
  return PvmExecution.Panic;
21937
22476
  }
21938
22477
  const result = this.partialState.requestPreimage(hash.asOpaque(), length);
21939
- logger_logger.trace `SOLICIT(${hash}, ${length}) <- ${resultToString(result)}`;
22478
+ logger_logger.trace `[${this.currentServiceId}] SOLICIT(${hash}, ${length}) <- ${resultToString(result)}`;
21940
22479
  if (result.isOk) {
21941
22480
  regs.set(solicit_IN_OUT_REG, HostCallResult.OK);
21942
22481
  return;
@@ -21994,7 +22533,7 @@ class Transfer {
21994
22533
  */
21995
22534
  basicGasCost = compatibility_Compatibility.isGreaterOrEqual(compatibility_GpVersion.V0_7_2)
21996
22535
  ? gas_tryAsSmallGas(10)
21997
- : (regs) => tryAsGas(10n + regs.get(TRANSFER_GAS_FEE_REG));
22536
+ : (regs) => gas_tryAsGas(10n + regs.get(TRANSFER_GAS_FEE_REG));
21998
22537
  tracedRegisters = traceRegisters(transfer_IN_OUT_REG, AMOUNT_REG, TRANSFER_GAS_FEE_REG, MEMO_START_REG);
21999
22538
  constructor(currentServiceId, partialState) {
22000
22539
  this.currentServiceId = currentServiceId;
@@ -22013,16 +22552,16 @@ class Transfer {
22013
22552
  const memoryReadResult = memory.loadInto(memo.raw, memoStart);
22014
22553
  // page fault while reading the memory.
22015
22554
  if (memoryReadResult.isError) {
22016
- logger_logger.trace `TRANSFER(${destination}, ${amount}, ${transferGasFee}, ${memo}) <- PANIC`;
22555
+ logger_logger.trace `[${this.currentServiceId}] TRANSFER(${destination}, ${amount}, ${transferGasFee}, ${memo}) <- PANIC`;
22017
22556
  return PvmExecution.Panic;
22018
22557
  }
22019
22558
  const transferResult = this.partialState.transfer(destination, amount, transferGasFee, memo);
22020
- logger_logger.trace `TRANSFER(${destination}, ${amount}, ${transferGasFee}, ${memo}) <- ${resultToString(transferResult)}`;
22559
+ logger_logger.trace `[${this.currentServiceId}] TRANSFER(${destination}, ${amount}, ${transferGasFee}, ${memo}) <- ${resultToString(transferResult)}`;
22021
22560
  // All good!
22022
22561
  if (transferResult.isOk) {
22023
22562
  if (compatibility_Compatibility.isGreaterOrEqual(compatibility_GpVersion.V0_7_2)) {
22024
22563
  // substracting value `t`
22025
- const underflow = gas.sub(tryAsGas(transferGasFee));
22564
+ const underflow = gas.sub(gas_tryAsGas(transferGasFee));
22026
22565
  if (underflow) {
22027
22566
  return PvmExecution.OOG;
22028
22567
  }
@@ -22083,11 +22622,11 @@ class Upgrade {
22083
22622
  const codeHash = bytes_Bytes.zero(hash_HASH_SIZE);
22084
22623
  const memoryReadResult = memory.loadInto(codeHash.raw, codeHashStart);
22085
22624
  if (memoryReadResult.isError) {
22086
- logger_logger.trace `UPGRADE(${codeHash}, ${gas}, ${allowance}) <- PANIC`;
22625
+ logger_logger.trace `[${this.currentServiceId}] UPGRADE(${codeHash}, ${gas}, ${allowance}) <- PANIC`;
22087
22626
  return PvmExecution.Panic;
22088
22627
  }
22089
22628
  this.partialState.upgradeService(codeHash.asOpaque(), gas, allowance);
22090
- logger_logger.trace `UPGRADE(${codeHash}, ${gas}, ${allowance})`;
22629
+ logger_logger.trace `[${this.currentServiceId}] UPGRADE(${codeHash}, ${gas}, ${allowance})`;
22091
22630
  regs.set(upgrade_IN_OUT_REG, HostCallResult.OK);
22092
22631
  }
22093
22632
  }
@@ -22121,11 +22660,11 @@ class Yield {
22121
22660
  const hash = bytes_Bytes.zero(hash_HASH_SIZE);
22122
22661
  const memoryReadResult = memory.loadInto(hash.raw, hashStart);
22123
22662
  if (memoryReadResult.isError) {
22124
- logger_logger.trace `YIELD() <- PANIC`;
22663
+ logger_logger.trace `[${this.currentServiceId}] YIELD() <- PANIC`;
22125
22664
  return PvmExecution.Panic;
22126
22665
  }
22127
22666
  this.partialState.yield(hash);
22128
- logger_logger.trace `YIELD(${hash})`;
22667
+ logger_logger.trace `[${this.currentServiceId}] YIELD(${hash})`;
22129
22668
  regs.set(yield_IN_OUT_REG, HostCallResult.OK);
22130
22669
  }
22131
22670
  }
@@ -22168,10 +22707,10 @@ class Fetch {
22168
22707
  const chunk = value === null ? new Uint8Array() : value.raw.subarray(Number(offset), Number(offset + length));
22169
22708
  const storeResult = memory.storeFrom(output, chunk);
22170
22709
  if (storeResult.isError) {
22171
- logger_logger.trace `FETCH(${kind}) <- PANIC`;
22710
+ logger_logger.trace `[${this.currentServiceId}] FETCH(${kind}) <- PANIC`;
22172
22711
  return PvmExecution.Panic;
22173
22712
  }
22174
- logger_logger.trace `FETCH(${kind}) <- ${value?.toStringTruncated()}`;
22713
+ logger_logger.trace `[${this.currentServiceId}] FETCH(${kind}) <- ${value?.toStringTruncated()}`;
22175
22714
  // write result
22176
22715
  regs.set(fetch_IN_OUT_REG, value === null ? HostCallResult.NONE : valueLength);
22177
22716
  }
@@ -22321,7 +22860,7 @@ class LogHostCall {
22321
22860
  }
22322
22861
  memory.loadInto(message, msgStart);
22323
22862
  const level = clampU64ToU32(lvl);
22324
- logger_logger.trace `LOG(${this.currentServiceId}, ${level < Levels.UNKNOWN ? Levels[level] : Levels[Levels.UNKNOWN]}(${lvl}), ${decoder.decode(target)}, ${decoder.decode(message)})`;
22863
+ logger_logger.trace `[${this.currentServiceId}] LOG(${this.currentServiceId}, ${level < Levels.UNKNOWN ? Levels[level] : Levels[Levels.UNKNOWN]}(${lvl}), ${decoder.decode(target)}, ${decoder.decode(message)})`;
22325
22864
  return Promise.resolve(undefined);
22326
22865
  }
22327
22866
  }
@@ -22362,12 +22901,12 @@ class Lookup {
22362
22901
  const preImageHash = bytes_Bytes.zero(hash_HASH_SIZE);
22363
22902
  const memoryReadResult = memory.loadInto(preImageHash.raw, hashAddress);
22364
22903
  if (memoryReadResult.isError) {
22365
- logger_logger.trace `LOOKUP(${serviceId}, ${preImageHash}) <- PANIC`;
22904
+ logger_logger.trace `[${this.currentServiceId}] LOOKUP(${serviceId}, ${preImageHash}) <- PANIC`;
22366
22905
  return PvmExecution.Panic;
22367
22906
  }
22368
22907
  // v
22369
22908
  const preImage = this.account.lookup(serviceId, preImageHash);
22370
- logger_logger.trace `LOOKUP(${serviceId}, ${preImageHash}) <- ${preImage?.toStringTruncated() ?? "<missing>"}...`;
22909
+ logger_logger.trace `[${this.currentServiceId}] LOOKUP(${serviceId}, ${preImageHash}) <- ${preImage?.toStringTruncated() ?? "<missing>"}...`;
22371
22910
  const preImageLength = preImage === null ? numbers_tryAsU64(0) : numbers_tryAsU64(preImage.raw.length);
22372
22911
  const preimageBlobOffset = regs.get(10);
22373
22912
  const lengthToWrite = regs.get(11);
@@ -22464,20 +23003,20 @@ class Read {
22464
23003
  const chunk = value === null ? safe_alloc_uint8array_safeAllocUint8Array(0) : value.raw.subarray(Number(offset), Number(offset + blobLength));
22465
23004
  const memoryWriteResult = memory.storeFrom(destinationAddress, chunk);
22466
23005
  if (memoryWriteResult.isError) {
22467
- logger_logger.trace `READ(${serviceId}, ${rawKey}) <- PANIC`;
23006
+ logger_logger.trace `[${this.currentServiceId}] READ(${serviceId}, ${rawKey}) <- PANIC`;
22468
23007
  return PvmExecution.Panic;
22469
23008
  }
22470
23009
  if (value === null) {
22471
- logger_logger.trace `READ(${serviceId}, ${rawKey}) <- NONE`;
23010
+ logger_logger.trace `[${this.currentServiceId}] READ(${serviceId}, ${rawKey}) <- NONE`;
22472
23011
  regs.set(read_IN_OUT_REG, HostCallResult.NONE);
22473
23012
  return;
22474
23013
  }
22475
23014
  if (chunk.length > 0) {
22476
- logger_logger.trace `READ(${serviceId}, ${rawKey}) <- ${bytes_BytesBlob.blobFrom(chunk).toStringTruncated()}`;
23015
+ logger_logger.trace `[${this.currentServiceId}] READ(${serviceId}, ${rawKey}) <- ${bytes_BytesBlob.blobFrom(chunk).toStringTruncated()}`;
22477
23016
  }
22478
23017
  else {
22479
23018
  // just a query for length of stored data
22480
- logger_logger.trace `READ(${serviceId}, ${rawKey}) <- (${valueLength} ${valueLength === 1n ? "byte" : "bytes"})`;
23019
+ logger_logger.trace `[${this.currentServiceId}] READ(${serviceId}, ${rawKey}) <- (${valueLength} ${valueLength === 1n ? "byte" : "bytes"})`;
22481
23020
  }
22482
23021
  regs.set(read_IN_OUT_REG, valueLength);
22483
23022
  }
@@ -22521,7 +23060,7 @@ class Write {
22521
23060
  const rawStorageKey = safe_alloc_uint8array_safeAllocUint8Array(storageKeyLengthClamped);
22522
23061
  const keyLoadingResult = memory.loadInto(rawStorageKey, storageKeyStartAddress);
22523
23062
  if (keyLoadingResult.isError) {
22524
- logger_logger.trace `WRITE() <- PANIC`;
23063
+ logger_logger.trace `[${this.currentServiceId}] WRITE() <- PANIC`;
22525
23064
  return PvmExecution.Panic;
22526
23065
  }
22527
23066
  // k
@@ -22531,14 +23070,14 @@ class Write {
22531
23070
  const valueLoadingResult = memory.loadInto(value, valueStart);
22532
23071
  // Note [MaSo] this is ok to return bcs if valueLength is 0, then this panic won't happen
22533
23072
  if (valueLoadingResult.isError) {
22534
- logger_logger.trace `WRITE(${storageKey}) <- PANIC`;
23073
+ logger_logger.trace `[${this.currentServiceId}] WRITE(${storageKey}) <- PANIC`;
22535
23074
  return PvmExecution.Panic;
22536
23075
  }
22537
23076
  /** https://graypaper.fluffylabs.dev/#/9a08063/33af0133b201?v=0.6.6 */
22538
23077
  const maybeValue = valueLength === 0n ? null : bytes_BytesBlob.blobFrom(value);
22539
23078
  // a
22540
23079
  const result = this.account.write(storageKey, maybeValue);
22541
- logger_logger.trace `WRITE(${storageKey}, ${maybeValue?.toStringTruncated() ?? "remove"}) <- ${resultToString(result)}`;
23080
+ logger_logger.trace `[${this.currentServiceId}] WRITE(${storageKey}, ${maybeValue?.toStringTruncated() ?? "remove"}) <- ${resultToString(result)}`;
22542
23081
  if (result.isError) {
22543
23082
  regs.set(write_IN_OUT_REG, HostCallResult.FULL);
22544
23083
  return;
@@ -22767,7 +23306,7 @@ class Accumulate {
22767
23306
  serviceId,
22768
23307
  argsLength: numbers_tryAsU32(transfers.length + operands.length),
22769
23308
  });
22770
- const result = await executor.run(invocationArgs, tryAsGas(gas));
23309
+ const result = await executor.run(invocationArgs, gas_tryAsGas(gas));
22771
23310
  const [newState, checkpoint] = partialState.getStateUpdates();
22772
23311
  /**
22773
23312
  * PVM invocation returned and error so we return the checkpoint
@@ -22968,19 +23507,19 @@ class Accumulate {
22968
23507
  for (let serviceIndex = 0; serviceIndex < serviceIdsLength; serviceIndex += 1) {
22969
23508
  const serviceId = serviceIds[serviceIndex];
22970
23509
  const checkpoint = AccumulationStateUpdate.copyFrom(inputStateUpdate);
22971
- const promise = this.accumulateSingleService(serviceId, accumulateData.getTransfers(serviceId), accumulateData.getOperands(serviceId), accumulateData.getGasLimit(serviceId), slot, entropy, AccumulationStateUpdate.copyFrom(inputStateUpdate)).then(({ consumedGas, stateUpdate }) => ({
22972
- consumedGas,
22973
- stateUpdate: stateUpdate === null ? checkpoint : stateUpdate,
22974
- }));
23510
+ const promise = this.accumulateSingleService(serviceId, accumulateData.getTransfers(serviceId), accumulateData.getOperands(serviceId), accumulateData.getGasLimit(serviceId), slot, entropy, AccumulationStateUpdate.copyFrom(inputStateUpdate)).then(({ consumedGas, stateUpdate }) => {
23511
+ const resultEntry = [
23512
+ serviceId,
23513
+ {
23514
+ consumedGas,
23515
+ stateUpdate: stateUpdate === null ? checkpoint : stateUpdate,
23516
+ },
23517
+ ];
23518
+ return resultEntry;
23519
+ });
22975
23520
  resultPromises[serviceIndex] = promise;
22976
23521
  }
22977
- return Promise.all(resultPromises).then((results) => {
22978
- const map = new Map();
22979
- for (let serviceIndex = 0; serviceIndex < serviceIdsLength; serviceIndex += 1) {
22980
- map.set(serviceIds[serviceIndex], results[serviceIndex]);
22981
- }
22982
- return map;
22983
- });
23522
+ return Promise.all(resultPromises).then((results) => new Map(results));
22984
23523
  }
22985
23524
  /**
22986
23525
  * A method that updates `recentlyAccumulated`, `accumulationQueue` and `timeslot` in state
@@ -23069,9 +23608,10 @@ class Accumulate {
23069
23608
  const _gasCost = gasCost;
23070
23609
  assertEmpty(rest);
23071
23610
  const accumulated = accumulatableReports.subview(0, accumulatedReports);
23072
- const { yieldedRoot, services, transfers: _transfers, validatorsData, privilegedServices, authorizationQueues, ...stateUpdateRest } = state;
23611
+ const { yieldedRoot, services, transfers, validatorsData, privilegedServices, authorizationQueues, ...stateUpdateRest } = state;
23073
23612
  assertEmpty(stateUpdateRest);
23074
- // yielded root is retrieved after each pvm invocation so we can ignore it here
23613
+ // transfers and yielded root are retrieved after each pvm invocation so we can ignore it here
23614
+ const _transfers = transfers;
23075
23615
  const _yieldedRoot = yieldedRoot;
23076
23616
  if (this.hasDuplicatedServiceIdCreated(services.created)) {
23077
23617
  accumulate_logger.trace `Duplicated Service creation detected. Block is invalid.`;
@@ -23170,7 +23710,7 @@ class DeferredTransfers {
23170
23710
  partiallyUpdatedState.updateServiceInfo(serviceId, newInfo);
23171
23711
  const partialState = new AccumulateExternalities(this.chainSpec, this.blake2b, partiallyUpdatedState, serviceId, serviceId, timeslot);
23172
23712
  const fetchExternalities = FetchExternalities.createForOnTransfer({ entropy, transfers }, this.chainSpec);
23173
- let consumedGas = tryAsGas(0);
23713
+ let consumedGas = gas_tryAsGas(0);
23174
23714
  const hasTransfers = transfers.length > 0;
23175
23715
  const isCodeCorrect = code !== null && code.length <= W_C;
23176
23716
  if (!hasTransfers || !isCodeCorrect) {
@@ -23188,7 +23728,7 @@ class DeferredTransfers {
23188
23728
  const executor = await PvmExecutor.createOnTransferExecutor(serviceId, code, { partialState, fetchExternalities }, this.pvm);
23189
23729
  const args = encoder_Encoder.encodeObject(deferred_transfers_ARGS_CODEC, { timeslot, serviceId, transfersLength: numbers_tryAsU32(transfers.length) }, this.chainSpec);
23190
23730
  const gas = transfers.reduce((acc, item) => acc + item.gas, 0n);
23191
- consumedGas = (await executor.run(args, tryAsGas(gas))).consumedGas;
23731
+ consumedGas = (await executor.run(args, gas_tryAsGas(gas))).consumedGas;
23192
23732
  }
23193
23733
  transferStatistics.set(serviceId, { count: numbers_tryAsU32(transfers.length), gasUsed: tryAsServiceGas(consumedGas) });
23194
23734
  const [updatedState] = partialState.getStateUpdates();
@@ -25002,7 +25542,7 @@ async function createImporter(config) {
25002
25542
  const interpreter = config.workerParams.pvm;
25003
25543
  const blocks = db.getBlocksDb();
25004
25544
  const states = db.getStatesDb();
25005
- const hasher = new TransitionHasher(chainSpec, await keccakHasher, await blake2b);
25545
+ const hasher = new TransitionHasher(await keccakHasher, await blake2b);
25006
25546
  const importer = new Importer(chainSpec, interpreter, hasher, main_logger, blocks, states);
25007
25547
  return {
25008
25548
  importer,