@typeberry/jam 0.1.0-08a9db1 → 0.1.0-b2d0b72

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/index.js CHANGED
@@ -28332,29 +28332,17 @@ function isBrowser() {
28332
28332
  * We avoid using `node:assert` to keep compatibility with a browser environment.
28333
28333
  * Note the checks should not have any side effects, since we might decide
28334
28334
  * to remove all of them in a post-processing step.
28335
- */
28336
- function debug_check(condition, message) {
28337
- if (!condition) {
28338
- throw new Error(`Assertion failure: ${message ?? ""}`);
28339
- }
28340
- }
28341
- function cast(_a, condition) {
28342
- return condition;
28343
- }
28344
- /**
28345
- * Yet another function to perform runtime assertions.
28346
- * This function returns a new type to mark in the code that this value was checked and you don't have to do it again.
28347
28335
  *
28348
- * In the post-processing step all usages of this functions should be replaced with simple casting. An example:
28349
- * const x = checkAndType<number, CheckedNumber>(y);
28350
- * should be replaced with:
28351
- * const x = y as CheckedNumber;
28336
+ * NOTE the function is intended to be used as tagged template string for the performance
28337
+ * reasons.
28352
28338
  */
28353
- function ensure(a, condition, message) {
28354
- if (cast(a, condition)) {
28355
- return a;
28339
+ function debug_check(strings, condition, ...data) {
28340
+ if (!condition) {
28341
+ // add an empty value so that `data.length === strings.length`
28342
+ data.unshift("");
28343
+ const message = strings.map((v, index) => `${v}${data[index] ?? ""}`);
28344
+ throw new Error(`Assertion failure:${message.join("")}`);
28356
28345
  }
28357
- throw new Error(`Assertion failure: ${message ?? ""}`);
28358
28346
  }
28359
28347
  /**
28360
28348
  * The function can be used to make sure that a particular type is `never`
@@ -28524,7 +28512,7 @@ function resultToString(res) {
28524
28512
  const result_Result = {
28525
28513
  /** Create new [`Result`] with `Ok` status. */
28526
28514
  ok: (ok) => {
28527
- debug_check(ok !== undefined, "`Ok` type cannot be undefined.");
28515
+ debug_check `${ok !== undefined} 'ok' type cannot be undefined.`;
28528
28516
  return {
28529
28517
  isOk: true,
28530
28518
  isError: false,
@@ -28533,7 +28521,7 @@ const result_Result = {
28533
28521
  },
28534
28522
  /** Create new [`Result`] with `Error` status. */
28535
28523
  error: (error, details = "") => {
28536
- debug_check(error !== undefined, "`Error` type cannot be undefined.");
28524
+ debug_check `${error !== undefined} 'Error' type cannot be undefined.`;
28537
28525
  return {
28538
28526
  isOk: false,
28539
28527
  isError: true,
@@ -28817,7 +28805,10 @@ class BitVec {
28817
28805
  constructor(data, bitLength) {
28818
28806
  this.data = data;
28819
28807
  this.bitLength = bitLength;
28820
- debug_check(data.length * 8 >= bitLength, `Not enough bytes in the data array. Need ${data.length * 8} has ${bitLength}.`);
28808
+ debug_check `
28809
+ ${data.length * 8 >= bitLength}
28810
+ Not enough bytes in the data array. Need ${data.length * 8} has ${bitLength}.
28811
+ `;
28821
28812
  this.byteLength = Math.ceil(bitLength / 8);
28822
28813
  }
28823
28814
  /** Return a raw in-memory representation of this [`BitVec`]. */
@@ -28826,7 +28817,10 @@ class BitVec {
28826
28817
  }
28827
28818
  /** Perform OR operation on all bits in place. */
28828
28819
  sumWith(other) {
28829
- debug_check(other.bitLength === this.bitLength, `Invalid bit length for sumWith: ${other.bitLength} vs ${this.bitLength}`);
28820
+ debug_check `
28821
+ ${other.bitLength === this.bitLength}
28822
+ Invalid bit length for sumWith: ${other.bitLength} vs ${this.bitLength}
28823
+ `;
28830
28824
  const otherRaw = other.raw;
28831
28825
  for (let i = 0; i < this.byteLength; i++) {
28832
28826
  this.data[i] |= otherRaw[i];
@@ -28836,7 +28830,7 @@ class BitVec {
28836
28830
  * Set the bit at index `idx` to value `val`.
28837
28831
  */
28838
28832
  setBit(idx, val) {
28839
- debug_check(idx < this.bitLength, `Index out of bounds. Need ${idx} has ${this.bitLength}.`);
28833
+ debug_check `${idx >= 0 && idx < this.bitLength} Index out of bounds. Need ${idx} has ${this.bitLength}.`;
28840
28834
  const byteIndex = Math.floor(idx / 8);
28841
28835
  const bitIndexInByte = idx % 8;
28842
28836
  const mask = 1 << bitIndexInByte;
@@ -28851,7 +28845,7 @@ class BitVec {
28851
28845
  * Return `true` if the bit at index `idx` is set.
28852
28846
  */
28853
28847
  isSet(idx) {
28854
- debug_check(idx < this.bitLength, `Index out of bounds. Need ${idx} has ${this.bitLength}.`);
28848
+ debug_check `${idx >= 0 && idx < this.bitLength} Index out of bounds. Need ${idx} has ${this.bitLength}.`;
28855
28849
  const byteIndex = Math.floor(idx / 8);
28856
28850
  const bitIndexInByte = idx % 8;
28857
28851
  const mask = 1 << bitIndexInByte;
@@ -29018,7 +29012,7 @@ class bytes_BytesBlob {
29018
29012
  }
29019
29013
  /** Create a new [`BytesBlob`] from an array of bytes. */
29020
29014
  static blobFromNumbers(v) {
29021
- debug_check(v.find((x) => (x & 0xff) !== x) === undefined, "BytesBlob.blobFromNumbers used with non-byte number array.");
29015
+ debug_check `${v.find((x) => (x & 0xff) !== x) === undefined} BytesBlob.blobFromNumbers used with non-byte number array.`;
29022
29016
  const arr = new Uint8Array(v);
29023
29017
  return new bytes_BytesBlob(arr);
29024
29018
  }
@@ -29062,7 +29056,7 @@ class bytes_Bytes extends bytes_BytesBlob {
29062
29056
  length;
29063
29057
  constructor(raw, len) {
29064
29058
  super(raw);
29065
- debug_check(raw.byteLength === len, `Given buffer has incorrect size ${raw.byteLength} vs expected ${len}`);
29059
+ debug_check `${raw.byteLength === len} Given buffer has incorrect size ${raw.byteLength} vs expected ${len}`;
29066
29060
  this.length = len;
29067
29061
  }
29068
29062
  /** Create new [`Bytes<X>`] given a backing buffer and it's length. */
@@ -29071,7 +29065,7 @@ class bytes_Bytes extends bytes_BytesBlob {
29071
29065
  }
29072
29066
  /** Create new [`Bytes<X>`] given an array of bytes and it's length. */
29073
29067
  static fromNumbers(v, len) {
29074
- debug_check(v.find((x) => (x & 0xff) !== x) === undefined, "Bytes.fromNumbers used with non-byte number array.");
29068
+ debug_check `${v.find((x) => (x & 0xff) !== x) === undefined} Bytes.fromNumbers used with non-byte number array.`;
29075
29069
  const x = new Uint8Array(v);
29076
29070
  return new bytes_Bytes(x, len);
29077
29071
  }
@@ -29082,7 +29076,7 @@ class bytes_Bytes extends bytes_BytesBlob {
29082
29076
  // TODO [ToDr] `fill` should have the argments swapped to align with the rest.
29083
29077
  /** Create a [`Bytes<X>`] with all bytes filled with given input number. */
29084
29078
  static fill(len, input) {
29085
- debug_check((input & 0xff) === input, "Input has to be a byte.");
29079
+ debug_check `${(input & 0xff) === input} Input has to be a byte.`;
29086
29080
  const bytes = bytes_Bytes.zero(len);
29087
29081
  bytes.raw.fill(input, 0, len);
29088
29082
  return bytes;
@@ -29105,7 +29099,7 @@ class bytes_Bytes extends bytes_BytesBlob {
29105
29099
  }
29106
29100
  /** Compare the sequence to another one. */
29107
29101
  isEqualTo(other) {
29108
- debug_check(this.length === other.length, "Comparing incorrectly typed bytes!");
29102
+ debug_check `${this.length === other.length} Comparing incorrectly typed bytes!`;
29109
29103
  return u8ArraySameLengthEqual(this.raw, other.raw);
29110
29104
  }
29111
29105
  /** Converts current type into some opaque extension. */
@@ -29114,7 +29108,7 @@ class bytes_Bytes extends bytes_BytesBlob {
29114
29108
  }
29115
29109
  }
29116
29110
  function byteFromString(s) {
29117
- debug_check(s.length === 2, "Two-character string expected");
29111
+ debug_check `${s.length === 2} Two-character string expected`;
29118
29112
  const a = numberFromCharCode(s.charCodeAt(0));
29119
29113
  const b = numberFromCharCode(s.charCodeAt(1));
29120
29114
  return (a << 4) | b;
@@ -29168,42 +29162,53 @@ const bytesBlobComparator = (a, b) => a.compare(b);
29168
29162
 
29169
29163
  ;// CONCATENATED MODULE: ./packages/core/numbers/index.ts
29170
29164
 
29171
- const asWithBytesRepresentation = (v) => v;
29165
+ const asTypedNumber = (v) => v;
29172
29166
  const MAX_VALUE_U8 = 0xff;
29173
29167
  const MAX_VALUE_U16 = 0xffff;
29174
29168
  const MAX_VALUE_U32 = 0xffff_ffff;
29175
29169
  const MAX_VALUE_U64 = 0xffffffffffffffffn;
29176
29170
  /** Attempt to cast an input number into U8. */
29177
- const tryAsU8 = (v) => ensure(v, isU8(v), `input must have one-byte representation, got ${v}`);
29171
+ const tryAsU8 = (v) => {
29172
+ debug_check `${isU8(v)} input must have one-byte representation, got ${v}`;
29173
+ return asTypedNumber(v);
29174
+ };
29178
29175
  /** Check if given number is a valid U8 number. */
29179
29176
  const isU8 = (v) => (v & MAX_VALUE_U8) === v;
29180
29177
  /** Attempt to cast an input number into U16. */
29181
- const numbers_tryAsU16 = (v) => ensure(v, isU16(v), `input must have two-byte representation, got ${v}`);
29178
+ const numbers_tryAsU16 = (v) => {
29179
+ debug_check `${isU16(v)} input must have two-byte representation, got ${v}`;
29180
+ return asTypedNumber(v);
29181
+ };
29182
29182
  /** Check if given number is a valid U16 number. */
29183
29183
  const isU16 = (v) => (v & MAX_VALUE_U16) === v;
29184
29184
  /** Attempt to cast an input number into U32. */
29185
- const numbers_tryAsU32 = (v) => ensure(v, isU32(v), `input must have four-byte representation, got ${v}`);
29185
+ const numbers_tryAsU32 = (v) => {
29186
+ debug_check `${isU32(v)} input must have four-byte representation, got ${v}`;
29187
+ return asTypedNumber(v);
29188
+ };
29186
29189
  /** Check if given number is a valid U32 number. */
29187
29190
  const isU32 = (v) => (v & MAX_VALUE_U32) >>> 0 === v;
29188
29191
  /** Attempt to cast an input number into U64. */
29189
29192
  const numbers_tryAsU64 = (x) => {
29190
29193
  const v = BigInt(x);
29191
- return ensure(v, isU64(v), `input must have eight-byte representation, got ${x}`);
29194
+ debug_check `${isU64(v)} input must have eight-byte representation, got ${x}`;
29195
+ return asTypedNumber(v);
29192
29196
  };
29193
29197
  /** Check if given number is a valid U64 number. */
29194
29198
  const isU64 = (v) => (v & MAX_VALUE_U64) === v;
29195
29199
  /** Collate two U32 parts into one U64. */
29196
29200
  const u64FromParts = ({ lower, upper }) => {
29197
29201
  const val = (BigInt(upper) << 32n) + BigInt(lower);
29198
- return asWithBytesRepresentation(val);
29202
+ return asTypedNumber(val);
29199
29203
  };
29200
29204
  /** Split U64 into lower & upper parts. */
29201
29205
  const u64IntoParts = (v) => {
29202
- const lower = v & (2n ** 32n - 1n);
29203
- const upper = v >> 32n;
29206
+ // Number(...) safe: both parts are <= 0xffffffff
29207
+ const lower = Number(v & (2n ** 32n - 1n));
29208
+ const upper = Number(v >> 32n);
29204
29209
  return {
29205
- lower: asWithBytesRepresentation(Number(lower)),
29206
- upper: asWithBytesRepresentation(Number(upper)),
29210
+ lower: asTypedNumber(lower),
29211
+ upper: asTypedNumber(upper),
29207
29212
  };
29208
29213
  };
29209
29214
  /**
@@ -29243,8 +29248,8 @@ function numbers_u32AsLeBytes(value) {
29243
29248
  * Interpret 4-byte `Uint8Array` as U32 written as little endian.
29244
29249
  */
29245
29250
  function leBytesAsU32(uint8Array) {
29246
- debug_check(uint8Array.length === 4, "Input must be a Uint8Array of length 4");
29247
- return asWithBytesRepresentation(uint8Array[0] | (uint8Array[1] << 8) | (uint8Array[2] << 16) | (uint8Array[3] << 24));
29251
+ debug_check `${uint8Array.length === 4} Input must be a Uint8Array of length 4`;
29252
+ return asTypedNumber(uint8Array[0] | (uint8Array[1] << 8) | (uint8Array[2] << 16) | (uint8Array[3] << 24));
29248
29253
  }
29249
29254
  /** Get the smallest value between U64 a and values given as input parameters. */
29250
29255
  const minU64 = (a, ...values) => values.reduce((min, value) => (value > min ? min : value), a);
@@ -29535,7 +29540,7 @@ class decoder_Decoder {
29535
29540
  this.skip(newOffset - this.offset);
29536
29541
  }
29537
29542
  else {
29538
- debug_check(newOffset >= 0, "The offset has to be positive");
29543
+ debug_check `${newOffset >= 0} The offset has to be positive`;
29539
29544
  this.offset = newOffset;
29540
29545
  }
29541
29546
  }
@@ -29563,7 +29568,7 @@ class decoder_Decoder {
29563
29568
  return num;
29564
29569
  }
29565
29570
  ensureHasBytes(bytes) {
29566
- debug_check(bytes >= 0, "Negative number of bytes given.");
29571
+ debug_check `${bytes >= 0} Negative number of bytes given.`;
29567
29572
  if (this.offset + bytes > this.source.length) {
29568
29573
  throw new Error(`Attempting to decode more data than there is left. Need ${bytes}, left: ${this.source.length - this.offset}.`);
29569
29574
  }
@@ -29571,7 +29576,7 @@ class decoder_Decoder {
29571
29576
  }
29572
29577
  const MASKS = [0xff, 0xfe, 0xfc, 0xf8, 0xf0, 0xe0, 0xc0, 0x80];
29573
29578
  function decodeVariableLengthExtraBytes(firstByte) {
29574
- debug_check(firstByte >= 0 && firstByte < 256, `Incorrect byte value: ${firstByte}`);
29579
+ debug_check `${firstByte >= 0 && firstByte < 256} Incorrect byte value: ${firstByte}`;
29575
29580
  for (let i = 0; i < MASKS.length; i++) {
29576
29581
  if (firstByte >= MASKS[i]) {
29577
29582
  return 8 - i;
@@ -29726,7 +29731,7 @@ class descriptor_Descriptor {
29726
29731
 
29727
29732
 
29728
29733
  function tryAsExactBytes(a) {
29729
- debug_check(a.isExact, "The value is not exact size estimation!");
29734
+ debug_check `${a.isExact} The value is not exact size estimation!`;
29730
29735
  return a.bytes;
29731
29736
  }
29732
29737
  function addSizeHints(a, b) {
@@ -29833,8 +29838,8 @@ class encoder_Encoder {
29833
29838
  // we still allow positive numbers from `[maxNum / 2, maxNum)`.
29834
29839
  // So it does not matter if the argument is a negative value,
29835
29840
  // OR if someone just gave us two-complement already.
29836
- debug_check(num < maxNum, "Only for numbers up to 2**64 - 1");
29837
- debug_check(-num <= maxNum / 2n, "Only for numbers down to -2**63");
29841
+ debug_check `${num < maxNum} Only for numbers up to 2**64 - 1`;
29842
+ debug_check `${-num <= maxNum / 2n} Only for numbers down to -2**63`;
29838
29843
  this.ensureBigEnough(8);
29839
29844
  this.dataView.setBigInt64(this.offset, num, true);
29840
29845
  this.offset += 8;
@@ -29898,8 +29903,8 @@ class encoder_Encoder {
29898
29903
  // we still allow positive numbers from `[maxNum / 2, maxNum)`.
29899
29904
  // So it does not matter if the argument is a negative value,
29900
29905
  // OR if someone just gave us two-complement already.
29901
- debug_check(num < maxNum, `Only for numbers up to 2**${BITS * bytesToEncode} - 1`);
29902
- debug_check(-num <= maxNum / 2, `Only for numbers down to -2**${BITS * bytesToEncode - 1}`);
29906
+ debug_check `${num < maxNum} Only for numbers up to 2**${BITS * bytesToEncode} - 1`;
29907
+ debug_check `${-num <= maxNum / 2} Only for numbers down to -2**${BITS * bytesToEncode - 1}`;
29903
29908
  this.ensureBigEnough(bytesToEncode);
29904
29909
  }
29905
29910
  /**
@@ -29910,8 +29915,8 @@ class encoder_Encoder {
29910
29915
  * https://graypaper.fluffylabs.dev/#/579bd12/365202365202
29911
29916
  */
29912
29917
  varU32(num) {
29913
- debug_check(num >= 0, "Only for natural numbers.");
29914
- debug_check(num < 2 ** 32, "Only for numbers up to 2**32");
29918
+ debug_check `${num >= 0} Only for natural numbers.`;
29919
+ debug_check `${num < 2 ** 32} Only for numbers up to 2**32`;
29915
29920
  this.varU64(BigInt(num));
29916
29921
  }
29917
29922
  /**
@@ -30062,7 +30067,7 @@ class encoder_Encoder {
30062
30067
  * https://graypaper.fluffylabs.dev/#/579bd12/374400374400
30063
30068
  */
30064
30069
  sequenceVarLen(encode, elements) {
30065
- debug_check(elements.length <= 2 ** 32, "Wow, that's a nice long sequence you've got here.");
30070
+ debug_check `${elements.length <= 2 ** 32} Wow, that's a nice long sequence you've got here.`;
30066
30071
  this.varU32(numbers_tryAsU32(elements.length));
30067
30072
  this.sequenceFixLen(encode, elements);
30068
30073
  }
@@ -30083,7 +30088,7 @@ class encoder_Encoder {
30083
30088
  * anyway, so if we really should throw we will.
30084
30089
  */
30085
30090
  ensureBigEnough(length, options = { silent: false }) {
30086
- debug_check(length >= 0, "Negative length given");
30091
+ debug_check `${length >= 0} Negative length given`;
30087
30092
  const newLength = this.offset + length;
30088
30093
  if (newLength > MAX_LENGTH) {
30089
30094
  if (options.silent) {
@@ -30219,10 +30224,12 @@ class ObjectView {
30219
30224
  decodeUpTo(field) {
30220
30225
  const index = this.descriptorsKeys.indexOf(field);
30221
30226
  const lastField = this.descriptorsKeys[this.lastDecodedFieldIdx];
30222
- debug_check(this.lastDecodedFieldIdx < index, `Unjustified call to 'decodeUpTo' -
30227
+ debug_check `
30228
+ ${this.lastDecodedFieldIdx < index}
30229
+ Unjustified call to 'decodeUpTo' -
30223
30230
  the index ($Blobindex}, ${String(field)})
30224
30231
  is already decoded (${this.lastDecodedFieldIdx}, ${String(lastField)}).
30225
- `);
30232
+ `;
30226
30233
  let lastItem = this.cache.get(lastField);
30227
30234
  const skipper = new Skipper(this.decoder);
30228
30235
  // now skip all of the fields and further populate the cache.
@@ -30238,8 +30245,10 @@ class ObjectView {
30238
30245
  this.cache.set(field, lastItem);
30239
30246
  this.lastDecodedFieldIdx = i;
30240
30247
  }
30241
- const last = ensure(lastItem, lastItem !== undefined, "Last item must be set, since the loop turns at least once.");
30242
- return last;
30248
+ if (lastItem === undefined) {
30249
+ throw new Error("Last item must be set, since the loop turns at least once.");
30250
+ }
30251
+ return lastItem;
30243
30252
  }
30244
30253
  }
30245
30254
  /**
@@ -30272,8 +30281,10 @@ class SequenceView {
30272
30281
  *[Symbol.iterator]() {
30273
30282
  for (let i = 0; i < this.length; i++) {
30274
30283
  const val = this.get(i);
30275
- const v = ensure(val, val !== undefined, "We are within 0..this.length so all items are defined.");
30276
- yield v;
30284
+ if (val === undefined) {
30285
+ throw new Error("We are within 0..this.length so all items are defined.");
30286
+ }
30287
+ yield val;
30277
30288
  }
30278
30289
  }
30279
30290
  /** Create an array of all views mapped to some particular value. */
@@ -30316,7 +30327,10 @@ class SequenceView {
30316
30327
  return bytes_BytesBlob.blobFrom(this.decoder.source.subarray(this.initialDecoderOffset, this.decoder.bytesRead()));
30317
30328
  }
30318
30329
  decodeUpTo(index) {
30319
- debug_check(this.lastDecodedIdx < index, `Unjustified call to 'decodeUpTo' - the index (${index}) is already decoded (${this.lastDecodedIdx}).`);
30330
+ debug_check `
30331
+ ${this.lastDecodedIdx < index}
30332
+ Unjustified call to 'decodeUpTo' - the index (${index}) is already decoded (${this.lastDecodedIdx}).
30333
+ `;
30320
30334
  let lastItem = this.cache.get(this.lastDecodedIdx);
30321
30335
  const skipper = new Skipper(this.decoder);
30322
30336
  // now skip all of the fields and further populate the cache.
@@ -30331,8 +30345,10 @@ class SequenceView {
30331
30345
  this.cache.set(i, lastItem);
30332
30346
  this.lastDecodedIdx = i;
30333
30347
  }
30334
- const last = ensure(lastItem, lastItem !== undefined, "Last item must be set, since the loop turns at least once.");
30335
- return last;
30348
+ if (lastItem === undefined) {
30349
+ throw new Error("Last item must be set, since the loop turns at least once.");
30350
+ }
30351
+ return lastItem;
30336
30352
  }
30337
30353
  }
30338
30354
 
@@ -30365,7 +30381,10 @@ const TYPICAL_DICTIONARY_LENGTH = 32;
30365
30381
  */
30366
30382
  function readonlyArray(desc) {
30367
30383
  return desc.convert((x) => {
30368
- debug_check(Array.isArray(x), `Non-arrays are not supported as 'readonly': got ${typeof x}, ${x}`);
30384
+ debug_check `
30385
+ ${Array.isArray(x)}
30386
+ Non-arrays are not supported as 'readonly': got ${typeof x}, ${x}
30387
+ `;
30369
30388
  // NOTE [ToDr] This assumption is incorrect in general, but it's documented
30370
30389
  // in the general note. We avoid `.slice()` the array for performance reasons.
30371
30390
  return x;
@@ -31352,7 +31371,7 @@ const BLS_KEY_BYTES = 144;
31352
31371
  /** Derive a Bandersnatch public key from a seed. */
31353
31372
  function bandersnatch_publicKey(seed) {
31354
31373
  const key = bandersnatch.derive_public_key(seed);
31355
- check(key[0] === 0, "Invalid Bandersnatch public key derived from seed");
31374
+ check `${key[0] === 0} Invalid Bandersnatch public key derived from seed`;
31356
31375
  return Bytes.fromBlob(key.subarray(1), BANDERSNATCH_KEY_BYTES).asOpaque();
31357
31376
  }
31358
31377
 
@@ -31840,7 +31859,7 @@ async function ed25519_verify(input) {
31840
31859
  data.set(signature.raw, offset);
31841
31860
  offset += ED25519_SIGNATURE_BYTES;
31842
31861
  const messageLength = message.length;
31843
- debug_check(messageLength < 256, `Message needs to be shorter than 256 bytes. Got: ${messageLength}`);
31862
+ debug_check `${messageLength < 256} Message needs to be shorter than 256 bytes. Got: ${messageLength}`;
31844
31863
  data[offset] = messageLength;
31845
31864
  offset += 1;
31846
31865
  data.set(message.raw, offset);
@@ -31924,7 +31943,7 @@ class PageAllocator {
31924
31943
  // TODO [ToDr] Benchmark the performance!
31925
31944
  constructor(hashesPerPage) {
31926
31945
  this.hashesPerPage = hashesPerPage;
31927
- check(hashesPerPage > 0 && hashesPerPage >>> 0 === hashesPerPage, "Expected a non-zero integer.");
31946
+ check `${hashesPerPage > 0 && hashesPerPage >>> 0 === hashesPerPage} Expected a non-zero integer.`;
31928
31947
  this.resetPage();
31929
31948
  }
31930
31949
  resetPage() {
@@ -32223,8 +32242,8 @@ class MultiMap {
32223
32242
  * if needed.
32224
32243
  */
32225
32244
  constructor(keysLength, keyMappers) {
32226
- check(keysLength > 0, "Keys cannot be empty.");
32227
- check(keyMappers === undefined || keyMappers.length === keysLength, "Incorrect number of key mappers given!");
32245
+ check `${keysLength > 0} Keys cannot be empty.`;
32246
+ check `${keyMappers === undefined || keyMappers.length === keysLength} Incorrect number of key mappers given!`;
32228
32247
  this.data = new Map();
32229
32248
  this.keyMappers = keyMappers === undefined ? Array(keysLength).fill(null) : keyMappers;
32230
32249
  }
@@ -32325,7 +32344,7 @@ class sized_array_FixedSizeArray extends Array {
32325
32344
  this.fixedLength = this.length;
32326
32345
  }
32327
32346
  static new(data, len) {
32328
- debug_check(data.length === len, `Expected an array of size: ${len}, got: ${data.length}`);
32347
+ debug_check `${data.length === len} Expected an array of size: ${len}, got: ${data.length}`;
32329
32348
  const arr = new sized_array_FixedSizeArray(len);
32330
32349
  for (let i = 0; i < len; i++) {
32331
32350
  arr[i] = data[i];
@@ -32459,7 +32478,7 @@ class SortedArray {
32459
32478
  }
32460
32479
  /** Create a new SortedSet from two sorted collections. */
32461
32480
  static fromTwoSortedCollections(first, second) {
32462
- debug_check(first.comparator === second.comparator, "Cannot merge arrays if they do not use the same comparator");
32481
+ debug_check `${first.comparator === second.comparator} Cannot merge arrays if they do not use the same comparator`;
32463
32482
  const comparator = first.comparator;
32464
32483
  const arr1 = first.array;
32465
32484
  const arr1Length = arr1.length;
@@ -32579,7 +32598,7 @@ class SortedSet extends SortedArray {
32579
32598
  }
32580
32599
  /** Create a new SortedSet from two sorted collections. */
32581
32600
  static fromTwoSortedCollections(first, second) {
32582
- debug_check(first.comparator === second.comparator, "Cannot merge arrays if they do not use the same comparator");
32601
+ debug_check `${first.comparator === second.comparator} Cannot merge arrays if they do not use the same comparator`;
32583
32602
  const comparator = first.comparator;
32584
32603
  if (first.length === 0) {
32585
32604
  return SortedSet.fromSortedArray(comparator, second.array);
@@ -33600,7 +33619,10 @@ const common_tryAsCoreIndex = (v) => opaque_asOpaqueType(numbers_tryAsU16(v));
33600
33619
  /** Attempt to convert a number into `Epoch`. */
33601
33620
  const tryAsEpoch = (v) => asOpaqueType(tryAsU32(v));
33602
33621
  function tryAsPerValidator(array, spec) {
33603
- debug_check(array.length === spec.validatorsCount, `Invalid per-validator array length. Expected ${spec.validatorsCount}, got: ${array.length}`);
33622
+ debug_check `
33623
+ ${array.length === spec.validatorsCount}
33624
+ Invalid per-validator array length. Expected ${spec.validatorsCount}, got: ${array.length}
33625
+ `;
33604
33626
  return sized_array_asKnownSize(array);
33605
33627
  }
33606
33628
  const codecPerValidator = (val) => codecWithContext((context) => {
@@ -33609,7 +33631,10 @@ const codecPerValidator = (val) => codecWithContext((context) => {
33609
33631
  });
33610
33632
  });
33611
33633
  function tryAsPerEpochBlock(array, spec) {
33612
- debug_check(array.length === spec.epochLength, `Invalid per-epoch-block array length. Expected ${spec.epochLength}, got: ${array.length}`);
33634
+ debug_check `
33635
+ ${array.length === spec.epochLength}
33636
+ Invalid per-epoch-block array length. Expected ${spec.epochLength}, got: ${array.length}
33637
+ `;
33613
33638
  return sized_array_asKnownSize(array);
33614
33639
  }
33615
33640
  const codecPerEpochBlock = (val) => codecWithContext((context) => {
@@ -33880,9 +33905,14 @@ class WorkItem extends WithDebug {
33880
33905
 
33881
33906
 
33882
33907
 
33908
+
33883
33909
  /** Verify the value is within the `WorkItemsCount` bounds. */
33884
33910
  function work_package_tryAsWorkItemsCount(len) {
33885
- return ensure(len, len >= MIN_NUMBER_OF_WORK_ITEMS && len <= work_package_MAX_NUMBER_OF_WORK_ITEMS, `WorkItemsCount: Expected '${MIN_NUMBER_OF_WORK_ITEMS} <= count <= ${work_package_MAX_NUMBER_OF_WORK_ITEMS}' got ${len}`);
33911
+ debug_check `
33912
+ ${len >= MIN_NUMBER_OF_WORK_ITEMS && len <= work_package_MAX_NUMBER_OF_WORK_ITEMS}
33913
+ WorkItemsCount: Expected '${MIN_NUMBER_OF_WORK_ITEMS} <= count <= ${work_package_MAX_NUMBER_OF_WORK_ITEMS}' got ${len}
33914
+ `;
33915
+ return tryAsU8(len);
33886
33916
  }
33887
33917
  /** Minimal number of work items in the work package or results in work report. */
33888
33918
  const MIN_NUMBER_OF_WORK_ITEMS = 1;
@@ -35856,7 +35886,10 @@ class AvailabilityAssignment extends WithDebug {
35856
35886
 
35857
35887
  /** Check if given array has correct length before casting to the opaque type. */
35858
35888
  function tryAsPerCore(array, spec) {
35859
- debug_check(array.length === spec.coresCount, `Invalid per-core array length. Expected ${spec.coresCount}, got: ${array.length}`);
35889
+ debug_check `
35890
+ ${array.length === spec.coresCount}
35891
+ Invalid per-core array length. Expected ${spec.coresCount}, got: ${array.length}
35892
+ `;
35860
35893
  return opaque_asOpaqueType(array);
35861
35894
  }
35862
35895
  const codecPerCore = (val) => codecWithContext((context) => {
@@ -37107,7 +37140,7 @@ class InMemoryState extends WithDebug {
37107
37140
  }
37108
37141
  removeServices(servicesRemoved) {
37109
37142
  for (const serviceId of servicesRemoved ?? []) {
37110
- debug_check(this.services.has(serviceId), `Attempting to remove non-existing service: ${serviceId}`);
37143
+ debug_check `${this.services.has(serviceId)} Attempting to remove non-existing service: ${serviceId}`;
37111
37144
  this.services.delete(serviceId);
37112
37145
  }
37113
37146
  }
@@ -37124,7 +37157,10 @@ class InMemoryState extends WithDebug {
37124
37157
  }
37125
37158
  else if (kind === UpdateStorageKind.Remove) {
37126
37159
  const { key } = action;
37127
- debug_check(service.data.storage.has(key.toString()), `Attempting to remove non-existing storage item at ${serviceId}: ${action.key}`);
37160
+ debug_check `
37161
+ ${service.data.storage.has(key.toString())}
37162
+ Attempting to remove non-existing storage item at ${serviceId}: ${action.key}
37163
+ `;
37128
37164
  service.data.storage.delete(key.toString());
37129
37165
  }
37130
37166
  else {
@@ -37807,12 +37843,12 @@ class TrieNode {
37807
37843
  }
37808
37844
  /** View this node as a branch node */
37809
37845
  asBranchNode() {
37810
- debug_check(this.getNodeType() === NodeType.Branch);
37846
+ debug_check `${this.getNodeType() === NodeType.Branch} not a branch!`;
37811
37847
  return new BranchNode(this);
37812
37848
  }
37813
37849
  /** View this node as a leaf node */
37814
37850
  asLeafNode() {
37815
- debug_check(this.getNodeType() !== NodeType.Branch);
37851
+ debug_check `${this.getNodeType() !== NodeType.Branch} not a leaf!`;
37816
37852
  return new LeafNode(this);
37817
37853
  }
37818
37854
  toString() {
@@ -38300,7 +38336,7 @@ function createSubtreeForBothLeaves(traversedPath, nodes, leafToReplace, leaf) {
38300
38336
  * Return a single bit from `key` located at `bitIndex`.
38301
38337
  */
38302
38338
  function getBit(key, bitIndex) {
38303
- debug_check(bitIndex < TRUNCATED_KEY_BITS);
38339
+ debug_check `${bitIndex < TRUNCATED_KEY_BITS} invalid bit index passed ${bitIndex}`;
38304
38340
  const byte = bitIndex >>> 3;
38305
38341
  const bit = bitIndex - (byte << 3);
38306
38342
  const mask = 0b10_00_00_00 >>> bit;
@@ -39460,7 +39496,7 @@ class TypedPort {
39460
39496
  * Send a response given the worker that has previously requested something.
39461
39497
  */
39462
39498
  respond(localState, request, data, transferList) {
39463
- debug_check(request.kind === "request");
39499
+ debug_check `${request.kind === "request"}`;
39464
39500
  this.postMessage({
39465
39501
  kind: "response",
39466
39502
  id: request.id,
@@ -39491,10 +39527,11 @@ class TypedPort {
39491
39527
  throw new Error(`Invalid message: ${JSON.stringify(msg)}.`);
39492
39528
  }
39493
39529
  switch (msg.kind) {
39494
- case "response":
39495
- debug_check(this.responseListeners.eventNames().indexOf(reqEvent(msg.id)) !== -1);
39530
+ case "response": {
39531
+ debug_check `${this.responseListeners.eventNames().indexOf(reqEvent(msg.id)) !== -1}`;
39496
39532
  this.responseListeners.emit(reqEvent(msg.id), null, msg.data, msg.name, msg.localState, msg);
39497
39533
  break;
39534
+ }
39498
39535
  case "signal":
39499
39536
  this.listeners.emit("signal", msg.name, msg.data, msg.localState, msg);
39500
39537
  break;
@@ -39709,9 +39746,9 @@ class MessageChannelStateMachine {
39709
39746
  const promise = new Promise((resolve, reject) => {
39710
39747
  parentPort.once("message", (value) => {
39711
39748
  try {
39712
- debug_check(value.kind === "request", "The initial message should be a request with channel.");
39713
- debug_check(value.name === CHANNEL_MESSAGE);
39714
- debug_check(value.data instanceof external_node_worker_threads_namespaceObject.MessagePort);
39749
+ debug_check `${value.kind === "request"} The initial message should be a request with channel.`;
39750
+ debug_check `${value.name === CHANNEL_MESSAGE}`;
39751
+ debug_check `${value.data instanceof external_node_worker_threads_namespaceObject.MessagePort}`;
39715
39752
  const port = new TypedPort(value.data);
39716
39753
  port.respond(machine.currentState().stateName, value, Ok);
39717
39754
  resolve(port);
@@ -39791,7 +39828,7 @@ class StateMachine {
39791
39828
  /** Get state object by name. */
39792
39829
  getState(name) {
39793
39830
  const state = this.allStates.get(name);
39794
- debug_check(state !== undefined, `Unable to retrieve state object for ${name}.`);
39831
+ debug_check `${state !== undefined} Unable to retrieve state object for ${name}.`;
39795
39832
  return state;
39796
39833
  }
39797
39834
  /** Get the currently active state object. */
@@ -40524,19 +40561,22 @@ class Preimages {
40524
40561
 
40525
40562
  const NO_OF_REGISTERS = 13;
40526
40563
  const REGISTER_SIZE_SHIFT = 3;
40527
- const tryAsRegisterIndex = (index) => ensure(index, index >= 0 && index <= NO_OF_REGISTERS, `Incorrect register index: ${index}!`);
40564
+ const tryAsRegisterIndex = (index) => {
40565
+ debug_check `${index >= 0 && index < NO_OF_REGISTERS} Incorrect register index: ${index}!`;
40566
+ return opaque_asOpaqueType(index);
40567
+ };
40528
40568
  class Registers {
40529
40569
  bytes;
40530
40570
  asSigned;
40531
40571
  asUnsigned;
40532
40572
  constructor(bytes = new Uint8Array(NO_OF_REGISTERS << REGISTER_SIZE_SHIFT)) {
40533
40573
  this.bytes = bytes;
40534
- debug_check(bytes.length === NO_OF_REGISTERS << REGISTER_SIZE_SHIFT, "Invalid size of registers array.");
40574
+ debug_check `${bytes.length === NO_OF_REGISTERS << REGISTER_SIZE_SHIFT} Invalid size of registers array.`;
40535
40575
  this.asSigned = new BigInt64Array(bytes.buffer, bytes.byteOffset);
40536
40576
  this.asUnsigned = new BigUint64Array(bytes.buffer, bytes.byteOffset);
40537
40577
  }
40538
40578
  static fromBytes(bytes) {
40539
- debug_check(bytes.length === NO_OF_REGISTERS << REGISTER_SIZE_SHIFT, "Invalid size of registers array.");
40579
+ debug_check `${bytes.length === NO_OF_REGISTERS << REGISTER_SIZE_SHIFT} Invalid size of registers array.`;
40540
40580
  return new Registers(bytes);
40541
40581
  }
40542
40582
  getBytesAsLittleEndian(index, len) {
@@ -40682,7 +40722,7 @@ class Mask {
40682
40722
  return this.lookupTableForward[index] === 0;
40683
40723
  }
40684
40724
  getNoOfBytesToNextInstruction(index) {
40685
- debug_check(index >= 0, `index (${index}) cannot be a negative number`);
40725
+ debug_check `${index >= 0} index (${index}) cannot be a negative number`;
40686
40726
  return Math.min(this.lookupTableForward[index] ?? 0, MAX_INSTRUCTION_DISTANCE);
40687
40727
  }
40688
40728
  buildLookupTableForward(mask) {
@@ -41682,7 +41722,7 @@ const PAGE_SIZE_SHIFT = 12;
41682
41722
  const PAGE_SIZE = 1 << PAGE_SIZE_SHIFT;
41683
41723
  const MIN_ALLOCATION_SHIFT = (() => {
41684
41724
  const MIN_ALLOCATION_SHIFT = 7;
41685
- debug_check(MIN_ALLOCATION_SHIFT >= 0 && MIN_ALLOCATION_SHIFT < PAGE_SIZE_SHIFT, "incorrect minimal allocation shift");
41725
+ debug_check `${MIN_ALLOCATION_SHIFT >= 0 && MIN_ALLOCATION_SHIFT < PAGE_SIZE_SHIFT} incorrect minimal allocation shift`;
41686
41726
  return MIN_ALLOCATION_SHIFT;
41687
41727
  })();
41688
41728
  const MIN_ALLOCATION_LENGTH = PAGE_SIZE >> MIN_ALLOCATION_SHIFT;
@@ -41695,16 +41735,28 @@ const MAX_NUMBER_OF_PAGES = MEMORY_SIZE / PAGE_SIZE;
41695
41735
  ;// CONCATENATED MODULE: ./packages/core/pvm-interpreter/memory/memory-index.ts
41696
41736
 
41697
41737
 
41698
- const tryAsMemoryIndex = (index) => ensure(index, index >= 0 && index <= MAX_MEMORY_INDEX, `Incorrect memory index: ${index}!`);
41699
- const tryAsSbrkIndex = (index) => ensure(index, index >= 0 && index <= MAX_MEMORY_INDEX + 1, `Incorrect sbrk index: ${index}!`);
41738
+ const tryAsMemoryIndex = (index) => {
41739
+ debug_check `${index >= 0 && index <= MAX_MEMORY_INDEX} Incorrect memory index: ${index}!`;
41740
+ return opaque_asOpaqueType(index);
41741
+ };
41742
+ const tryAsSbrkIndex = (index) => {
41743
+ debug_check `${index >= 0 && index <= MAX_MEMORY_INDEX + 1} Incorrect sbrk index: ${index}!`;
41744
+ return opaque_asOpaqueType(index);
41745
+ };
41700
41746
 
41701
41747
  ;// CONCATENATED MODULE: ./packages/core/pvm-interpreter/memory/pages/page-utils.ts
41702
41748
 
41703
41749
 
41704
41750
  /** Ensure that given memory `index` is within `[0...PAGE_SIZE)` and can be used to index a page */
41705
- const tryAsPageIndex = (index) => ensure(index, index >= 0 && index < PAGE_SIZE, `Incorect page index: ${index}!`);
41751
+ const tryAsPageIndex = (index) => {
41752
+ debug_check `${index >= 0 && index < PAGE_SIZE}, Incorect page index: ${index}!`;
41753
+ return opaque_asOpaqueType(index);
41754
+ };
41706
41755
  /** Ensure that given `index` represents an index of one of the pages. */
41707
- const tryAsPageNumber = (index) => ensure(index, index >= 0 && index <= LAST_PAGE_NUMBER, `Incorrect page number: ${index}!`);
41756
+ const tryAsPageNumber = (index) => {
41757
+ debug_check `${index >= 0 && index <= LAST_PAGE_NUMBER}, Incorect page number: ${index}!`;
41758
+ return opaque_asOpaqueType(index);
41759
+ };
41708
41760
  /**
41709
41761
  * Get the next page number and wrap the result if it is bigger than LAST_PAGE_NUMBER
41710
41762
  *
@@ -42236,10 +42288,10 @@ class MemoryBuilder {
42236
42288
  */
42237
42289
  setReadablePages(start, end, data = new Uint8Array()) {
42238
42290
  this.ensureNotFinalized();
42239
- debug_check(start < end, "end has to be bigger than start");
42240
- debug_check(start % PAGE_SIZE === 0, `start needs to be a multiple of page size (${PAGE_SIZE})`);
42241
- debug_check(end % PAGE_SIZE === 0, `end needs to be a multiple of page size (${PAGE_SIZE})`);
42242
- debug_check(data.length <= end - start, "the initial data is longer than address range");
42291
+ debug_check `${start < end} end has to be bigger than start`;
42292
+ debug_check `${start % PAGE_SIZE === 0} start needs to be a multiple of page size (${PAGE_SIZE})`;
42293
+ debug_check `${end % PAGE_SIZE === 0} end needs to be a multiple of page size (${PAGE_SIZE})`;
42294
+ debug_check `${data.length <= end - start} the initial data is longer than address range`;
42243
42295
  const length = end - start;
42244
42296
  const range = MemoryRange.fromStartAndLength(start, length);
42245
42297
  this.ensureNoReservedMemoryUsage(range);
@@ -42264,10 +42316,10 @@ class MemoryBuilder {
42264
42316
  */
42265
42317
  setWriteablePages(start, end, data = new Uint8Array()) {
42266
42318
  this.ensureNotFinalized();
42267
- debug_check(start < end, "end has to be bigger than start");
42268
- debug_check(start % PAGE_SIZE === 0, `start needs to be a multiple of page size (${PAGE_SIZE})`);
42269
- debug_check(end % PAGE_SIZE === 0, `end needs to be a multiple of page size (${PAGE_SIZE})`);
42270
- debug_check(data.length <= end - start, "the initial data is longer than address range");
42319
+ debug_check `${start < end} end has to be bigger than start`;
42320
+ debug_check `${start % PAGE_SIZE === 0} start needs to be a multiple of page size (${PAGE_SIZE})`;
42321
+ debug_check `${end % PAGE_SIZE === 0} end needs to be a multiple of page size (${PAGE_SIZE})`;
42322
+ debug_check `${data.length <= end - start} the initial data is longer than address range`;
42271
42323
  const length = end - start;
42272
42324
  const range = MemoryRange.fromStartAndLength(start, length);
42273
42325
  this.ensureNoReservedMemoryUsage(range);
@@ -42289,7 +42341,7 @@ class MemoryBuilder {
42289
42341
  this.ensureNotFinalized();
42290
42342
  const pageOffset = start % PAGE_SIZE;
42291
42343
  const remainingSpaceOnPage = PAGE_SIZE - pageOffset;
42292
- debug_check(data.length <= remainingSpaceOnPage, "The data has to fit into a single page.");
42344
+ debug_check `${data.length <= remainingSpaceOnPage} The data has to fit into a single page.`;
42293
42345
  const length = data.length;
42294
42346
  const range = MemoryRange.fromStartAndLength(start, length);
42295
42347
  this.ensureNoReservedMemoryUsage(range);
@@ -42303,7 +42355,10 @@ class MemoryBuilder {
42303
42355
  return this;
42304
42356
  }
42305
42357
  finalize(startHeapIndex, endHeapIndex) {
42306
- debug_check(startHeapIndex <= endHeapIndex, `startHeapIndex (${startHeapIndex}) has to be less than or equal to endHeapIndex (${endHeapIndex})`);
42358
+ debug_check `
42359
+ ${startHeapIndex <= endHeapIndex}
42360
+ startHeapIndex (${startHeapIndex}) has to be less than or equal to endHeapIndex (${endHeapIndex})
42361
+ `;
42307
42362
  this.ensureNotFinalized();
42308
42363
  const range = MemoryRange.fromStartAndLength(startHeapIndex, endHeapIndex - startHeapIndex);
42309
42364
  const pages = PageRange.fromMemoryRange(range);
@@ -42541,7 +42596,7 @@ function mulU64(a, b) {
42541
42596
  *
42542
42597
  * The result of multiplication is a 64-bits number and we are only interested in the part that lands in the upper 32-bits.
42543
42598
  * For example if we multiply `0xffffffff * 0xffffffff`, we get:
42544
-
42599
+
42545
42600
  * | 64-bits | 64-bits |
42546
42601
  * +--------------------+--------------------+
42547
42602
  * | upper | lower |
@@ -42577,7 +42632,7 @@ function mulUpperSS(a, b) {
42577
42632
  return interpretAsSigned(resultLimitedTo64Bits);
42578
42633
  }
42579
42634
  function unsignedRightShiftBigInt(value, shift) {
42580
- debug_check(shift >= 0, "Shift count must be non-negative");
42635
+ debug_check `${shift >= 0} Shift count must be non-negative`;
42581
42636
  const fillBit = value < 0 ? "1" : "0";
42582
42637
  // Convert the BigInt to its binary representation
42583
42638
  const binaryRepresentation = value.toString(2).padStart(64, fillBit);
@@ -43993,7 +44048,10 @@ class TwoRegsTwoImmsDispatcher {
43993
44048
  class JumpTable {
43994
44049
  indices;
43995
44050
  constructor(itemByteLength, bytes) {
43996
- debug_check(itemByteLength === 0 || bytes.length % itemByteLength === 0, `Length of jump table (${bytes.length}) should be a multiple of item lenght (${itemByteLength})!`);
44051
+ debug_check `
44052
+ ${itemByteLength === 0 || bytes.length % itemByteLength === 0}
44053
+ Length of jump table (${bytes.length}) should be a multiple of item lenght (${itemByteLength})!
44054
+ `;
43997
44055
  const length = itemByteLength === 0 ? 0 : bytes.length / itemByteLength;
43998
44056
  this.indices = new Uint32Array(length);
43999
44057
  for (let i = 0; i < length; i++) {
@@ -44437,7 +44495,10 @@ class ReturnValue {
44437
44495
  this.consumedGas = consumedGas;
44438
44496
  this.status = status;
44439
44497
  this.memorySlice = memorySlice;
44440
- debug_check((status === null && memorySlice !== null) || (status !== null && memorySlice === null), "`status` and `memorySlice` must not both be null or both be non-null — exactly one must be provided");
44498
+ debug_check `
44499
+ ${(status === null && memorySlice !== null) || (status !== null && memorySlice === null)}
44500
+ 'status' and 'memorySlice' must not both be null or both be non-null — exactly one must be provided
44501
+ `;
44441
44502
  }
44442
44503
  static fromStatus(consumedGas, status) {
44443
44504
  return new ReturnValue(consumedGas, status, null);
@@ -44486,7 +44547,10 @@ class HostCalls {
44486
44547
  if (status !== status_Status.HOST) {
44487
44548
  return this.getReturnValue(status, pvmInstance);
44488
44549
  }
44489
- debug_check(pvmInstance.getExitParam() !== null, "We know that the exit param is not null, because the status is `Status.HOST`");
44550
+ debug_check `
44551
+ ${pvmInstance.getExitParam() !== null}
44552
+ "We know that the exit param is not null, because the status is 'Status.HOST'
44553
+ `;
44490
44554
  const hostCallIndex = pvmInstance.getExitParam() ?? -1;
44491
44555
  const gas = pvmInstance.getGasCounter();
44492
44556
  const regs = new HostCallRegisters(pvmInstance.getRegisters());
@@ -44546,7 +44610,7 @@ class host_calls_manager_HostCallsManager {
44546
44610
  constructor({ missing, handlers = [], }) {
44547
44611
  this.missing = missing;
44548
44612
  for (const handler of handlers) {
44549
- debug_check(this.hostCalls.get(handler.index) === undefined, `Overwriting host call handler at index ${handler.index}`);
44613
+ debug_check `${this.hostCalls.get(handler.index) === undefined} Overwriting host call handler at index ${handler.index}`;
44550
44614
  this.hostCalls.set(handler.index, handler);
44551
44615
  }
44552
44616
  }
@@ -44669,7 +44733,7 @@ function getServiceId(serviceId) {
44669
44733
  return null;
44670
44734
  }
44671
44735
  function writeServiceIdAsLeBytes(serviceId, destination) {
44672
- debug_check(destination.length >= SERVICE_ID_BYTES, "Not enough space in the destination.");
44736
+ debug_check `${destination.length >= SERVICE_ID_BYTES} Not enough space in the destination.`;
44673
44737
  destination.set(numbers_u32AsLeBytes(serviceId));
44674
44738
  }
44675
44739
  /** Clamp a U64 to the maximum value of a 32-bit unsigned integer. */
@@ -44758,13 +44822,27 @@ class SpiProgram extends WithDebug {
44758
44822
  this.registers = registers;
44759
44823
  }
44760
44824
  }
44825
+ /**
44826
+ * program = E_3(|o|) ++ E_3(|w|) ++ E_2(z) ++ E_3(s) ++ o ++ w ++ E_4(|c|) ++ c
44827
+ *
44828
+ * E_n - little endian encoding, n - length
44829
+ * o - initial read only data
44830
+ * w - initial heap
44831
+ * z - heap pages filled with zeros
44832
+ * s - stack size
44833
+ * c - program code
44834
+ *
44835
+ * https://graypaper.fluffylabs.dev/#/579bd12/2b92022b9202
44836
+ */
44761
44837
  function decodeStandardProgram(program, args) {
44762
44838
  const decoder = decoder_Decoder.fromBlob(program);
44763
44839
  const oLength = decoder.u24();
44764
44840
  const wLength = decoder.u24();
44765
- const argsLength = ensure(args.length, args.length <= DATA_LEGNTH, "Incorrect arguments length");
44766
- const readOnlyLength = ensure(oLength, oLength <= DATA_LEGNTH, "Incorrect readonly segment length");
44767
- const heapLength = ensure(wLength, wLength <= DATA_LEGNTH, "Incorrect heap segment length");
44841
+ debug_check `${args.length <= DATA_LEGNTH} Incorrect arguments length`;
44842
+ debug_check `${oLength <= DATA_LEGNTH} Incorrect readonly segment length`;
44843
+ const readOnlyLength = oLength;
44844
+ debug_check `${wLength <= DATA_LEGNTH} Incorrect heap segment length`;
44845
+ const heapLength = wLength;
44768
44846
  const noOfHeapZerosPages = decoder.u16();
44769
44847
  const stackSize = decoder.u24();
44770
44848
  const readOnlyMemory = decoder.bytes(readOnlyLength).raw;
@@ -44780,14 +44858,14 @@ function decodeStandardProgram(program, args) {
44780
44858
  const stackStart = STACK_SEGMENT - memory_utils_alignToPageSize(stackSize);
44781
44859
  const stackEnd = STACK_SEGMENT;
44782
44860
  const argsStart = ARGS_SEGMENT;
44783
- const argsEnd = argsStart + memory_utils_alignToPageSize(argsLength);
44784
- const argsZerosEnd = argsEnd + memory_utils_alignToPageSize(argsLength);
44861
+ const argsEnd = argsStart + memory_utils_alignToPageSize(args.length);
44862
+ const argsZerosEnd = argsEnd + memory_utils_alignToPageSize(args.length);
44785
44863
  function nonEmpty(s) {
44786
44864
  return s !== false;
44787
44865
  }
44788
44866
  const readableMemory = [
44789
44867
  readOnlyLength > 0 && getMemorySegment(readonlyDataStart, readonlyDataEnd, readOnlyMemory),
44790
- argsLength > 0 && getMemorySegment(argsStart, argsEnd, args),
44868
+ args.length > 0 && getMemorySegment(argsStart, argsEnd, args),
44791
44869
  argsEnd < argsZerosEnd && getMemorySegment(argsEnd, argsZerosEnd),
44792
44870
  ].filter(nonEmpty);
44793
44871
  const writeableMemory = [
@@ -58875,8 +58953,8 @@ class PartiallyUpdatedState {
58875
58953
  this.stateUpdate.services.preimages.push(newUpdate);
58876
58954
  }
58877
58955
  updateServiceStorageUtilisation(serviceId, items, bytes, serviceInfo) {
58878
- debug_check(items >= 0, `storageUtilisationCount has to be a positive number, got: ${items}`);
58879
- debug_check(bytes >= 0, `storageUtilisationBytes has to be a positive number, got: ${bytes}`);
58956
+ debug_check `${items >= 0} storageUtilisationCount has to be a positive number, got: ${items}`;
58957
+ debug_check `${bytes >= 0} storageUtilisationBytes has to be a positive number, got: ${bytes}`;
58880
58958
  const overflowItems = !isU32(items);
58881
58959
  const overflowBytes = !isU64(bytes);
58882
58960
  // TODO [ToDr] this is not specified in GP, but it seems sensible.
@@ -59301,7 +59379,7 @@ class AccumulateExternalities {
59301
59379
  }
59302
59380
  // TODO [ToDr] Not sure if we should update the service info in that case,
59303
59381
  // but for now we let that case fall-through.
59304
- debug_check(len === PreimageStatusKind.Unavailable);
59382
+ debug_check `${len === PreimageStatusKind.Unavailable} preimage is not unavailable`;
59305
59383
  }
59306
59384
  // make sure we have enough balance for this update
59307
59385
  // https://graypaper.fluffylabs.dev/#/9a08063/381201381601?v=0.6.6
@@ -59797,7 +59875,7 @@ class Assurances {
59797
59875
  return result_Result.error(AssurancesError.InvalidOrder, `order: expected: ${prevValidatorIndex + 1}, got: ${validatorIndex}`);
59798
59876
  }
59799
59877
  prevValidatorIndex = assurance.validatorIndex;
59800
- debug_check(bitfield.bitLength === coresCount, `Invalid bitfield length of ${bitfield.bitLength}`);
59878
+ debug_check `${bitfield.bitLength === coresCount} Invalid bitfield length of ${bitfield.bitLength}`;
59801
59879
  const setBits = bitfield.indicesOfSetBits();
59802
59880
  for (const idx of setBits) {
59803
59881
  perCoreAssurances[idx] += 1;
@@ -62121,7 +62199,7 @@ class DeferredTransfers {
62121
62199
  transferStatistics.set(serviceId, { count: numbers_tryAsU32(transfers.length), gasUsed: common_tryAsServiceGas(consumedGas) });
62122
62200
  const [updatedState, checkpointedState] = partialState.getStateUpdates();
62123
62201
  currentStateUpdate = updatedState;
62124
- debug_check(checkpointedState === null, "On transfer cannot invoke checkpoint.");
62202
+ debug_check `${checkpointedState === null} On transfer cannot invoke checkpoint.`;
62125
62203
  }
62126
62204
  return result_Result.ok({
62127
62205
  // NOTE: we return only services, since it's impossible to update
@@ -62459,7 +62537,7 @@ const ENTROPY_BYTES = 32;
62459
62537
  * https://graypaper.fluffylabs.dev/#/579bd12/3b9a013b9a01
62460
62538
  */
62461
62539
  function fisherYatesShuffle(arr, entropy) {
62462
- debug_check(entropy.length === ENTROPY_BYTES, `Expected entropy of length ${ENTROPY_BYTES}, got ${entropy.length}`);
62540
+ debug_check `${entropy.length === ENTROPY_BYTES} Expected entropy of length ${ENTROPY_BYTES}, got ${entropy.length}`;
62463
62541
  const n = arr.length;
62464
62542
  const randomNumbers = hashToNumberSequence(entropy, arr.length);
62465
62543
  const result = new Array(n);
@@ -63306,7 +63384,7 @@ class Statistics {
63306
63384
  /** get statistics for the current epoch */
63307
63385
  const statistics = this.getStatistics(slot);
63308
63386
  const { current, cores, services } = statistics;
63309
- debug_check(current[authorIndex] !== undefined, "authorIndex is out of bounds");
63387
+ debug_check `${current[authorIndex] !== undefined} authorIndex is out of bounds`;
63310
63388
  /** One validator can produce maximal one block per timeslot */
63311
63389
  const newBlocksCount = current[authorIndex].blocks + 1;
63312
63390
  current[authorIndex].blocks = numbers_tryAsU32(newBlocksCount);