@typeberry/jam 0.1.0-3c30204 → 0.1.0-b2d0b72

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/index.js CHANGED
@@ -28332,29 +28332,17 @@ function isBrowser() {
28332
28332
  * We avoid using `node:assert` to keep compatibility with a browser environment.
28333
28333
  * Note the checks should not have any side effects, since we might decide
28334
28334
  * to remove all of them in a post-processing step.
28335
- */
28336
- function debug_check(condition, message) {
28337
- if (!condition) {
28338
- throw new Error(`Assertion failure: ${message ?? ""}`);
28339
- }
28340
- }
28341
- function cast(_a, condition) {
28342
- return condition;
28343
- }
28344
- /**
28345
- * Yet another function to perform runtime assertions.
28346
- * This function returns a new type to mark in the code that this value was checked and you don't have to do it again.
28347
28335
  *
28348
- * In the post-processing step all usages of this functions should be replaced with simple casting. An example:
28349
- * const x = checkAndType<number, CheckedNumber>(y);
28350
- * should be replaced with:
28351
- * const x = y as CheckedNumber;
28336
+ * NOTE the function is intended to be used as tagged template string for the performance
28337
+ * reasons.
28352
28338
  */
28353
- function ensure(a, condition, message) {
28354
- if (cast(a, condition)) {
28355
- return a;
28339
+ function debug_check(strings, condition, ...data) {
28340
+ if (!condition) {
28341
+ // add an empty value so that `data.length === strings.length`
28342
+ data.unshift("");
28343
+ const message = strings.map((v, index) => `${v}${data[index] ?? ""}`);
28344
+ throw new Error(`Assertion failure:${message.join("")}`);
28356
28345
  }
28357
- throw new Error(`Assertion failure: ${message ?? ""}`);
28358
28346
  }
28359
28347
  /**
28360
28348
  * The function can be used to make sure that a particular type is `never`
@@ -28524,7 +28512,7 @@ function resultToString(res) {
28524
28512
  const result_Result = {
28525
28513
  /** Create new [`Result`] with `Ok` status. */
28526
28514
  ok: (ok) => {
28527
- debug_check(ok !== undefined, "`Ok` type cannot be undefined.");
28515
+ debug_check `${ok !== undefined} 'ok' type cannot be undefined.`;
28528
28516
  return {
28529
28517
  isOk: true,
28530
28518
  isError: false,
@@ -28533,7 +28521,7 @@ const result_Result = {
28533
28521
  },
28534
28522
  /** Create new [`Result`] with `Error` status. */
28535
28523
  error: (error, details = "") => {
28536
- debug_check(error !== undefined, "`Error` type cannot be undefined.");
28524
+ debug_check `${error !== undefined} 'Error' type cannot be undefined.`;
28537
28525
  return {
28538
28526
  isOk: false,
28539
28527
  isError: true,
@@ -28817,7 +28805,10 @@ class BitVec {
28817
28805
  constructor(data, bitLength) {
28818
28806
  this.data = data;
28819
28807
  this.bitLength = bitLength;
28820
- debug_check(data.length * 8 >= bitLength, `Not enough bytes in the data array. Need ${data.length * 8} has ${bitLength}.`);
28808
+ debug_check `
28809
+ ${data.length * 8 >= bitLength}
28810
+ Not enough bytes in the data array. Need ${data.length * 8} has ${bitLength}.
28811
+ `;
28821
28812
  this.byteLength = Math.ceil(bitLength / 8);
28822
28813
  }
28823
28814
  /** Return a raw in-memory representation of this [`BitVec`]. */
@@ -28826,7 +28817,10 @@ class BitVec {
28826
28817
  }
28827
28818
  /** Perform OR operation on all bits in place. */
28828
28819
  sumWith(other) {
28829
- debug_check(other.bitLength === this.bitLength, `Invalid bit length for sumWith: ${other.bitLength} vs ${this.bitLength}`);
28820
+ debug_check `
28821
+ ${other.bitLength === this.bitLength}
28822
+ Invalid bit length for sumWith: ${other.bitLength} vs ${this.bitLength}
28823
+ `;
28830
28824
  const otherRaw = other.raw;
28831
28825
  for (let i = 0; i < this.byteLength; i++) {
28832
28826
  this.data[i] |= otherRaw[i];
@@ -28836,7 +28830,7 @@ class BitVec {
28836
28830
  * Set the bit at index `idx` to value `val`.
28837
28831
  */
28838
28832
  setBit(idx, val) {
28839
- debug_check(idx < this.bitLength, `Index out of bounds. Need ${idx} has ${this.bitLength}.`);
28833
+ debug_check `${idx >= 0 && idx < this.bitLength} Index out of bounds. Need ${idx} has ${this.bitLength}.`;
28840
28834
  const byteIndex = Math.floor(idx / 8);
28841
28835
  const bitIndexInByte = idx % 8;
28842
28836
  const mask = 1 << bitIndexInByte;
@@ -28851,7 +28845,7 @@ class BitVec {
28851
28845
  * Return `true` if the bit at index `idx` is set.
28852
28846
  */
28853
28847
  isSet(idx) {
28854
- debug_check(idx < this.bitLength, `Index out of bounds. Need ${idx} has ${this.bitLength}.`);
28848
+ debug_check `${idx >= 0 && idx < this.bitLength} Index out of bounds. Need ${idx} has ${this.bitLength}.`;
28855
28849
  const byteIndex = Math.floor(idx / 8);
28856
28850
  const bitIndexInByte = idx % 8;
28857
28851
  const mask = 1 << bitIndexInByte;
@@ -29018,7 +29012,7 @@ class bytes_BytesBlob {
29018
29012
  }
29019
29013
  /** Create a new [`BytesBlob`] from an array of bytes. */
29020
29014
  static blobFromNumbers(v) {
29021
- debug_check(v.find((x) => (x & 0xff) !== x) === undefined, "BytesBlob.blobFromNumbers used with non-byte number array.");
29015
+ debug_check `${v.find((x) => (x & 0xff) !== x) === undefined} BytesBlob.blobFromNumbers used with non-byte number array.`;
29022
29016
  const arr = new Uint8Array(v);
29023
29017
  return new bytes_BytesBlob(arr);
29024
29018
  }
@@ -29062,7 +29056,7 @@ class bytes_Bytes extends bytes_BytesBlob {
29062
29056
  length;
29063
29057
  constructor(raw, len) {
29064
29058
  super(raw);
29065
- debug_check(raw.byteLength === len, `Given buffer has incorrect size ${raw.byteLength} vs expected ${len}`);
29059
+ debug_check `${raw.byteLength === len} Given buffer has incorrect size ${raw.byteLength} vs expected ${len}`;
29066
29060
  this.length = len;
29067
29061
  }
29068
29062
  /** Create new [`Bytes<X>`] given a backing buffer and it's length. */
@@ -29071,7 +29065,7 @@ class bytes_Bytes extends bytes_BytesBlob {
29071
29065
  }
29072
29066
  /** Create new [`Bytes<X>`] given an array of bytes and it's length. */
29073
29067
  static fromNumbers(v, len) {
29074
- debug_check(v.find((x) => (x & 0xff) !== x) === undefined, "Bytes.fromNumbers used with non-byte number array.");
29068
+ debug_check `${v.find((x) => (x & 0xff) !== x) === undefined} Bytes.fromNumbers used with non-byte number array.`;
29075
29069
  const x = new Uint8Array(v);
29076
29070
  return new bytes_Bytes(x, len);
29077
29071
  }
@@ -29082,7 +29076,7 @@ class bytes_Bytes extends bytes_BytesBlob {
29082
29076
  // TODO [ToDr] `fill` should have the argments swapped to align with the rest.
29083
29077
  /** Create a [`Bytes<X>`] with all bytes filled with given input number. */
29084
29078
  static fill(len, input) {
29085
- debug_check((input & 0xff) === input, "Input has to be a byte.");
29079
+ debug_check `${(input & 0xff) === input} Input has to be a byte.`;
29086
29080
  const bytes = bytes_Bytes.zero(len);
29087
29081
  bytes.raw.fill(input, 0, len);
29088
29082
  return bytes;
@@ -29105,7 +29099,7 @@ class bytes_Bytes extends bytes_BytesBlob {
29105
29099
  }
29106
29100
  /** Compare the sequence to another one. */
29107
29101
  isEqualTo(other) {
29108
- debug_check(this.length === other.length, "Comparing incorrectly typed bytes!");
29102
+ debug_check `${this.length === other.length} Comparing incorrectly typed bytes!`;
29109
29103
  return u8ArraySameLengthEqual(this.raw, other.raw);
29110
29104
  }
29111
29105
  /** Converts current type into some opaque extension. */
@@ -29114,7 +29108,7 @@ class bytes_Bytes extends bytes_BytesBlob {
29114
29108
  }
29115
29109
  }
29116
29110
  function byteFromString(s) {
29117
- debug_check(s.length === 2, "Two-character string expected");
29111
+ debug_check `${s.length === 2} Two-character string expected`;
29118
29112
  const a = numberFromCharCode(s.charCodeAt(0));
29119
29113
  const b = numberFromCharCode(s.charCodeAt(1));
29120
29114
  return (a << 4) | b;
@@ -29168,42 +29162,53 @@ const bytesBlobComparator = (a, b) => a.compare(b);
29168
29162
 
29169
29163
  ;// CONCATENATED MODULE: ./packages/core/numbers/index.ts
29170
29164
 
29171
- const asWithBytesRepresentation = (v) => v;
29165
+ const asTypedNumber = (v) => v;
29172
29166
  const MAX_VALUE_U8 = 0xff;
29173
29167
  const MAX_VALUE_U16 = 0xffff;
29174
29168
  const MAX_VALUE_U32 = 0xffff_ffff;
29175
29169
  const MAX_VALUE_U64 = 0xffffffffffffffffn;
29176
29170
  /** Attempt to cast an input number into U8. */
29177
- const tryAsU8 = (v) => ensure(v, isU8(v), `input must have one-byte representation, got ${v}`);
29171
+ const tryAsU8 = (v) => {
29172
+ debug_check `${isU8(v)} input must have one-byte representation, got ${v}`;
29173
+ return asTypedNumber(v);
29174
+ };
29178
29175
  /** Check if given number is a valid U8 number. */
29179
29176
  const isU8 = (v) => (v & MAX_VALUE_U8) === v;
29180
29177
  /** Attempt to cast an input number into U16. */
29181
- const numbers_tryAsU16 = (v) => ensure(v, isU16(v), `input must have two-byte representation, got ${v}`);
29178
+ const numbers_tryAsU16 = (v) => {
29179
+ debug_check `${isU16(v)} input must have two-byte representation, got ${v}`;
29180
+ return asTypedNumber(v);
29181
+ };
29182
29182
  /** Check if given number is a valid U16 number. */
29183
29183
  const isU16 = (v) => (v & MAX_VALUE_U16) === v;
29184
29184
  /** Attempt to cast an input number into U32. */
29185
- const numbers_tryAsU32 = (v) => ensure(v, isU32(v), `input must have four-byte representation, got ${v}`);
29185
+ const numbers_tryAsU32 = (v) => {
29186
+ debug_check `${isU32(v)} input must have four-byte representation, got ${v}`;
29187
+ return asTypedNumber(v);
29188
+ };
29186
29189
  /** Check if given number is a valid U32 number. */
29187
29190
  const isU32 = (v) => (v & MAX_VALUE_U32) >>> 0 === v;
29188
29191
  /** Attempt to cast an input number into U64. */
29189
29192
  const numbers_tryAsU64 = (x) => {
29190
29193
  const v = BigInt(x);
29191
- return ensure(v, isU64(v), `input must have eight-byte representation, got ${x}`);
29194
+ debug_check `${isU64(v)} input must have eight-byte representation, got ${x}`;
29195
+ return asTypedNumber(v);
29192
29196
  };
29193
29197
  /** Check if given number is a valid U64 number. */
29194
29198
  const isU64 = (v) => (v & MAX_VALUE_U64) === v;
29195
29199
  /** Collate two U32 parts into one U64. */
29196
29200
  const u64FromParts = ({ lower, upper }) => {
29197
29201
  const val = (BigInt(upper) << 32n) + BigInt(lower);
29198
- return asWithBytesRepresentation(val);
29202
+ return asTypedNumber(val);
29199
29203
  };
29200
29204
  /** Split U64 into lower & upper parts. */
29201
29205
  const u64IntoParts = (v) => {
29202
- const lower = v & (2n ** 32n - 1n);
29203
- const upper = v >> 32n;
29206
+ // Number(...) safe: both parts are <= 0xffffffff
29207
+ const lower = Number(v & (2n ** 32n - 1n));
29208
+ const upper = Number(v >> 32n);
29204
29209
  return {
29205
- lower: asWithBytesRepresentation(Number(lower)),
29206
- upper: asWithBytesRepresentation(Number(upper)),
29210
+ lower: asTypedNumber(lower),
29211
+ upper: asTypedNumber(upper),
29207
29212
  };
29208
29213
  };
29209
29214
  /**
@@ -29243,8 +29248,8 @@ function numbers_u32AsLeBytes(value) {
29243
29248
  * Interpret 4-byte `Uint8Array` as U32 written as little endian.
29244
29249
  */
29245
29250
  function leBytesAsU32(uint8Array) {
29246
- debug_check(uint8Array.length === 4, "Input must be a Uint8Array of length 4");
29247
- return asWithBytesRepresentation(uint8Array[0] | (uint8Array[1] << 8) | (uint8Array[2] << 16) | (uint8Array[3] << 24));
29251
+ debug_check `${uint8Array.length === 4} Input must be a Uint8Array of length 4`;
29252
+ return asTypedNumber(uint8Array[0] | (uint8Array[1] << 8) | (uint8Array[2] << 16) | (uint8Array[3] << 24));
29248
29253
  }
29249
29254
  /** Get the smallest value between U64 a and values given as input parameters. */
29250
29255
  const minU64 = (a, ...values) => values.reduce((min, value) => (value > min ? min : value), a);
@@ -29535,7 +29540,7 @@ class decoder_Decoder {
29535
29540
  this.skip(newOffset - this.offset);
29536
29541
  }
29537
29542
  else {
29538
- debug_check(newOffset >= 0, "The offset has to be positive");
29543
+ debug_check `${newOffset >= 0} The offset has to be positive`;
29539
29544
  this.offset = newOffset;
29540
29545
  }
29541
29546
  }
@@ -29563,7 +29568,7 @@ class decoder_Decoder {
29563
29568
  return num;
29564
29569
  }
29565
29570
  ensureHasBytes(bytes) {
29566
- debug_check(bytes >= 0, "Negative number of bytes given.");
29571
+ debug_check `${bytes >= 0} Negative number of bytes given.`;
29567
29572
  if (this.offset + bytes > this.source.length) {
29568
29573
  throw new Error(`Attempting to decode more data than there is left. Need ${bytes}, left: ${this.source.length - this.offset}.`);
29569
29574
  }
@@ -29571,7 +29576,7 @@ class decoder_Decoder {
29571
29576
  }
29572
29577
  const MASKS = [0xff, 0xfe, 0xfc, 0xf8, 0xf0, 0xe0, 0xc0, 0x80];
29573
29578
  function decodeVariableLengthExtraBytes(firstByte) {
29574
- debug_check(firstByte >= 0 && firstByte < 256, `Incorrect byte value: ${firstByte}`);
29579
+ debug_check `${firstByte >= 0 && firstByte < 256} Incorrect byte value: ${firstByte}`;
29575
29580
  for (let i = 0; i < MASKS.length; i++) {
29576
29581
  if (firstByte >= MASKS[i]) {
29577
29582
  return 8 - i;
@@ -29726,7 +29731,7 @@ class descriptor_Descriptor {
29726
29731
 
29727
29732
 
29728
29733
  function tryAsExactBytes(a) {
29729
- debug_check(a.isExact, "The value is not exact size estimation!");
29734
+ debug_check `${a.isExact} The value is not exact size estimation!`;
29730
29735
  return a.bytes;
29731
29736
  }
29732
29737
  function addSizeHints(a, b) {
@@ -29833,8 +29838,8 @@ class encoder_Encoder {
29833
29838
  // we still allow positive numbers from `[maxNum / 2, maxNum)`.
29834
29839
  // So it does not matter if the argument is a negative value,
29835
29840
  // OR if someone just gave us two-complement already.
29836
- debug_check(num < maxNum, "Only for numbers up to 2**64 - 1");
29837
- debug_check(-num <= maxNum / 2n, "Only for numbers down to -2**63");
29841
+ debug_check `${num < maxNum} Only for numbers up to 2**64 - 1`;
29842
+ debug_check `${-num <= maxNum / 2n} Only for numbers down to -2**63`;
29838
29843
  this.ensureBigEnough(8);
29839
29844
  this.dataView.setBigInt64(this.offset, num, true);
29840
29845
  this.offset += 8;
@@ -29898,8 +29903,8 @@ class encoder_Encoder {
29898
29903
  // we still allow positive numbers from `[maxNum / 2, maxNum)`.
29899
29904
  // So it does not matter if the argument is a negative value,
29900
29905
  // OR if someone just gave us two-complement already.
29901
- debug_check(num < maxNum, `Only for numbers up to 2**${BITS * bytesToEncode} - 1`);
29902
- debug_check(-num <= maxNum / 2, `Only for numbers down to -2**${BITS * bytesToEncode - 1}`);
29906
+ debug_check `${num < maxNum} Only for numbers up to 2**${BITS * bytesToEncode} - 1`;
29907
+ debug_check `${-num <= maxNum / 2} Only for numbers down to -2**${BITS * bytesToEncode - 1}`;
29903
29908
  this.ensureBigEnough(bytesToEncode);
29904
29909
  }
29905
29910
  /**
@@ -29910,8 +29915,8 @@ class encoder_Encoder {
29910
29915
  * https://graypaper.fluffylabs.dev/#/579bd12/365202365202
29911
29916
  */
29912
29917
  varU32(num) {
29913
- debug_check(num >= 0, "Only for natural numbers.");
29914
- debug_check(num < 2 ** 32, "Only for numbers up to 2**32");
29918
+ debug_check `${num >= 0} Only for natural numbers.`;
29919
+ debug_check `${num < 2 ** 32} Only for numbers up to 2**32`;
29915
29920
  this.varU64(BigInt(num));
29916
29921
  }
29917
29922
  /**
@@ -30062,7 +30067,7 @@ class encoder_Encoder {
30062
30067
  * https://graypaper.fluffylabs.dev/#/579bd12/374400374400
30063
30068
  */
30064
30069
  sequenceVarLen(encode, elements) {
30065
- debug_check(elements.length <= 2 ** 32, "Wow, that's a nice long sequence you've got here.");
30070
+ debug_check `${elements.length <= 2 ** 32} Wow, that's a nice long sequence you've got here.`;
30066
30071
  this.varU32(numbers_tryAsU32(elements.length));
30067
30072
  this.sequenceFixLen(encode, elements);
30068
30073
  }
@@ -30083,7 +30088,7 @@ class encoder_Encoder {
30083
30088
  * anyway, so if we really should throw we will.
30084
30089
  */
30085
30090
  ensureBigEnough(length, options = { silent: false }) {
30086
- debug_check(length >= 0, "Negative length given");
30091
+ debug_check `${length >= 0} Negative length given`;
30087
30092
  const newLength = this.offset + length;
30088
30093
  if (newLength > MAX_LENGTH) {
30089
30094
  if (options.silent) {
@@ -30219,10 +30224,12 @@ class ObjectView {
30219
30224
  decodeUpTo(field) {
30220
30225
  const index = this.descriptorsKeys.indexOf(field);
30221
30226
  const lastField = this.descriptorsKeys[this.lastDecodedFieldIdx];
30222
- debug_check(this.lastDecodedFieldIdx < index, `Unjustified call to 'decodeUpTo' -
30227
+ debug_check `
30228
+ ${this.lastDecodedFieldIdx < index}
30229
+ Unjustified call to 'decodeUpTo' -
30223
30230
  the index ($Blobindex}, ${String(field)})
30224
30231
  is already decoded (${this.lastDecodedFieldIdx}, ${String(lastField)}).
30225
- `);
30232
+ `;
30226
30233
  let lastItem = this.cache.get(lastField);
30227
30234
  const skipper = new Skipper(this.decoder);
30228
30235
  // now skip all of the fields and further populate the cache.
@@ -30238,8 +30245,10 @@ class ObjectView {
30238
30245
  this.cache.set(field, lastItem);
30239
30246
  this.lastDecodedFieldIdx = i;
30240
30247
  }
30241
- const last = ensure(lastItem, lastItem !== undefined, "Last item must be set, since the loop turns at least once.");
30242
- return last;
30248
+ if (lastItem === undefined) {
30249
+ throw new Error("Last item must be set, since the loop turns at least once.");
30250
+ }
30251
+ return lastItem;
30243
30252
  }
30244
30253
  }
30245
30254
  /**
@@ -30272,8 +30281,10 @@ class SequenceView {
30272
30281
  *[Symbol.iterator]() {
30273
30282
  for (let i = 0; i < this.length; i++) {
30274
30283
  const val = this.get(i);
30275
- const v = ensure(val, val !== undefined, "We are within 0..this.length so all items are defined.");
30276
- yield v;
30284
+ if (val === undefined) {
30285
+ throw new Error("We are within 0..this.length so all items are defined.");
30286
+ }
30287
+ yield val;
30277
30288
  }
30278
30289
  }
30279
30290
  /** Create an array of all views mapped to some particular value. */
@@ -30316,7 +30327,10 @@ class SequenceView {
30316
30327
  return bytes_BytesBlob.blobFrom(this.decoder.source.subarray(this.initialDecoderOffset, this.decoder.bytesRead()));
30317
30328
  }
30318
30329
  decodeUpTo(index) {
30319
- debug_check(this.lastDecodedIdx < index, `Unjustified call to 'decodeUpTo' - the index (${index}) is already decoded (${this.lastDecodedIdx}).`);
30330
+ debug_check `
30331
+ ${this.lastDecodedIdx < index}
30332
+ Unjustified call to 'decodeUpTo' - the index (${index}) is already decoded (${this.lastDecodedIdx}).
30333
+ `;
30320
30334
  let lastItem = this.cache.get(this.lastDecodedIdx);
30321
30335
  const skipper = new Skipper(this.decoder);
30322
30336
  // now skip all of the fields and further populate the cache.
@@ -30331,8 +30345,10 @@ class SequenceView {
30331
30345
  this.cache.set(i, lastItem);
30332
30346
  this.lastDecodedIdx = i;
30333
30347
  }
30334
- const last = ensure(lastItem, lastItem !== undefined, "Last item must be set, since the loop turns at least once.");
30335
- return last;
30348
+ if (lastItem === undefined) {
30349
+ throw new Error("Last item must be set, since the loop turns at least once.");
30350
+ }
30351
+ return lastItem;
30336
30352
  }
30337
30353
  }
30338
30354
 
@@ -30365,7 +30381,10 @@ const TYPICAL_DICTIONARY_LENGTH = 32;
30365
30381
  */
30366
30382
  function readonlyArray(desc) {
30367
30383
  return desc.convert((x) => {
30368
- debug_check(Array.isArray(x), `Non-arrays are not supported as 'readonly': got ${typeof x}, ${x}`);
30384
+ debug_check `
30385
+ ${Array.isArray(x)}
30386
+ Non-arrays are not supported as 'readonly': got ${typeof x}, ${x}
30387
+ `;
30369
30388
  // NOTE [ToDr] This assumption is incorrect in general, but it's documented
30370
30389
  // in the general note. We avoid `.slice()` the array for performance reasons.
30371
30390
  return x;
@@ -31320,10 +31339,17 @@ async function initAll() {
31320
31339
  await init.ed25519();
31321
31340
  await init.reedSolomon();
31322
31341
  }
31342
+ function initOnce(doInit) {
31343
+ let ready = null;
31344
+ return async () => {
31345
+ if (ready === null) ready = doInit();
31346
+ return await ready;
31347
+ };
31348
+ }
31323
31349
  const init = {
31324
- bandersnatch: async () => await bandersnatch_default({ module_or_path: await bandersnatch_bg_default() }),
31325
- ed25519: async () => await ed25519_wasm_default({ module_or_path: await ed25519_wasm_bg_default() }),
31326
- reedSolomon: async () => await reed_solomon_wasm_default({ module_or_path: await reed_solomon_wasm_bg_default() })
31350
+ bandersnatch: initOnce(async () => await bandersnatch_default({ module_or_path: await bandersnatch_bg_default() })),
31351
+ ed25519: initOnce(async () => await ed25519_wasm_default({ module_or_path: await ed25519_wasm_bg_default() })),
31352
+ reedSolomon: initOnce(async () => await reed_solomon_wasm_default({ module_or_path: await reed_solomon_wasm_bg_default() }))
31327
31353
  };
31328
31354
 
31329
31355
  //#endregion
@@ -31345,7 +31371,7 @@ const BLS_KEY_BYTES = 144;
31345
31371
  /** Derive a Bandersnatch public key from a seed. */
31346
31372
  function bandersnatch_publicKey(seed) {
31347
31373
  const key = bandersnatch.derive_public_key(seed);
31348
- check(key[0] === 0, "Invalid Bandersnatch public key derived from seed");
31374
+ check `${key[0] === 0} Invalid Bandersnatch public key derived from seed`;
31349
31375
  return Bytes.fromBlob(key.subarray(1), BANDERSNATCH_KEY_BYTES).asOpaque();
31350
31376
  }
31351
31377
 
@@ -31833,7 +31859,7 @@ async function ed25519_verify(input) {
31833
31859
  data.set(signature.raw, offset);
31834
31860
  offset += ED25519_SIGNATURE_BYTES;
31835
31861
  const messageLength = message.length;
31836
- debug_check(messageLength < 256, `Message needs to be shorter than 256 bytes. Got: ${messageLength}`);
31862
+ debug_check `${messageLength < 256} Message needs to be shorter than 256 bytes. Got: ${messageLength}`;
31837
31863
  data[offset] = messageLength;
31838
31864
  offset += 1;
31839
31865
  data.set(message.raw, offset);
@@ -31917,7 +31943,7 @@ class PageAllocator {
31917
31943
  // TODO [ToDr] Benchmark the performance!
31918
31944
  constructor(hashesPerPage) {
31919
31945
  this.hashesPerPage = hashesPerPage;
31920
- check(hashesPerPage > 0 && hashesPerPage >>> 0 === hashesPerPage, "Expected a non-zero integer.");
31946
+ check `${hashesPerPage > 0 && hashesPerPage >>> 0 === hashesPerPage} Expected a non-zero integer.`;
31921
31947
  this.resetPage();
31922
31948
  }
31923
31949
  resetPage() {
@@ -32216,8 +32242,8 @@ class MultiMap {
32216
32242
  * if needed.
32217
32243
  */
32218
32244
  constructor(keysLength, keyMappers) {
32219
- check(keysLength > 0, "Keys cannot be empty.");
32220
- check(keyMappers === undefined || keyMappers.length === keysLength, "Incorrect number of key mappers given!");
32245
+ check `${keysLength > 0} Keys cannot be empty.`;
32246
+ check `${keyMappers === undefined || keyMappers.length === keysLength} Incorrect number of key mappers given!`;
32221
32247
  this.data = new Map();
32222
32248
  this.keyMappers = keyMappers === undefined ? Array(keysLength).fill(null) : keyMappers;
32223
32249
  }
@@ -32318,7 +32344,7 @@ class sized_array_FixedSizeArray extends Array {
32318
32344
  this.fixedLength = this.length;
32319
32345
  }
32320
32346
  static new(data, len) {
32321
- debug_check(data.length === len, `Expected an array of size: ${len}, got: ${data.length}`);
32347
+ debug_check `${data.length === len} Expected an array of size: ${len}, got: ${data.length}`;
32322
32348
  const arr = new sized_array_FixedSizeArray(len);
32323
32349
  for (let i = 0; i < len; i++) {
32324
32350
  arr[i] = data[i];
@@ -32452,7 +32478,7 @@ class SortedArray {
32452
32478
  }
32453
32479
  /** Create a new SortedSet from two sorted collections. */
32454
32480
  static fromTwoSortedCollections(first, second) {
32455
- debug_check(first.comparator === second.comparator, "Cannot merge arrays if they do not use the same comparator");
32481
+ debug_check `${first.comparator === second.comparator} Cannot merge arrays if they do not use the same comparator`;
32456
32482
  const comparator = first.comparator;
32457
32483
  const arr1 = first.array;
32458
32484
  const arr1Length = arr1.length;
@@ -32572,7 +32598,7 @@ class SortedSet extends SortedArray {
32572
32598
  }
32573
32599
  /** Create a new SortedSet from two sorted collections. */
32574
32600
  static fromTwoSortedCollections(first, second) {
32575
- debug_check(first.comparator === second.comparator, "Cannot merge arrays if they do not use the same comparator");
32601
+ debug_check `${first.comparator === second.comparator} Cannot merge arrays if they do not use the same comparator`;
32576
32602
  const comparator = first.comparator;
32577
32603
  if (first.length === 0) {
32578
32604
  return SortedSet.fromSortedArray(comparator, second.array);
@@ -33593,7 +33619,10 @@ const common_tryAsCoreIndex = (v) => opaque_asOpaqueType(numbers_tryAsU16(v));
33593
33619
  /** Attempt to convert a number into `Epoch`. */
33594
33620
  const tryAsEpoch = (v) => asOpaqueType(tryAsU32(v));
33595
33621
  function tryAsPerValidator(array, spec) {
33596
- debug_check(array.length === spec.validatorsCount, `Invalid per-validator array length. Expected ${spec.validatorsCount}, got: ${array.length}`);
33622
+ debug_check `
33623
+ ${array.length === spec.validatorsCount}
33624
+ Invalid per-validator array length. Expected ${spec.validatorsCount}, got: ${array.length}
33625
+ `;
33597
33626
  return sized_array_asKnownSize(array);
33598
33627
  }
33599
33628
  const codecPerValidator = (val) => codecWithContext((context) => {
@@ -33602,7 +33631,10 @@ const codecPerValidator = (val) => codecWithContext((context) => {
33602
33631
  });
33603
33632
  });
33604
33633
  function tryAsPerEpochBlock(array, spec) {
33605
- debug_check(array.length === spec.epochLength, `Invalid per-epoch-block array length. Expected ${spec.epochLength}, got: ${array.length}`);
33634
+ debug_check `
33635
+ ${array.length === spec.epochLength}
33636
+ Invalid per-epoch-block array length. Expected ${spec.epochLength}, got: ${array.length}
33637
+ `;
33606
33638
  return sized_array_asKnownSize(array);
33607
33639
  }
33608
33640
  const codecPerEpochBlock = (val) => codecWithContext((context) => {
@@ -33873,9 +33905,14 @@ class WorkItem extends WithDebug {
33873
33905
 
33874
33906
 
33875
33907
 
33908
+
33876
33909
  /** Verify the value is within the `WorkItemsCount` bounds. */
33877
33910
  function work_package_tryAsWorkItemsCount(len) {
33878
- return ensure(len, len >= MIN_NUMBER_OF_WORK_ITEMS && len <= work_package_MAX_NUMBER_OF_WORK_ITEMS, `WorkItemsCount: Expected '${MIN_NUMBER_OF_WORK_ITEMS} <= count <= ${work_package_MAX_NUMBER_OF_WORK_ITEMS}' got ${len}`);
33911
+ debug_check `
33912
+ ${len >= MIN_NUMBER_OF_WORK_ITEMS && len <= work_package_MAX_NUMBER_OF_WORK_ITEMS}
33913
+ WorkItemsCount: Expected '${MIN_NUMBER_OF_WORK_ITEMS} <= count <= ${work_package_MAX_NUMBER_OF_WORK_ITEMS}' got ${len}
33914
+ `;
33915
+ return tryAsU8(len);
33879
33916
  }
33880
33917
  /** Minimal number of work items in the work package or results in work report. */
33881
33918
  const MIN_NUMBER_OF_WORK_ITEMS = 1;
@@ -35849,7 +35886,10 @@ class AvailabilityAssignment extends WithDebug {
35849
35886
 
35850
35887
  /** Check if given array has correct length before casting to the opaque type. */
35851
35888
  function tryAsPerCore(array, spec) {
35852
- debug_check(array.length === spec.coresCount, `Invalid per-core array length. Expected ${spec.coresCount}, got: ${array.length}`);
35889
+ debug_check `
35890
+ ${array.length === spec.coresCount}
35891
+ Invalid per-core array length. Expected ${spec.coresCount}, got: ${array.length}
35892
+ `;
35853
35893
  return opaque_asOpaqueType(array);
35854
35894
  }
35855
35895
  const codecPerCore = (val) => codecWithContext((context) => {
@@ -37100,7 +37140,7 @@ class InMemoryState extends WithDebug {
37100
37140
  }
37101
37141
  removeServices(servicesRemoved) {
37102
37142
  for (const serviceId of servicesRemoved ?? []) {
37103
- debug_check(this.services.has(serviceId), `Attempting to remove non-existing service: ${serviceId}`);
37143
+ debug_check `${this.services.has(serviceId)} Attempting to remove non-existing service: ${serviceId}`;
37104
37144
  this.services.delete(serviceId);
37105
37145
  }
37106
37146
  }
@@ -37117,7 +37157,10 @@ class InMemoryState extends WithDebug {
37117
37157
  }
37118
37158
  else if (kind === UpdateStorageKind.Remove) {
37119
37159
  const { key } = action;
37120
- debug_check(service.data.storage.has(key.toString()), `Attempting to remove non-existing storage item at ${serviceId}: ${action.key}`);
37160
+ debug_check `
37161
+ ${service.data.storage.has(key.toString())}
37162
+ Attempting to remove non-existing storage item at ${serviceId}: ${action.key}
37163
+ `;
37121
37164
  service.data.storage.delete(key.toString());
37122
37165
  }
37123
37166
  else {
@@ -37800,12 +37843,12 @@ class TrieNode {
37800
37843
  }
37801
37844
  /** View this node as a branch node */
37802
37845
  asBranchNode() {
37803
- debug_check(this.getNodeType() === NodeType.Branch);
37846
+ debug_check `${this.getNodeType() === NodeType.Branch} not a branch!`;
37804
37847
  return new BranchNode(this);
37805
37848
  }
37806
37849
  /** View this node as a leaf node */
37807
37850
  asLeafNode() {
37808
- debug_check(this.getNodeType() !== NodeType.Branch);
37851
+ debug_check `${this.getNodeType() !== NodeType.Branch} not a leaf!`;
37809
37852
  return new LeafNode(this);
37810
37853
  }
37811
37854
  toString() {
@@ -38293,7 +38336,7 @@ function createSubtreeForBothLeaves(traversedPath, nodes, leafToReplace, leaf) {
38293
38336
  * Return a single bit from `key` located at `bitIndex`.
38294
38337
  */
38295
38338
  function getBit(key, bitIndex) {
38296
- debug_check(bitIndex < TRUNCATED_KEY_BITS);
38339
+ debug_check `${bitIndex < TRUNCATED_KEY_BITS} invalid bit index passed ${bitIndex}`;
38297
38340
  const byte = bitIndex >>> 3;
38298
38341
  const bit = bitIndex - (byte << 3);
38299
38342
  const mask = 0b10_00_00_00 >>> bit;
@@ -39453,7 +39496,7 @@ class TypedPort {
39453
39496
  * Send a response given the worker that has previously requested something.
39454
39497
  */
39455
39498
  respond(localState, request, data, transferList) {
39456
- debug_check(request.kind === "request");
39499
+ debug_check `${request.kind === "request"}`;
39457
39500
  this.postMessage({
39458
39501
  kind: "response",
39459
39502
  id: request.id,
@@ -39484,10 +39527,11 @@ class TypedPort {
39484
39527
  throw new Error(`Invalid message: ${JSON.stringify(msg)}.`);
39485
39528
  }
39486
39529
  switch (msg.kind) {
39487
- case "response":
39488
- debug_check(this.responseListeners.eventNames().indexOf(reqEvent(msg.id)) !== -1);
39530
+ case "response": {
39531
+ debug_check `${this.responseListeners.eventNames().indexOf(reqEvent(msg.id)) !== -1}`;
39489
39532
  this.responseListeners.emit(reqEvent(msg.id), null, msg.data, msg.name, msg.localState, msg);
39490
39533
  break;
39534
+ }
39491
39535
  case "signal":
39492
39536
  this.listeners.emit("signal", msg.name, msg.data, msg.localState, msg);
39493
39537
  break;
@@ -39702,9 +39746,9 @@ class MessageChannelStateMachine {
39702
39746
  const promise = new Promise((resolve, reject) => {
39703
39747
  parentPort.once("message", (value) => {
39704
39748
  try {
39705
- debug_check(value.kind === "request", "The initial message should be a request with channel.");
39706
- debug_check(value.name === CHANNEL_MESSAGE);
39707
- debug_check(value.data instanceof external_node_worker_threads_namespaceObject.MessagePort);
39749
+ debug_check `${value.kind === "request"} The initial message should be a request with channel.`;
39750
+ debug_check `${value.name === CHANNEL_MESSAGE}`;
39751
+ debug_check `${value.data instanceof external_node_worker_threads_namespaceObject.MessagePort}`;
39708
39752
  const port = new TypedPort(value.data);
39709
39753
  port.respond(machine.currentState().stateName, value, Ok);
39710
39754
  resolve(port);
@@ -39784,7 +39828,7 @@ class StateMachine {
39784
39828
  /** Get state object by name. */
39785
39829
  getState(name) {
39786
39830
  const state = this.allStates.get(name);
39787
- debug_check(state !== undefined, `Unable to retrieve state object for ${name}.`);
39831
+ debug_check `${state !== undefined} Unable to retrieve state object for ${name}.`;
39788
39832
  return state;
39789
39833
  }
39790
39834
  /** Get the currently active state object. */
@@ -40517,19 +40561,22 @@ class Preimages {
40517
40561
 
40518
40562
  const NO_OF_REGISTERS = 13;
40519
40563
  const REGISTER_SIZE_SHIFT = 3;
40520
- const tryAsRegisterIndex = (index) => ensure(index, index >= 0 && index <= NO_OF_REGISTERS, `Incorrect register index: ${index}!`);
40564
+ const tryAsRegisterIndex = (index) => {
40565
+ debug_check `${index >= 0 && index < NO_OF_REGISTERS} Incorrect register index: ${index}!`;
40566
+ return opaque_asOpaqueType(index);
40567
+ };
40521
40568
  class Registers {
40522
40569
  bytes;
40523
40570
  asSigned;
40524
40571
  asUnsigned;
40525
40572
  constructor(bytes = new Uint8Array(NO_OF_REGISTERS << REGISTER_SIZE_SHIFT)) {
40526
40573
  this.bytes = bytes;
40527
- debug_check(bytes.length === NO_OF_REGISTERS << REGISTER_SIZE_SHIFT, "Invalid size of registers array.");
40574
+ debug_check `${bytes.length === NO_OF_REGISTERS << REGISTER_SIZE_SHIFT} Invalid size of registers array.`;
40528
40575
  this.asSigned = new BigInt64Array(bytes.buffer, bytes.byteOffset);
40529
40576
  this.asUnsigned = new BigUint64Array(bytes.buffer, bytes.byteOffset);
40530
40577
  }
40531
40578
  static fromBytes(bytes) {
40532
- debug_check(bytes.length === NO_OF_REGISTERS << REGISTER_SIZE_SHIFT, "Invalid size of registers array.");
40579
+ debug_check `${bytes.length === NO_OF_REGISTERS << REGISTER_SIZE_SHIFT} Invalid size of registers array.`;
40533
40580
  return new Registers(bytes);
40534
40581
  }
40535
40582
  getBytesAsLittleEndian(index, len) {
@@ -40675,7 +40722,7 @@ class Mask {
40675
40722
  return this.lookupTableForward[index] === 0;
40676
40723
  }
40677
40724
  getNoOfBytesToNextInstruction(index) {
40678
- debug_check(index >= 0, `index (${index}) cannot be a negative number`);
40725
+ debug_check `${index >= 0} index (${index}) cannot be a negative number`;
40679
40726
  return Math.min(this.lookupTableForward[index] ?? 0, MAX_INSTRUCTION_DISTANCE);
40680
40727
  }
40681
40728
  buildLookupTableForward(mask) {
@@ -41675,7 +41722,7 @@ const PAGE_SIZE_SHIFT = 12;
41675
41722
  const PAGE_SIZE = 1 << PAGE_SIZE_SHIFT;
41676
41723
  const MIN_ALLOCATION_SHIFT = (() => {
41677
41724
  const MIN_ALLOCATION_SHIFT = 7;
41678
- debug_check(MIN_ALLOCATION_SHIFT >= 0 && MIN_ALLOCATION_SHIFT < PAGE_SIZE_SHIFT, "incorrect minimal allocation shift");
41725
+ debug_check `${MIN_ALLOCATION_SHIFT >= 0 && MIN_ALLOCATION_SHIFT < PAGE_SIZE_SHIFT} incorrect minimal allocation shift`;
41679
41726
  return MIN_ALLOCATION_SHIFT;
41680
41727
  })();
41681
41728
  const MIN_ALLOCATION_LENGTH = PAGE_SIZE >> MIN_ALLOCATION_SHIFT;
@@ -41688,16 +41735,28 @@ const MAX_NUMBER_OF_PAGES = MEMORY_SIZE / PAGE_SIZE;
41688
41735
  ;// CONCATENATED MODULE: ./packages/core/pvm-interpreter/memory/memory-index.ts
41689
41736
 
41690
41737
 
41691
- const tryAsMemoryIndex = (index) => ensure(index, index >= 0 && index <= MAX_MEMORY_INDEX, `Incorrect memory index: ${index}!`);
41692
- const tryAsSbrkIndex = (index) => ensure(index, index >= 0 && index <= MAX_MEMORY_INDEX + 1, `Incorrect sbrk index: ${index}!`);
41738
+ const tryAsMemoryIndex = (index) => {
41739
+ debug_check `${index >= 0 && index <= MAX_MEMORY_INDEX} Incorrect memory index: ${index}!`;
41740
+ return opaque_asOpaqueType(index);
41741
+ };
41742
+ const tryAsSbrkIndex = (index) => {
41743
+ debug_check `${index >= 0 && index <= MAX_MEMORY_INDEX + 1} Incorrect sbrk index: ${index}!`;
41744
+ return opaque_asOpaqueType(index);
41745
+ };
41693
41746
 
41694
41747
  ;// CONCATENATED MODULE: ./packages/core/pvm-interpreter/memory/pages/page-utils.ts
41695
41748
 
41696
41749
 
41697
41750
  /** Ensure that given memory `index` is within `[0...PAGE_SIZE)` and can be used to index a page */
41698
- const tryAsPageIndex = (index) => ensure(index, index >= 0 && index < PAGE_SIZE, `Incorect page index: ${index}!`);
41751
+ const tryAsPageIndex = (index) => {
41752
+ debug_check `${index >= 0 && index < PAGE_SIZE}, Incorect page index: ${index}!`;
41753
+ return opaque_asOpaqueType(index);
41754
+ };
41699
41755
  /** Ensure that given `index` represents an index of one of the pages. */
41700
- const tryAsPageNumber = (index) => ensure(index, index >= 0 && index <= LAST_PAGE_NUMBER, `Incorrect page number: ${index}!`);
41756
+ const tryAsPageNumber = (index) => {
41757
+ debug_check `${index >= 0 && index <= LAST_PAGE_NUMBER}, Incorect page number: ${index}!`;
41758
+ return opaque_asOpaqueType(index);
41759
+ };
41701
41760
  /**
41702
41761
  * Get the next page number and wrap the result if it is bigger than LAST_PAGE_NUMBER
41703
41762
  *
@@ -42229,10 +42288,10 @@ class MemoryBuilder {
42229
42288
  */
42230
42289
  setReadablePages(start, end, data = new Uint8Array()) {
42231
42290
  this.ensureNotFinalized();
42232
- debug_check(start < end, "end has to be bigger than start");
42233
- debug_check(start % PAGE_SIZE === 0, `start needs to be a multiple of page size (${PAGE_SIZE})`);
42234
- debug_check(end % PAGE_SIZE === 0, `end needs to be a multiple of page size (${PAGE_SIZE})`);
42235
- debug_check(data.length <= end - start, "the initial data is longer than address range");
42291
+ debug_check `${start < end} end has to be bigger than start`;
42292
+ debug_check `${start % PAGE_SIZE === 0} start needs to be a multiple of page size (${PAGE_SIZE})`;
42293
+ debug_check `${end % PAGE_SIZE === 0} end needs to be a multiple of page size (${PAGE_SIZE})`;
42294
+ debug_check `${data.length <= end - start} the initial data is longer than address range`;
42236
42295
  const length = end - start;
42237
42296
  const range = MemoryRange.fromStartAndLength(start, length);
42238
42297
  this.ensureNoReservedMemoryUsage(range);
@@ -42257,10 +42316,10 @@ class MemoryBuilder {
42257
42316
  */
42258
42317
  setWriteablePages(start, end, data = new Uint8Array()) {
42259
42318
  this.ensureNotFinalized();
42260
- debug_check(start < end, "end has to be bigger than start");
42261
- debug_check(start % PAGE_SIZE === 0, `start needs to be a multiple of page size (${PAGE_SIZE})`);
42262
- debug_check(end % PAGE_SIZE === 0, `end needs to be a multiple of page size (${PAGE_SIZE})`);
42263
- debug_check(data.length <= end - start, "the initial data is longer than address range");
42319
+ debug_check `${start < end} end has to be bigger than start`;
42320
+ debug_check `${start % PAGE_SIZE === 0} start needs to be a multiple of page size (${PAGE_SIZE})`;
42321
+ debug_check `${end % PAGE_SIZE === 0} end needs to be a multiple of page size (${PAGE_SIZE})`;
42322
+ debug_check `${data.length <= end - start} the initial data is longer than address range`;
42264
42323
  const length = end - start;
42265
42324
  const range = MemoryRange.fromStartAndLength(start, length);
42266
42325
  this.ensureNoReservedMemoryUsage(range);
@@ -42282,7 +42341,7 @@ class MemoryBuilder {
42282
42341
  this.ensureNotFinalized();
42283
42342
  const pageOffset = start % PAGE_SIZE;
42284
42343
  const remainingSpaceOnPage = PAGE_SIZE - pageOffset;
42285
- debug_check(data.length <= remainingSpaceOnPage, "The data has to fit into a single page.");
42344
+ debug_check `${data.length <= remainingSpaceOnPage} The data has to fit into a single page.`;
42286
42345
  const length = data.length;
42287
42346
  const range = MemoryRange.fromStartAndLength(start, length);
42288
42347
  this.ensureNoReservedMemoryUsage(range);
@@ -42296,7 +42355,10 @@ class MemoryBuilder {
42296
42355
  return this;
42297
42356
  }
42298
42357
  finalize(startHeapIndex, endHeapIndex) {
42299
- debug_check(startHeapIndex <= endHeapIndex, `startHeapIndex (${startHeapIndex}) has to be less than or equal to endHeapIndex (${endHeapIndex})`);
42358
+ debug_check `
42359
+ ${startHeapIndex <= endHeapIndex}
42360
+ startHeapIndex (${startHeapIndex}) has to be less than or equal to endHeapIndex (${endHeapIndex})
42361
+ `;
42300
42362
  this.ensureNotFinalized();
42301
42363
  const range = MemoryRange.fromStartAndLength(startHeapIndex, endHeapIndex - startHeapIndex);
42302
42364
  const pages = PageRange.fromMemoryRange(range);
@@ -42534,7 +42596,7 @@ function mulU64(a, b) {
42534
42596
  *
42535
42597
  * The result of multiplication is a 64-bits number and we are only interested in the part that lands in the upper 32-bits.
42536
42598
  * For example if we multiply `0xffffffff * 0xffffffff`, we get:
42537
-
42599
+
42538
42600
  * | 64-bits | 64-bits |
42539
42601
  * +--------------------+--------------------+
42540
42602
  * | upper | lower |
@@ -42570,7 +42632,7 @@ function mulUpperSS(a, b) {
42570
42632
  return interpretAsSigned(resultLimitedTo64Bits);
42571
42633
  }
42572
42634
  function unsignedRightShiftBigInt(value, shift) {
42573
- debug_check(shift >= 0, "Shift count must be non-negative");
42635
+ debug_check `${shift >= 0} Shift count must be non-negative`;
42574
42636
  const fillBit = value < 0 ? "1" : "0";
42575
42637
  // Convert the BigInt to its binary representation
42576
42638
  const binaryRepresentation = value.toString(2).padStart(64, fillBit);
@@ -43986,7 +44048,10 @@ class TwoRegsTwoImmsDispatcher {
43986
44048
  class JumpTable {
43987
44049
  indices;
43988
44050
  constructor(itemByteLength, bytes) {
43989
- debug_check(itemByteLength === 0 || bytes.length % itemByteLength === 0, `Length of jump table (${bytes.length}) should be a multiple of item lenght (${itemByteLength})!`);
44051
+ debug_check `
44052
+ ${itemByteLength === 0 || bytes.length % itemByteLength === 0}
44053
+ Length of jump table (${bytes.length}) should be a multiple of item lenght (${itemByteLength})!
44054
+ `;
43990
44055
  const length = itemByteLength === 0 ? 0 : bytes.length / itemByteLength;
43991
44056
  this.indices = new Uint32Array(length);
43992
44057
  for (let i = 0; i < length; i++) {
@@ -44430,7 +44495,10 @@ class ReturnValue {
44430
44495
  this.consumedGas = consumedGas;
44431
44496
  this.status = status;
44432
44497
  this.memorySlice = memorySlice;
44433
- debug_check((status === null && memorySlice !== null) || (status !== null && memorySlice === null), "`status` and `memorySlice` must not both be null or both be non-null — exactly one must be provided");
44498
+ debug_check `
44499
+ ${(status === null && memorySlice !== null) || (status !== null && memorySlice === null)}
44500
+ 'status' and 'memorySlice' must not both be null or both be non-null — exactly one must be provided
44501
+ `;
44434
44502
  }
44435
44503
  static fromStatus(consumedGas, status) {
44436
44504
  return new ReturnValue(consumedGas, status, null);
@@ -44479,7 +44547,10 @@ class HostCalls {
44479
44547
  if (status !== status_Status.HOST) {
44480
44548
  return this.getReturnValue(status, pvmInstance);
44481
44549
  }
44482
- debug_check(pvmInstance.getExitParam() !== null, "We know that the exit param is not null, because the status is `Status.HOST`");
44550
+ debug_check `
44551
+ ${pvmInstance.getExitParam() !== null}
44552
+ "We know that the exit param is not null, because the status is 'Status.HOST'
44553
+ `;
44483
44554
  const hostCallIndex = pvmInstance.getExitParam() ?? -1;
44484
44555
  const gas = pvmInstance.getGasCounter();
44485
44556
  const regs = new HostCallRegisters(pvmInstance.getRegisters());
@@ -44539,7 +44610,7 @@ class host_calls_manager_HostCallsManager {
44539
44610
  constructor({ missing, handlers = [], }) {
44540
44611
  this.missing = missing;
44541
44612
  for (const handler of handlers) {
44542
- debug_check(this.hostCalls.get(handler.index) === undefined, `Overwriting host call handler at index ${handler.index}`);
44613
+ debug_check `${this.hostCalls.get(handler.index) === undefined} Overwriting host call handler at index ${handler.index}`;
44543
44614
  this.hostCalls.set(handler.index, handler);
44544
44615
  }
44545
44616
  }
@@ -44662,7 +44733,7 @@ function getServiceId(serviceId) {
44662
44733
  return null;
44663
44734
  }
44664
44735
  function writeServiceIdAsLeBytes(serviceId, destination) {
44665
- debug_check(destination.length >= SERVICE_ID_BYTES, "Not enough space in the destination.");
44736
+ debug_check `${destination.length >= SERVICE_ID_BYTES} Not enough space in the destination.`;
44666
44737
  destination.set(numbers_u32AsLeBytes(serviceId));
44667
44738
  }
44668
44739
  /** Clamp a U64 to the maximum value of a 32-bit unsigned integer. */
@@ -44751,13 +44822,27 @@ class SpiProgram extends WithDebug {
44751
44822
  this.registers = registers;
44752
44823
  }
44753
44824
  }
44825
+ /**
44826
+ * program = E_3(|o|) ++ E_3(|w|) ++ E_2(z) ++ E_3(s) ++ o ++ w ++ E_4(|c|) ++ c
44827
+ *
44828
+ * E_n - little endian encoding, n - length
44829
+ * o - initial read only data
44830
+ * w - initial heap
44831
+ * z - heap pages filled with zeros
44832
+ * s - stack size
44833
+ * c - program code
44834
+ *
44835
+ * https://graypaper.fluffylabs.dev/#/579bd12/2b92022b9202
44836
+ */
44754
44837
  function decodeStandardProgram(program, args) {
44755
44838
  const decoder = decoder_Decoder.fromBlob(program);
44756
44839
  const oLength = decoder.u24();
44757
44840
  const wLength = decoder.u24();
44758
- const argsLength = ensure(args.length, args.length <= DATA_LEGNTH, "Incorrect arguments length");
44759
- const readOnlyLength = ensure(oLength, oLength <= DATA_LEGNTH, "Incorrect readonly segment length");
44760
- const heapLength = ensure(wLength, wLength <= DATA_LEGNTH, "Incorrect heap segment length");
44841
+ debug_check `${args.length <= DATA_LEGNTH} Incorrect arguments length`;
44842
+ debug_check `${oLength <= DATA_LEGNTH} Incorrect readonly segment length`;
44843
+ const readOnlyLength = oLength;
44844
+ debug_check `${wLength <= DATA_LEGNTH} Incorrect heap segment length`;
44845
+ const heapLength = wLength;
44761
44846
  const noOfHeapZerosPages = decoder.u16();
44762
44847
  const stackSize = decoder.u24();
44763
44848
  const readOnlyMemory = decoder.bytes(readOnlyLength).raw;
@@ -44773,14 +44858,14 @@ function decodeStandardProgram(program, args) {
44773
44858
  const stackStart = STACK_SEGMENT - memory_utils_alignToPageSize(stackSize);
44774
44859
  const stackEnd = STACK_SEGMENT;
44775
44860
  const argsStart = ARGS_SEGMENT;
44776
- const argsEnd = argsStart + memory_utils_alignToPageSize(argsLength);
44777
- const argsZerosEnd = argsEnd + memory_utils_alignToPageSize(argsLength);
44861
+ const argsEnd = argsStart + memory_utils_alignToPageSize(args.length);
44862
+ const argsZerosEnd = argsEnd + memory_utils_alignToPageSize(args.length);
44778
44863
  function nonEmpty(s) {
44779
44864
  return s !== false;
44780
44865
  }
44781
44866
  const readableMemory = [
44782
44867
  readOnlyLength > 0 && getMemorySegment(readonlyDataStart, readonlyDataEnd, readOnlyMemory),
44783
- argsLength > 0 && getMemorySegment(argsStart, argsEnd, args),
44868
+ args.length > 0 && getMemorySegment(argsStart, argsEnd, args),
44784
44869
  argsEnd < argsZerosEnd && getMemorySegment(argsEnd, argsZerosEnd),
44785
44870
  ].filter(nonEmpty);
44786
44871
  const writeableMemory = [
@@ -58868,8 +58953,8 @@ class PartiallyUpdatedState {
58868
58953
  this.stateUpdate.services.preimages.push(newUpdate);
58869
58954
  }
58870
58955
  updateServiceStorageUtilisation(serviceId, items, bytes, serviceInfo) {
58871
- debug_check(items >= 0, `storageUtilisationCount has to be a positive number, got: ${items}`);
58872
- debug_check(bytes >= 0, `storageUtilisationBytes has to be a positive number, got: ${bytes}`);
58956
+ debug_check `${items >= 0} storageUtilisationCount has to be a positive number, got: ${items}`;
58957
+ debug_check `${bytes >= 0} storageUtilisationBytes has to be a positive number, got: ${bytes}`;
58873
58958
  const overflowItems = !isU32(items);
58874
58959
  const overflowBytes = !isU64(bytes);
58875
58960
  // TODO [ToDr] this is not specified in GP, but it seems sensible.
@@ -59294,7 +59379,7 @@ class AccumulateExternalities {
59294
59379
  }
59295
59380
  // TODO [ToDr] Not sure if we should update the service info in that case,
59296
59381
  // but for now we let that case fall-through.
59297
- debug_check(len === PreimageStatusKind.Unavailable);
59382
+ debug_check `${len === PreimageStatusKind.Unavailable} preimage is not unavailable`;
59298
59383
  }
59299
59384
  // make sure we have enough balance for this update
59300
59385
  // https://graypaper.fluffylabs.dev/#/9a08063/381201381601?v=0.6.6
@@ -59790,7 +59875,7 @@ class Assurances {
59790
59875
  return result_Result.error(AssurancesError.InvalidOrder, `order: expected: ${prevValidatorIndex + 1}, got: ${validatorIndex}`);
59791
59876
  }
59792
59877
  prevValidatorIndex = assurance.validatorIndex;
59793
- debug_check(bitfield.bitLength === coresCount, `Invalid bitfield length of ${bitfield.bitLength}`);
59878
+ debug_check `${bitfield.bitLength === coresCount} Invalid bitfield length of ${bitfield.bitLength}`;
59794
59879
  const setBits = bitfield.indicesOfSetBits();
59795
59880
  for (const idx of setBits) {
59796
59881
  perCoreAssurances[idx] += 1;
@@ -62114,7 +62199,7 @@ class DeferredTransfers {
62114
62199
  transferStatistics.set(serviceId, { count: numbers_tryAsU32(transfers.length), gasUsed: common_tryAsServiceGas(consumedGas) });
62115
62200
  const [updatedState, checkpointedState] = partialState.getStateUpdates();
62116
62201
  currentStateUpdate = updatedState;
62117
- debug_check(checkpointedState === null, "On transfer cannot invoke checkpoint.");
62202
+ debug_check `${checkpointedState === null} On transfer cannot invoke checkpoint.`;
62118
62203
  }
62119
62204
  return result_Result.ok({
62120
62205
  // NOTE: we return only services, since it's impossible to update
@@ -62452,7 +62537,7 @@ const ENTROPY_BYTES = 32;
62452
62537
  * https://graypaper.fluffylabs.dev/#/579bd12/3b9a013b9a01
62453
62538
  */
62454
62539
  function fisherYatesShuffle(arr, entropy) {
62455
- debug_check(entropy.length === ENTROPY_BYTES, `Expected entropy of length ${ENTROPY_BYTES}, got ${entropy.length}`);
62540
+ debug_check `${entropy.length === ENTROPY_BYTES} Expected entropy of length ${ENTROPY_BYTES}, got ${entropy.length}`;
62456
62541
  const n = arr.length;
62457
62542
  const randomNumbers = hashToNumberSequence(entropy, arr.length);
62458
62543
  const result = new Array(n);
@@ -63299,7 +63384,7 @@ class Statistics {
63299
63384
  /** get statistics for the current epoch */
63300
63385
  const statistics = this.getStatistics(slot);
63301
63386
  const { current, cores, services } = statistics;
63302
- debug_check(current[authorIndex] !== undefined, "authorIndex is out of bounds");
63387
+ debug_check `${current[authorIndex] !== undefined} authorIndex is out of bounds`;
63303
63388
  /** One validator can produce maximal one block per timeslot */
63304
63389
  const newBlocksCount = current[authorIndex].blocks + 1;
63305
63390
  current[authorIndex].blocks = numbers_tryAsU32(newBlocksCount);