@typeberry/jam 0.1.0-b2d0b72 → 0.1.0-eb00e84

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -3664,17 +3664,29 @@ function isBrowser() {
3664
3664
  * We avoid using `node:assert` to keep compatibility with a browser environment.
3665
3665
  * Note the checks should not have any side effects, since we might decide
3666
3666
  * to remove all of them in a post-processing step.
3667
- *
3668
- * NOTE the function is intended to be used as tagged template string for the performance
3669
- * reasons.
3670
3667
  */
3671
- function debug_check(strings, condition, ...data) {
3668
+ function debug_check(condition, message) {
3672
3669
  if (!condition) {
3673
- // add an empty value so that `data.length === strings.length`
3674
- data.unshift("");
3675
- const message = strings.map((v, index) => `${v}${data[index] ?? ""}`);
3676
- throw new Error(`Assertion failure:${message.join("")}`);
3670
+ throw new Error(`Assertion failure: ${message ?? ""}`);
3671
+ }
3672
+ }
3673
+ function cast(_a, condition) {
3674
+ return condition;
3675
+ }
3676
+ /**
3677
+ * Yet another function to perform runtime assertions.
3678
+ * This function returns a new type to mark in the code that this value was checked and you don't have to do it again.
3679
+ *
3680
+ * In the post-processing step all usages of this functions should be replaced with simple casting. An example:
3681
+ * const x = checkAndType<number, CheckedNumber>(y);
3682
+ * should be replaced with:
3683
+ * const x = y as CheckedNumber;
3684
+ */
3685
+ function debug_ensure(a, condition, message) {
3686
+ if (cast(a, condition)) {
3687
+ return a;
3677
3688
  }
3689
+ throw new Error(`Assertion failure: ${message ?? ""}`);
3678
3690
  }
3679
3691
  /**
3680
3692
  * The function can be used to make sure that a particular type is `never`
@@ -3844,7 +3856,7 @@ function resultToString(res) {
3844
3856
  const result_Result = {
3845
3857
  /** Create new [`Result`] with `Ok` status. */
3846
3858
  ok: (ok) => {
3847
- debug_check `${ok !== undefined} 'ok' type cannot be undefined.`;
3859
+ debug_check(ok !== undefined, "`Ok` type cannot be undefined.");
3848
3860
  return {
3849
3861
  isOk: true,
3850
3862
  isError: false,
@@ -3853,7 +3865,7 @@ const result_Result = {
3853
3865
  },
3854
3866
  /** Create new [`Result`] with `Error` status. */
3855
3867
  error: (error, details = "") => {
3856
- debug_check `${error !== undefined} 'Error' type cannot be undefined.`;
3868
+ debug_check(error !== undefined, "`Error` type cannot be undefined.");
3857
3869
  return {
3858
3870
  isOk: false,
3859
3871
  isError: true,
@@ -4137,10 +4149,7 @@ class bitvec_BitVec {
4137
4149
  constructor(data, bitLength) {
4138
4150
  this.data = data;
4139
4151
  this.bitLength = bitLength;
4140
- debug_check `
4141
- ${data.length * 8 >= bitLength}
4142
- Not enough bytes in the data array. Need ${data.length * 8} has ${bitLength}.
4143
- `;
4152
+ debug_check(data.length * 8 >= bitLength, `Not enough bytes in the data array. Need ${data.length * 8} has ${bitLength}.`);
4144
4153
  this.byteLength = Math.ceil(bitLength / 8);
4145
4154
  }
4146
4155
  /** Return a raw in-memory representation of this [`BitVec`]. */
@@ -4149,10 +4158,7 @@ class bitvec_BitVec {
4149
4158
  }
4150
4159
  /** Perform OR operation on all bits in place. */
4151
4160
  sumWith(other) {
4152
- debug_check `
4153
- ${other.bitLength === this.bitLength}
4154
- Invalid bit length for sumWith: ${other.bitLength} vs ${this.bitLength}
4155
- `;
4161
+ debug_check(other.bitLength === this.bitLength, `Invalid bit length for sumWith: ${other.bitLength} vs ${this.bitLength}`);
4156
4162
  const otherRaw = other.raw;
4157
4163
  for (let i = 0; i < this.byteLength; i++) {
4158
4164
  this.data[i] |= otherRaw[i];
@@ -4162,7 +4168,7 @@ class bitvec_BitVec {
4162
4168
  * Set the bit at index `idx` to value `val`.
4163
4169
  */
4164
4170
  setBit(idx, val) {
4165
- debug_check `${idx >= 0 && idx < this.bitLength} Index out of bounds. Need ${idx} has ${this.bitLength}.`;
4171
+ debug_check(idx < this.bitLength, `Index out of bounds. Need ${idx} has ${this.bitLength}.`);
4166
4172
  const byteIndex = Math.floor(idx / 8);
4167
4173
  const bitIndexInByte = idx % 8;
4168
4174
  const mask = 1 << bitIndexInByte;
@@ -4177,7 +4183,7 @@ class bitvec_BitVec {
4177
4183
  * Return `true` if the bit at index `idx` is set.
4178
4184
  */
4179
4185
  isSet(idx) {
4180
- debug_check `${idx >= 0 && idx < this.bitLength} Index out of bounds. Need ${idx} has ${this.bitLength}.`;
4186
+ debug_check(idx < this.bitLength, `Index out of bounds. Need ${idx} has ${this.bitLength}.`);
4181
4187
  const byteIndex = Math.floor(idx / 8);
4182
4188
  const bitIndexInByte = idx % 8;
4183
4189
  const mask = 1 << bitIndexInByte;
@@ -4344,7 +4350,7 @@ class bytes_BytesBlob {
4344
4350
  }
4345
4351
  /** Create a new [`BytesBlob`] from an array of bytes. */
4346
4352
  static blobFromNumbers(v) {
4347
- debug_check `${v.find((x) => (x & 0xff) !== x) === undefined} BytesBlob.blobFromNumbers used with non-byte number array.`;
4353
+ debug_check(v.find((x) => (x & 0xff) !== x) === undefined, "BytesBlob.blobFromNumbers used with non-byte number array.");
4348
4354
  const arr = new Uint8Array(v);
4349
4355
  return new bytes_BytesBlob(arr);
4350
4356
  }
@@ -4388,7 +4394,7 @@ class bytes_Bytes extends bytes_BytesBlob {
4388
4394
  length;
4389
4395
  constructor(raw, len) {
4390
4396
  super(raw);
4391
- debug_check `${raw.byteLength === len} Given buffer has incorrect size ${raw.byteLength} vs expected ${len}`;
4397
+ debug_check(raw.byteLength === len, `Given buffer has incorrect size ${raw.byteLength} vs expected ${len}`);
4392
4398
  this.length = len;
4393
4399
  }
4394
4400
  /** Create new [`Bytes<X>`] given a backing buffer and it's length. */
@@ -4397,7 +4403,7 @@ class bytes_Bytes extends bytes_BytesBlob {
4397
4403
  }
4398
4404
  /** Create new [`Bytes<X>`] given an array of bytes and it's length. */
4399
4405
  static fromNumbers(v, len) {
4400
- debug_check `${v.find((x) => (x & 0xff) !== x) === undefined} Bytes.fromNumbers used with non-byte number array.`;
4406
+ debug_check(v.find((x) => (x & 0xff) !== x) === undefined, "Bytes.fromNumbers used with non-byte number array.");
4401
4407
  const x = new Uint8Array(v);
4402
4408
  return new bytes_Bytes(x, len);
4403
4409
  }
@@ -4408,7 +4414,7 @@ class bytes_Bytes extends bytes_BytesBlob {
4408
4414
  // TODO [ToDr] `fill` should have the argments swapped to align with the rest.
4409
4415
  /** Create a [`Bytes<X>`] with all bytes filled with given input number. */
4410
4416
  static fill(len, input) {
4411
- debug_check `${(input & 0xff) === input} Input has to be a byte.`;
4417
+ debug_check((input & 0xff) === input, "Input has to be a byte.");
4412
4418
  const bytes = bytes_Bytes.zero(len);
4413
4419
  bytes.raw.fill(input, 0, len);
4414
4420
  return bytes;
@@ -4431,7 +4437,7 @@ class bytes_Bytes extends bytes_BytesBlob {
4431
4437
  }
4432
4438
  /** Compare the sequence to another one. */
4433
4439
  isEqualTo(other) {
4434
- debug_check `${this.length === other.length} Comparing incorrectly typed bytes!`;
4440
+ debug_check(this.length === other.length, "Comparing incorrectly typed bytes!");
4435
4441
  return u8ArraySameLengthEqual(this.raw, other.raw);
4436
4442
  }
4437
4443
  /** Converts current type into some opaque extension. */
@@ -4440,7 +4446,7 @@ class bytes_Bytes extends bytes_BytesBlob {
4440
4446
  }
4441
4447
  }
4442
4448
  function byteFromString(s) {
4443
- debug_check `${s.length === 2} Two-character string expected`;
4449
+ debug_check(s.length === 2, "Two-character string expected");
4444
4450
  const a = numberFromCharCode(s.charCodeAt(0));
4445
4451
  const b = numberFromCharCode(s.charCodeAt(1));
4446
4452
  return (a << 4) | b;
@@ -4494,53 +4500,42 @@ const bytesBlobComparator = (a, b) => a.compare(b);
4494
4500
 
4495
4501
  ;// CONCATENATED MODULE: ./packages/core/numbers/index.ts
4496
4502
 
4497
- const asTypedNumber = (v) => v;
4503
+ const asWithBytesRepresentation = (v) => v;
4498
4504
  const MAX_VALUE_U8 = 0xff;
4499
4505
  const MAX_VALUE_U16 = 0xffff;
4500
4506
  const MAX_VALUE_U32 = 0xffff_ffff;
4501
4507
  const MAX_VALUE_U64 = 0xffffffffffffffffn;
4502
4508
  /** Attempt to cast an input number into U8. */
4503
- const numbers_tryAsU8 = (v) => {
4504
- debug_check `${isU8(v)} input must have one-byte representation, got ${v}`;
4505
- return asTypedNumber(v);
4506
- };
4509
+ const numbers_tryAsU8 = (v) => debug_ensure(v, isU8(v), `input must have one-byte representation, got ${v}`);
4507
4510
  /** Check if given number is a valid U8 number. */
4508
4511
  const isU8 = (v) => (v & MAX_VALUE_U8) === v;
4509
4512
  /** Attempt to cast an input number into U16. */
4510
- const numbers_tryAsU16 = (v) => {
4511
- debug_check `${isU16(v)} input must have two-byte representation, got ${v}`;
4512
- return asTypedNumber(v);
4513
- };
4513
+ const numbers_tryAsU16 = (v) => debug_ensure(v, isU16(v), `input must have two-byte representation, got ${v}`);
4514
4514
  /** Check if given number is a valid U16 number. */
4515
4515
  const isU16 = (v) => (v & MAX_VALUE_U16) === v;
4516
4516
  /** Attempt to cast an input number into U32. */
4517
- const numbers_tryAsU32 = (v) => {
4518
- debug_check `${isU32(v)} input must have four-byte representation, got ${v}`;
4519
- return asTypedNumber(v);
4520
- };
4517
+ const numbers_tryAsU32 = (v) => debug_ensure(v, isU32(v), `input must have four-byte representation, got ${v}`);
4521
4518
  /** Check if given number is a valid U32 number. */
4522
4519
  const isU32 = (v) => (v & MAX_VALUE_U32) >>> 0 === v;
4523
4520
  /** Attempt to cast an input number into U64. */
4524
4521
  const numbers_tryAsU64 = (x) => {
4525
4522
  const v = BigInt(x);
4526
- debug_check `${isU64(v)} input must have eight-byte representation, got ${x}`;
4527
- return asTypedNumber(v);
4523
+ return debug_ensure(v, isU64(v), `input must have eight-byte representation, got ${x}`);
4528
4524
  };
4529
4525
  /** Check if given number is a valid U64 number. */
4530
4526
  const isU64 = (v) => (v & MAX_VALUE_U64) === v;
4531
4527
  /** Collate two U32 parts into one U64. */
4532
4528
  const u64FromParts = ({ lower, upper }) => {
4533
4529
  const val = (BigInt(upper) << 32n) + BigInt(lower);
4534
- return asTypedNumber(val);
4530
+ return asWithBytesRepresentation(val);
4535
4531
  };
4536
4532
  /** Split U64 into lower & upper parts. */
4537
4533
  const numbers_u64IntoParts = (v) => {
4538
- // Number(...) safe: both parts are <= 0xffffffff
4539
- const lower = Number(v & (2n ** 32n - 1n));
4540
- const upper = Number(v >> 32n);
4534
+ const lower = v & (2n ** 32n - 1n);
4535
+ const upper = v >> 32n;
4541
4536
  return {
4542
- lower: asTypedNumber(lower),
4543
- upper: asTypedNumber(upper),
4537
+ lower: asWithBytesRepresentation(Number(lower)),
4538
+ upper: asWithBytesRepresentation(Number(upper)),
4544
4539
  };
4545
4540
  };
4546
4541
  /**
@@ -4580,8 +4575,8 @@ function numbers_u32AsLeBytes(value) {
4580
4575
  * Interpret 4-byte `Uint8Array` as U32 written as little endian.
4581
4576
  */
4582
4577
  function leBytesAsU32(uint8Array) {
4583
- check `${uint8Array.length === 4} Input must be a Uint8Array of length 4`;
4584
- return asTypedNumber(uint8Array[0] | (uint8Array[1] << 8) | (uint8Array[2] << 16) | (uint8Array[3] << 24));
4578
+ check(uint8Array.length === 4, "Input must be a Uint8Array of length 4");
4579
+ return asWithBytesRepresentation(uint8Array[0] | (uint8Array[1] << 8) | (uint8Array[2] << 16) | (uint8Array[3] << 24));
4585
4580
  }
4586
4581
  /** Get the smallest value between U64 a and values given as input parameters. */
4587
4582
  const minU64 = (a, ...values) => values.reduce((min, value) => (value > min ? min : value), a);
@@ -4872,7 +4867,7 @@ class decoder_Decoder {
4872
4867
  this.skip(newOffset - this.offset);
4873
4868
  }
4874
4869
  else {
4875
- debug_check `${newOffset >= 0} The offset has to be positive`;
4870
+ debug_check(newOffset >= 0, "The offset has to be positive");
4876
4871
  this.offset = newOffset;
4877
4872
  }
4878
4873
  }
@@ -4900,7 +4895,7 @@ class decoder_Decoder {
4900
4895
  return num;
4901
4896
  }
4902
4897
  ensureHasBytes(bytes) {
4903
- debug_check `${bytes >= 0} Negative number of bytes given.`;
4898
+ debug_check(bytes >= 0, "Negative number of bytes given.");
4904
4899
  if (this.offset + bytes > this.source.length) {
4905
4900
  throw new Error(`Attempting to decode more data than there is left. Need ${bytes}, left: ${this.source.length - this.offset}.`);
4906
4901
  }
@@ -4908,7 +4903,7 @@ class decoder_Decoder {
4908
4903
  }
4909
4904
  const MASKS = [0xff, 0xfe, 0xfc, 0xf8, 0xf0, 0xe0, 0xc0, 0x80];
4910
4905
  function decodeVariableLengthExtraBytes(firstByte) {
4911
- debug_check `${firstByte >= 0 && firstByte < 256} Incorrect byte value: ${firstByte}`;
4906
+ debug_check(firstByte >= 0 && firstByte < 256, `Incorrect byte value: ${firstByte}`);
4912
4907
  for (let i = 0; i < MASKS.length; i++) {
4913
4908
  if (firstByte >= MASKS[i]) {
4914
4909
  return 8 - i;
@@ -5063,7 +5058,7 @@ class descriptor_Descriptor {
5063
5058
 
5064
5059
 
5065
5060
  function tryAsExactBytes(a) {
5066
- check `${a.isExact} The value is not exact size estimation!`;
5061
+ check(a.isExact, "The value is not exact size estimation!");
5067
5062
  return a.bytes;
5068
5063
  }
5069
5064
  function addSizeHints(a, b) {
@@ -5170,8 +5165,8 @@ class encoder_Encoder {
5170
5165
  // we still allow positive numbers from `[maxNum / 2, maxNum)`.
5171
5166
  // So it does not matter if the argument is a negative value,
5172
5167
  // OR if someone just gave us two-complement already.
5173
- debug_check `${num < maxNum} Only for numbers up to 2**64 - 1`;
5174
- debug_check `${-num <= maxNum / 2n} Only for numbers down to -2**63`;
5168
+ debug_check(num < maxNum, "Only for numbers up to 2**64 - 1");
5169
+ debug_check(-num <= maxNum / 2n, "Only for numbers down to -2**63");
5175
5170
  this.ensureBigEnough(8);
5176
5171
  this.dataView.setBigInt64(this.offset, num, true);
5177
5172
  this.offset += 8;
@@ -5235,8 +5230,8 @@ class encoder_Encoder {
5235
5230
  // we still allow positive numbers from `[maxNum / 2, maxNum)`.
5236
5231
  // So it does not matter if the argument is a negative value,
5237
5232
  // OR if someone just gave us two-complement already.
5238
- debug_check `${num < maxNum} Only for numbers up to 2**${BITS * bytesToEncode} - 1`;
5239
- debug_check `${-num <= maxNum / 2} Only for numbers down to -2**${BITS * bytesToEncode - 1}`;
5233
+ debug_check(num < maxNum, `Only for numbers up to 2**${BITS * bytesToEncode} - 1`);
5234
+ debug_check(-num <= maxNum / 2, `Only for numbers down to -2**${BITS * bytesToEncode - 1}`);
5240
5235
  this.ensureBigEnough(bytesToEncode);
5241
5236
  }
5242
5237
  /**
@@ -5247,8 +5242,8 @@ class encoder_Encoder {
5247
5242
  * https://graypaper.fluffylabs.dev/#/579bd12/365202365202
5248
5243
  */
5249
5244
  varU32(num) {
5250
- debug_check `${num >= 0} Only for natural numbers.`;
5251
- debug_check `${num < 2 ** 32} Only for numbers up to 2**32`;
5245
+ debug_check(num >= 0, "Only for natural numbers.");
5246
+ debug_check(num < 2 ** 32, "Only for numbers up to 2**32");
5252
5247
  this.varU64(BigInt(num));
5253
5248
  }
5254
5249
  /**
@@ -5399,7 +5394,7 @@ class encoder_Encoder {
5399
5394
  * https://graypaper.fluffylabs.dev/#/579bd12/374400374400
5400
5395
  */
5401
5396
  sequenceVarLen(encode, elements) {
5402
- debug_check `${elements.length <= 2 ** 32} Wow, that's a nice long sequence you've got here.`;
5397
+ debug_check(elements.length <= 2 ** 32, "Wow, that's a nice long sequence you've got here.");
5403
5398
  this.varU32(numbers_tryAsU32(elements.length));
5404
5399
  this.sequenceFixLen(encode, elements);
5405
5400
  }
@@ -5420,7 +5415,7 @@ class encoder_Encoder {
5420
5415
  * anyway, so if we really should throw we will.
5421
5416
  */
5422
5417
  ensureBigEnough(length, options = { silent: false }) {
5423
- debug_check `${length >= 0} Negative length given`;
5418
+ debug_check(length >= 0, "Negative length given");
5424
5419
  const newLength = this.offset + length;
5425
5420
  if (newLength > MAX_LENGTH) {
5426
5421
  if (options.silent) {
@@ -5556,12 +5551,10 @@ class ObjectView {
5556
5551
  decodeUpTo(field) {
5557
5552
  const index = this.descriptorsKeys.indexOf(field);
5558
5553
  const lastField = this.descriptorsKeys[this.lastDecodedFieldIdx];
5559
- debug_check `
5560
- ${this.lastDecodedFieldIdx < index}
5561
- Unjustified call to 'decodeUpTo' -
5554
+ debug_check(this.lastDecodedFieldIdx < index, `Unjustified call to 'decodeUpTo' -
5562
5555
  the index ($Blobindex}, ${String(field)})
5563
5556
  is already decoded (${this.lastDecodedFieldIdx}, ${String(lastField)}).
5564
- `;
5557
+ `);
5565
5558
  let lastItem = this.cache.get(lastField);
5566
5559
  const skipper = new Skipper(this.decoder);
5567
5560
  // now skip all of the fields and further populate the cache.
@@ -5577,10 +5570,8 @@ class ObjectView {
5577
5570
  this.cache.set(field, lastItem);
5578
5571
  this.lastDecodedFieldIdx = i;
5579
5572
  }
5580
- if (lastItem === undefined) {
5581
- throw new Error("Last item must be set, since the loop turns at least once.");
5582
- }
5583
- return lastItem;
5573
+ const last = debug_ensure(lastItem, lastItem !== undefined, "Last item must be set, since the loop turns at least once.");
5574
+ return last;
5584
5575
  }
5585
5576
  }
5586
5577
  /**
@@ -5613,10 +5604,8 @@ class SequenceView {
5613
5604
  *[Symbol.iterator]() {
5614
5605
  for (let i = 0; i < this.length; i++) {
5615
5606
  const val = this.get(i);
5616
- if (val === undefined) {
5617
- throw new Error("We are within 0..this.length so all items are defined.");
5618
- }
5619
- yield val;
5607
+ const v = debug_ensure(val, val !== undefined, "We are within 0..this.length so all items are defined.");
5608
+ yield v;
5620
5609
  }
5621
5610
  }
5622
5611
  /** Create an array of all views mapped to some particular value. */
@@ -5659,10 +5648,7 @@ class SequenceView {
5659
5648
  return bytes_BytesBlob.blobFrom(this.decoder.source.subarray(this.initialDecoderOffset, this.decoder.bytesRead()));
5660
5649
  }
5661
5650
  decodeUpTo(index) {
5662
- debug_check `
5663
- ${this.lastDecodedIdx < index}
5664
- Unjustified call to 'decodeUpTo' - the index (${index}) is already decoded (${this.lastDecodedIdx}).
5665
- `;
5651
+ debug_check(this.lastDecodedIdx < index, `Unjustified call to 'decodeUpTo' - the index (${index}) is already decoded (${this.lastDecodedIdx}).`);
5666
5652
  let lastItem = this.cache.get(this.lastDecodedIdx);
5667
5653
  const skipper = new Skipper(this.decoder);
5668
5654
  // now skip all of the fields and further populate the cache.
@@ -5677,10 +5663,8 @@ class SequenceView {
5677
5663
  this.cache.set(i, lastItem);
5678
5664
  this.lastDecodedIdx = i;
5679
5665
  }
5680
- if (lastItem === undefined) {
5681
- throw new Error("Last item must be set, since the loop turns at least once.");
5682
- }
5683
- return lastItem;
5666
+ const last = debug_ensure(lastItem, lastItem !== undefined, "Last item must be set, since the loop turns at least once.");
5667
+ return last;
5684
5668
  }
5685
5669
  }
5686
5670
 
@@ -5713,10 +5697,7 @@ const TYPICAL_DICTIONARY_LENGTH = 32;
5713
5697
  */
5714
5698
  function readonlyArray(desc) {
5715
5699
  return desc.convert((x) => {
5716
- debug_check `
5717
- ${Array.isArray(x)}
5718
- Non-arrays are not supported as 'readonly': got ${typeof x}, ${x}
5719
- `;
5700
+ debug_check(Array.isArray(x), `Non-arrays are not supported as 'readonly': got ${typeof x}, ${x}`);
5720
5701
  // NOTE [ToDr] This assumption is incorrect in general, but it's documented
5721
5702
  // in the general note. We avoid `.slice()` the array for performance reasons.
5722
5703
  return x;
@@ -6671,17 +6652,10 @@ async function initAll() {
6671
6652
  await init.ed25519();
6672
6653
  await init.reedSolomon();
6673
6654
  }
6674
- function initOnce(doInit) {
6675
- let ready = null;
6676
- return async () => {
6677
- if (ready === null) ready = doInit();
6678
- return await ready;
6679
- };
6680
- }
6681
6655
  const init = {
6682
- bandersnatch: initOnce(async () => await bandersnatch_default({ module_or_path: await bandersnatch_bg_default() })),
6683
- ed25519: initOnce(async () => await ed25519_wasm_default({ module_or_path: await ed25519_wasm_bg_default() })),
6684
- reedSolomon: initOnce(async () => await reed_solomon_wasm_default({ module_or_path: await reed_solomon_wasm_bg_default() }))
6656
+ bandersnatch: async () => await bandersnatch_default({ module_or_path: await bandersnatch_bg_default() }),
6657
+ ed25519: async () => await ed25519_wasm_default({ module_or_path: await ed25519_wasm_bg_default() }),
6658
+ reedSolomon: async () => await reed_solomon_wasm_default({ module_or_path: await reed_solomon_wasm_bg_default() })
6685
6659
  };
6686
6660
 
6687
6661
  //#endregion
@@ -6703,7 +6677,7 @@ const BLS_KEY_BYTES = 144;
6703
6677
  /** Derive a Bandersnatch public key from a seed. */
6704
6678
  function bandersnatch_publicKey(seed) {
6705
6679
  const key = bandersnatch.derive_public_key(seed);
6706
- check `${key[0] === 0} Invalid Bandersnatch public key derived from seed`;
6680
+ check(key[0] === 0, "Invalid Bandersnatch public key derived from seed");
6707
6681
  return Bytes.fromBlob(key.subarray(1), BANDERSNATCH_KEY_BYTES).asOpaque();
6708
6682
  }
6709
6683
 
@@ -6763,7 +6737,7 @@ async function verify(input) {
6763
6737
  data.set(signature.raw, offset);
6764
6738
  offset += ED25519_SIGNATURE_BYTES;
6765
6739
  const messageLength = message.length;
6766
- check `${messageLength < 256} Message needs to be shorter than 256 bytes. Got: ${messageLength}`;
6740
+ check(messageLength < 256, `Message needs to be shorter than 256 bytes. Got: ${messageLength}`);
6767
6741
  data[offset] = messageLength;
6768
6742
  offset += 1;
6769
6743
  data.set(message.raw, offset);
@@ -6792,7 +6766,6 @@ async function verifyBatch(input) {
6792
6766
 
6793
6767
  ;// CONCATENATED MODULE: ./packages/core/hash/hash.ts
6794
6768
 
6795
-
6796
6769
  /**
6797
6770
  * Size of the output of the hash functions.
6798
6771
  *
@@ -6802,7 +6775,6 @@ async function verifyBatch(input) {
6802
6775
  const hash_HASH_SIZE = 32;
6803
6776
  /** A hash without last byte (useful for trie representation). */
6804
6777
  const TRUNCATED_HASH_SIZE = 31;
6805
- const ZERO_HASH = bytes_Bytes.zero(hash_HASH_SIZE);
6806
6778
  /**
6807
6779
  * Container for some object with a hash that is related to this object.
6808
6780
  *
@@ -6847,7 +6819,7 @@ class PageAllocator {
6847
6819
  // TODO [ToDr] Benchmark the performance!
6848
6820
  constructor(hashesPerPage) {
6849
6821
  this.hashesPerPage = hashesPerPage;
6850
- check `${hashesPerPage > 0 && hashesPerPage >>> 0 === hashesPerPage} Expected a non-zero integer.`;
6822
+ check(hashesPerPage > 0 && hashesPerPage >>> 0 === hashesPerPage, "Expected a non-zero integer.");
6851
6823
  this.resetPage();
6852
6824
  }
6853
6825
  resetPage() {
@@ -7146,8 +7118,8 @@ class MultiMap {
7146
7118
  * if needed.
7147
7119
  */
7148
7120
  constructor(keysLength, keyMappers) {
7149
- check `${keysLength > 0} Keys cannot be empty.`;
7150
- check `${keyMappers === undefined || keyMappers.length === keysLength} Incorrect number of key mappers given!`;
7121
+ check(keysLength > 0, "Keys cannot be empty.");
7122
+ check(keyMappers === undefined || keyMappers.length === keysLength, "Incorrect number of key mappers given!");
7151
7123
  this.data = new Map();
7152
7124
  this.keyMappers = keyMappers === undefined ? Array(keysLength).fill(null) : keyMappers;
7153
7125
  }
@@ -7248,7 +7220,7 @@ class sized_array_FixedSizeArray extends Array {
7248
7220
  this.fixedLength = this.length;
7249
7221
  }
7250
7222
  static new(data, len) {
7251
- debug_check `${data.length === len} Expected an array of size: ${len}, got: ${data.length}`;
7223
+ debug_check(data.length === len, `Expected an array of size: ${len}, got: ${data.length}`);
7252
7224
  const arr = new sized_array_FixedSizeArray(len);
7253
7225
  for (let i = 0; i < len; i++) {
7254
7226
  arr[i] = data[i];
@@ -7382,7 +7354,7 @@ class SortedArray {
7382
7354
  }
7383
7355
  /** Create a new SortedSet from two sorted collections. */
7384
7356
  static fromTwoSortedCollections(first, second) {
7385
- debug_check `${first.comparator === second.comparator} Cannot merge arrays if they do not use the same comparator`;
7357
+ debug_check(first.comparator === second.comparator, "Cannot merge arrays if they do not use the same comparator");
7386
7358
  const comparator = first.comparator;
7387
7359
  const arr1 = first.array;
7388
7360
  const arr1Length = arr1.length;
@@ -7502,7 +7474,7 @@ class SortedSet extends SortedArray {
7502
7474
  }
7503
7475
  /** Create a new SortedSet from two sorted collections. */
7504
7476
  static fromTwoSortedCollections(first, second) {
7505
- debug_check `${first.comparator === second.comparator} Cannot merge arrays if they do not use the same comparator`;
7477
+ debug_check(first.comparator === second.comparator, "Cannot merge arrays if they do not use the same comparator");
7506
7478
  const comparator = first.comparator;
7507
7479
  if (first.length === 0) {
7508
7480
  return SortedSet.fromSortedArray(comparator, second.array);
@@ -8140,10 +8112,7 @@ const common_tryAsCoreIndex = (v) => opaque_asOpaqueType(numbers_tryAsU16(v));
8140
8112
  /** Attempt to convert a number into `Epoch`. */
8141
8113
  const tryAsEpoch = (v) => asOpaqueType(tryAsU32(v));
8142
8114
  function tryAsPerValidator(array, spec) {
8143
- debug_check `
8144
- ${array.length === spec.validatorsCount}
8145
- Invalid per-validator array length. Expected ${spec.validatorsCount}, got: ${array.length}
8146
- `;
8115
+ debug_check(array.length === spec.validatorsCount, `Invalid per-validator array length. Expected ${spec.validatorsCount}, got: ${array.length}`);
8147
8116
  return sized_array_asKnownSize(array);
8148
8117
  }
8149
8118
  const codecPerValidator = (val) => codecWithContext((context) => {
@@ -8152,10 +8121,7 @@ const codecPerValidator = (val) => codecWithContext((context) => {
8152
8121
  });
8153
8122
  });
8154
8123
  function tryAsPerEpochBlock(array, spec) {
8155
- debug_check `
8156
- ${array.length === spec.epochLength}
8157
- Invalid per-epoch-block array length. Expected ${spec.epochLength}, got: ${array.length}
8158
- `;
8124
+ debug_check(array.length === spec.epochLength, `Invalid per-epoch-block array length. Expected ${spec.epochLength}, got: ${array.length}`);
8159
8125
  return sized_array_asKnownSize(array);
8160
8126
  }
8161
8127
  const codecPerEpochBlock = (val) => codecWithContext((context) => {
@@ -8426,14 +8392,9 @@ class WorkItem extends WithDebug {
8426
8392
 
8427
8393
 
8428
8394
 
8429
-
8430
8395
  /** Verify the value is within the `WorkItemsCount` bounds. */
8431
8396
  function work_package_tryAsWorkItemsCount(len) {
8432
- debug_check `
8433
- ${len >= MIN_NUMBER_OF_WORK_ITEMS && len <= work_package_MAX_NUMBER_OF_WORK_ITEMS}
8434
- WorkItemsCount: Expected '${MIN_NUMBER_OF_WORK_ITEMS} <= count <= ${work_package_MAX_NUMBER_OF_WORK_ITEMS}' got ${len}
8435
- `;
8436
- return numbers_tryAsU8(len);
8397
+ return debug_ensure(len, len >= MIN_NUMBER_OF_WORK_ITEMS && len <= work_package_MAX_NUMBER_OF_WORK_ITEMS, `WorkItemsCount: Expected '${MIN_NUMBER_OF_WORK_ITEMS} <= count <= ${work_package_MAX_NUMBER_OF_WORK_ITEMS}' got ${len}`);
8437
8398
  }
8438
8399
  /** Minimal number of work items in the work package or results in work report. */
8439
8400
  const MIN_NUMBER_OF_WORK_ITEMS = 1;
@@ -9676,10 +9637,7 @@ class AvailabilityAssignment extends WithDebug {
9676
9637
 
9677
9638
  /** Check if given array has correct length before casting to the opaque type. */
9678
9639
  function tryAsPerCore(array, spec) {
9679
- debug_check `
9680
- ${array.length === spec.coresCount}
9681
- Invalid per-core array length. Expected ${spec.coresCount}, got: ${array.length}
9682
- `;
9640
+ debug_check(array.length === spec.coresCount, `Invalid per-core array length. Expected ${spec.coresCount}, got: ${array.length}`);
9683
9641
  return opaque_asOpaqueType(array);
9684
9642
  }
9685
9643
  const codecPerCore = (val) => codecWithContext((context) => {
@@ -10930,7 +10888,7 @@ class InMemoryState extends WithDebug {
10930
10888
  }
10931
10889
  removeServices(servicesRemoved) {
10932
10890
  for (const serviceId of servicesRemoved ?? []) {
10933
- debug_check `${this.services.has(serviceId)} Attempting to remove non-existing service: ${serviceId}`;
10891
+ debug_check(this.services.has(serviceId), `Attempting to remove non-existing service: ${serviceId}`);
10934
10892
  this.services.delete(serviceId);
10935
10893
  }
10936
10894
  }
@@ -10947,10 +10905,7 @@ class InMemoryState extends WithDebug {
10947
10905
  }
10948
10906
  else if (kind === UpdateStorageKind.Remove) {
10949
10907
  const { key } = action;
10950
- debug_check `
10951
- ${service.data.storage.has(key.toString())}
10952
- Attempting to remove non-existing storage item at ${serviceId}: ${action.key}
10953
- `;
10908
+ debug_check(service.data.storage.has(key.toString()), `Attempting to remove non-existing storage item at ${serviceId}: ${action.key}`);
10954
10909
  service.data.storage.delete(key.toString());
10955
10910
  }
10956
10911
  else {
@@ -11633,12 +11588,12 @@ class TrieNode {
11633
11588
  }
11634
11589
  /** View this node as a branch node */
11635
11590
  asBranchNode() {
11636
- debug_check `${this.getNodeType() === NodeType.Branch} not a branch!`;
11591
+ debug_check(this.getNodeType() === NodeType.Branch);
11637
11592
  return new BranchNode(this);
11638
11593
  }
11639
11594
  /** View this node as a leaf node */
11640
11595
  asLeafNode() {
11641
- debug_check `${this.getNodeType() !== NodeType.Branch} not a leaf!`;
11596
+ debug_check(this.getNodeType() !== NodeType.Branch);
11642
11597
  return new LeafNode(this);
11643
11598
  }
11644
11599
  toString() {
@@ -12126,7 +12081,7 @@ function createSubtreeForBothLeaves(traversedPath, nodes, leafToReplace, leaf) {
12126
12081
  * Return a single bit from `key` located at `bitIndex`.
12127
12082
  */
12128
12083
  function getBit(key, bitIndex) {
12129
- debug_check `${bitIndex < TRUNCATED_KEY_BITS} invalid bit index passed ${bitIndex}`;
12084
+ debug_check(bitIndex < TRUNCATED_KEY_BITS);
12130
12085
  const byte = bitIndex >>> 3;
12131
12086
  const bit = bitIndex - (byte << 3);
12132
12087
  const mask = 0b10_00_00_00 >>> bit;
@@ -13451,7 +13406,7 @@ class TypedPort {
13451
13406
  * Send a response given the worker that has previously requested something.
13452
13407
  */
13453
13408
  respond(localState, request, data, transferList) {
13454
- debug_check `${request.kind === "request"}`;
13409
+ debug_check(request.kind === "request");
13455
13410
  this.postMessage({
13456
13411
  kind: "response",
13457
13412
  id: request.id,
@@ -13482,11 +13437,10 @@ class TypedPort {
13482
13437
  throw new Error(`Invalid message: ${JSON.stringify(msg)}.`);
13483
13438
  }
13484
13439
  switch (msg.kind) {
13485
- case "response": {
13486
- debug_check `${this.responseListeners.eventNames().indexOf(reqEvent(msg.id)) !== -1}`;
13440
+ case "response":
13441
+ debug_check(this.responseListeners.eventNames().indexOf(reqEvent(msg.id)) !== -1);
13487
13442
  this.responseListeners.emit(reqEvent(msg.id), null, msg.data, msg.name, msg.localState, msg);
13488
13443
  break;
13489
- }
13490
13444
  case "signal":
13491
13445
  this.listeners.emit("signal", msg.name, msg.data, msg.localState, msg);
13492
13446
  break;
@@ -13701,9 +13655,9 @@ class MessageChannelStateMachine {
13701
13655
  const promise = new Promise((resolve, reject) => {
13702
13656
  parentPort.once("message", (value) => {
13703
13657
  try {
13704
- debug_check `${value.kind === "request"} The initial message should be a request with channel.`;
13705
- debug_check `${value.name === CHANNEL_MESSAGE}`;
13706
- debug_check `${value.data instanceof external_node_worker_threads_namespaceObject.MessagePort}`;
13658
+ debug_check(value.kind === "request", "The initial message should be a request with channel.");
13659
+ debug_check(value.name === CHANNEL_MESSAGE);
13660
+ debug_check(value.data instanceof external_node_worker_threads_namespaceObject.MessagePort);
13707
13661
  const port = new TypedPort(value.data);
13708
13662
  port.respond(machine.currentState().stateName, value, Ok);
13709
13663
  resolve(port);
@@ -13783,7 +13737,7 @@ class StateMachine {
13783
13737
  /** Get state object by name. */
13784
13738
  getState(name) {
13785
13739
  const state = this.allStates.get(name);
13786
- debug_check `${state !== undefined} Unable to retrieve state object for ${name}.`;
13740
+ debug_check(state !== undefined, `Unable to retrieve state object for ${name}.`);
13787
13741
  return state;
13788
13742
  }
13789
13743
  /** Get the currently active state object. */
@@ -14127,22 +14081,19 @@ class Preimages {
14127
14081
 
14128
14082
  const NO_OF_REGISTERS = 13;
14129
14083
  const REGISTER_SIZE_SHIFT = 3;
14130
- const registers_tryAsRegisterIndex = (index) => {
14131
- check `${index >= 0 && index < NO_OF_REGISTERS} Incorrect register index: ${index}!`;
14132
- return asOpaqueType(index);
14133
- };
14084
+ const registers_tryAsRegisterIndex = (index) => ensure(index, index >= 0 && index <= NO_OF_REGISTERS, `Incorrect register index: ${index}!`);
14134
14085
  class registers_Registers {
14135
14086
  bytes;
14136
14087
  asSigned;
14137
14088
  asUnsigned;
14138
14089
  constructor(bytes = new Uint8Array(NO_OF_REGISTERS << REGISTER_SIZE_SHIFT)) {
14139
14090
  this.bytes = bytes;
14140
- check `${bytes.length === NO_OF_REGISTERS << REGISTER_SIZE_SHIFT} Invalid size of registers array.`;
14091
+ check(bytes.length === NO_OF_REGISTERS << REGISTER_SIZE_SHIFT, "Invalid size of registers array.");
14141
14092
  this.asSigned = new BigInt64Array(bytes.buffer, bytes.byteOffset);
14142
14093
  this.asUnsigned = new BigUint64Array(bytes.buffer, bytes.byteOffset);
14143
14094
  }
14144
14095
  static fromBytes(bytes) {
14145
- check `${bytes.length === NO_OF_REGISTERS << REGISTER_SIZE_SHIFT} Invalid size of registers array.`;
14096
+ check(bytes.length === NO_OF_REGISTERS << REGISTER_SIZE_SHIFT, "Invalid size of registers array.");
14146
14097
  return new registers_Registers(bytes);
14147
14098
  }
14148
14099
  getBytesAsLittleEndian(index, len) {
@@ -14288,7 +14239,7 @@ class mask_Mask {
14288
14239
  return this.lookupTableForward[index] === 0;
14289
14240
  }
14290
14241
  getNoOfBytesToNextInstruction(index) {
14291
- check `${index >= 0} index (${index}) cannot be a negative number`;
14242
+ check(index >= 0, `index (${index}) cannot be a negative number`);
14292
14243
  return Math.min(this.lookupTableForward[index] ?? 0, MAX_INSTRUCTION_DISTANCE);
14293
14244
  }
14294
14245
  buildLookupTableForward(mask) {
@@ -15174,7 +15125,7 @@ const PAGE_SIZE_SHIFT = 12;
15174
15125
  const memory_consts_PAGE_SIZE = 1 << PAGE_SIZE_SHIFT;
15175
15126
  const MIN_ALLOCATION_SHIFT = (() => {
15176
15127
  const MIN_ALLOCATION_SHIFT = 7;
15177
- debug_check `${MIN_ALLOCATION_SHIFT >= 0 && MIN_ALLOCATION_SHIFT < PAGE_SIZE_SHIFT} incorrect minimal allocation shift`;
15128
+ debug_check(MIN_ALLOCATION_SHIFT >= 0 && MIN_ALLOCATION_SHIFT < PAGE_SIZE_SHIFT, "incorrect minimal allocation shift");
15178
15129
  return MIN_ALLOCATION_SHIFT;
15179
15130
  })();
15180
15131
  const MIN_ALLOCATION_LENGTH = memory_consts_PAGE_SIZE >> MIN_ALLOCATION_SHIFT;
@@ -15187,28 +15138,16 @@ const MAX_NUMBER_OF_PAGES = memory_consts_MEMORY_SIZE / memory_consts_PAGE_SIZE;
15187
15138
  ;// CONCATENATED MODULE: ./packages/core/pvm-interpreter/memory/memory-index.ts
15188
15139
 
15189
15140
 
15190
- const memory_index_tryAsMemoryIndex = (index) => {
15191
- debug_check `${index >= 0 && index <= memory_consts_MAX_MEMORY_INDEX} Incorrect memory index: ${index}!`;
15192
- return opaque_asOpaqueType(index);
15193
- };
15194
- const memory_index_tryAsSbrkIndex = (index) => {
15195
- check `${index >= 0 && index <= MAX_MEMORY_INDEX + 1} Incorrect sbrk index: ${index}!`;
15196
- return asOpaqueType(index);
15197
- };
15141
+ const memory_index_tryAsMemoryIndex = (index) => debug_ensure(index, index >= 0 && index <= memory_consts_MAX_MEMORY_INDEX, `Incorrect memory index: ${index}!`);
15142
+ const memory_index_tryAsSbrkIndex = (index) => ensure(index, index >= 0 && index <= MAX_MEMORY_INDEX + 1, `Incorrect sbrk index: ${index}!`);
15198
15143
 
15199
15144
  ;// CONCATENATED MODULE: ./packages/core/pvm-interpreter/memory/pages/page-utils.ts
15200
15145
 
15201
15146
 
15202
15147
  /** Ensure that given memory `index` is within `[0...PAGE_SIZE)` and can be used to index a page */
15203
- const page_utils_tryAsPageIndex = (index) => {
15204
- check `${index >= 0 && index < PAGE_SIZE}, Incorect page index: ${index}!`;
15205
- return asOpaqueType(index);
15206
- };
15148
+ const page_utils_tryAsPageIndex = (index) => ensure(index, index >= 0 && index < PAGE_SIZE, `Incorect page index: ${index}!`);
15207
15149
  /** Ensure that given `index` represents an index of one of the pages. */
15208
- const page_utils_tryAsPageNumber = (index) => {
15209
- debug_check `${index >= 0 && index <= LAST_PAGE_NUMBER}, Incorect page number: ${index}!`;
15210
- return opaque_asOpaqueType(index);
15211
- };
15150
+ const page_utils_tryAsPageNumber = (index) => debug_ensure(index, index >= 0 && index <= LAST_PAGE_NUMBER, `Incorrect page number: ${index}!`);
15212
15151
  /**
15213
15152
  * Get the next page number and wrap the result if it is bigger than LAST_PAGE_NUMBER
15214
15153
  *
@@ -15740,10 +15679,10 @@ class memory_builder_MemoryBuilder {
15740
15679
  */
15741
15680
  setReadablePages(start, end, data = new Uint8Array()) {
15742
15681
  this.ensureNotFinalized();
15743
- check `${start < end} end has to be bigger than start`;
15744
- check `${start % PAGE_SIZE === 0} start needs to be a multiple of page size (${PAGE_SIZE})`;
15745
- check `${end % PAGE_SIZE === 0} end needs to be a multiple of page size (${PAGE_SIZE})`;
15746
- check `${data.length <= end - start} the initial data is longer than address range`;
15682
+ check(start < end, "end has to be bigger than start");
15683
+ check(start % PAGE_SIZE === 0, `start needs to be a multiple of page size (${PAGE_SIZE})`);
15684
+ check(end % PAGE_SIZE === 0, `end needs to be a multiple of page size (${PAGE_SIZE})`);
15685
+ check(data.length <= end - start, "the initial data is longer than address range");
15747
15686
  const length = end - start;
15748
15687
  const range = MemoryRange.fromStartAndLength(start, length);
15749
15688
  this.ensureNoReservedMemoryUsage(range);
@@ -15768,10 +15707,10 @@ class memory_builder_MemoryBuilder {
15768
15707
  */
15769
15708
  setWriteablePages(start, end, data = new Uint8Array()) {
15770
15709
  this.ensureNotFinalized();
15771
- check `${start < end} end has to be bigger than start`;
15772
- check `${start % PAGE_SIZE === 0} start needs to be a multiple of page size (${PAGE_SIZE})`;
15773
- check `${end % PAGE_SIZE === 0} end needs to be a multiple of page size (${PAGE_SIZE})`;
15774
- check `${data.length <= end - start} the initial data is longer than address range`;
15710
+ check(start < end, "end has to be bigger than start");
15711
+ check(start % PAGE_SIZE === 0, `start needs to be a multiple of page size (${PAGE_SIZE})`);
15712
+ check(end % PAGE_SIZE === 0, `end needs to be a multiple of page size (${PAGE_SIZE})`);
15713
+ check(data.length <= end - start, "the initial data is longer than address range");
15775
15714
  const length = end - start;
15776
15715
  const range = MemoryRange.fromStartAndLength(start, length);
15777
15716
  this.ensureNoReservedMemoryUsage(range);
@@ -15793,7 +15732,7 @@ class memory_builder_MemoryBuilder {
15793
15732
  this.ensureNotFinalized();
15794
15733
  const pageOffset = start % PAGE_SIZE;
15795
15734
  const remainingSpaceOnPage = PAGE_SIZE - pageOffset;
15796
- check `${data.length <= remainingSpaceOnPage} The data has to fit into a single page.`;
15735
+ check(data.length <= remainingSpaceOnPage, "The data has to fit into a single page.");
15797
15736
  const length = data.length;
15798
15737
  const range = MemoryRange.fromStartAndLength(start, length);
15799
15738
  this.ensureNoReservedMemoryUsage(range);
@@ -15807,10 +15746,7 @@ class memory_builder_MemoryBuilder {
15807
15746
  return this;
15808
15747
  }
15809
15748
  finalize(startHeapIndex, endHeapIndex) {
15810
- check `
15811
- ${startHeapIndex <= endHeapIndex}
15812
- startHeapIndex (${startHeapIndex}) has to be less than or equal to endHeapIndex (${endHeapIndex})
15813
- `;
15749
+ check(startHeapIndex <= endHeapIndex, `startHeapIndex (${startHeapIndex}) has to be less than or equal to endHeapIndex (${endHeapIndex})`);
15814
15750
  this.ensureNotFinalized();
15815
15751
  const range = MemoryRange.fromStartAndLength(startHeapIndex, endHeapIndex - startHeapIndex);
15816
15752
  const pages = PageRange.fromMemoryRange(range);
@@ -15912,7 +15848,7 @@ function math_utils_mulU64(a, b) {
15912
15848
  *
15913
15849
  * The result of multiplication is a 64-bits number and we are only interested in the part that lands in the upper 32-bits.
15914
15850
  * For example if we multiply `0xffffffff * 0xffffffff`, we get:
15915
-
15851
+
15916
15852
  * | 64-bits | 64-bits |
15917
15853
  * +--------------------+--------------------+
15918
15854
  * | upper | lower |
@@ -15948,7 +15884,7 @@ function math_utils_mulUpperSS(a, b) {
15948
15884
  return interpretAsSigned(resultLimitedTo64Bits);
15949
15885
  }
15950
15886
  function math_utils_unsignedRightShiftBigInt(value, shift) {
15951
- check `${shift >= 0} Shift count must be non-negative`;
15887
+ check(shift >= 0, "Shift count must be non-negative");
15952
15888
  const fillBit = value < 0 ? "1" : "0";
15953
15889
  // Convert the BigInt to its binary representation
15954
15890
  const binaryRepresentation = value.toString(2).padStart(64, fillBit);
@@ -17307,10 +17243,7 @@ class two_regs_two_imms_dispatcher_TwoRegsTwoImmsDispatcher {
17307
17243
  class jump_table_JumpTable {
17308
17244
  indices;
17309
17245
  constructor(itemByteLength, bytes) {
17310
- check `
17311
- ${itemByteLength === 0 || bytes.length % itemByteLength === 0}
17312
- Length of jump table (${bytes.length}) should be a multiple of item lenght (${itemByteLength})!
17313
- `;
17246
+ check(itemByteLength === 0 || bytes.length % itemByteLength === 0, `Length of jump table (${bytes.length}) should be a multiple of item lenght (${itemByteLength})!`);
17314
17247
  const length = itemByteLength === 0 ? 0 : bytes.length / itemByteLength;
17315
17248
  this.indices = new Uint32Array(length);
17316
17249
  for (let i = 0; i < length; i++) {
@@ -17754,10 +17687,7 @@ class ReturnValue {
17754
17687
  this.consumedGas = consumedGas;
17755
17688
  this.status = status;
17756
17689
  this.memorySlice = memorySlice;
17757
- check `
17758
- ${(status === null && memorySlice !== null) || (status !== null && memorySlice === null)}
17759
- 'status' and 'memorySlice' must not both be null or both be non-null — exactly one must be provided
17760
- `;
17690
+ check((status === null && memorySlice !== null) || (status !== null && memorySlice === null), "`status` and `memorySlice` must not both be null or both be non-null — exactly one must be provided");
17761
17691
  }
17762
17692
  static fromStatus(consumedGas, status) {
17763
17693
  return new ReturnValue(consumedGas, status, null);
@@ -17806,10 +17736,7 @@ class HostCalls {
17806
17736
  if (status !== Status.HOST) {
17807
17737
  return this.getReturnValue(status, pvmInstance);
17808
17738
  }
17809
- check `
17810
- ${pvmInstance.getExitParam() !== null}
17811
- "We know that the exit param is not null, because the status is 'Status.HOST'
17812
- `;
17739
+ check(pvmInstance.getExitParam() !== null, "We know that the exit param is not null, because the status is `Status.HOST`");
17813
17740
  const hostCallIndex = pvmInstance.getExitParam() ?? -1;
17814
17741
  const gas = pvmInstance.getGasCounter();
17815
17742
  const regs = new HostCallRegisters(pvmInstance.getRegisters());
@@ -17869,7 +17796,7 @@ class host_calls_manager_HostCallsManager {
17869
17796
  constructor({ missing, handlers = [], }) {
17870
17797
  this.missing = missing;
17871
17798
  for (const handler of handlers) {
17872
- check `${this.hostCalls.get(handler.index) === undefined} Overwriting host call handler at index ${handler.index}`;
17799
+ check(this.hostCalls.get(handler.index) === undefined, `Overwriting host call handler at index ${handler.index}`);
17873
17800
  this.hostCalls.set(handler.index, handler);
17874
17801
  }
17875
17802
  }
@@ -17992,7 +17919,7 @@ function getServiceId(serviceId) {
17992
17919
  return null;
17993
17920
  }
17994
17921
  function writeServiceIdAsLeBytes(serviceId, destination) {
17995
- check `${destination.length >= SERVICE_ID_BYTES} Not enough space in the destination.`;
17922
+ check(destination.length >= SERVICE_ID_BYTES, "Not enough space in the destination.");
17996
17923
  destination.set(u32AsLeBytes(serviceId));
17997
17924
  }
17998
17925
  /** Clamp a U64 to the maximum value of a 32-bit unsigned integer. */
@@ -18060,27 +17987,13 @@ class SpiProgram extends WithDebug {
18060
17987
  this.registers = registers;
18061
17988
  }
18062
17989
  }
18063
- /**
18064
- * program = E_3(|o|) ++ E_3(|w|) ++ E_2(z) ++ E_3(s) ++ o ++ w ++ E_4(|c|) ++ c
18065
- *
18066
- * E_n - little endian encoding, n - length
18067
- * o - initial read only data
18068
- * w - initial heap
18069
- * z - heap pages filled with zeros
18070
- * s - stack size
18071
- * c - program code
18072
- *
18073
- * https://graypaper.fluffylabs.dev/#/579bd12/2b92022b9202
18074
- */
18075
17990
  function decode_standard_program_decodeStandardProgram(program, args) {
18076
17991
  const decoder = Decoder.fromBlob(program);
18077
17992
  const oLength = decoder.u24();
18078
17993
  const wLength = decoder.u24();
18079
- check `${args.length <= DATA_LENGTH} Incorrect arguments length`;
18080
- check `${oLength <= DATA_LENGTH} Incorrect readonly segment length`;
18081
- const readOnlyLength = oLength;
18082
- check `${wLength <= DATA_LENGTH} Incorrect heap segment length`;
18083
- const heapLength = wLength;
17994
+ const argsLength = ensure(args.length, args.length <= DATA_LEGNTH, "Incorrect arguments length");
17995
+ const readOnlyLength = ensure(oLength, oLength <= DATA_LEGNTH, "Incorrect readonly segment length");
17996
+ const heapLength = ensure(wLength, wLength <= DATA_LEGNTH, "Incorrect heap segment length");
18084
17997
  const noOfHeapZerosPages = decoder.u16();
18085
17998
  const stackSize = decoder.u24();
18086
17999
  const readOnlyMemory = decoder.bytes(readOnlyLength).raw;
@@ -18096,14 +18009,14 @@ function decode_standard_program_decodeStandardProgram(program, args) {
18096
18009
  const stackStart = STACK_SEGMENT - alignToPageSize(stackSize);
18097
18010
  const stackEnd = STACK_SEGMENT;
18098
18011
  const argsStart = ARGS_SEGMENT;
18099
- const argsEnd = argsStart + alignToPageSize(args.length);
18100
- const argsZerosEnd = argsEnd + alignToPageSize(args.length);
18012
+ const argsEnd = argsStart + alignToPageSize(argsLength);
18013
+ const argsZerosEnd = argsEnd + alignToPageSize(argsLength);
18101
18014
  function nonEmpty(s) {
18102
18015
  return s !== false;
18103
18016
  }
18104
18017
  const readableMemory = [
18105
18018
  readOnlyLength > 0 && getMemorySegment(readonlyDataStart, readonlyDataEnd, readOnlyMemory),
18106
- args.length > 0 && getMemorySegment(argsStart, argsEnd, args),
18019
+ argsLength > 0 && getMemorySegment(argsStart, argsEnd, args),
18107
18020
  argsEnd < argsZerosEnd && getMemorySegment(argsEnd, argsZerosEnd),
18108
18021
  ].filter(nonEmpty);
18109
18022
  const writeableMemory = [