@typeberry/lib 0.1.0-08a9db1 → 0.1.0-b2d0b72

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (4) hide show
  1. package/index.cjs +211 -128
  2. package/index.d.ts +219 -219
  3. package/index.js +211 -128
  4. package/package.json +1 -1
package/index.js CHANGED
@@ -95,29 +95,17 @@ function isBrowser() {
95
95
  * We avoid using `node:assert` to keep compatibility with a browser environment.
96
96
  * Note the checks should not have any side effects, since we might decide
97
97
  * to remove all of them in a post-processing step.
98
- */
99
- function check(condition, message) {
100
- if (!condition) {
101
- throw new Error(`Assertion failure: ${message ?? ""}`);
102
- }
103
- }
104
- function cast(_a, condition) {
105
- return condition;
106
- }
107
- /**
108
- * Yet another function to perform runtime assertions.
109
- * This function returns a new type to mark in the code that this value was checked and you don't have to do it again.
110
98
  *
111
- * In the post-processing step all usages of this functions should be replaced with simple casting. An example:
112
- * const x = checkAndType<number, CheckedNumber>(y);
113
- * should be replaced with:
114
- * const x = y as CheckedNumber;
99
+ * NOTE the function is intended to be used as tagged template string for the performance
100
+ * reasons.
115
101
  */
116
- function ensure(a, condition, message) {
117
- if (cast(a, condition)) {
118
- return a;
102
+ function check(strings, condition, ...data) {
103
+ if (!condition) {
104
+ // add an empty value so that `data.length === strings.length`
105
+ data.unshift("");
106
+ const message = strings.map((v, index) => `${v}${data[index] ?? ""}`);
107
+ throw new Error(`Assertion failure:${message.join("")}`);
119
108
  }
120
- throw new Error(`Assertion failure: ${message ?? ""}`);
121
109
  }
122
110
  /**
123
111
  * The function can be used to make sure that a particular type is `never`
@@ -284,7 +272,7 @@ function resultToString(res) {
284
272
  const Result$1 = {
285
273
  /** Create new [`Result`] with `Ok` status. */
286
274
  ok: (ok) => {
287
- check(ok !== undefined, "`Ok` type cannot be undefined.");
275
+ check `${ok !== undefined} 'ok' type cannot be undefined.`;
288
276
  return {
289
277
  isOk: true,
290
278
  isError: false,
@@ -293,7 +281,7 @@ const Result$1 = {
293
281
  },
294
282
  /** Create new [`Result`] with `Error` status. */
295
283
  error: (error, details = "") => {
296
- check(error !== undefined, "`Error` type cannot be undefined.");
284
+ check `${error !== undefined} 'Error' type cannot be undefined.`;
297
285
  return {
298
286
  isOk: false,
299
287
  isError: true,
@@ -557,7 +545,6 @@ var index$s = /*#__PURE__*/Object.freeze({
557
545
  assertNever: assertNever,
558
546
  check: check,
559
547
  deepEqual: deepEqual,
560
- ensure: ensure,
561
548
  inspect: inspect,
562
549
  isBrowser: isBrowser,
563
550
  measure: measure,
@@ -591,7 +578,10 @@ class BitVec {
591
578
  constructor(data, bitLength) {
592
579
  this.data = data;
593
580
  this.bitLength = bitLength;
594
- check(data.length * 8 >= bitLength, `Not enough bytes in the data array. Need ${data.length * 8} has ${bitLength}.`);
581
+ check `
582
+ ${data.length * 8 >= bitLength}
583
+ Not enough bytes in the data array. Need ${data.length * 8} has ${bitLength}.
584
+ `;
595
585
  this.byteLength = Math.ceil(bitLength / 8);
596
586
  }
597
587
  /** Return a raw in-memory representation of this [`BitVec`]. */
@@ -600,7 +590,10 @@ class BitVec {
600
590
  }
601
591
  /** Perform OR operation on all bits in place. */
602
592
  sumWith(other) {
603
- check(other.bitLength === this.bitLength, `Invalid bit length for sumWith: ${other.bitLength} vs ${this.bitLength}`);
593
+ check `
594
+ ${other.bitLength === this.bitLength}
595
+ Invalid bit length for sumWith: ${other.bitLength} vs ${this.bitLength}
596
+ `;
604
597
  const otherRaw = other.raw;
605
598
  for (let i = 0; i < this.byteLength; i++) {
606
599
  this.data[i] |= otherRaw[i];
@@ -610,7 +603,7 @@ class BitVec {
610
603
  * Set the bit at index `idx` to value `val`.
611
604
  */
612
605
  setBit(idx, val) {
613
- check(idx < this.bitLength, `Index out of bounds. Need ${idx} has ${this.bitLength}.`);
606
+ check `${idx >= 0 && idx < this.bitLength} Index out of bounds. Need ${idx} has ${this.bitLength}.`;
614
607
  const byteIndex = Math.floor(idx / 8);
615
608
  const bitIndexInByte = idx % 8;
616
609
  const mask = 1 << bitIndexInByte;
@@ -625,7 +618,7 @@ class BitVec {
625
618
  * Return `true` if the bit at index `idx` is set.
626
619
  */
627
620
  isSet(idx) {
628
- check(idx < this.bitLength, `Index out of bounds. Need ${idx} has ${this.bitLength}.`);
621
+ check `${idx >= 0 && idx < this.bitLength} Index out of bounds. Need ${idx} has ${this.bitLength}.`;
629
622
  const byteIndex = Math.floor(idx / 8);
630
623
  const bitIndexInByte = idx % 8;
631
624
  const mask = 1 << bitIndexInByte;
@@ -793,7 +786,7 @@ class BytesBlob {
793
786
  }
794
787
  /** Create a new [`BytesBlob`] from an array of bytes. */
795
788
  static blobFromNumbers(v) {
796
- check(v.find((x) => (x & 0xff) !== x) === undefined, "BytesBlob.blobFromNumbers used with non-byte number array.");
789
+ check `${v.find((x) => (x & 0xff) !== x) === undefined} BytesBlob.blobFromNumbers used with non-byte number array.`;
797
790
  const arr = new Uint8Array(v);
798
791
  return new BytesBlob(arr);
799
792
  }
@@ -837,7 +830,7 @@ class Bytes extends BytesBlob {
837
830
  length;
838
831
  constructor(raw, len) {
839
832
  super(raw);
840
- check(raw.byteLength === len, `Given buffer has incorrect size ${raw.byteLength} vs expected ${len}`);
833
+ check `${raw.byteLength === len} Given buffer has incorrect size ${raw.byteLength} vs expected ${len}`;
841
834
  this.length = len;
842
835
  }
843
836
  /** Create new [`Bytes<X>`] given a backing buffer and it's length. */
@@ -846,7 +839,7 @@ class Bytes extends BytesBlob {
846
839
  }
847
840
  /** Create new [`Bytes<X>`] given an array of bytes and it's length. */
848
841
  static fromNumbers(v, len) {
849
- check(v.find((x) => (x & 0xff) !== x) === undefined, "Bytes.fromNumbers used with non-byte number array.");
842
+ check `${v.find((x) => (x & 0xff) !== x) === undefined} Bytes.fromNumbers used with non-byte number array.`;
850
843
  const x = new Uint8Array(v);
851
844
  return new Bytes(x, len);
852
845
  }
@@ -857,7 +850,7 @@ class Bytes extends BytesBlob {
857
850
  // TODO [ToDr] `fill` should have the argments swapped to align with the rest.
858
851
  /** Create a [`Bytes<X>`] with all bytes filled with given input number. */
859
852
  static fill(len, input) {
860
- check((input & 0xff) === input, "Input has to be a byte.");
853
+ check `${(input & 0xff) === input} Input has to be a byte.`;
861
854
  const bytes = Bytes.zero(len);
862
855
  bytes.raw.fill(input, 0, len);
863
856
  return bytes;
@@ -880,7 +873,7 @@ class Bytes extends BytesBlob {
880
873
  }
881
874
  /** Compare the sequence to another one. */
882
875
  isEqualTo(other) {
883
- check(this.length === other.length, "Comparing incorrectly typed bytes!");
876
+ check `${this.length === other.length} Comparing incorrectly typed bytes!`;
884
877
  return u8ArraySameLengthEqual(this.raw, other.raw);
885
878
  }
886
879
  /** Converts current type into some opaque extension. */
@@ -889,7 +882,7 @@ class Bytes extends BytesBlob {
889
882
  }
890
883
  }
891
884
  function byteFromString(s) {
892
- check(s.length === 2, "Two-character string expected");
885
+ check `${s.length === 2} Two-character string expected`;
893
886
  const a = numberFromCharCode(s.charCodeAt(0));
894
887
  const b = numberFromCharCode(s.charCodeAt(1));
895
888
  return (a << 4) | b;
@@ -945,42 +938,53 @@ var index$q = /*#__PURE__*/Object.freeze({
945
938
  bytesBlobComparator: bytesBlobComparator
946
939
  });
947
940
 
948
- const asWithBytesRepresentation = (v) => v;
941
+ const asTypedNumber = (v) => v;
949
942
  const MAX_VALUE_U8 = 0xff;
950
943
  const MAX_VALUE_U16 = 0xffff;
951
944
  const MAX_VALUE_U32 = 0xffff_ffff;
952
945
  const MAX_VALUE_U64 = 0xffffffffffffffffn;
953
946
  /** Attempt to cast an input number into U8. */
954
- const tryAsU8 = (v) => ensure(v, isU8(v), `input must have one-byte representation, got ${v}`);
947
+ const tryAsU8 = (v) => {
948
+ check `${isU8(v)} input must have one-byte representation, got ${v}`;
949
+ return asTypedNumber(v);
950
+ };
955
951
  /** Check if given number is a valid U8 number. */
956
952
  const isU8 = (v) => (v & MAX_VALUE_U8) === v;
957
953
  /** Attempt to cast an input number into U16. */
958
- const tryAsU16 = (v) => ensure(v, isU16(v), `input must have two-byte representation, got ${v}`);
954
+ const tryAsU16 = (v) => {
955
+ check `${isU16(v)} input must have two-byte representation, got ${v}`;
956
+ return asTypedNumber(v);
957
+ };
959
958
  /** Check if given number is a valid U16 number. */
960
959
  const isU16 = (v) => (v & MAX_VALUE_U16) === v;
961
960
  /** Attempt to cast an input number into U32. */
962
- const tryAsU32 = (v) => ensure(v, isU32(v), `input must have four-byte representation, got ${v}`);
961
+ const tryAsU32 = (v) => {
962
+ check `${isU32(v)} input must have four-byte representation, got ${v}`;
963
+ return asTypedNumber(v);
964
+ };
963
965
  /** Check if given number is a valid U32 number. */
964
966
  const isU32 = (v) => (v & MAX_VALUE_U32) >>> 0 === v;
965
967
  /** Attempt to cast an input number into U64. */
966
968
  const tryAsU64 = (x) => {
967
969
  const v = BigInt(x);
968
- return ensure(v, isU64(v), `input must have eight-byte representation, got ${x}`);
970
+ check `${isU64(v)} input must have eight-byte representation, got ${x}`;
971
+ return asTypedNumber(v);
969
972
  };
970
973
  /** Check if given number is a valid U64 number. */
971
974
  const isU64 = (v) => (v & MAX_VALUE_U64) === v;
972
975
  /** Collate two U32 parts into one U64. */
973
976
  const u64FromParts = ({ lower, upper }) => {
974
977
  const val = (BigInt(upper) << 32n) + BigInt(lower);
975
- return asWithBytesRepresentation(val);
978
+ return asTypedNumber(val);
976
979
  };
977
980
  /** Split U64 into lower & upper parts. */
978
981
  const u64IntoParts = (v) => {
979
- const lower = v & (2n ** 32n - 1n);
980
- const upper = v >> 32n;
982
+ // Number(...) safe: both parts are <= 0xffffffff
983
+ const lower = Number(v & (2n ** 32n - 1n));
984
+ const upper = Number(v >> 32n);
981
985
  return {
982
- lower: asWithBytesRepresentation(Number(lower)),
983
- upper: asWithBytesRepresentation(Number(upper)),
986
+ lower: asTypedNumber(lower),
987
+ upper: asTypedNumber(upper),
984
988
  };
985
989
  };
986
990
  /**
@@ -1020,8 +1024,8 @@ function u32AsLeBytes(value) {
1020
1024
  * Interpret 4-byte `Uint8Array` as U32 written as little endian.
1021
1025
  */
1022
1026
  function leBytesAsU32(uint8Array) {
1023
- check(uint8Array.length === 4, "Input must be a Uint8Array of length 4");
1024
- return asWithBytesRepresentation(uint8Array[0] | (uint8Array[1] << 8) | (uint8Array[2] << 16) | (uint8Array[3] << 24));
1027
+ check `${uint8Array.length === 4} Input must be a Uint8Array of length 4`;
1028
+ return asTypedNumber(uint8Array[0] | (uint8Array[1] << 8) | (uint8Array[2] << 16) | (uint8Array[3] << 24));
1025
1029
  }
1026
1030
  /** Get the smallest value between U64 a and values given as input parameters. */
1027
1031
  const minU64 = (a, ...values) => values.reduce((min, value) => (value > min ? min : value), a);
@@ -1328,7 +1332,7 @@ class Decoder {
1328
1332
  this.skip(newOffset - this.offset);
1329
1333
  }
1330
1334
  else {
1331
- check(newOffset >= 0, "The offset has to be positive");
1335
+ check `${newOffset >= 0} The offset has to be positive`;
1332
1336
  this.offset = newOffset;
1333
1337
  }
1334
1338
  }
@@ -1356,7 +1360,7 @@ class Decoder {
1356
1360
  return num;
1357
1361
  }
1358
1362
  ensureHasBytes(bytes) {
1359
- check(bytes >= 0, "Negative number of bytes given.");
1363
+ check `${bytes >= 0} Negative number of bytes given.`;
1360
1364
  if (this.offset + bytes > this.source.length) {
1361
1365
  throw new Error(`Attempting to decode more data than there is left. Need ${bytes}, left: ${this.source.length - this.offset}.`);
1362
1366
  }
@@ -1364,7 +1368,7 @@ class Decoder {
1364
1368
  }
1365
1369
  const MASKS = [0xff, 0xfe, 0xfc, 0xf8, 0xf0, 0xe0, 0xc0, 0x80];
1366
1370
  function decodeVariableLengthExtraBytes(firstByte) {
1367
- check(firstByte >= 0 && firstByte < 256, `Incorrect byte value: ${firstByte}`);
1371
+ check `${firstByte >= 0 && firstByte < 256} Incorrect byte value: ${firstByte}`;
1368
1372
  for (let i = 0; i < MASKS.length; i++) {
1369
1373
  if (firstByte >= MASKS[i]) {
1370
1374
  return 8 - i;
@@ -1509,7 +1513,7 @@ class Descriptor {
1509
1513
  }
1510
1514
 
1511
1515
  function tryAsExactBytes(a) {
1512
- check(a.isExact, "The value is not exact size estimation!");
1516
+ check `${a.isExact} The value is not exact size estimation!`;
1513
1517
  return a.bytes;
1514
1518
  }
1515
1519
  function addSizeHints(a, b) {
@@ -1616,8 +1620,8 @@ class Encoder {
1616
1620
  // we still allow positive numbers from `[maxNum / 2, maxNum)`.
1617
1621
  // So it does not matter if the argument is a negative value,
1618
1622
  // OR if someone just gave us two-complement already.
1619
- check(num < maxNum, "Only for numbers up to 2**64 - 1");
1620
- check(-num <= maxNum / 2n, "Only for numbers down to -2**63");
1623
+ check `${num < maxNum} Only for numbers up to 2**64 - 1`;
1624
+ check `${-num <= maxNum / 2n} Only for numbers down to -2**63`;
1621
1625
  this.ensureBigEnough(8);
1622
1626
  this.dataView.setBigInt64(this.offset, num, true);
1623
1627
  this.offset += 8;
@@ -1681,8 +1685,8 @@ class Encoder {
1681
1685
  // we still allow positive numbers from `[maxNum / 2, maxNum)`.
1682
1686
  // So it does not matter if the argument is a negative value,
1683
1687
  // OR if someone just gave us two-complement already.
1684
- check(num < maxNum, `Only for numbers up to 2**${BITS * bytesToEncode} - 1`);
1685
- check(-num <= maxNum / 2, `Only for numbers down to -2**${BITS * bytesToEncode - 1}`);
1688
+ check `${num < maxNum} Only for numbers up to 2**${BITS * bytesToEncode} - 1`;
1689
+ check `${-num <= maxNum / 2} Only for numbers down to -2**${BITS * bytesToEncode - 1}`;
1686
1690
  this.ensureBigEnough(bytesToEncode);
1687
1691
  }
1688
1692
  /**
@@ -1693,8 +1697,8 @@ class Encoder {
1693
1697
  * https://graypaper.fluffylabs.dev/#/579bd12/365202365202
1694
1698
  */
1695
1699
  varU32(num) {
1696
- check(num >= 0, "Only for natural numbers.");
1697
- check(num < 2 ** 32, "Only for numbers up to 2**32");
1700
+ check `${num >= 0} Only for natural numbers.`;
1701
+ check `${num < 2 ** 32} Only for numbers up to 2**32`;
1698
1702
  this.varU64(BigInt(num));
1699
1703
  }
1700
1704
  /**
@@ -1845,7 +1849,7 @@ class Encoder {
1845
1849
  * https://graypaper.fluffylabs.dev/#/579bd12/374400374400
1846
1850
  */
1847
1851
  sequenceVarLen(encode, elements) {
1848
- check(elements.length <= 2 ** 32, "Wow, that's a nice long sequence you've got here.");
1852
+ check `${elements.length <= 2 ** 32} Wow, that's a nice long sequence you've got here.`;
1849
1853
  this.varU32(tryAsU32(elements.length));
1850
1854
  this.sequenceFixLen(encode, elements);
1851
1855
  }
@@ -1866,7 +1870,7 @@ class Encoder {
1866
1870
  * anyway, so if we really should throw we will.
1867
1871
  */
1868
1872
  ensureBigEnough(length, options = { silent: false }) {
1869
- check(length >= 0, "Negative length given");
1873
+ check `${length >= 0} Negative length given`;
1870
1874
  const newLength = this.offset + length;
1871
1875
  if (newLength > MAX_LENGTH$1) {
1872
1876
  if (options.silent) {
@@ -1997,10 +2001,12 @@ class ObjectView {
1997
2001
  decodeUpTo(field) {
1998
2002
  const index = this.descriptorsKeys.indexOf(field);
1999
2003
  const lastField = this.descriptorsKeys[this.lastDecodedFieldIdx];
2000
- check(this.lastDecodedFieldIdx < index, `Unjustified call to 'decodeUpTo' -
2004
+ check `
2005
+ ${this.lastDecodedFieldIdx < index}
2006
+ Unjustified call to 'decodeUpTo' -
2001
2007
  the index ($Blobindex}, ${String(field)})
2002
2008
  is already decoded (${this.lastDecodedFieldIdx}, ${String(lastField)}).
2003
- `);
2009
+ `;
2004
2010
  let lastItem = this.cache.get(lastField);
2005
2011
  const skipper = new Skipper(this.decoder);
2006
2012
  // now skip all of the fields and further populate the cache.
@@ -2016,8 +2022,10 @@ class ObjectView {
2016
2022
  this.cache.set(field, lastItem);
2017
2023
  this.lastDecodedFieldIdx = i;
2018
2024
  }
2019
- const last = ensure(lastItem, lastItem !== undefined, "Last item must be set, since the loop turns at least once.");
2020
- return last;
2025
+ if (lastItem === undefined) {
2026
+ throw new Error("Last item must be set, since the loop turns at least once.");
2027
+ }
2028
+ return lastItem;
2021
2029
  }
2022
2030
  }
2023
2031
  /**
@@ -2050,8 +2058,10 @@ class SequenceView {
2050
2058
  *[Symbol.iterator]() {
2051
2059
  for (let i = 0; i < this.length; i++) {
2052
2060
  const val = this.get(i);
2053
- const v = ensure(val, val !== undefined, "We are within 0..this.length so all items are defined.");
2054
- yield v;
2061
+ if (val === undefined) {
2062
+ throw new Error("We are within 0..this.length so all items are defined.");
2063
+ }
2064
+ yield val;
2055
2065
  }
2056
2066
  }
2057
2067
  /** Create an array of all views mapped to some particular value. */
@@ -2094,7 +2104,10 @@ class SequenceView {
2094
2104
  return BytesBlob.blobFrom(this.decoder.source.subarray(this.initialDecoderOffset, this.decoder.bytesRead()));
2095
2105
  }
2096
2106
  decodeUpTo(index) {
2097
- check(this.lastDecodedIdx < index, `Unjustified call to 'decodeUpTo' - the index (${index}) is already decoded (${this.lastDecodedIdx}).`);
2107
+ check `
2108
+ ${this.lastDecodedIdx < index}
2109
+ Unjustified call to 'decodeUpTo' - the index (${index}) is already decoded (${this.lastDecodedIdx}).
2110
+ `;
2098
2111
  let lastItem = this.cache.get(this.lastDecodedIdx);
2099
2112
  const skipper = new Skipper(this.decoder);
2100
2113
  // now skip all of the fields and further populate the cache.
@@ -2109,8 +2122,10 @@ class SequenceView {
2109
2122
  this.cache.set(i, lastItem);
2110
2123
  this.lastDecodedIdx = i;
2111
2124
  }
2112
- const last = ensure(lastItem, lastItem !== undefined, "Last item must be set, since the loop turns at least once.");
2113
- return last;
2125
+ if (lastItem === undefined) {
2126
+ throw new Error("Last item must be set, since the loop turns at least once.");
2127
+ }
2128
+ return lastItem;
2114
2129
  }
2115
2130
  }
2116
2131
 
@@ -2134,7 +2149,10 @@ const TYPICAL_DICTIONARY_LENGTH = 32;
2134
2149
  */
2135
2150
  function readonlyArray(desc) {
2136
2151
  return desc.convert((x) => {
2137
- check(Array.isArray(x), `Non-arrays are not supported as 'readonly': got ${typeof x}, ${x}`);
2152
+ check `
2153
+ ${Array.isArray(x)}
2154
+ Non-arrays are not supported as 'readonly': got ${typeof x}, ${x}
2155
+ `;
2138
2156
  // NOTE [ToDr] This assumption is incorrect in general, but it's documented
2139
2157
  // in the general note. We avoid `.slice()` the array for performance reasons.
2140
2158
  return x;
@@ -3091,7 +3109,7 @@ const BLS_KEY_BYTES = 144;
3091
3109
  /** Derive a Bandersnatch public key from a seed. */
3092
3110
  function publicKey(seed) {
3093
3111
  const key = bandersnatch_exports.derive_public_key(seed);
3094
- check(key[0] === 0, "Invalid Bandersnatch public key derived from seed");
3112
+ check `${key[0] === 0} Invalid Bandersnatch public key derived from seed`;
3095
3113
  return Bytes.fromBlob(key.subarray(1), BANDERSNATCH_KEY_BYTES).asOpaque();
3096
3114
  }
3097
3115
 
@@ -3533,7 +3551,7 @@ async function verify(input) {
3533
3551
  data.set(signature.raw, offset);
3534
3552
  offset += ED25519_SIGNATURE_BYTES;
3535
3553
  const messageLength = message.length;
3536
- check(messageLength < 256, `Message needs to be shorter than 256 bytes. Got: ${messageLength}`);
3554
+ check `${messageLength < 256} Message needs to be shorter than 256 bytes. Got: ${messageLength}`;
3537
3555
  data[offset] = messageLength;
3538
3556
  offset += 1;
3539
3557
  data.set(message.raw, offset);
@@ -3622,7 +3640,7 @@ class PageAllocator {
3622
3640
  // TODO [ToDr] Benchmark the performance!
3623
3641
  constructor(hashesPerPage) {
3624
3642
  this.hashesPerPage = hashesPerPage;
3625
- check(hashesPerPage > 0 && hashesPerPage >>> 0 === hashesPerPage, "Expected a non-zero integer.");
3643
+ check `${hashesPerPage > 0 && hashesPerPage >>> 0 === hashesPerPage} Expected a non-zero integer.`;
3626
3644
  this.resetPage();
3627
3645
  }
3628
3646
  resetPage() {
@@ -5093,8 +5111,8 @@ class MultiMap {
5093
5111
  * if needed.
5094
5112
  */
5095
5113
  constructor(keysLength, keyMappers) {
5096
- check(keysLength > 0, "Keys cannot be empty.");
5097
- check(keyMappers === undefined || keyMappers.length === keysLength, "Incorrect number of key mappers given!");
5114
+ check `${keysLength > 0} Keys cannot be empty.`;
5115
+ check `${keyMappers === undefined || keyMappers.length === keysLength} Incorrect number of key mappers given!`;
5098
5116
  this.data = new Map();
5099
5117
  this.keyMappers = keyMappers === undefined ? Array(keysLength).fill(null) : keyMappers;
5100
5118
  }
@@ -5193,7 +5211,7 @@ class FixedSizeArray extends Array {
5193
5211
  this.fixedLength = this.length;
5194
5212
  }
5195
5213
  static new(data, len) {
5196
- check(data.length === len, `Expected an array of size: ${len}, got: ${data.length}`);
5214
+ check `${data.length === len} Expected an array of size: ${len}, got: ${data.length}`;
5197
5215
  const arr = new FixedSizeArray(len);
5198
5216
  for (let i = 0; i < len; i++) {
5199
5217
  arr[i] = data[i];
@@ -5325,7 +5343,7 @@ class SortedArray {
5325
5343
  }
5326
5344
  /** Create a new SortedSet from two sorted collections. */
5327
5345
  static fromTwoSortedCollections(first, second) {
5328
- check(first.comparator === second.comparator, "Cannot merge arrays if they do not use the same comparator");
5346
+ check `${first.comparator === second.comparator} Cannot merge arrays if they do not use the same comparator`;
5329
5347
  const comparator = first.comparator;
5330
5348
  const arr1 = first.array;
5331
5349
  const arr1Length = arr1.length;
@@ -5442,7 +5460,7 @@ class SortedSet extends SortedArray {
5442
5460
  }
5443
5461
  /** Create a new SortedSet from two sorted collections. */
5444
5462
  static fromTwoSortedCollections(first, second) {
5445
- check(first.comparator === second.comparator, "Cannot merge arrays if they do not use the same comparator");
5463
+ check `${first.comparator === second.comparator} Cannot merge arrays if they do not use the same comparator`;
5446
5464
  const comparator = first.comparator;
5447
5465
  if (first.length === 0) {
5448
5466
  return SortedSet.fromSortedArray(comparator, second.array);
@@ -6104,7 +6122,10 @@ const tryAsCoreIndex = (v) => asOpaqueType(tryAsU16(v));
6104
6122
  /** Attempt to convert a number into `Epoch`. */
6105
6123
  const tryAsEpoch = (v) => asOpaqueType(tryAsU32(v));
6106
6124
  function tryAsPerValidator(array, spec) {
6107
- check(array.length === spec.validatorsCount, `Invalid per-validator array length. Expected ${spec.validatorsCount}, got: ${array.length}`);
6125
+ check `
6126
+ ${array.length === spec.validatorsCount}
6127
+ Invalid per-validator array length. Expected ${spec.validatorsCount}, got: ${array.length}
6128
+ `;
6108
6129
  return asKnownSize(array);
6109
6130
  }
6110
6131
  const codecPerValidator = (val) => codecWithContext((context) => {
@@ -6113,7 +6134,10 @@ const codecPerValidator = (val) => codecWithContext((context) => {
6113
6134
  });
6114
6135
  });
6115
6136
  function tryAsPerEpochBlock(array, spec) {
6116
- check(array.length === spec.epochLength, `Invalid per-epoch-block array length. Expected ${spec.epochLength}, got: ${array.length}`);
6137
+ check `
6138
+ ${array.length === spec.epochLength}
6139
+ Invalid per-epoch-block array length. Expected ${spec.epochLength}, got: ${array.length}
6140
+ `;
6117
6141
  return asKnownSize(array);
6118
6142
  }
6119
6143
  const codecPerEpochBlock = (val) => codecWithContext((context) => {
@@ -6378,7 +6402,11 @@ var workItem = /*#__PURE__*/Object.freeze({
6378
6402
 
6379
6403
  /** Verify the value is within the `WorkItemsCount` bounds. */
6380
6404
  function tryAsWorkItemsCount(len) {
6381
- return ensure(len, len >= MIN_NUMBER_OF_WORK_ITEMS && len <= MAX_NUMBER_OF_WORK_ITEMS, `WorkItemsCount: Expected '${MIN_NUMBER_OF_WORK_ITEMS} <= count <= ${MAX_NUMBER_OF_WORK_ITEMS}' got ${len}`);
6405
+ check `
6406
+ ${len >= MIN_NUMBER_OF_WORK_ITEMS && len <= MAX_NUMBER_OF_WORK_ITEMS}
6407
+ WorkItemsCount: Expected '${MIN_NUMBER_OF_WORK_ITEMS} <= count <= ${MAX_NUMBER_OF_WORK_ITEMS}' got ${len}
6408
+ `;
6409
+ return tryAsU8(len);
6382
6410
  }
6383
6411
  /** Minimal number of work items in the work package or results in work report. */
6384
6412
  const MIN_NUMBER_OF_WORK_ITEMS = 1;
@@ -8594,7 +8622,10 @@ class AvailabilityAssignment extends WithDebug {
8594
8622
 
8595
8623
  /** Check if given array has correct length before casting to the opaque type. */
8596
8624
  function tryAsPerCore(array, spec) {
8597
- check(array.length === spec.coresCount, `Invalid per-core array length. Expected ${spec.coresCount}, got: ${array.length}`);
8625
+ check `
8626
+ ${array.length === spec.coresCount}
8627
+ Invalid per-core array length. Expected ${spec.coresCount}, got: ${array.length}
8628
+ `;
8598
8629
  return asOpaqueType(array);
8599
8630
  }
8600
8631
  const codecPerCore = (val) => codecWithContext((context) => {
@@ -9742,7 +9773,7 @@ class InMemoryState extends WithDebug {
9742
9773
  }
9743
9774
  removeServices(servicesRemoved) {
9744
9775
  for (const serviceId of servicesRemoved ?? []) {
9745
- check(this.services.has(serviceId), `Attempting to remove non-existing service: ${serviceId}`);
9776
+ check `${this.services.has(serviceId)} Attempting to remove non-existing service: ${serviceId}`;
9746
9777
  this.services.delete(serviceId);
9747
9778
  }
9748
9779
  }
@@ -9759,7 +9790,10 @@ class InMemoryState extends WithDebug {
9759
9790
  }
9760
9791
  else if (kind === UpdateStorageKind.Remove) {
9761
9792
  const { key } = action;
9762
- check(service.data.storage.has(key.toString()), `Attempting to remove non-existing storage item at ${serviceId}: ${action.key}`);
9793
+ check `
9794
+ ${service.data.storage.has(key.toString())}
9795
+ Attempting to remove non-existing storage item at ${serviceId}: ${action.key}
9796
+ `;
9763
9797
  service.data.storage.delete(key.toString());
9764
9798
  }
9765
9799
  else {
@@ -10438,12 +10472,12 @@ class TrieNode {
10438
10472
  }
10439
10473
  /** View this node as a branch node */
10440
10474
  asBranchNode() {
10441
- check(this.getNodeType() === NodeType.Branch);
10475
+ check `${this.getNodeType() === NodeType.Branch} not a branch!`;
10442
10476
  return new BranchNode(this);
10443
10477
  }
10444
10478
  /** View this node as a leaf node */
10445
10479
  asLeafNode() {
10446
- check(this.getNodeType() !== NodeType.Branch);
10480
+ check `${this.getNodeType() !== NodeType.Branch} not a leaf!`;
10447
10481
  return new LeafNode(this);
10448
10482
  }
10449
10483
  toString() {
@@ -10921,7 +10955,7 @@ function createSubtreeForBothLeaves(traversedPath, nodes, leafToReplace, leaf) {
10921
10955
  * Return a single bit from `key` located at `bitIndex`.
10922
10956
  */
10923
10957
  function getBit(key, bitIndex) {
10924
- check(bitIndex < TRUNCATED_KEY_BITS);
10958
+ check `${bitIndex < TRUNCATED_KEY_BITS} invalid bit index passed ${bitIndex}`;
10925
10959
  const byte = bitIndex >>> 3;
10926
10960
  const bit = bitIndex - (byte << 3);
10927
10961
  const mask = 0b10_00_00_00 >>> bit;
@@ -11649,7 +11683,6 @@ const N_CHUNKS_REQUIRED = 342;
11649
11683
  const N_CHUNKS_REDUNDANCY = 681;
11650
11684
  /** Total number of chunks generated by EC. */
11651
11685
  const N_CHUNKS_TOTAL = 1023;
11652
- // check(N_CHUNKS_TOTAL === N_CHUNKS_REQUIRED + N_CHUNKS_REDUNDANCY);
11653
11686
  /**
11654
11687
  * reed-solomon-simd requires point size to be multiple of 64 bytes but we need only 2 bytes.
11655
11688
  * It does not matter what indices are selected, but it has to be n and n + 32
@@ -11661,7 +11694,10 @@ const HALF_POINT_SIZE = 32;
11661
11694
  */
11662
11695
  const POINT_LENGTH = 2;
11663
11696
  const PIECE_SIZE = 684;
11664
- // check(PIECE_SIZE === N_CHUNKS_REQUIRED * POINT_LENGTH);
11697
+ function checkConsistency() {
11698
+ check `${N_CHUNKS_TOTAL === N_CHUNKS_REQUIRED + N_CHUNKS_REDUNDANCY} no of required and redundancy chunks need to match ${N_CHUNKS_TOTAL}`;
11699
+ check `${PIECE_SIZE === N_CHUNKS_REQUIRED * POINT_LENGTH} piece size needs to match ${N_CHUNKS_REQUIRED} * ${POINT_LENGTH}`;
11700
+ }
11665
11701
  /**
11666
11702
  * Takes arbitrarily long input data, padds it to multiple of `PIECE_SIZE` and returns
11667
11703
  * exactly `N_CHUNKS_TOTAL` ec-coded segments.
@@ -11694,8 +11730,11 @@ function decodeDataAndTrim(input, expectedLength) {
11694
11730
  function decodeData(input) {
11695
11731
  const pointBytes = input[0][1].length;
11696
11732
  const points = Math.floor(pointBytes / POINT_LENGTH);
11697
- check(points * POINT_LENGTH === pointBytes, "Each point length needs to be a multiple of `POINT_LENGTH`");
11698
- check(input.every(([_idx, point]) => point.length === pointBytes), "Every piece must have the same length!");
11733
+ check `${points * POINT_LENGTH === pointBytes} Each point length needs to be a multiple of ${POINT_LENGTH}`;
11734
+ check `
11735
+ ${input.every(([_idx, point]) => point.length === pointBytes)},
11736
+ Every piece must have the same length!
11737
+ `;
11699
11738
  const pieces = FixedSizeArray.fill(() => Bytes.zero(PIECE_SIZE), points);
11700
11739
  for (let i = 0; i < points; i++) {
11701
11740
  const start = i * POINT_LENGTH;
@@ -11768,7 +11807,7 @@ function decodePiece(input) {
11768
11807
  if (resultIndices === undefined) {
11769
11808
  throw new Error("indices array in decoded result must exist!");
11770
11809
  }
11771
- check(resultData.length === resultIndices.length * POINT_ALIGNMENT, "incorrect length of data or indices!");
11810
+ check `${resultData.length === resultIndices.length * POINT_ALIGNMENT} incorrect length of data or indices!`;
11772
11811
  for (let i = 0; i < resultIndices.length; i++) {
11773
11812
  // fill reconstructed shards in result
11774
11813
  const index = resultIndices[i];
@@ -11793,7 +11832,7 @@ function decodePiece(input) {
11793
11832
  * https://graypaper.fluffylabs.dev/#/9a08063/3eb4013eb401?v=0.6.6
11794
11833
  */
11795
11834
  function split(input, n, k) {
11796
- check(n * k === input.length);
11835
+ check `${n * k === input.length}`;
11797
11836
  const result = [];
11798
11837
  for (let i = 0; i < k; i++) {
11799
11838
  const start = i * n;
@@ -11906,7 +11945,7 @@ function transpose(input, k) {
11906
11945
  */
11907
11946
  function chunkingFunction(input) {
11908
11947
  const k = Math.floor(input.length / PIECE_SIZE);
11909
- check(k * PIECE_SIZE === input.length, `Input length ${input.length} is not divisible by ${PIECE_SIZE}`);
11948
+ check `${k * PIECE_SIZE === input.length} Input length ${input.length} is not divisible by ${PIECE_SIZE}`;
11910
11949
  // we get a `k` pieces.
11911
11950
  const pieces = unzip(input, PIECE_SIZE, k);
11912
11951
  // and each piece get's ec-codec
@@ -11924,7 +11963,10 @@ function chunkingFunction(input) {
11924
11963
  function shardsToChunks(spec, shards) {
11925
11964
  const result = [];
11926
11965
  const shardSize = shards[0].length;
11927
- check(shards.every((s) => s.length === shardSize), "Each shard must be the same length!");
11966
+ check `
11967
+ ${shards.every((s) => s.length === shardSize)}
11968
+ Each shard must be the same length!
11969
+ `;
11928
11970
  const totalData = shards.map((s) => s.length).reduce((sum, sLength) => sum + sLength, 0);
11929
11971
  const chunkSize = Math.floor(totalData / N_CHUNKS_TOTAL);
11930
11972
  const piecesPerChunk = Math.floor(shardSize / chunkSize);
@@ -11974,6 +12016,7 @@ var index$a = /*#__PURE__*/Object.freeze({
11974
12016
  N_CHUNKS_TOTAL: N_CHUNKS_TOTAL,
11975
12017
  PIECE_SIZE: PIECE_SIZE,
11976
12018
  POINT_LENGTH: POINT_LENGTH,
12019
+ checkConsistency: checkConsistency,
11977
12020
  chunkingFunction: chunkingFunction,
11978
12021
  chunksToShards: chunksToShards,
11979
12022
  decodeData: decodeData,
@@ -12436,8 +12479,8 @@ class PartiallyUpdatedState {
12436
12479
  this.stateUpdate.services.preimages.push(newUpdate);
12437
12480
  }
12438
12481
  updateServiceStorageUtilisation(serviceId, items, bytes, serviceInfo) {
12439
- check(items >= 0, `storageUtilisationCount has to be a positive number, got: ${items}`);
12440
- check(bytes >= 0, `storageUtilisationBytes has to be a positive number, got: ${bytes}`);
12482
+ check `${items >= 0} storageUtilisationCount has to be a positive number, got: ${items}`;
12483
+ check `${bytes >= 0} storageUtilisationBytes has to be a positive number, got: ${bytes}`;
12441
12484
  const overflowItems = !isU32(items);
12442
12485
  const overflowBytes = !isU64(bytes);
12443
12486
  // TODO [ToDr] this is not specified in GP, but it seems sensible.
@@ -12532,7 +12575,7 @@ function getServiceId(serviceId) {
12532
12575
  return null;
12533
12576
  }
12534
12577
  function writeServiceIdAsLeBytes(serviceId, destination) {
12535
- check(destination.length >= SERVICE_ID_BYTES, "Not enough space in the destination.");
12578
+ check `${destination.length >= SERVICE_ID_BYTES} Not enough space in the destination.`;
12536
12579
  destination.set(u32AsLeBytes(serviceId));
12537
12580
  }
12538
12581
  /** Clamp a U64 to the maximum value of a 32-bit unsigned integer. */
@@ -12700,19 +12743,22 @@ var index$8 = /*#__PURE__*/Object.freeze({
12700
12743
 
12701
12744
  const NO_OF_REGISTERS$1 = 13;
12702
12745
  const REGISTER_SIZE_SHIFT = 3;
12703
- const tryAsRegisterIndex = (index) => ensure(index, index >= 0 && index <= NO_OF_REGISTERS$1, `Incorrect register index: ${index}!`);
12746
+ const tryAsRegisterIndex = (index) => {
12747
+ check `${index >= 0 && index < NO_OF_REGISTERS$1} Incorrect register index: ${index}!`;
12748
+ return asOpaqueType(index);
12749
+ };
12704
12750
  class Registers {
12705
12751
  bytes;
12706
12752
  asSigned;
12707
12753
  asUnsigned;
12708
12754
  constructor(bytes = new Uint8Array(NO_OF_REGISTERS$1 << REGISTER_SIZE_SHIFT)) {
12709
12755
  this.bytes = bytes;
12710
- check(bytes.length === NO_OF_REGISTERS$1 << REGISTER_SIZE_SHIFT, "Invalid size of registers array.");
12756
+ check `${bytes.length === NO_OF_REGISTERS$1 << REGISTER_SIZE_SHIFT} Invalid size of registers array.`;
12711
12757
  this.asSigned = new BigInt64Array(bytes.buffer, bytes.byteOffset);
12712
12758
  this.asUnsigned = new BigUint64Array(bytes.buffer, bytes.byteOffset);
12713
12759
  }
12714
12760
  static fromBytes(bytes) {
12715
- check(bytes.length === NO_OF_REGISTERS$1 << REGISTER_SIZE_SHIFT, "Invalid size of registers array.");
12761
+ check `${bytes.length === NO_OF_REGISTERS$1 << REGISTER_SIZE_SHIFT} Invalid size of registers array.`;
12716
12762
  return new Registers(bytes);
12717
12763
  }
12718
12764
  getBytesAsLittleEndian(index, len) {
@@ -12848,7 +12894,7 @@ class Mask {
12848
12894
  return this.lookupTableForward[index] === 0;
12849
12895
  }
12850
12896
  getNoOfBytesToNextInstruction(index) {
12851
- check(index >= 0, `index (${index}) cannot be a negative number`);
12897
+ check `${index >= 0} index (${index}) cannot be a negative number`;
12852
12898
  return Math.min(this.lookupTableForward[index] ?? 0, MAX_INSTRUCTION_DISTANCE);
12853
12899
  }
12854
12900
  buildLookupTableForward(mask) {
@@ -13814,7 +13860,7 @@ const PAGE_SIZE_SHIFT = 12;
13814
13860
  const PAGE_SIZE$1 = 1 << PAGE_SIZE_SHIFT;
13815
13861
  const MIN_ALLOCATION_SHIFT = (() => {
13816
13862
  const MIN_ALLOCATION_SHIFT = 7;
13817
- check(MIN_ALLOCATION_SHIFT < PAGE_SIZE_SHIFT, "incorrect minimal allocation shift");
13863
+ check `${MIN_ALLOCATION_SHIFT < PAGE_SIZE_SHIFT} incorrect minimal allocation shift`;
13818
13864
  return MIN_ALLOCATION_SHIFT;
13819
13865
  })();
13820
13866
  const MIN_ALLOCATION_LENGTH = PAGE_SIZE$1 >> MIN_ALLOCATION_SHIFT;
@@ -13824,13 +13870,25 @@ const RESERVED_NUMBER_OF_PAGES = 16;
13824
13870
  /** https://graypaper.fluffylabs.dev/#/68eaa1f/35a60235a602?v=0.6.4 */
13825
13871
  const MAX_NUMBER_OF_PAGES = MEMORY_SIZE / PAGE_SIZE$1;
13826
13872
 
13827
- const tryAsMemoryIndex = (index) => ensure(index, index >= 0 && index <= MAX_MEMORY_INDEX, `Incorrect memory index: ${index}!`);
13828
- const tryAsSbrkIndex = (index) => ensure(index, index >= 0 && index <= MAX_MEMORY_INDEX + 1, `Incorrect sbrk index: ${index}!`);
13873
+ const tryAsMemoryIndex = (index) => {
13874
+ check `${index >= 0 && index <= MAX_MEMORY_INDEX} Incorrect memory index: ${index}!`;
13875
+ return asOpaqueType(index);
13876
+ };
13877
+ const tryAsSbrkIndex = (index) => {
13878
+ check `${index >= 0 && index <= MAX_MEMORY_INDEX + 1} Incorrect sbrk index: ${index}!`;
13879
+ return asOpaqueType(index);
13880
+ };
13829
13881
 
13830
13882
  /** Ensure that given memory `index` is within `[0...PAGE_SIZE)` and can be used to index a page */
13831
- const tryAsPageIndex = (index) => ensure(index, index >= 0 && index < PAGE_SIZE$1, `Incorect page index: ${index}!`);
13883
+ const tryAsPageIndex = (index) => {
13884
+ check `${index >= 0 && index < PAGE_SIZE$1}, Incorect page index: ${index}!`;
13885
+ return asOpaqueType(index);
13886
+ };
13832
13887
  /** Ensure that given `index` represents an index of one of the pages. */
13833
- const tryAsPageNumber = (index) => ensure(index, index >= 0 && index <= LAST_PAGE_NUMBER, `Incorrect page number: ${index}!`);
13888
+ const tryAsPageNumber = (index) => {
13889
+ check `${index >= 0 && index <= LAST_PAGE_NUMBER}, Incorect page number: ${index}!`;
13890
+ return asOpaqueType(index);
13891
+ };
13834
13892
  /**
13835
13893
  * Get the next page number and wrap the result if it is bigger than LAST_PAGE_NUMBER
13836
13894
  *
@@ -14294,10 +14352,10 @@ class MemoryBuilder {
14294
14352
  */
14295
14353
  setReadablePages(start, end, data = new Uint8Array()) {
14296
14354
  this.ensureNotFinalized();
14297
- check(start < end, "end has to be bigger than start");
14298
- check(start % PAGE_SIZE$1 === 0, `start needs to be a multiple of page size (${PAGE_SIZE$1})`);
14299
- check(end % PAGE_SIZE$1 === 0, `end needs to be a multiple of page size (${PAGE_SIZE$1})`);
14300
- check(data.length <= end - start, "the initial data is longer than address range");
14355
+ check `${start < end} end has to be bigger than start`;
14356
+ check `${start % PAGE_SIZE$1 === 0} start needs to be a multiple of page size (${PAGE_SIZE$1})`;
14357
+ check `${end % PAGE_SIZE$1 === 0} end needs to be a multiple of page size (${PAGE_SIZE$1})`;
14358
+ check `${data.length <= end - start} the initial data is longer than address range`;
14301
14359
  const length = end - start;
14302
14360
  const range = MemoryRange.fromStartAndLength(start, length);
14303
14361
  this.ensureNoReservedMemoryUsage(range);
@@ -14322,10 +14380,10 @@ class MemoryBuilder {
14322
14380
  */
14323
14381
  setWriteablePages(start, end, data = new Uint8Array()) {
14324
14382
  this.ensureNotFinalized();
14325
- check(start < end, "end has to be bigger than start");
14326
- check(start % PAGE_SIZE$1 === 0, `start needs to be a multiple of page size (${PAGE_SIZE$1})`);
14327
- check(end % PAGE_SIZE$1 === 0, `end needs to be a multiple of page size (${PAGE_SIZE$1})`);
14328
- check(data.length <= end - start, "the initial data is longer than address range");
14383
+ check `${start < end} end has to be bigger than start`;
14384
+ check `${start % PAGE_SIZE$1 === 0} start needs to be a multiple of page size (${PAGE_SIZE$1})`;
14385
+ check `${end % PAGE_SIZE$1 === 0} end needs to be a multiple of page size (${PAGE_SIZE$1})`;
14386
+ check `${data.length <= end - start} the initial data is longer than address range`;
14329
14387
  const length = end - start;
14330
14388
  const range = MemoryRange.fromStartAndLength(start, length);
14331
14389
  this.ensureNoReservedMemoryUsage(range);
@@ -14347,7 +14405,7 @@ class MemoryBuilder {
14347
14405
  this.ensureNotFinalized();
14348
14406
  const pageOffset = start % PAGE_SIZE$1;
14349
14407
  const remainingSpaceOnPage = PAGE_SIZE$1 - pageOffset;
14350
- check(data.length <= remainingSpaceOnPage, "The data has to fit into a single page.");
14408
+ check `${data.length <= remainingSpaceOnPage} The data has to fit into a single page.`;
14351
14409
  const length = data.length;
14352
14410
  const range = MemoryRange.fromStartAndLength(start, length);
14353
14411
  this.ensureNoReservedMemoryUsage(range);
@@ -14361,7 +14419,10 @@ class MemoryBuilder {
14361
14419
  return this;
14362
14420
  }
14363
14421
  finalize(startHeapIndex, endHeapIndex) {
14364
- check(startHeapIndex <= endHeapIndex, `startHeapIndex (${startHeapIndex}) has to be less than or equal to endHeapIndex (${endHeapIndex})`);
14422
+ check `
14423
+ ${startHeapIndex <= endHeapIndex}
14424
+ startHeapIndex (${startHeapIndex}) has to be less than or equal to endHeapIndex (${endHeapIndex})
14425
+ `;
14365
14426
  this.ensureNotFinalized();
14366
14427
  const range = MemoryRange.fromStartAndLength(startHeapIndex, endHeapIndex - startHeapIndex);
14367
14428
  const pages = PageRange.fromMemoryRange(range);
@@ -14606,7 +14667,7 @@ function mulUpperSS(a, b) {
14606
14667
  return interpretAsSigned(resultLimitedTo64Bits);
14607
14668
  }
14608
14669
  function unsignedRightShiftBigInt(value, shift) {
14609
- check(shift >= 0, "Shift count must be non-negative");
14670
+ check `${shift >= 0} Shift count must be non-negative`;
14610
14671
  const fillBit = value < 0 ? "1" : "0";
14611
14672
  // Convert the BigInt to its binary representation
14612
14673
  const binaryRepresentation = value.toString(2).padStart(64, fillBit);
@@ -15932,7 +15993,10 @@ class TwoRegsTwoImmsDispatcher {
15932
15993
  class JumpTable {
15933
15994
  indices;
15934
15995
  constructor(itemByteLength, bytes) {
15935
- check(itemByteLength === 0 || bytes.length % itemByteLength === 0, `Length of jump table (${bytes.length}) should be a multiple of item lenght (${itemByteLength})!`);
15996
+ check `
15997
+ ${itemByteLength === 0 || bytes.length % itemByteLength === 0}
15998
+ Length of jump table (${bytes.length}) should be a multiple of item lenght (${itemByteLength})!
15999
+ `;
15936
16000
  const length = itemByteLength === 0 ? 0 : bytes.length / itemByteLength;
15937
16001
  this.indices = new Uint32Array(length);
15938
16002
  for (let i = 0; i < length; i++) {
@@ -16322,7 +16386,10 @@ class ReturnValue {
16322
16386
  this.consumedGas = consumedGas;
16323
16387
  this.status = status;
16324
16388
  this.memorySlice = memorySlice;
16325
- check((status === null && memorySlice !== null) || (status !== null && memorySlice === null), "`status` and `memorySlice` must not both be null or both be non-null — exactly one must be provided");
16389
+ check `
16390
+ ${(status === null && memorySlice !== null) || (status !== null && memorySlice === null)}
16391
+ 'status' and 'memorySlice' must not both be null or both be non-null — exactly one must be provided
16392
+ `;
16326
16393
  }
16327
16394
  static fromStatus(consumedGas, status) {
16328
16395
  return new ReturnValue(consumedGas, status, null);
@@ -16371,7 +16438,10 @@ class HostCalls {
16371
16438
  if (status !== Status.HOST) {
16372
16439
  return this.getReturnValue(status, pvmInstance);
16373
16440
  }
16374
- check(pvmInstance.getExitParam() !== null, "We know that the exit param is not null, because the status is `Status.HOST`");
16441
+ check `
16442
+ ${pvmInstance.getExitParam() !== null}
16443
+ "We know that the exit param is not null, because the status is 'Status.HOST'
16444
+ `;
16375
16445
  const hostCallIndex = pvmInstance.getExitParam() ?? -1;
16376
16446
  const gas = pvmInstance.getGasCounter();
16377
16447
  const regs = new HostCallRegisters(pvmInstance.getRegisters());
@@ -16425,7 +16495,7 @@ class HostCallsManager {
16425
16495
  constructor({ missing, handlers = [], }) {
16426
16496
  this.missing = missing;
16427
16497
  for (const handler of handlers) {
16428
- check(this.hostCalls.get(handler.index) === undefined, `Overwriting host call handler at index ${handler.index}`);
16498
+ check `${this.hostCalls.get(handler.index) === undefined} Overwriting host call handler at index ${handler.index}`;
16429
16499
  this.hostCalls.set(handler.index, handler);
16430
16500
  }
16431
16501
  }
@@ -16545,13 +16615,27 @@ class SpiProgram extends WithDebug {
16545
16615
  this.registers = registers;
16546
16616
  }
16547
16617
  }
16618
+ /**
16619
+ * program = E_3(|o|) ++ E_3(|w|) ++ E_2(z) ++ E_3(s) ++ o ++ w ++ E_4(|c|) ++ c
16620
+ *
16621
+ * E_n - little endian encoding, n - length
16622
+ * o - initial read only data
16623
+ * w - initial heap
16624
+ * z - heap pages filled with zeros
16625
+ * s - stack size
16626
+ * c - program code
16627
+ *
16628
+ * https://graypaper.fluffylabs.dev/#/579bd12/2b92022b9202
16629
+ */
16548
16630
  function decodeStandardProgram(program, args) {
16549
16631
  const decoder = Decoder.fromBlob(program);
16550
16632
  const oLength = decoder.u24();
16551
16633
  const wLength = decoder.u24();
16552
- const argsLength = ensure(args.length, args.length <= DATA_LEGNTH, "Incorrect arguments length");
16553
- const readOnlyLength = ensure(oLength, oLength <= DATA_LEGNTH, "Incorrect readonly segment length");
16554
- const heapLength = ensure(wLength, wLength <= DATA_LEGNTH, "Incorrect heap segment length");
16634
+ check `${args.length <= DATA_LEGNTH} Incorrect arguments length`;
16635
+ check `${oLength <= DATA_LEGNTH} Incorrect readonly segment length`;
16636
+ const readOnlyLength = oLength;
16637
+ check `${wLength <= DATA_LEGNTH} Incorrect heap segment length`;
16638
+ const heapLength = wLength;
16555
16639
  const noOfHeapZerosPages = decoder.u16();
16556
16640
  const stackSize = decoder.u24();
16557
16641
  const readOnlyMemory = decoder.bytes(readOnlyLength).raw;
@@ -16567,14 +16651,14 @@ function decodeStandardProgram(program, args) {
16567
16651
  const stackStart = STACK_SEGMENT - alignToPageSize(stackSize);
16568
16652
  const stackEnd = STACK_SEGMENT;
16569
16653
  const argsStart = ARGS_SEGMENT;
16570
- const argsEnd = argsStart + alignToPageSize(argsLength);
16571
- const argsZerosEnd = argsEnd + alignToPageSize(argsLength);
16654
+ const argsEnd = argsStart + alignToPageSize(args.length);
16655
+ const argsZerosEnd = argsEnd + alignToPageSize(args.length);
16572
16656
  function nonEmpty(s) {
16573
16657
  return s !== false;
16574
16658
  }
16575
16659
  const readableMemory = [
16576
16660
  readOnlyLength > 0 && getMemorySegment(readonlyDataStart, readonlyDataEnd, readOnlyMemory),
16577
- argsLength > 0 && getMemorySegment(argsStart, argsEnd, args),
16661
+ args.length > 0 && getMemorySegment(argsStart, argsEnd, args),
16578
16662
  argsEnd < argsZerosEnd && getMemorySegment(argsEnd, argsZerosEnd),
16579
16663
  ].filter(nonEmpty);
16580
16664
  const writeableMemory = [
@@ -16705,7 +16789,7 @@ class DebuggerAdapter {
16705
16789
  return this.pvm.nextStep() === Status.OK;
16706
16790
  }
16707
16791
  nSteps(steps) {
16708
- check(steps >>> 0 > 0, `Expected a positive integer got ${steps}`);
16792
+ check `${steps >>> 0 > 0} Expected a positive integer got ${steps}`;
16709
16793
  for (let i = 0; i < steps; i++) {
16710
16794
  const isOk = this.nextStep();
16711
16795
  if (!isOk) {
@@ -16789,7 +16873,6 @@ var index$3 = /*#__PURE__*/Object.freeze({
16789
16873
  clampU64ToU32: clampU64ToU32,
16790
16874
  createResults: createResults,
16791
16875
  decodeStandardProgram: decodeStandardProgram,
16792
- ensure: ensure,
16793
16876
  extractCodeAndMetadata: extractCodeAndMetadata,
16794
16877
  getServiceId: getServiceId,
16795
16878
  getServiceIdOrCurrent: getServiceIdOrCurrent,
@@ -16816,7 +16899,7 @@ const ENTROPY_BYTES = 32;
16816
16899
  * https://graypaper.fluffylabs.dev/#/579bd12/3b9a013b9a01
16817
16900
  */
16818
16901
  function fisherYatesShuffle(arr, entropy) {
16819
- check(entropy.length === ENTROPY_BYTES, `Expected entropy of length ${ENTROPY_BYTES}, got ${entropy.length}`);
16902
+ check `${entropy.length === ENTROPY_BYTES} Expected entropy of length ${ENTROPY_BYTES}, got ${entropy.length}`;
16820
16903
  const n = arr.length;
16821
16904
  const randomNumbers = hashToNumberSequence(entropy, arr.length);
16822
16905
  const result = new Array(n);