@typeberry/jam 0.1.0 → 0.1.1-127cc86

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/importer/index.js CHANGED
@@ -4224,10 +4224,17 @@ async function initAll() {
4224
4224
  await init.ed25519();
4225
4225
  await init.reedSolomon();
4226
4226
  }
4227
+ function initOnce(doInit) {
4228
+ let ready = null;
4229
+ return async () => {
4230
+ if (ready === null) ready = doInit();
4231
+ return await ready;
4232
+ };
4233
+ }
4227
4234
  const init = {
4228
- bandersnatch: async () => await bandersnatch_default({ module_or_path: await bandersnatch_bg_default() }),
4229
- ed25519: async () => await ed25519_wasm_default({ module_or_path: await ed25519_wasm_bg_default() }),
4230
- reedSolomon: async () => await reed_solomon_wasm_default({ module_or_path: await reed_solomon_wasm_bg_default() })
4235
+ bandersnatch: initOnce(async () => await bandersnatch_default({ module_or_path: await bandersnatch_bg_default() })),
4236
+ ed25519: initOnce(async () => await ed25519_wasm_default({ module_or_path: await ed25519_wasm_bg_default() })),
4237
+ reedSolomon: initOnce(async () => await reed_solomon_wasm_default({ module_or_path: await reed_solomon_wasm_bg_default() }))
4231
4238
  };
4232
4239
 
4233
4240
  //#endregion
@@ -4327,29 +4334,17 @@ function isBrowser() {
4327
4334
  * We avoid using `node:assert` to keep compatibility with a browser environment.
4328
4335
  * Note the checks should not have any side effects, since we might decide
4329
4336
  * to remove all of them in a post-processing step.
4330
- */
4331
- function debug_check(condition, message) {
4332
- if (!condition) {
4333
- throw new Error(`Assertion failure: ${message ?? ""}`);
4334
- }
4335
- }
4336
- function cast(_a, condition) {
4337
- return condition;
4338
- }
4339
- /**
4340
- * Yet another function to perform runtime assertions.
4341
- * This function returns a new type to mark in the code that this value was checked and you don't have to do it again.
4342
4337
  *
4343
- * In the post-processing step all usages of this functions should be replaced with simple casting. An example:
4344
- * const x = checkAndType<number, CheckedNumber>(y);
4345
- * should be replaced with:
4346
- * const x = y as CheckedNumber;
4338
+ * NOTE the function is intended to be used as tagged template string for the performance
4339
+ * reasons.
4347
4340
  */
4348
- function ensure(a, condition, message) {
4349
- if (cast(a, condition)) {
4350
- return a;
4341
+ function debug_check(strings, condition, ...data) {
4342
+ if (!condition) {
4343
+ // add an empty value so that `data.length === strings.length`
4344
+ data.unshift("");
4345
+ const message = strings.map((v, index) => `${v}${data[index] ?? ""}`);
4346
+ throw new Error(`Assertion failure:${message.join("")}`);
4351
4347
  }
4352
- throw new Error(`Assertion failure: ${message ?? ""}`);
4353
4348
  }
4354
4349
  /**
4355
4350
  * The function can be used to make sure that a particular type is `never`
@@ -4519,7 +4514,7 @@ function resultToString(res) {
4519
4514
  const result_Result = {
4520
4515
  /** Create new [`Result`] with `Ok` status. */
4521
4516
  ok: (ok) => {
4522
- debug_check(ok !== undefined, "`Ok` type cannot be undefined.");
4517
+ debug_check `${ok !== undefined} 'ok' type cannot be undefined.`;
4523
4518
  return {
4524
4519
  isOk: true,
4525
4520
  isError: false,
@@ -4528,7 +4523,7 @@ const result_Result = {
4528
4523
  },
4529
4524
  /** Create new [`Result`] with `Error` status. */
4530
4525
  error: (error, details = "") => {
4531
- debug_check(error !== undefined, "`Error` type cannot be undefined.");
4526
+ debug_check `${error !== undefined} 'Error' type cannot be undefined.`;
4532
4527
  return {
4533
4528
  isOk: false,
4534
4529
  isError: true,
@@ -4812,7 +4807,10 @@ class BitVec {
4812
4807
  constructor(data, bitLength) {
4813
4808
  this.data = data;
4814
4809
  this.bitLength = bitLength;
4815
- debug_check(data.length * 8 >= bitLength, `Not enough bytes in the data array. Need ${data.length * 8} has ${bitLength}.`);
4810
+ debug_check `
4811
+ ${data.length * 8 >= bitLength}
4812
+ Not enough bytes in the data array. Need ${data.length * 8} has ${bitLength}.
4813
+ `;
4816
4814
  this.byteLength = Math.ceil(bitLength / 8);
4817
4815
  }
4818
4816
  /** Return a raw in-memory representation of this [`BitVec`]. */
@@ -4821,7 +4819,10 @@ class BitVec {
4821
4819
  }
4822
4820
  /** Perform OR operation on all bits in place. */
4823
4821
  sumWith(other) {
4824
- debug_check(other.bitLength === this.bitLength, `Invalid bit length for sumWith: ${other.bitLength} vs ${this.bitLength}`);
4822
+ debug_check `
4823
+ ${other.bitLength === this.bitLength}
4824
+ Invalid bit length for sumWith: ${other.bitLength} vs ${this.bitLength}
4825
+ `;
4825
4826
  const otherRaw = other.raw;
4826
4827
  for (let i = 0; i < this.byteLength; i++) {
4827
4828
  this.data[i] |= otherRaw[i];
@@ -4831,7 +4832,7 @@ class BitVec {
4831
4832
  * Set the bit at index `idx` to value `val`.
4832
4833
  */
4833
4834
  setBit(idx, val) {
4834
- debug_check(idx < this.bitLength, `Index out of bounds. Need ${idx} has ${this.bitLength}.`);
4835
+ debug_check `${idx >= 0 && idx < this.bitLength} Index out of bounds. Need ${idx} has ${this.bitLength}.`;
4835
4836
  const byteIndex = Math.floor(idx / 8);
4836
4837
  const bitIndexInByte = idx % 8;
4837
4838
  const mask = 1 << bitIndexInByte;
@@ -4846,7 +4847,7 @@ class BitVec {
4846
4847
  * Return `true` if the bit at index `idx` is set.
4847
4848
  */
4848
4849
  isSet(idx) {
4849
- debug_check(idx < this.bitLength, `Index out of bounds. Need ${idx} has ${this.bitLength}.`);
4850
+ debug_check `${idx >= 0 && idx < this.bitLength} Index out of bounds. Need ${idx} has ${this.bitLength}.`;
4850
4851
  const byteIndex = Math.floor(idx / 8);
4851
4852
  const bitIndexInByte = idx % 8;
4852
4853
  const mask = 1 << bitIndexInByte;
@@ -5013,7 +5014,7 @@ class bytes_BytesBlob {
5013
5014
  }
5014
5015
  /** Create a new [`BytesBlob`] from an array of bytes. */
5015
5016
  static blobFromNumbers(v) {
5016
- debug_check(v.find((x) => (x & 0xff) !== x) === undefined, "BytesBlob.blobFromNumbers used with non-byte number array.");
5017
+ debug_check `${v.find((x) => (x & 0xff) !== x) === undefined} BytesBlob.blobFromNumbers used with non-byte number array.`;
5017
5018
  const arr = new Uint8Array(v);
5018
5019
  return new bytes_BytesBlob(arr);
5019
5020
  }
@@ -5057,7 +5058,7 @@ class bytes_Bytes extends bytes_BytesBlob {
5057
5058
  length;
5058
5059
  constructor(raw, len) {
5059
5060
  super(raw);
5060
- debug_check(raw.byteLength === len, `Given buffer has incorrect size ${raw.byteLength} vs expected ${len}`);
5061
+ debug_check `${raw.byteLength === len} Given buffer has incorrect size ${raw.byteLength} vs expected ${len}`;
5061
5062
  this.length = len;
5062
5063
  }
5063
5064
  /** Create new [`Bytes<X>`] given a backing buffer and it's length. */
@@ -5066,7 +5067,7 @@ class bytes_Bytes extends bytes_BytesBlob {
5066
5067
  }
5067
5068
  /** Create new [`Bytes<X>`] given an array of bytes and it's length. */
5068
5069
  static fromNumbers(v, len) {
5069
- debug_check(v.find((x) => (x & 0xff) !== x) === undefined, "Bytes.fromNumbers used with non-byte number array.");
5070
+ debug_check `${v.find((x) => (x & 0xff) !== x) === undefined} Bytes.fromNumbers used with non-byte number array.`;
5070
5071
  const x = new Uint8Array(v);
5071
5072
  return new bytes_Bytes(x, len);
5072
5073
  }
@@ -5077,7 +5078,7 @@ class bytes_Bytes extends bytes_BytesBlob {
5077
5078
  // TODO [ToDr] `fill` should have the argments swapped to align with the rest.
5078
5079
  /** Create a [`Bytes<X>`] with all bytes filled with given input number. */
5079
5080
  static fill(len, input) {
5080
- debug_check((input & 0xff) === input, "Input has to be a byte.");
5081
+ debug_check `${(input & 0xff) === input} Input has to be a byte.`;
5081
5082
  const bytes = bytes_Bytes.zero(len);
5082
5083
  bytes.raw.fill(input, 0, len);
5083
5084
  return bytes;
@@ -5100,7 +5101,7 @@ class bytes_Bytes extends bytes_BytesBlob {
5100
5101
  }
5101
5102
  /** Compare the sequence to another one. */
5102
5103
  isEqualTo(other) {
5103
- debug_check(this.length === other.length, "Comparing incorrectly typed bytes!");
5104
+ debug_check `${this.length === other.length} Comparing incorrectly typed bytes!`;
5104
5105
  return u8ArraySameLengthEqual(this.raw, other.raw);
5105
5106
  }
5106
5107
  /** Converts current type into some opaque extension. */
@@ -5109,7 +5110,7 @@ class bytes_Bytes extends bytes_BytesBlob {
5109
5110
  }
5110
5111
  }
5111
5112
  function byteFromString(s) {
5112
- debug_check(s.length === 2, "Two-character string expected");
5113
+ debug_check `${s.length === 2} Two-character string expected`;
5113
5114
  const a = numberFromCharCode(s.charCodeAt(0));
5114
5115
  const b = numberFromCharCode(s.charCodeAt(1));
5115
5116
  return (a << 4) | b;
@@ -5178,7 +5179,7 @@ const BLS_KEY_BYTES = 144;
5178
5179
  /** Derive a Bandersnatch public key from a seed. */
5179
5180
  function bandersnatch_publicKey(seed) {
5180
5181
  const key = bandersnatch.derive_public_key(seed);
5181
- check(key[0] === 0, "Invalid Bandersnatch public key derived from seed");
5182
+ check `${key[0] === 0} Invalid Bandersnatch public key derived from seed`;
5182
5183
  return Bytes.fromBlob(key.subarray(1), BANDERSNATCH_KEY_BYTES).asOpaque();
5183
5184
  }
5184
5185
 
@@ -5666,7 +5667,7 @@ async function ed25519_verify(input) {
5666
5667
  data.set(signature.raw, offset);
5667
5668
  offset += ED25519_SIGNATURE_BYTES;
5668
5669
  const messageLength = message.length;
5669
- debug_check(messageLength < 256, `Message needs to be shorter than 256 bytes. Got: ${messageLength}`);
5670
+ debug_check `${messageLength < 256} Message needs to be shorter than 256 bytes. Got: ${messageLength}`;
5670
5671
  data[offset] = messageLength;
5671
5672
  offset += 1;
5672
5673
  data.set(message.raw, offset);
@@ -5695,6 +5696,7 @@ async function verifyBatch(input) {
5695
5696
 
5696
5697
  ;// CONCATENATED MODULE: ./packages/core/hash/hash.ts
5697
5698
 
5699
+
5698
5700
  /**
5699
5701
  * Size of the output of the hash functions.
5700
5702
  *
@@ -5704,6 +5706,7 @@ async function verifyBatch(input) {
5704
5706
  const hash_HASH_SIZE = 32;
5705
5707
  /** A hash without last byte (useful for trie representation). */
5706
5708
  const TRUNCATED_HASH_SIZE = 31;
5709
+ const ZERO_HASH = bytes_Bytes.zero(hash_HASH_SIZE);
5707
5710
  /**
5708
5711
  * Container for some object with a hash that is related to this object.
5709
5712
  *
@@ -5748,7 +5751,7 @@ class PageAllocator {
5748
5751
  // TODO [ToDr] Benchmark the performance!
5749
5752
  constructor(hashesPerPage) {
5750
5753
  this.hashesPerPage = hashesPerPage;
5751
- check(hashesPerPage > 0 && hashesPerPage >>> 0 === hashesPerPage, "Expected a non-zero integer.");
5754
+ check `${hashesPerPage > 0 && hashesPerPage >>> 0 === hashesPerPage} Expected a non-zero integer.`;
5752
5755
  this.resetPage();
5753
5756
  }
5754
5757
  resetPage() {
@@ -5842,42 +5845,53 @@ function keccak_hashBlobs(hasher, blobs) {
5842
5845
 
5843
5846
  ;// CONCATENATED MODULE: ./packages/core/numbers/index.ts
5844
5847
 
5845
- const asWithBytesRepresentation = (v) => v;
5848
+ const asTypedNumber = (v) => v;
5846
5849
  const MAX_VALUE_U8 = 0xff;
5847
5850
  const MAX_VALUE_U16 = 0xffff;
5848
5851
  const MAX_VALUE_U32 = 0xffff_ffff;
5849
5852
  const MAX_VALUE_U64 = 0xffffffffffffffffn;
5850
5853
  /** Attempt to cast an input number into U8. */
5851
- const numbers_tryAsU8 = (v) => ensure(v, isU8(v), `input must have one-byte representation, got ${v}`);
5854
+ const numbers_tryAsU8 = (v) => {
5855
+ debug_check `${isU8(v)} input must have one-byte representation, got ${v}`;
5856
+ return asTypedNumber(v);
5857
+ };
5852
5858
  /** Check if given number is a valid U8 number. */
5853
5859
  const isU8 = (v) => (v & MAX_VALUE_U8) === v;
5854
5860
  /** Attempt to cast an input number into U16. */
5855
- const numbers_tryAsU16 = (v) => ensure(v, isU16(v), `input must have two-byte representation, got ${v}`);
5861
+ const numbers_tryAsU16 = (v) => {
5862
+ debug_check `${isU16(v)} input must have two-byte representation, got ${v}`;
5863
+ return asTypedNumber(v);
5864
+ };
5856
5865
  /** Check if given number is a valid U16 number. */
5857
5866
  const isU16 = (v) => (v & MAX_VALUE_U16) === v;
5858
5867
  /** Attempt to cast an input number into U32. */
5859
- const numbers_tryAsU32 = (v) => ensure(v, isU32(v), `input must have four-byte representation, got ${v}`);
5868
+ const numbers_tryAsU32 = (v) => {
5869
+ debug_check `${isU32(v)} input must have four-byte representation, got ${v}`;
5870
+ return asTypedNumber(v);
5871
+ };
5860
5872
  /** Check if given number is a valid U32 number. */
5861
5873
  const isU32 = (v) => (v & MAX_VALUE_U32) >>> 0 === v;
5862
5874
  /** Attempt to cast an input number into U64. */
5863
5875
  const numbers_tryAsU64 = (x) => {
5864
5876
  const v = BigInt(x);
5865
- return ensure(v, isU64(v), `input must have eight-byte representation, got ${x}`);
5877
+ debug_check `${isU64(v)} input must have eight-byte representation, got ${x}`;
5878
+ return asTypedNumber(v);
5866
5879
  };
5867
5880
  /** Check if given number is a valid U64 number. */
5868
5881
  const isU64 = (v) => (v & MAX_VALUE_U64) === v;
5869
5882
  /** Collate two U32 parts into one U64. */
5870
5883
  const u64FromParts = ({ lower, upper }) => {
5871
5884
  const val = (BigInt(upper) << 32n) + BigInt(lower);
5872
- return asWithBytesRepresentation(val);
5885
+ return asTypedNumber(val);
5873
5886
  };
5874
5887
  /** Split U64 into lower & upper parts. */
5875
5888
  const u64IntoParts = (v) => {
5876
- const lower = v & (2n ** 32n - 1n);
5877
- const upper = v >> 32n;
5889
+ // Number(...) safe: both parts are <= 0xffffffff
5890
+ const lower = Number(v & (2n ** 32n - 1n));
5891
+ const upper = Number(v >> 32n);
5878
5892
  return {
5879
- lower: asWithBytesRepresentation(Number(lower)),
5880
- upper: asWithBytesRepresentation(Number(upper)),
5893
+ lower: asTypedNumber(lower),
5894
+ upper: asTypedNumber(upper),
5881
5895
  };
5882
5896
  };
5883
5897
  /**
@@ -5917,8 +5931,8 @@ function numbers_u32AsLeBytes(value) {
5917
5931
  * Interpret 4-byte `Uint8Array` as U32 written as little endian.
5918
5932
  */
5919
5933
  function leBytesAsU32(uint8Array) {
5920
- debug_check(uint8Array.length === 4, "Input must be a Uint8Array of length 4");
5921
- return asWithBytesRepresentation(uint8Array[0] | (uint8Array[1] << 8) | (uint8Array[2] << 16) | (uint8Array[3] << 24));
5934
+ debug_check `${uint8Array.length === 4} Input must be a Uint8Array of length 4`;
5935
+ return asTypedNumber(uint8Array[0] | (uint8Array[1] << 8) | (uint8Array[2] << 16) | (uint8Array[3] << 24));
5922
5936
  }
5923
5937
  /** Get the smallest value between U64 a and values given as input parameters. */
5924
5938
  const minU64 = (a, ...values) => values.reduce((min, value) => (value > min ? min : value), a);
@@ -6266,7 +6280,7 @@ class decoder_Decoder {
6266
6280
  this.skip(newOffset - this.offset);
6267
6281
  }
6268
6282
  else {
6269
- debug_check(newOffset >= 0, "The offset has to be positive");
6283
+ debug_check `${newOffset >= 0} The offset has to be positive`;
6270
6284
  this.offset = newOffset;
6271
6285
  }
6272
6286
  }
@@ -6294,7 +6308,7 @@ class decoder_Decoder {
6294
6308
  return num;
6295
6309
  }
6296
6310
  ensureHasBytes(bytes) {
6297
- debug_check(bytes >= 0, "Negative number of bytes given.");
6311
+ debug_check `${bytes >= 0} Negative number of bytes given.`;
6298
6312
  if (this.offset + bytes > this.source.length) {
6299
6313
  throw new Error(`Attempting to decode more data than there is left. Need ${bytes}, left: ${this.source.length - this.offset}.`);
6300
6314
  }
@@ -6302,7 +6316,7 @@ class decoder_Decoder {
6302
6316
  }
6303
6317
  const MASKS = [0xff, 0xfe, 0xfc, 0xf8, 0xf0, 0xe0, 0xc0, 0x80];
6304
6318
  function decodeVariableLengthExtraBytes(firstByte) {
6305
- debug_check(firstByte >= 0 && firstByte < 256, `Incorrect byte value: ${firstByte}`);
6319
+ debug_check `${firstByte >= 0 && firstByte < 256} Incorrect byte value: ${firstByte}`;
6306
6320
  for (let i = 0; i < MASKS.length; i++) {
6307
6321
  if (firstByte >= MASKS[i]) {
6308
6322
  return 8 - i;
@@ -6457,7 +6471,7 @@ class descriptor_Descriptor {
6457
6471
 
6458
6472
 
6459
6473
  function tryAsExactBytes(a) {
6460
- debug_check(a.isExact, "The value is not exact size estimation!");
6474
+ debug_check `${a.isExact} The value is not exact size estimation!`;
6461
6475
  return a.bytes;
6462
6476
  }
6463
6477
  function addSizeHints(a, b) {
@@ -6564,8 +6578,8 @@ class encoder_Encoder {
6564
6578
  // we still allow positive numbers from `[maxNum / 2, maxNum)`.
6565
6579
  // So it does not matter if the argument is a negative value,
6566
6580
  // OR if someone just gave us two-complement already.
6567
- debug_check(num < maxNum, "Only for numbers up to 2**64 - 1");
6568
- debug_check(-num <= maxNum / 2n, "Only for numbers down to -2**63");
6581
+ debug_check `${num < maxNum} Only for numbers up to 2**64 - 1`;
6582
+ debug_check `${-num <= maxNum / 2n} Only for numbers down to -2**63`;
6569
6583
  this.ensureBigEnough(8);
6570
6584
  this.dataView.setBigInt64(this.offset, num, true);
6571
6585
  this.offset += 8;
@@ -6629,8 +6643,8 @@ class encoder_Encoder {
6629
6643
  // we still allow positive numbers from `[maxNum / 2, maxNum)`.
6630
6644
  // So it does not matter if the argument is a negative value,
6631
6645
  // OR if someone just gave us two-complement already.
6632
- debug_check(num < maxNum, `Only for numbers up to 2**${BITS * bytesToEncode} - 1`);
6633
- debug_check(-num <= maxNum / 2, `Only for numbers down to -2**${BITS * bytesToEncode - 1}`);
6646
+ debug_check `${num < maxNum} Only for numbers up to 2**${BITS * bytesToEncode} - 1`;
6647
+ debug_check `${-num <= maxNum / 2} Only for numbers down to -2**${BITS * bytesToEncode - 1}`;
6634
6648
  this.ensureBigEnough(bytesToEncode);
6635
6649
  }
6636
6650
  /**
@@ -6641,8 +6655,8 @@ class encoder_Encoder {
6641
6655
  * https://graypaper.fluffylabs.dev/#/579bd12/365202365202
6642
6656
  */
6643
6657
  varU32(num) {
6644
- debug_check(num >= 0, "Only for natural numbers.");
6645
- debug_check(num < 2 ** 32, "Only for numbers up to 2**32");
6658
+ debug_check `${num >= 0} Only for natural numbers.`;
6659
+ debug_check `${num < 2 ** 32} Only for numbers up to 2**32`;
6646
6660
  this.varU64(BigInt(num));
6647
6661
  }
6648
6662
  /**
@@ -6793,7 +6807,7 @@ class encoder_Encoder {
6793
6807
  * https://graypaper.fluffylabs.dev/#/579bd12/374400374400
6794
6808
  */
6795
6809
  sequenceVarLen(encode, elements) {
6796
- debug_check(elements.length <= 2 ** 32, "Wow, that's a nice long sequence you've got here.");
6810
+ debug_check `${elements.length <= 2 ** 32} Wow, that's a nice long sequence you've got here.`;
6797
6811
  this.varU32(numbers_tryAsU32(elements.length));
6798
6812
  this.sequenceFixLen(encode, elements);
6799
6813
  }
@@ -6814,7 +6828,7 @@ class encoder_Encoder {
6814
6828
  * anyway, so if we really should throw we will.
6815
6829
  */
6816
6830
  ensureBigEnough(length, options = { silent: false }) {
6817
- debug_check(length >= 0, "Negative length given");
6831
+ debug_check `${length >= 0} Negative length given`;
6818
6832
  const newLength = this.offset + length;
6819
6833
  if (newLength > MAX_LENGTH) {
6820
6834
  if (options.silent) {
@@ -6950,10 +6964,12 @@ class ObjectView {
6950
6964
  decodeUpTo(field) {
6951
6965
  const index = this.descriptorsKeys.indexOf(field);
6952
6966
  const lastField = this.descriptorsKeys[this.lastDecodedFieldIdx];
6953
- debug_check(this.lastDecodedFieldIdx < index, `Unjustified call to 'decodeUpTo' -
6967
+ debug_check `
6968
+ ${this.lastDecodedFieldIdx < index}
6969
+ Unjustified call to 'decodeUpTo' -
6954
6970
  the index ($Blobindex}, ${String(field)})
6955
6971
  is already decoded (${this.lastDecodedFieldIdx}, ${String(lastField)}).
6956
- `);
6972
+ `;
6957
6973
  let lastItem = this.cache.get(lastField);
6958
6974
  const skipper = new Skipper(this.decoder);
6959
6975
  // now skip all of the fields and further populate the cache.
@@ -6969,8 +6985,10 @@ class ObjectView {
6969
6985
  this.cache.set(field, lastItem);
6970
6986
  this.lastDecodedFieldIdx = i;
6971
6987
  }
6972
- const last = ensure(lastItem, lastItem !== undefined, "Last item must be set, since the loop turns at least once.");
6973
- return last;
6988
+ if (lastItem === undefined) {
6989
+ throw new Error("Last item must be set, since the loop turns at least once.");
6990
+ }
6991
+ return lastItem;
6974
6992
  }
6975
6993
  }
6976
6994
  /**
@@ -7003,8 +7021,10 @@ class SequenceView {
7003
7021
  *[Symbol.iterator]() {
7004
7022
  for (let i = 0; i < this.length; i++) {
7005
7023
  const val = this.get(i);
7006
- const v = ensure(val, val !== undefined, "We are within 0..this.length so all items are defined.");
7007
- yield v;
7024
+ if (val === undefined) {
7025
+ throw new Error("We are within 0..this.length so all items are defined.");
7026
+ }
7027
+ yield val;
7008
7028
  }
7009
7029
  }
7010
7030
  /** Create an array of all views mapped to some particular value. */
@@ -7047,7 +7067,10 @@ class SequenceView {
7047
7067
  return bytes_BytesBlob.blobFrom(this.decoder.source.subarray(this.initialDecoderOffset, this.decoder.bytesRead()));
7048
7068
  }
7049
7069
  decodeUpTo(index) {
7050
- debug_check(this.lastDecodedIdx < index, `Unjustified call to 'decodeUpTo' - the index (${index}) is already decoded (${this.lastDecodedIdx}).`);
7070
+ debug_check `
7071
+ ${this.lastDecodedIdx < index}
7072
+ Unjustified call to 'decodeUpTo' - the index (${index}) is already decoded (${this.lastDecodedIdx}).
7073
+ `;
7051
7074
  let lastItem = this.cache.get(this.lastDecodedIdx);
7052
7075
  const skipper = new Skipper(this.decoder);
7053
7076
  // now skip all of the fields and further populate the cache.
@@ -7062,8 +7085,10 @@ class SequenceView {
7062
7085
  this.cache.set(i, lastItem);
7063
7086
  this.lastDecodedIdx = i;
7064
7087
  }
7065
- const last = ensure(lastItem, lastItem !== undefined, "Last item must be set, since the loop turns at least once.");
7066
- return last;
7088
+ if (lastItem === undefined) {
7089
+ throw new Error("Last item must be set, since the loop turns at least once.");
7090
+ }
7091
+ return lastItem;
7067
7092
  }
7068
7093
  }
7069
7094
 
@@ -7096,7 +7121,10 @@ const TYPICAL_DICTIONARY_LENGTH = 32;
7096
7121
  */
7097
7122
  function readonlyArray(desc) {
7098
7123
  return desc.convert((x) => {
7099
- debug_check(Array.isArray(x), `Non-arrays are not supported as 'readonly': got ${typeof x}, ${x}`);
7124
+ debug_check `
7125
+ ${Array.isArray(x)}
7126
+ Non-arrays are not supported as 'readonly': got ${typeof x}, ${x}
7127
+ `;
7100
7128
  // NOTE [ToDr] This assumption is incorrect in general, but it's documented
7101
7129
  // in the general note. We avoid `.slice()` the array for performance reasons.
7102
7130
  return x;
@@ -7544,8 +7572,8 @@ class MultiMap {
7544
7572
  * if needed.
7545
7573
  */
7546
7574
  constructor(keysLength, keyMappers) {
7547
- check(keysLength > 0, "Keys cannot be empty.");
7548
- check(keyMappers === undefined || keyMappers.length === keysLength, "Incorrect number of key mappers given!");
7575
+ check `${keysLength > 0} Keys cannot be empty.`;
7576
+ check `${keyMappers === undefined || keyMappers.length === keysLength} Incorrect number of key mappers given!`;
7549
7577
  this.data = new Map();
7550
7578
  this.keyMappers = keyMappers === undefined ? Array(keysLength).fill(null) : keyMappers;
7551
7579
  }
@@ -7646,7 +7674,7 @@ class sized_array_FixedSizeArray extends Array {
7646
7674
  this.fixedLength = this.length;
7647
7675
  }
7648
7676
  static new(data, len) {
7649
- debug_check(data.length === len, `Expected an array of size: ${len}, got: ${data.length}`);
7677
+ debug_check `${data.length === len} Expected an array of size: ${len}, got: ${data.length}`;
7650
7678
  const arr = new sized_array_FixedSizeArray(len);
7651
7679
  for (let i = 0; i < len; i++) {
7652
7680
  arr[i] = data[i];
@@ -7780,7 +7808,7 @@ class SortedArray {
7780
7808
  }
7781
7809
  /** Create a new SortedSet from two sorted collections. */
7782
7810
  static fromTwoSortedCollections(first, second) {
7783
- debug_check(first.comparator === second.comparator, "Cannot merge arrays if they do not use the same comparator");
7811
+ debug_check `${first.comparator === second.comparator} Cannot merge arrays if they do not use the same comparator`;
7784
7812
  const comparator = first.comparator;
7785
7813
  const arr1 = first.array;
7786
7814
  const arr1Length = arr1.length;
@@ -7900,7 +7928,7 @@ class SortedSet extends SortedArray {
7900
7928
  }
7901
7929
  /** Create a new SortedSet from two sorted collections. */
7902
7930
  static fromTwoSortedCollections(first, second) {
7903
- debug_check(first.comparator === second.comparator, "Cannot merge arrays if they do not use the same comparator");
7931
+ debug_check `${first.comparator === second.comparator} Cannot merge arrays if they do not use the same comparator`;
7904
7932
  const comparator = first.comparator;
7905
7933
  if (first.length === 0) {
7906
7934
  return SortedSet.fromSortedArray(comparator, second.array);
@@ -8536,9 +8564,12 @@ const common_tryAsServiceGas = (v) => opaque_asOpaqueType(numbers_tryAsU64(v));
8536
8564
  /** Attempt to convert a number into `CoreIndex`. */
8537
8565
  const common_tryAsCoreIndex = (v) => opaque_asOpaqueType(numbers_tryAsU16(v));
8538
8566
  /** Attempt to convert a number into `Epoch`. */
8539
- const tryAsEpoch = (v) => opaque_asOpaqueType(numbers_tryAsU32(v));
8567
+ const tryAsEpoch = (v) => asOpaqueType(tryAsU32(v));
8540
8568
  function tryAsPerValidator(array, spec) {
8541
- debug_check(array.length === spec.validatorsCount, `Invalid per-validator array length. Expected ${spec.validatorsCount}, got: ${array.length}`);
8569
+ debug_check `
8570
+ ${array.length === spec.validatorsCount}
8571
+ Invalid per-validator array length. Expected ${spec.validatorsCount}, got: ${array.length}
8572
+ `;
8542
8573
  return sized_array_asKnownSize(array);
8543
8574
  }
8544
8575
  const codecPerValidator = (val) => codecWithContext((context) => {
@@ -8547,7 +8578,10 @@ const codecPerValidator = (val) => codecWithContext((context) => {
8547
8578
  });
8548
8579
  });
8549
8580
  function tryAsPerEpochBlock(array, spec) {
8550
- debug_check(array.length === spec.epochLength, `Invalid per-epoch-block array length. Expected ${spec.epochLength}, got: ${array.length}`);
8581
+ debug_check `
8582
+ ${array.length === spec.epochLength}
8583
+ Invalid per-epoch-block array length. Expected ${spec.epochLength}, got: ${array.length}
8584
+ `;
8551
8585
  return sized_array_asKnownSize(array);
8552
8586
  }
8553
8587
  const codecPerEpochBlock = (val) => codecWithContext((context) => {
@@ -8818,9 +8852,14 @@ class WorkItem extends WithDebug {
8818
8852
 
8819
8853
 
8820
8854
 
8855
+
8821
8856
  /** Verify the value is within the `WorkItemsCount` bounds. */
8822
8857
  function work_package_tryAsWorkItemsCount(len) {
8823
- return ensure(len, len >= MIN_NUMBER_OF_WORK_ITEMS && len <= work_package_MAX_NUMBER_OF_WORK_ITEMS, `WorkItemsCount: Expected '${MIN_NUMBER_OF_WORK_ITEMS} <= count <= ${work_package_MAX_NUMBER_OF_WORK_ITEMS}' got ${len}`);
8858
+ debug_check `
8859
+ ${len >= MIN_NUMBER_OF_WORK_ITEMS && len <= work_package_MAX_NUMBER_OF_WORK_ITEMS}
8860
+ WorkItemsCount: Expected '${MIN_NUMBER_OF_WORK_ITEMS} <= count <= ${work_package_MAX_NUMBER_OF_WORK_ITEMS}' got ${len}
8861
+ `;
8862
+ return numbers_tryAsU8(len);
8824
8863
  }
8825
8864
  /** Minimal number of work items in the work package or results in work report. */
8826
8865
  const MIN_NUMBER_OF_WORK_ITEMS = 1;
@@ -10063,7 +10102,10 @@ class AvailabilityAssignment extends WithDebug {
10063
10102
 
10064
10103
  /** Check if given array has correct length before casting to the opaque type. */
10065
10104
  function tryAsPerCore(array, spec) {
10066
- debug_check(array.length === spec.coresCount, `Invalid per-core array length. Expected ${spec.coresCount}, got: ${array.length}`);
10105
+ debug_check `
10106
+ ${array.length === spec.coresCount}
10107
+ Invalid per-core array length. Expected ${spec.coresCount}, got: ${array.length}
10108
+ `;
10067
10109
  return opaque_asOpaqueType(array);
10068
10110
  }
10069
10111
  const codecPerCore = (val) => codecWithContext((context) => {
@@ -11314,7 +11356,7 @@ class InMemoryState extends WithDebug {
11314
11356
  }
11315
11357
  removeServices(servicesRemoved) {
11316
11358
  for (const serviceId of servicesRemoved ?? []) {
11317
- debug_check(this.services.has(serviceId), `Attempting to remove non-existing service: ${serviceId}`);
11359
+ debug_check `${this.services.has(serviceId)} Attempting to remove non-existing service: ${serviceId}`;
11318
11360
  this.services.delete(serviceId);
11319
11361
  }
11320
11362
  }
@@ -11331,7 +11373,10 @@ class InMemoryState extends WithDebug {
11331
11373
  }
11332
11374
  else if (kind === UpdateStorageKind.Remove) {
11333
11375
  const { key } = action;
11334
- debug_check(service.data.storage.has(key.toString()), `Attempting to remove non-existing storage item at ${serviceId}: ${action.key}`);
11376
+ debug_check `
11377
+ ${service.data.storage.has(key.toString())}
11378
+ Attempting to remove non-existing storage item at ${serviceId}: ${action.key}
11379
+ `;
11335
11380
  service.data.storage.delete(key.toString());
11336
11381
  }
11337
11382
  else {
@@ -12014,12 +12059,12 @@ class TrieNode {
12014
12059
  }
12015
12060
  /** View this node as a branch node */
12016
12061
  asBranchNode() {
12017
- debug_check(this.getNodeType() === NodeType.Branch);
12062
+ debug_check `${this.getNodeType() === NodeType.Branch} not a branch!`;
12018
12063
  return new BranchNode(this);
12019
12064
  }
12020
12065
  /** View this node as a leaf node */
12021
12066
  asLeafNode() {
12022
- debug_check(this.getNodeType() !== NodeType.Branch);
12067
+ debug_check `${this.getNodeType() !== NodeType.Branch} not a leaf!`;
12023
12068
  return new LeafNode(this);
12024
12069
  }
12025
12070
  toString() {
@@ -12507,7 +12552,7 @@ function createSubtreeForBothLeaves(traversedPath, nodes, leafToReplace, leaf) {
12507
12552
  * Return a single bit from `key` located at `bitIndex`.
12508
12553
  */
12509
12554
  function getBit(key, bitIndex) {
12510
- debug_check(bitIndex < TRUNCATED_KEY_BITS);
12555
+ debug_check `${bitIndex < TRUNCATED_KEY_BITS} invalid bit index passed ${bitIndex}`;
12511
12556
  const byte = bitIndex >>> 3;
12512
12557
  const bit = bitIndex - (byte << 3);
12513
12558
  const mask = 0b10_00_00_00 >>> bit;
@@ -13832,7 +13877,7 @@ class TypedPort {
13832
13877
  * Send a response given the worker that has previously requested something.
13833
13878
  */
13834
13879
  respond(localState, request, data, transferList) {
13835
- debug_check(request.kind === "request");
13880
+ debug_check `${request.kind === "request"}`;
13836
13881
  this.postMessage({
13837
13882
  kind: "response",
13838
13883
  id: request.id,
@@ -13863,10 +13908,11 @@ class TypedPort {
13863
13908
  throw new Error(`Invalid message: ${JSON.stringify(msg)}.`);
13864
13909
  }
13865
13910
  switch (msg.kind) {
13866
- case "response":
13867
- debug_check(this.responseListeners.eventNames().indexOf(reqEvent(msg.id)) !== -1);
13911
+ case "response": {
13912
+ debug_check `${this.responseListeners.eventNames().indexOf(reqEvent(msg.id)) !== -1}`;
13868
13913
  this.responseListeners.emit(reqEvent(msg.id), null, msg.data, msg.name, msg.localState, msg);
13869
13914
  break;
13915
+ }
13870
13916
  case "signal":
13871
13917
  this.listeners.emit("signal", msg.name, msg.data, msg.localState, msg);
13872
13918
  break;
@@ -14081,9 +14127,9 @@ class channel_MessageChannelStateMachine {
14081
14127
  const promise = new Promise((resolve, reject) => {
14082
14128
  parentPort.once("message", (value) => {
14083
14129
  try {
14084
- debug_check(value.kind === "request", "The initial message should be a request with channel.");
14085
- debug_check(value.name === CHANNEL_MESSAGE);
14086
- debug_check(value.data instanceof external_node_worker_threads_namespaceObject.MessagePort);
14130
+ debug_check `${value.kind === "request"} The initial message should be a request with channel.`;
14131
+ debug_check `${value.name === CHANNEL_MESSAGE}`;
14132
+ debug_check `${value.data instanceof external_node_worker_threads_namespaceObject.MessagePort}`;
14087
14133
  const port = new TypedPort(value.data);
14088
14134
  port.respond(machine.currentState().stateName, value, Ok);
14089
14135
  resolve(port);
@@ -14163,7 +14209,7 @@ class machine_StateMachine {
14163
14209
  /** Get state object by name. */
14164
14210
  getState(name) {
14165
14211
  const state = this.allStates.get(name);
14166
- debug_check(state !== undefined, `Unable to retrieve state object for ${name}.`);
14212
+ debug_check `${state !== undefined} Unable to retrieve state object for ${name}.`;
14167
14213
  return state;
14168
14214
  }
14169
14215
  /** Get the currently active state object. */
@@ -14428,19 +14474,22 @@ class Preimages {
14428
14474
 
14429
14475
  const NO_OF_REGISTERS = 13;
14430
14476
  const REGISTER_SIZE_SHIFT = 3;
14431
- const tryAsRegisterIndex = (index) => ensure(index, index >= 0 && index <= NO_OF_REGISTERS, `Incorrect register index: ${index}!`);
14477
+ const tryAsRegisterIndex = (index) => {
14478
+ debug_check `${index >= 0 && index < NO_OF_REGISTERS} Incorrect register index: ${index}!`;
14479
+ return opaque_asOpaqueType(index);
14480
+ };
14432
14481
  class Registers {
14433
14482
  bytes;
14434
14483
  asSigned;
14435
14484
  asUnsigned;
14436
14485
  constructor(bytes = new Uint8Array(NO_OF_REGISTERS << REGISTER_SIZE_SHIFT)) {
14437
14486
  this.bytes = bytes;
14438
- debug_check(bytes.length === NO_OF_REGISTERS << REGISTER_SIZE_SHIFT, "Invalid size of registers array.");
14487
+ debug_check `${bytes.length === NO_OF_REGISTERS << REGISTER_SIZE_SHIFT} Invalid size of registers array.`;
14439
14488
  this.asSigned = new BigInt64Array(bytes.buffer, bytes.byteOffset);
14440
14489
  this.asUnsigned = new BigUint64Array(bytes.buffer, bytes.byteOffset);
14441
14490
  }
14442
14491
  static fromBytes(bytes) {
14443
- debug_check(bytes.length === NO_OF_REGISTERS << REGISTER_SIZE_SHIFT, "Invalid size of registers array.");
14492
+ debug_check `${bytes.length === NO_OF_REGISTERS << REGISTER_SIZE_SHIFT} Invalid size of registers array.`;
14444
14493
  return new Registers(bytes);
14445
14494
  }
14446
14495
  getBytesAsLittleEndian(index, len) {
@@ -14586,7 +14635,7 @@ class Mask {
14586
14635
  return this.lookupTableForward[index] === 0;
14587
14636
  }
14588
14637
  getNoOfBytesToNextInstruction(index) {
14589
- debug_check(index >= 0, `index (${index}) cannot be a negative number`);
14638
+ debug_check `${index >= 0} index (${index}) cannot be a negative number`;
14590
14639
  return Math.min(this.lookupTableForward[index] ?? 0, MAX_INSTRUCTION_DISTANCE);
14591
14640
  }
14592
14641
  buildLookupTableForward(mask) {
@@ -15586,7 +15635,7 @@ const PAGE_SIZE_SHIFT = 12;
15586
15635
  const PAGE_SIZE = 1 << PAGE_SIZE_SHIFT;
15587
15636
  const MIN_ALLOCATION_SHIFT = (() => {
15588
15637
  const MIN_ALLOCATION_SHIFT = 7;
15589
- debug_check(MIN_ALLOCATION_SHIFT >= 0 && MIN_ALLOCATION_SHIFT < PAGE_SIZE_SHIFT, "incorrect minimal allocation shift");
15638
+ debug_check `${MIN_ALLOCATION_SHIFT >= 0 && MIN_ALLOCATION_SHIFT < PAGE_SIZE_SHIFT} incorrect minimal allocation shift`;
15590
15639
  return MIN_ALLOCATION_SHIFT;
15591
15640
  })();
15592
15641
  const MIN_ALLOCATION_LENGTH = PAGE_SIZE >> MIN_ALLOCATION_SHIFT;
@@ -15599,16 +15648,28 @@ const MAX_NUMBER_OF_PAGES = MEMORY_SIZE / PAGE_SIZE;
15599
15648
  ;// CONCATENATED MODULE: ./packages/core/pvm-interpreter/memory/memory-index.ts
15600
15649
 
15601
15650
 
15602
- const tryAsMemoryIndex = (index) => ensure(index, index >= 0 && index <= MAX_MEMORY_INDEX, `Incorrect memory index: ${index}!`);
15603
- const tryAsSbrkIndex = (index) => ensure(index, index >= 0 && index <= MAX_MEMORY_INDEX + 1, `Incorrect sbrk index: ${index}!`);
15651
+ const tryAsMemoryIndex = (index) => {
15652
+ debug_check `${index >= 0 && index <= MAX_MEMORY_INDEX} Incorrect memory index: ${index}!`;
15653
+ return opaque_asOpaqueType(index);
15654
+ };
15655
+ const tryAsSbrkIndex = (index) => {
15656
+ debug_check `${index >= 0 && index <= MAX_MEMORY_INDEX + 1} Incorrect sbrk index: ${index}!`;
15657
+ return opaque_asOpaqueType(index);
15658
+ };
15604
15659
 
15605
15660
  ;// CONCATENATED MODULE: ./packages/core/pvm-interpreter/memory/pages/page-utils.ts
15606
15661
 
15607
15662
 
15608
15663
  /** Ensure that given memory `index` is within `[0...PAGE_SIZE)` and can be used to index a page */
15609
- const tryAsPageIndex = (index) => ensure(index, index >= 0 && index < PAGE_SIZE, `Incorect page index: ${index}!`);
15664
+ const tryAsPageIndex = (index) => {
15665
+ debug_check `${index >= 0 && index < PAGE_SIZE}, Incorect page index: ${index}!`;
15666
+ return opaque_asOpaqueType(index);
15667
+ };
15610
15668
  /** Ensure that given `index` represents an index of one of the pages. */
15611
- const tryAsPageNumber = (index) => ensure(index, index >= 0 && index <= LAST_PAGE_NUMBER, `Incorrect page number: ${index}!`);
15669
+ const tryAsPageNumber = (index) => {
15670
+ debug_check `${index >= 0 && index <= LAST_PAGE_NUMBER}, Incorect page number: ${index}!`;
15671
+ return opaque_asOpaqueType(index);
15672
+ };
15612
15673
  /**
15613
15674
  * Get the next page number and wrap the result if it is bigger than LAST_PAGE_NUMBER
15614
15675
  *
@@ -16140,10 +16201,10 @@ class MemoryBuilder {
16140
16201
  */
16141
16202
  setReadablePages(start, end, data = new Uint8Array()) {
16142
16203
  this.ensureNotFinalized();
16143
- debug_check(start < end, "end has to be bigger than start");
16144
- debug_check(start % PAGE_SIZE === 0, `start needs to be a multiple of page size (${PAGE_SIZE})`);
16145
- debug_check(end % PAGE_SIZE === 0, `end needs to be a multiple of page size (${PAGE_SIZE})`);
16146
- debug_check(data.length <= end - start, "the initial data is longer than address range");
16204
+ debug_check `${start < end} end has to be bigger than start`;
16205
+ debug_check `${start % PAGE_SIZE === 0} start needs to be a multiple of page size (${PAGE_SIZE})`;
16206
+ debug_check `${end % PAGE_SIZE === 0} end needs to be a multiple of page size (${PAGE_SIZE})`;
16207
+ debug_check `${data.length <= end - start} the initial data is longer than address range`;
16147
16208
  const length = end - start;
16148
16209
  const range = MemoryRange.fromStartAndLength(start, length);
16149
16210
  this.ensureNoReservedMemoryUsage(range);
@@ -16168,10 +16229,10 @@ class MemoryBuilder {
16168
16229
  */
16169
16230
  setWriteablePages(start, end, data = new Uint8Array()) {
16170
16231
  this.ensureNotFinalized();
16171
- debug_check(start < end, "end has to be bigger than start");
16172
- debug_check(start % PAGE_SIZE === 0, `start needs to be a multiple of page size (${PAGE_SIZE})`);
16173
- debug_check(end % PAGE_SIZE === 0, `end needs to be a multiple of page size (${PAGE_SIZE})`);
16174
- debug_check(data.length <= end - start, "the initial data is longer than address range");
16232
+ debug_check `${start < end} end has to be bigger than start`;
16233
+ debug_check `${start % PAGE_SIZE === 0} start needs to be a multiple of page size (${PAGE_SIZE})`;
16234
+ debug_check `${end % PAGE_SIZE === 0} end needs to be a multiple of page size (${PAGE_SIZE})`;
16235
+ debug_check `${data.length <= end - start} the initial data is longer than address range`;
16175
16236
  const length = end - start;
16176
16237
  const range = MemoryRange.fromStartAndLength(start, length);
16177
16238
  this.ensureNoReservedMemoryUsage(range);
@@ -16193,7 +16254,7 @@ class MemoryBuilder {
16193
16254
  this.ensureNotFinalized();
16194
16255
  const pageOffset = start % PAGE_SIZE;
16195
16256
  const remainingSpaceOnPage = PAGE_SIZE - pageOffset;
16196
- debug_check(data.length <= remainingSpaceOnPage, "The data has to fit into a single page.");
16257
+ debug_check `${data.length <= remainingSpaceOnPage} The data has to fit into a single page.`;
16197
16258
  const length = data.length;
16198
16259
  const range = MemoryRange.fromStartAndLength(start, length);
16199
16260
  this.ensureNoReservedMemoryUsage(range);
@@ -16207,7 +16268,10 @@ class MemoryBuilder {
16207
16268
  return this;
16208
16269
  }
16209
16270
  finalize(startHeapIndex, endHeapIndex) {
16210
- debug_check(startHeapIndex <= endHeapIndex, `startHeapIndex (${startHeapIndex}) has to be less than or equal to endHeapIndex (${endHeapIndex})`);
16271
+ debug_check `
16272
+ ${startHeapIndex <= endHeapIndex}
16273
+ startHeapIndex (${startHeapIndex}) has to be less than or equal to endHeapIndex (${endHeapIndex})
16274
+ `;
16211
16275
  this.ensureNotFinalized();
16212
16276
  const range = MemoryRange.fromStartAndLength(startHeapIndex, endHeapIndex - startHeapIndex);
16213
16277
  const pages = PageRange.fromMemoryRange(range);
@@ -16445,7 +16509,7 @@ function mulU64(a, b) {
16445
16509
  *
16446
16510
  * The result of multiplication is a 64-bits number and we are only interested in the part that lands in the upper 32-bits.
16447
16511
  * For example if we multiply `0xffffffff * 0xffffffff`, we get:
16448
-
16512
+
16449
16513
  * | 64-bits | 64-bits |
16450
16514
  * +--------------------+--------------------+
16451
16515
  * | upper | lower |
@@ -16481,7 +16545,7 @@ function mulUpperSS(a, b) {
16481
16545
  return interpretAsSigned(resultLimitedTo64Bits);
16482
16546
  }
16483
16547
  function unsignedRightShiftBigInt(value, shift) {
16484
- debug_check(shift >= 0, "Shift count must be non-negative");
16548
+ debug_check `${shift >= 0} Shift count must be non-negative`;
16485
16549
  const fillBit = value < 0 ? "1" : "0";
16486
16550
  // Convert the BigInt to its binary representation
16487
16551
  const binaryRepresentation = value.toString(2).padStart(64, fillBit);
@@ -17897,7 +17961,10 @@ class TwoRegsTwoImmsDispatcher {
17897
17961
  class JumpTable {
17898
17962
  indices;
17899
17963
  constructor(itemByteLength, bytes) {
17900
- debug_check(itemByteLength === 0 || bytes.length % itemByteLength === 0, `Length of jump table (${bytes.length}) should be a multiple of item lenght (${itemByteLength})!`);
17964
+ debug_check `
17965
+ ${itemByteLength === 0 || bytes.length % itemByteLength === 0}
17966
+ Length of jump table (${bytes.length}) should be a multiple of item lenght (${itemByteLength})!
17967
+ `;
17901
17968
  const length = itemByteLength === 0 ? 0 : bytes.length / itemByteLength;
17902
17969
  this.indices = new Uint32Array(length);
17903
17970
  for (let i = 0; i < length; i++) {
@@ -18341,7 +18408,10 @@ class ReturnValue {
18341
18408
  this.consumedGas = consumedGas;
18342
18409
  this.status = status;
18343
18410
  this.memorySlice = memorySlice;
18344
- debug_check((status === null && memorySlice !== null) || (status !== null && memorySlice === null), "`status` and `memorySlice` must not both be null or both be non-null — exactly one must be provided");
18411
+ debug_check `
18412
+ ${(status === null && memorySlice !== null) || (status !== null && memorySlice === null)}
18413
+ 'status' and 'memorySlice' must not both be null or both be non-null — exactly one must be provided
18414
+ `;
18345
18415
  }
18346
18416
  static fromStatus(consumedGas, status) {
18347
18417
  return new ReturnValue(consumedGas, status, null);
@@ -18390,7 +18460,10 @@ class HostCalls {
18390
18460
  if (status !== status_Status.HOST) {
18391
18461
  return this.getReturnValue(status, pvmInstance);
18392
18462
  }
18393
- debug_check(pvmInstance.getExitParam() !== null, "We know that the exit param is not null, because the status is `Status.HOST`");
18463
+ debug_check `
18464
+ ${pvmInstance.getExitParam() !== null}
18465
+ "We know that the exit param is not null, because the status is 'Status.HOST'
18466
+ `;
18394
18467
  const hostCallIndex = pvmInstance.getExitParam() ?? -1;
18395
18468
  const gas = pvmInstance.getGasCounter();
18396
18469
  const regs = new HostCallRegisters(pvmInstance.getRegisters());
@@ -18450,7 +18523,7 @@ class host_calls_manager_HostCallsManager {
18450
18523
  constructor({ missing, handlers = [], }) {
18451
18524
  this.missing = missing;
18452
18525
  for (const handler of handlers) {
18453
- debug_check(this.hostCalls.get(handler.index) === undefined, `Overwriting host call handler at index ${handler.index}`);
18526
+ debug_check `${this.hostCalls.get(handler.index) === undefined} Overwriting host call handler at index ${handler.index}`;
18454
18527
  this.hostCalls.set(handler.index, handler);
18455
18528
  }
18456
18529
  }
@@ -18573,7 +18646,7 @@ function getServiceId(serviceId) {
18573
18646
  return null;
18574
18647
  }
18575
18648
  function writeServiceIdAsLeBytes(serviceId, destination) {
18576
- debug_check(destination.length >= SERVICE_ID_BYTES, "Not enough space in the destination.");
18649
+ debug_check `${destination.length >= SERVICE_ID_BYTES} Not enough space in the destination.`;
18577
18650
  destination.set(numbers_u32AsLeBytes(serviceId));
18578
18651
  }
18579
18652
  /** Clamp a U64 to the maximum value of a 32-bit unsigned integer. */
@@ -18662,13 +18735,27 @@ class SpiProgram extends WithDebug {
18662
18735
  this.registers = registers;
18663
18736
  }
18664
18737
  }
18738
+ /**
18739
+ * program = E_3(|o|) ++ E_3(|w|) ++ E_2(z) ++ E_3(s) ++ o ++ w ++ E_4(|c|) ++ c
18740
+ *
18741
+ * E_n - little endian encoding, n - length
18742
+ * o - initial read only data
18743
+ * w - initial heap
18744
+ * z - heap pages filled with zeros
18745
+ * s - stack size
18746
+ * c - program code
18747
+ *
18748
+ * https://graypaper.fluffylabs.dev/#/579bd12/2b92022b9202
18749
+ */
18665
18750
  function decodeStandardProgram(program, args) {
18666
18751
  const decoder = decoder_Decoder.fromBlob(program);
18667
18752
  const oLength = decoder.u24();
18668
18753
  const wLength = decoder.u24();
18669
- const argsLength = ensure(args.length, args.length <= DATA_LEGNTH, "Incorrect arguments length");
18670
- const readOnlyLength = ensure(oLength, oLength <= DATA_LEGNTH, "Incorrect readonly segment length");
18671
- const heapLength = ensure(wLength, wLength <= DATA_LEGNTH, "Incorrect heap segment length");
18754
+ debug_check `${args.length <= DATA_LEGNTH} Incorrect arguments length`;
18755
+ debug_check `${oLength <= DATA_LEGNTH} Incorrect readonly segment length`;
18756
+ const readOnlyLength = oLength;
18757
+ debug_check `${wLength <= DATA_LEGNTH} Incorrect heap segment length`;
18758
+ const heapLength = wLength;
18672
18759
  const noOfHeapZerosPages = decoder.u16();
18673
18760
  const stackSize = decoder.u24();
18674
18761
  const readOnlyMemory = decoder.bytes(readOnlyLength).raw;
@@ -18684,14 +18771,14 @@ function decodeStandardProgram(program, args) {
18684
18771
  const stackStart = STACK_SEGMENT - memory_utils_alignToPageSize(stackSize);
18685
18772
  const stackEnd = STACK_SEGMENT;
18686
18773
  const argsStart = ARGS_SEGMENT;
18687
- const argsEnd = argsStart + memory_utils_alignToPageSize(argsLength);
18688
- const argsZerosEnd = argsEnd + memory_utils_alignToPageSize(argsLength);
18774
+ const argsEnd = argsStart + memory_utils_alignToPageSize(args.length);
18775
+ const argsZerosEnd = argsEnd + memory_utils_alignToPageSize(args.length);
18689
18776
  function nonEmpty(s) {
18690
18777
  return s !== false;
18691
18778
  }
18692
18779
  const readableMemory = [
18693
18780
  readOnlyLength > 0 && getMemorySegment(readonlyDataStart, readonlyDataEnd, readOnlyMemory),
18694
- argsLength > 0 && getMemorySegment(argsStart, argsEnd, args),
18781
+ args.length > 0 && getMemorySegment(argsStart, argsEnd, args),
18695
18782
  argsEnd < argsZerosEnd && getMemorySegment(argsEnd, argsZerosEnd),
18696
18783
  ].filter(nonEmpty);
18697
18784
  const writeableMemory = [
@@ -18920,88 +19007,6 @@ class PvmExecutor {
18920
19007
 
18921
19008
 
18922
19009
 
18923
- ;// CONCATENATED MODULE: ./workers/importer/import-queue.ts
18924
-
18925
-
18926
-
18927
-
18928
-
18929
- class ImportQueue {
18930
- spec;
18931
- importer;
18932
- toImport = SortedArray.fromSortedArray((a, b) => {
18933
- const diff = a.timeSlot - b.timeSlot;
18934
- if (diff < 0) {
18935
- return Ordering.Greater;
18936
- }
18937
- if (diff > 0) {
18938
- return Ordering.Less;
18939
- }
18940
- return Ordering.Equal;
18941
- });
18942
- queuedBlocks = HashSet.new();
18943
- lastEpoch = tryAsEpoch(2 ** 32 - 1);
18944
- constructor(spec, importer) {
18945
- this.spec = spec;
18946
- this.importer = importer;
18947
- }
18948
- isCurrentEpoch(timeSlot) {
18949
- const epoch = Math.floor(timeSlot / this.spec.epochLength);
18950
- return this.lastEpoch === epoch;
18951
- }
18952
- startPreverification() {
18953
- for (const entry of this.toImport) {
18954
- if (this.isCurrentEpoch(entry.timeSlot)) {
18955
- entry.seal = this.importer.preverifySeal(entry.timeSlot, entry.block);
18956
- }
18957
- }
18958
- }
18959
- static getBlockDetails(block) {
18960
- let encodedHeader;
18961
- let timeSlot;
18962
- try {
18963
- encodedHeader = block.header.encoded();
18964
- timeSlot = block.header.view().timeSlotIndex.materialize();
18965
- }
18966
- catch {
18967
- return result_Result.error("invalid");
18968
- }
18969
- const headerHash = hashBytes(encodedHeader).asOpaque();
18970
- return result_Result.ok(new WithHash(headerHash, { block, timeSlot }));
18971
- }
18972
- push(details) {
18973
- const headerHash = details.hash;
18974
- if (this.queuedBlocks.has(headerHash)) {
18975
- return result_Result.error("already queued");
18976
- }
18977
- const { timeSlot, block } = details.data;
18978
- const entry = {
18979
- headerHash,
18980
- timeSlot,
18981
- block,
18982
- seal: this.isCurrentEpoch(timeSlot) ? this.importer.preverifySeal(timeSlot, block) : Promise.resolve(null),
18983
- };
18984
- this.toImport.insert(entry);
18985
- this.queuedBlocks.insert(headerHash);
18986
- return result_Result.ok(result_OK);
18987
- }
18988
- shift() {
18989
- const entry = this.toImport.pop();
18990
- if (entry !== undefined) {
18991
- this.queuedBlocks.delete(entry.headerHash);
18992
- const blockEpoch = Math.floor(entry.timeSlot / this.spec.epochLength);
18993
- const hasEpochChanged = this.lastEpoch !== blockEpoch;
18994
- this.lastEpoch = tryAsEpoch(blockEpoch);
18995
- // currently removed block is changing the epoch, so fire up
18996
- // preverifcation for the following blocks.
18997
- if (hasEpochChanged) {
18998
- this.startPreverification();
18999
- }
19000
- }
19001
- return entry;
19002
- }
19003
- }
19004
-
19005
19010
  ;// CONCATENATED MODULE: ./packages/jam/transition/block-verifier.ts
19006
19011
 
19007
19012
 
@@ -19015,7 +19020,7 @@ var BlockVerifierError;
19015
19020
  BlockVerifierError[BlockVerifierError["InvalidStateRoot"] = 4] = "InvalidStateRoot";
19016
19021
  BlockVerifierError[BlockVerifierError["AlreadyImported"] = 5] = "AlreadyImported";
19017
19022
  })(BlockVerifierError || (BlockVerifierError = {}));
19018
- const ZERO_HASH = bytes_Bytes.zero(hash_HASH_SIZE).asOpaque();
19023
+ const block_verifier_ZERO_HASH = bytes_Bytes.zero(hash_HASH_SIZE).asOpaque();
19019
19024
  class BlockVerifier {
19020
19025
  hasher;
19021
19026
  blocks;
@@ -19035,7 +19040,7 @@ class BlockVerifier {
19035
19040
  // https://graypaper.fluffylabs.dev/#/cc517d7/0c9d000c9d00?v=0.6.5
19036
19041
  const parentHash = headerView.parentHeaderHash.materialize();
19037
19042
  // importing genesis block
19038
- if (!parentHash.isEqualTo(ZERO_HASH)) {
19043
+ if (!parentHash.isEqualTo(block_verifier_ZERO_HASH)) {
19039
19044
  const parentBlock = this.blocks.getHeader(parentHash);
19040
19045
  if (parentBlock === null) {
19041
19046
  return result_Result.error(BlockVerifierError.ParentNotFound, `Parent ${parentHash.toString()} not found`);
@@ -19548,304 +19553,22 @@ async function verifyTickets(bandersnatch, numberOfValidators, epochRoot, ticket
19548
19553
  }));
19549
19554
  }
19550
19555
 
19551
- ;// CONCATENATED MODULE: external "node:os"
19552
- const external_node_os_namespaceObject = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("node:os");
19553
- var external_node_os_default = /*#__PURE__*/__nccwpck_require__.n(external_node_os_namespaceObject);
19554
- ;// CONCATENATED MODULE: ./packages/core/concurrent/parent.ts
19555
-
19556
-
19557
- // Amount of tasks in the queue that will trigger creation of new worker thread.
19558
- // NOTE this might need to be configurable in the future.
19559
- const QUEUE_SIZE_WORKER_THRESHOLD = 5;
19560
- /** Execution pool manager. */
19561
- class Executor {
19562
- workers;
19563
- maxWorkers;
19564
- workerPath;
19565
- /** Initialize a new concurrent executor given a path to the worker. */
19566
- static async initialize(workerPath, options) {
19567
- debug_check(options.maxWorkers > 0, "Max workers has to be positive.");
19568
- debug_check(options.minWorkers <= options.maxWorkers, "Min workers has to be lower or equal to max workers.");
19569
- const workers = [];
19570
- for (let i = 0; i < options.minWorkers; i++) {
19571
- workers.push(await initWorker(workerPath));
19572
- }
19573
- return new Executor(workers, options.maxWorkers, workerPath);
19574
- }
19575
- // keeps track of the indices of worker threads that are currently free and available to execute tasks
19576
- freeWorkerIndices = [];
19577
- taskQueue = [];
19578
- isDestroyed = false;
19579
- isWorkerInitializing = false;
19580
- constructor(workers, maxWorkers, workerPath) {
19581
- this.workers = workers;
19582
- this.maxWorkers = maxWorkers;
19583
- this.workerPath = workerPath;
19584
- // intial free workers.
19585
- for (let i = 0; i < workers.length; i++) {
19586
- this.freeWorkerIndices.push(i);
19587
- }
19588
- }
19589
- /** Attempt to initialize a new worker. */
19590
- async initNewWorker(onSuccess = () => { }) {
19591
- if (this.workers.length >= this.maxWorkers) {
19592
- // biome-ignore lint/suspicious/noConsole: warning
19593
- console.warn(`Task queue has ${this.taskQueue.length} pending items and we can't init any more workers.`);
19594
- return;
19595
- }
19596
- if (this.isWorkerInitializing) {
19597
- return;
19598
- }
19599
- this.isWorkerInitializing = true;
19600
- this.workers.push(await initWorker(this.workerPath));
19601
- this.freeWorkerIndices.push(this.workers.length - 1);
19602
- this.isWorkerInitializing = false;
19603
- onSuccess();
19604
- }
19605
- /** Terminate all workers and clear the executor. */
19606
- async destroy() {
19607
- for (const worker of this.workers) {
19608
- worker.port.close();
19609
- await worker.worker.terminate();
19610
- }
19611
- this.workers.length = 0;
19612
- this.isDestroyed = true;
19613
- }
19614
- /** Execute a task with given parameters. */
19615
- async run(params) {
19616
- return new Promise((resolve, reject) => {
19617
- if (this.isDestroyed) {
19618
- reject("pool destroyed");
19619
- return;
19620
- }
19621
- this.taskQueue.push({
19622
- params,
19623
- resolve,
19624
- reject,
19625
- });
19626
- this.processEntryFromTaskQueue();
19627
- });
19628
- }
19629
- /** Process single element from the task queue. */
19630
- processEntryFromTaskQueue() {
19631
- const freeWorker = this.freeWorkerIndices.pop();
19632
- // no free workers available currently,
19633
- // we will retry when one of the tasks completes.
19634
- if (freeWorker === undefined) {
19635
- if (this.taskQueue.length > QUEUE_SIZE_WORKER_THRESHOLD) {
19636
- this.initNewWorker(() => {
19637
- // process an entry in this newly initialized worker.
19638
- this.processEntryFromTaskQueue();
19639
- });
19640
- }
19641
- return;
19642
- }
19643
- const task = this.taskQueue.pop();
19644
- // no tasks in the queue
19645
- if (task === undefined) {
19646
- this.freeWorkerIndices.push(freeWorker);
19647
- return;
19648
- }
19649
- const worker = this.workers[freeWorker];
19650
- worker.runTask(task, () => {
19651
- // mark the worker as available again
19652
- this.freeWorkerIndices.push(freeWorker);
19653
- // and continue processing the queue
19654
- this.processEntryFromTaskQueue();
19655
- });
19656
- }
19657
- }
19658
- async function initWorker(workerPath) {
19659
- // create a worker and initialize communication channel
19660
- const { port1, port2 } = new MessageChannel();
19661
- const workerThread = new external_node_worker_threads_namespaceObject.Worker(workerPath, {});
19662
- workerThread.postMessage(port1, [port1]);
19663
- // // wait for the worker to start
19664
- await new Promise((resolve, reject) => {
19665
- workerThread.once("message", resolve);
19666
- workerThread.once("error", reject);
19667
- });
19668
- // make sure the threads don't prevent the program from stopping.
19669
- workerThread.unref();
19670
- return new WorkerChannel(workerThread, port2);
19671
- }
19672
- class WorkerChannel {
19673
- worker;
19674
- port;
19675
- constructor(worker, port) {
19676
- this.worker = worker;
19677
- this.port = port;
19678
- }
19679
- runTask(task, onFinish) {
19680
- const message = {
19681
- params: task.params,
19682
- };
19683
- // when we receive a response, make sure to process it
19684
- this.port.once("message", (e) => {
19685
- if (e.isOk) {
19686
- task.resolve(e.ok);
19687
- }
19688
- else {
19689
- task.reject(new Error(e.error));
19690
- }
19691
- onFinish();
19692
- });
19693
- // send the task to work on.
19694
- this.port.postMessage(message, message.params.getTransferList());
19695
- }
19696
- }
19697
-
19698
- ;// CONCATENATED MODULE: ./packages/core/concurrent/worker.ts
19699
-
19700
-
19701
- /** A in-worker abstraction. */
19702
- class ConcurrentWorker {
19703
- runInternal;
19704
- state;
19705
- static new(run, state) {
19706
- return new ConcurrentWorker(run, state);
19707
- }
19708
- constructor(runInternal, state) {
19709
- this.runInternal = runInternal;
19710
- this.state = state;
19711
- }
19712
- listenToParentPort() {
19713
- if (external_node_worker_threads_namespaceObject.parentPort === null) {
19714
- throw new Error("This method is meant to be run inside a worker thread!");
19715
- }
19716
- external_node_worker_threads_namespaceObject.parentPort.once("close", () => {
19717
- process.exit(0);
19718
- });
19719
- external_node_worker_threads_namespaceObject.parentPort.once("message", (port) => {
19720
- this.listenTo(port);
19721
- // send back readiness signal.
19722
- external_node_worker_threads_namespaceObject.parentPort?.postMessage("ready");
19723
- });
19724
- }
19725
- listenTo(port) {
19726
- port.once("close", () => {
19727
- port.removeAllListeners();
19728
- process.exit(0);
19729
- });
19730
- port.on("message", (ev) => {
19731
- const { params } = ev;
19732
- this.run(params)
19733
- .then((result) => {
19734
- const response = result_Result.ok(result);
19735
- port.postMessage(response, result.getTransferList());
19736
- })
19737
- .catch((e) => {
19738
- const response = result_Result.error(`${e}`);
19739
- port.postMessage(response, []);
19740
- });
19741
- });
19742
- }
19743
- async run(params) {
19744
- return await this.runInternal(params, this.state);
19745
- }
19746
- async destroy() { }
19747
- }
19748
-
19749
- ;// CONCATENATED MODULE: ./packages/core/concurrent/index.ts
19750
-
19751
-
19752
-
19753
- ;// CONCATENATED MODULE: ./packages/jam/safrole/bandersnatch-wasm/params.ts
19754
- var Method;
19755
- (function (Method) {
19756
- Method[Method["RingCommitment"] = 0] = "RingCommitment";
19757
- Method[Method["BatchVerifyTickets"] = 1] = "BatchVerifyTickets";
19758
- Method[Method["VerifySeal"] = 2] = "VerifySeal";
19759
- })(Method || (Method = {}));
19760
- class params_Response {
19761
- data;
19762
- constructor(data) {
19763
- this.data = data;
19764
- }
19765
- getTransferList() {
19766
- return [this.data.buffer];
19767
- }
19768
- }
19769
- class Params {
19770
- params;
19771
- constructor(params) {
19772
- this.params = params;
19773
- }
19774
- getTransferList() {
19775
- return [];
19776
- }
19777
- }
19778
-
19779
- ;// CONCATENATED MODULE: ./packages/jam/safrole/bandersnatch-wasm/worker.ts
19780
-
19781
-
19782
-
19783
-
19784
- const worker = ConcurrentWorker.new(async (p) => {
19785
- await initAll();
19786
- const params = p.params;
19787
- const method = params.method;
19788
- if (method === Method.RingCommitment) {
19789
- return Promise.resolve(new params_Response(bandersnatch_exports.ring_commitment(params.keys)));
19790
- }
19791
- if (method === Method.BatchVerifyTickets) {
19792
- return Promise.resolve(new params_Response(bandersnatch_exports.batch_verify_tickets(params.ringSize, params.commitment, params.ticketsData, params.contextLength)));
19793
- }
19794
- if (method === Method.VerifySeal) {
19795
- return Promise.resolve(new params_Response(bandersnatch_exports.verify_seal(params.authorKey, params.signature, params.payload, params.auxData)));
19796
- }
19797
- debug_assertNever(method);
19798
- }, null);
19799
-
19800
- ;// CONCATENATED MODULE: ./packages/jam/safrole/bandersnatch-wasm/index.ts
19801
-
19556
+ ;// CONCATENATED MODULE: ./packages/jam/safrole/bandersnatch-wasm.ts
19802
19557
 
19803
-
19804
-
19805
- const workerFile = __nccwpck_require__.ab + "bootstrap-bandersnatch.mjs";
19806
19558
  class BandernsatchWasm {
19807
- executor;
19808
- constructor(executor) {
19809
- this.executor = executor;
19810
- }
19811
- destroy() {
19812
- return this.executor.destroy();
19813
- }
19814
- static async new({ synchronous }) {
19815
- const workers = external_node_os_default().cpus().length;
19816
- return new BandernsatchWasm(!synchronous
19817
- ? await Executor.initialize(workerFile, {
19818
- minWorkers: Math.max(1, Math.floor(workers / 2)),
19819
- maxWorkers: workers,
19820
- })
19821
- : worker);
19559
+ constructor() { }
19560
+ static async new() {
19561
+ await initAll();
19562
+ return new BandernsatchWasm();
19822
19563
  }
19823
19564
  async verifySeal(authorKey, signature, payload, auxData) {
19824
- const x = await this.executor.run(new Params({
19825
- method: Method.VerifySeal,
19826
- authorKey,
19827
- signature,
19828
- payload,
19829
- auxData,
19830
- }));
19831
- return x.data;
19565
+ return bandersnatch_exports.verify_seal(authorKey, signature, payload, auxData);
19832
19566
  }
19833
19567
  async getRingCommitment(keys) {
19834
- const x = await this.executor.run(new Params({
19835
- method: Method.RingCommitment,
19836
- keys,
19837
- }));
19838
- return x.data;
19568
+ return bandersnatch_exports.ring_commitment(keys);
19839
19569
  }
19840
19570
  async batchVerifyTicket(ringSize, commitment, ticketsData, contextLength) {
19841
- const x = await this.executor.run(new Params({
19842
- method: Method.BatchVerifyTickets,
19843
- ringSize,
19844
- commitment,
19845
- ticketsData,
19846
- contextLength,
19847
- }));
19848
- return x.data;
19571
+ return bandersnatch_exports.batch_verify_tickets(ringSize, commitment, ticketsData, contextLength);
19849
19572
  }
19850
19573
  }
19851
19574
 
@@ -19888,7 +19611,7 @@ class Safrole {
19888
19611
  chainSpec;
19889
19612
  state;
19890
19613
  bandersnatch;
19891
- constructor(chainSpec, state, bandersnatch = BandernsatchWasm.new({ synchronous: true })) {
19614
+ constructor(chainSpec, state, bandersnatch = BandernsatchWasm.new()) {
19892
19615
  this.chainSpec = chainSpec;
19893
19616
  this.state = state;
19894
19617
  this.bandersnatch = bandersnatch;
@@ -20266,7 +19989,7 @@ var SafroleSealError;
20266
19989
  const BANDERSNATCH_ZERO_KEY = bytes_Bytes.zero(BANDERSNATCH_KEY_BYTES).asOpaque();
20267
19990
  class SafroleSeal {
20268
19991
  bandersnatch;
20269
- constructor(bandersnatch = BandernsatchWasm.new({ synchronous: true })) {
19992
+ constructor(bandersnatch = BandernsatchWasm.new()) {
20270
19993
  this.bandersnatch = bandersnatch;
20271
19994
  }
20272
19995
  /**
@@ -20578,8 +20301,8 @@ class PartiallyUpdatedState {
20578
20301
  this.stateUpdate.services.preimages.push(newUpdate);
20579
20302
  }
20580
20303
  updateServiceStorageUtilisation(serviceId, items, bytes, serviceInfo) {
20581
- debug_check(items >= 0, `storageUtilisationCount has to be a positive number, got: ${items}`);
20582
- debug_check(bytes >= 0, `storageUtilisationBytes has to be a positive number, got: ${bytes}`);
20304
+ debug_check `${items >= 0} storageUtilisationCount has to be a positive number, got: ${items}`;
20305
+ debug_check `${bytes >= 0} storageUtilisationBytes has to be a positive number, got: ${bytes}`;
20583
20306
  const overflowItems = !isU32(items);
20584
20307
  const overflowBytes = !isU64(bytes);
20585
20308
  // TODO [ToDr] this is not specified in GP, but it seems sensible.
@@ -21004,7 +20727,7 @@ class AccumulateExternalities {
21004
20727
  }
21005
20728
  // TODO [ToDr] Not sure if we should update the service info in that case,
21006
20729
  // but for now we let that case fall-through.
21007
- debug_check(len === PreimageStatusKind.Unavailable);
20730
+ debug_check `${len === PreimageStatusKind.Unavailable} preimage is not unavailable`;
21008
20731
  }
21009
20732
  // make sure we have enough balance for this update
21010
20733
  // https://graypaper.fluffylabs.dev/#/9a08063/381201381601?v=0.6.6
@@ -21500,7 +21223,7 @@ class Assurances {
21500
21223
  return result_Result.error(AssurancesError.InvalidOrder, `order: expected: ${prevValidatorIndex + 1}, got: ${validatorIndex}`);
21501
21224
  }
21502
21225
  prevValidatorIndex = assurance.validatorIndex;
21503
- debug_check(bitfield.bitLength === coresCount, `Invalid bitfield length of ${bitfield.bitLength}`);
21226
+ debug_check `${bitfield.bitLength === coresCount} Invalid bitfield length of ${bitfield.bitLength}`;
21504
21227
  const setBits = bitfield.indicesOfSetBits();
21505
21228
  for (const idx of setBits) {
21506
21229
  perCoreAssurances[idx] += 1;
@@ -23824,7 +23547,7 @@ class DeferredTransfers {
23824
23547
  transferStatistics.set(serviceId, { count: numbers_tryAsU32(transfers.length), gasUsed: common_tryAsServiceGas(consumedGas) });
23825
23548
  const [updatedState, checkpointedState] = partialState.getStateUpdates();
23826
23549
  currentStateUpdate = updatedState;
23827
- debug_check(checkpointedState === null, "On transfer cannot invoke checkpoint.");
23550
+ debug_check `${checkpointedState === null} On transfer cannot invoke checkpoint.`;
23828
23551
  }
23829
23552
  return result_Result.ok({
23830
23553
  // NOTE: we return only services, since it's impossible to update
@@ -24162,7 +23885,7 @@ const ENTROPY_BYTES = 32;
24162
23885
  * https://graypaper.fluffylabs.dev/#/579bd12/3b9a013b9a01
24163
23886
  */
24164
23887
  function fisherYatesShuffle(arr, entropy) {
24165
- debug_check(entropy.length === ENTROPY_BYTES, `Expected entropy of length ${ENTROPY_BYTES}, got ${entropy.length}`);
23888
+ debug_check `${entropy.length === ENTROPY_BYTES} Expected entropy of length ${ENTROPY_BYTES}, got ${entropy.length}`;
24166
23889
  const n = arr.length;
24167
23890
  const randomNumbers = hashToNumberSequence(entropy, arr.length);
24168
23891
  const result = new Array(n);
@@ -25009,7 +24732,7 @@ class Statistics {
25009
24732
  /** get statistics for the current epoch */
25010
24733
  const statistics = this.getStatistics(slot);
25011
24734
  const { current, cores, services } = statistics;
25012
- debug_check(current[authorIndex] !== undefined, "authorIndex is out of bounds");
24735
+ debug_check `${current[authorIndex] !== undefined} authorIndex is out of bounds`;
25013
24736
  /** One validator can produce maximal one block per timeslot */
25014
24737
  const newBlocksCount = current[authorIndex].blocks + 1;
25015
24738
  current[authorIndex].blocks = numbers_tryAsU32(newBlocksCount);
@@ -25209,11 +24932,11 @@ class OnChain {
25209
24932
  authorization;
25210
24933
  // chapter 13: https://graypaper.fluffylabs.dev/#/68eaa1f/18b60118b601?v=0.6.4
25211
24934
  statistics;
25212
- constructor(chainSpec, state, blocks, hasher, { enableParallelSealVerification }) {
24935
+ constructor(chainSpec, state, blocks, hasher) {
25213
24936
  this.chainSpec = chainSpec;
25214
24937
  this.state = state;
25215
24938
  this.hasher = hasher;
25216
- const bandersnatch = BandernsatchWasm.new({ synchronous: !enableParallelSealVerification });
24939
+ const bandersnatch = BandernsatchWasm.new();
25217
24940
  this.statistics = new Statistics(chainSpec, state);
25218
24941
  this.safrole = new Safrole(chainSpec, state, bandersnatch);
25219
24942
  this.safroleSeal = new SafroleSeal(bandersnatch);
@@ -25231,16 +24954,16 @@ class OnChain {
25231
24954
  const sealState = this.safrole.getSafroleSealState(timeSlot);
25232
24955
  return await this.safroleSeal.verifyHeaderSeal(block.header.view(), sealState);
25233
24956
  }
25234
- async transition(block, headerHash, preverifiedSeal = null, omitSealVerification = false) {
24957
+ async transition(block, headerHash, omitSealVerification = false) {
25235
24958
  const headerView = block.header.view();
25236
24959
  const header = block.header.materialize();
25237
24960
  const timeSlot = header.timeSlotIndex;
25238
24961
  // safrole seal
25239
- let newEntropyHash = preverifiedSeal;
24962
+ let newEntropyHash;
25240
24963
  if (omitSealVerification) {
25241
24964
  newEntropyHash = hashBytes(header.seal).asOpaque();
25242
24965
  }
25243
- if (newEntropyHash === null) {
24966
+ else {
25244
24967
  const sealResult = await this.verifySeal(timeSlot, block);
25245
24968
  if (sealResult.isError) {
25246
24969
  return stfError(StfErrorKind.SafroleSeal, sealResult);
@@ -25347,7 +25070,7 @@ class OnChain {
25347
25070
  assertEmpty(deferredTransfersRest);
25348
25071
  const accumulateRoot = await this.accumulateOutput.transition({ accumulationOutputLog });
25349
25072
  // recent history
25350
- const recentHistoryUpdate = await this.recentHistory.transition({
25073
+ const recentHistoryUpdate = this.recentHistory.transition({
25351
25074
  partial: recentHistoryPartialUpdate,
25352
25075
  headerHash,
25353
25076
  accumulateRoot,
@@ -25427,6 +25150,7 @@ function checkOffendersMatch(offendersMark, headerOffendersMark) {
25427
25150
 
25428
25151
 
25429
25152
 
25153
+
25430
25154
  var ImporterErrorKind;
25431
25155
  (function (ImporterErrorKind) {
25432
25156
  ImporterErrorKind[ImporterErrorKind["Verifier"] = 0] = "Verifier";
@@ -25454,29 +25178,28 @@ class Importer {
25454
25178
  throw new Error(`Unable to load best state from header hash: ${currentBestHeaderHash}.`);
25455
25179
  }
25456
25180
  this.verifier = new BlockVerifier(hasher, blocks);
25457
- this.stf = new OnChain(spec, state, blocks, hasher, { enableParallelSealVerification: true });
25181
+ this.stf = new OnChain(spec, state, blocks, hasher);
25458
25182
  this.state = state;
25459
25183
  this.currentHash = currentBestHeaderHash;
25460
25184
  logger.info(`😎 Best time slot: ${state.timeslot} (header hash: ${currentBestHeaderHash})`);
25461
25185
  }
25462
- /** Attempt to pre-verify the seal to speed up importing. */
25463
- async preverifySeal(timeSlot, block) {
25464
- try {
25465
- const res = await this.stf.verifySeal(timeSlot, block);
25466
- if (res.isOk) {
25467
- return res.ok;
25468
- }
25469
- this.logger.warn(`Unable to pre-verify the seal: ${resultToString(res)}`);
25470
- return null;
25471
- }
25472
- catch (e) {
25473
- this.logger.warn(`Error while trying to pre-verify the seal: ${e}`);
25474
- return null;
25475
- }
25476
- }
25477
- async importBlock(block, preverifiedSeal, omitSealVerification = false) {
25186
+ async importBlock(block, omitSealVerification) {
25187
+ const timer = measure("importBlock");
25188
+ const timeSlot = extractTimeSlot(block);
25189
+ const maybeBestHeader = await this.importBlockInternal(block, omitSealVerification);
25190
+ if (maybeBestHeader.isOk) {
25191
+ const bestHeader = maybeBestHeader.ok;
25192
+ this.logger.info(`🧊 Best block: #${timeSlot} (${bestHeader.hash})`);
25193
+ this.logger.log(timer());
25194
+ return maybeBestHeader;
25195
+ }
25196
+ this.logger.log(`❌ Rejected block #${timeSlot}: ${resultToString(maybeBestHeader)}`);
25197
+ this.logger.log(timer());
25198
+ return maybeBestHeader;
25199
+ }
25200
+ async importBlockInternal(block, omitSealVerification = false) {
25478
25201
  const logger = this.logger;
25479
- logger.log(`🧱 Attempting to import a new block ${preverifiedSeal !== null ? "(seal preverified)" : ""}`);
25202
+ logger.log("🧱 Attempting to import a new block");
25480
25203
  const timerVerify = measure("import:verify");
25481
25204
  const hash = await this.verifier.verifyBlock(block);
25482
25205
  logger.log(timerVerify());
@@ -25501,7 +25224,7 @@ class Importer {
25501
25224
  const headerHash = hash.ok;
25502
25225
  logger.log(`🧱 Verified block: Got hash ${headerHash} for block at slot ${timeSlot}.`);
25503
25226
  const timerStf = measure("import:stf");
25504
- const res = await this.stf.transition(block, headerHash, preverifiedSeal, omitSealVerification);
25227
+ const res = await this.stf.transition(block, headerHash, omitSealVerification);
25505
25228
  logger.log(timerStf());
25506
25229
  if (res.isError) {
25507
25230
  return importerError(ImporterErrorKind.Stf, res);
@@ -25551,6 +25274,19 @@ class Importer {
25551
25274
  return stateEntries ?? null;
25552
25275
  }
25553
25276
  }
25277
+ /**
25278
+ * Attempt to safely extract timeslot of a block.
25279
+ *
25280
+ * NOTE: it may fail if encoding is invalid.
25281
+ */
25282
+ function extractTimeSlot(block) {
25283
+ try {
25284
+ return block.header.view().timeSlotIndex.materialize();
25285
+ }
25286
+ catch {
25287
+ return tryAsTimeSlot(2 ** 32 - 1);
25288
+ }
25289
+ }
25554
25290
 
25555
25291
  ;// CONCATENATED MODULE: ./workers/generic/finished.ts
25556
25292
 
@@ -25814,7 +25550,6 @@ class ImporterReady extends State {
25814
25550
  response: rootHash === null ? bytes_Bytes.zero(hash_HASH_SIZE).raw : rootHash.raw,
25815
25551
  };
25816
25552
  }
25817
- // NOTE [ToDr] This should rather be using the import queue, instead of going directly.
25818
25553
  async importBlock(block) {
25819
25554
  if (this.importer === null) {
25820
25555
  state_machine_logger.error(`${this.constructor.name} importer not initialized yet!`);
@@ -25826,17 +25561,13 @@ class ImporterReady extends State {
25826
25561
  if (block instanceof Uint8Array) {
25827
25562
  const config = this.getConfig();
25828
25563
  const blockView = decoder_Decoder.decodeObject(Block.Codec.View, block, config.chainSpec);
25829
- const headerView = blockView.header.view();
25830
- const timeSlot = headerView.timeSlotIndex.materialize();
25831
25564
  let response;
25832
25565
  try {
25833
- const res = await this.importer.importBlock(blockView, null, config.omitSealVerification);
25566
+ const res = await this.importer.importBlock(blockView, config.omitSealVerification);
25834
25567
  if (res.isOk) {
25835
- state_machine_logger.info(`🧊 Best block: #${timeSlot} (${res.ok.hash})`);
25836
- response = result_Result.ok(this.importer.getBestStateRootHash() ?? bytes_Bytes.zero(hash_HASH_SIZE).asOpaque());
25568
+ response = result_Result.ok(this.importer.getBestStateRootHash() ?? ZERO_HASH.asOpaque());
25837
25569
  }
25838
25570
  else {
25839
- state_machine_logger.log(`❌ Rejected block #${timeSlot}: ${resultToString(res)}`);
25840
25571
  response = result_Result.error(resultToString(res));
25841
25572
  }
25842
25573
  }
@@ -25884,8 +25615,6 @@ class ImporterReady extends State {
25884
25615
 
25885
25616
 
25886
25617
 
25887
-
25888
-
25889
25618
  const importer_logger = Logger.new(import.meta.filename, "importer");
25890
25619
  if (!external_node_worker_threads_namespaceObject.isMainThread) {
25891
25620
  Logger.configureAll(process.env.JAM_LOG ?? "", Level.LOG);
@@ -25902,7 +25631,6 @@ async function createImporter(config) {
25902
25631
  const importer = new Importer(config.chainSpec, hasher, importer_logger, blocks, states);
25903
25632
  return {
25904
25633
  lmdb,
25905
- blocks,
25906
25634
  importer,
25907
25635
  };
25908
25636
  }
@@ -25917,65 +25645,27 @@ async function main(channel) {
25917
25645
  importer_logger.info(`📥 Importer starting ${channel.currentState()}`);
25918
25646
  // Await the configuration object
25919
25647
  const ready = await channel.waitForState("ready(importer)");
25648
+ let closeDb = async () => { };
25920
25649
  const finished = await ready.doUntil("finished", async (worker, port) => {
25921
25650
  const config = worker.getConfig();
25922
- const { blocks, importer } = await createImporter(config);
25651
+ const { lmdb, importer } = await createImporter(config);
25652
+ closeDb = async () => {
25653
+ await lmdb.close();
25654
+ };
25923
25655
  // TODO [ToDr] this is shit, since we have circular dependency.
25924
25656
  worker.setImporter(importer);
25925
25657
  importer_logger.info("📥 Importer waiting for blocks.");
25926
- // TODO [ToDr] back pressure?
25927
- let isProcessing = false;
25928
- const importingQueue = new ImportQueue(config.chainSpec, importer);
25929
25658
  worker.onBlock.on(async (block) => {
25930
- const details = ImportQueue.getBlockDetails(block);
25931
- // ignore invalid blocks.
25932
- if (details.isError) {
25933
- importer_logger.trace("🧊 Ignoring invalid block.");
25934
- return;
25935
- }
25936
- // ignore already known blocks
25937
- if (blocks.getHeader(details.ok.hash) !== null) {
25938
- importer_logger.trace(`🧊 Already imported block: #${details.ok.data.timeSlot}.`);
25939
- return;
25940
- }
25941
- const importResult = importingQueue.push(details.ok);
25942
- // ignore blocks that are already queued
25943
- if (importResult.isError) {
25944
- importer_logger.trace(`🧊 Already queued block: #${details.ok.data.timeSlot}.`);
25945
- return;
25946
- }
25947
- importer_logger.log(`🧊 Queued block: #${details.ok.data.timeSlot} (skip seal: ${config.omitSealVerification})`);
25948
- if (isProcessing) {
25949
- return;
25950
- }
25951
- isProcessing = true;
25952
- try {
25953
- for (;;) {
25954
- const entry = importingQueue.shift();
25955
- if (entry === undefined) {
25956
- return;
25957
- }
25958
- const { block, seal, timeSlot } = entry;
25959
- const timer = measure("importBlock");
25960
- const maybeBestHeader = await importer.importBlock(block, await seal, config.omitSealVerification);
25961
- if (maybeBestHeader.isOk) {
25962
- const bestHeader = maybeBestHeader.ok;
25963
- worker.announce(port, bestHeader);
25964
- importer_logger.info(`🧊 Best block: #${bestHeader.data.timeSlotIndex.materialize()} (${bestHeader.hash})`);
25965
- }
25966
- else {
25967
- importer_logger.log(`❌ Rejected block #${timeSlot}: ${resultToString(maybeBestHeader)}`);
25968
- }
25969
- importer_logger.log(timer());
25970
- }
25971
- }
25972
- finally {
25973
- isProcessing = false;
25659
+ const res = await importer.importBlock(block, config.omitSealVerification);
25660
+ if (res.isOk) {
25661
+ worker.announce(port, res.ok);
25974
25662
  }
25975
25663
  });
25976
25664
  await wasmPromise;
25977
25665
  });
25978
25666
  importer_logger.info("📥 Importer finished. Closing channel.");
25667
+ // close the database
25668
+ await closeDb();
25979
25669
  // Close the comms to gracefuly close the app.
25980
25670
  finished.currentState().close(channel);
25981
25671
  }