@typeberry/jam 0.1.0-b2d0b72 → 0.1.0-eb00e84

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/index.js CHANGED
@@ -28332,17 +28332,29 @@ function isBrowser() {
28332
28332
  * We avoid using `node:assert` to keep compatibility with a browser environment.
28333
28333
  * Note the checks should not have any side effects, since we might decide
28334
28334
  * to remove all of them in a post-processing step.
28335
- *
28336
- * NOTE the function is intended to be used as tagged template string for the performance
28337
- * reasons.
28338
28335
  */
28339
- function debug_check(strings, condition, ...data) {
28336
+ function debug_check(condition, message) {
28340
28337
  if (!condition) {
28341
- // add an empty value so that `data.length === strings.length`
28342
- data.unshift("");
28343
- const message = strings.map((v, index) => `${v}${data[index] ?? ""}`);
28344
- throw new Error(`Assertion failure:${message.join("")}`);
28338
+ throw new Error(`Assertion failure: ${message ?? ""}`);
28339
+ }
28340
+ }
28341
+ function cast(_a, condition) {
28342
+ return condition;
28343
+ }
28344
+ /**
28345
+ * Yet another function to perform runtime assertions.
28346
+ * This function returns a new type to mark in the code that this value was checked and you don't have to do it again.
28347
+ *
28348
+ * In the post-processing step all usages of this functions should be replaced with simple casting. An example:
28349
+ * const x = checkAndType<number, CheckedNumber>(y);
28350
+ * should be replaced with:
28351
+ * const x = y as CheckedNumber;
28352
+ */
28353
+ function ensure(a, condition, message) {
28354
+ if (cast(a, condition)) {
28355
+ return a;
28345
28356
  }
28357
+ throw new Error(`Assertion failure: ${message ?? ""}`);
28346
28358
  }
28347
28359
  /**
28348
28360
  * The function can be used to make sure that a particular type is `never`
@@ -28512,7 +28524,7 @@ function resultToString(res) {
28512
28524
  const result_Result = {
28513
28525
  /** Create new [`Result`] with `Ok` status. */
28514
28526
  ok: (ok) => {
28515
- debug_check `${ok !== undefined} 'ok' type cannot be undefined.`;
28527
+ debug_check(ok !== undefined, "`Ok` type cannot be undefined.");
28516
28528
  return {
28517
28529
  isOk: true,
28518
28530
  isError: false,
@@ -28521,7 +28533,7 @@ const result_Result = {
28521
28533
  },
28522
28534
  /** Create new [`Result`] with `Error` status. */
28523
28535
  error: (error, details = "") => {
28524
- debug_check `${error !== undefined} 'Error' type cannot be undefined.`;
28536
+ debug_check(error !== undefined, "`Error` type cannot be undefined.");
28525
28537
  return {
28526
28538
  isOk: false,
28527
28539
  isError: true,
@@ -28805,10 +28817,7 @@ class BitVec {
28805
28817
  constructor(data, bitLength) {
28806
28818
  this.data = data;
28807
28819
  this.bitLength = bitLength;
28808
- debug_check `
28809
- ${data.length * 8 >= bitLength}
28810
- Not enough bytes in the data array. Need ${data.length * 8} has ${bitLength}.
28811
- `;
28820
+ debug_check(data.length * 8 >= bitLength, `Not enough bytes in the data array. Need ${data.length * 8} has ${bitLength}.`);
28812
28821
  this.byteLength = Math.ceil(bitLength / 8);
28813
28822
  }
28814
28823
  /** Return a raw in-memory representation of this [`BitVec`]. */
@@ -28817,10 +28826,7 @@ class BitVec {
28817
28826
  }
28818
28827
  /** Perform OR operation on all bits in place. */
28819
28828
  sumWith(other) {
28820
- debug_check `
28821
- ${other.bitLength === this.bitLength}
28822
- Invalid bit length for sumWith: ${other.bitLength} vs ${this.bitLength}
28823
- `;
28829
+ debug_check(other.bitLength === this.bitLength, `Invalid bit length for sumWith: ${other.bitLength} vs ${this.bitLength}`);
28824
28830
  const otherRaw = other.raw;
28825
28831
  for (let i = 0; i < this.byteLength; i++) {
28826
28832
  this.data[i] |= otherRaw[i];
@@ -28830,7 +28836,7 @@ class BitVec {
28830
28836
  * Set the bit at index `idx` to value `val`.
28831
28837
  */
28832
28838
  setBit(idx, val) {
28833
- debug_check `${idx >= 0 && idx < this.bitLength} Index out of bounds. Need ${idx} has ${this.bitLength}.`;
28839
+ debug_check(idx < this.bitLength, `Index out of bounds. Need ${idx} has ${this.bitLength}.`);
28834
28840
  const byteIndex = Math.floor(idx / 8);
28835
28841
  const bitIndexInByte = idx % 8;
28836
28842
  const mask = 1 << bitIndexInByte;
@@ -28845,7 +28851,7 @@ class BitVec {
28845
28851
  * Return `true` if the bit at index `idx` is set.
28846
28852
  */
28847
28853
  isSet(idx) {
28848
- debug_check `${idx >= 0 && idx < this.bitLength} Index out of bounds. Need ${idx} has ${this.bitLength}.`;
28854
+ debug_check(idx < this.bitLength, `Index out of bounds. Need ${idx} has ${this.bitLength}.`);
28849
28855
  const byteIndex = Math.floor(idx / 8);
28850
28856
  const bitIndexInByte = idx % 8;
28851
28857
  const mask = 1 << bitIndexInByte;
@@ -29012,7 +29018,7 @@ class bytes_BytesBlob {
29012
29018
  }
29013
29019
  /** Create a new [`BytesBlob`] from an array of bytes. */
29014
29020
  static blobFromNumbers(v) {
29015
- debug_check `${v.find((x) => (x & 0xff) !== x) === undefined} BytesBlob.blobFromNumbers used with non-byte number array.`;
29021
+ debug_check(v.find((x) => (x & 0xff) !== x) === undefined, "BytesBlob.blobFromNumbers used with non-byte number array.");
29016
29022
  const arr = new Uint8Array(v);
29017
29023
  return new bytes_BytesBlob(arr);
29018
29024
  }
@@ -29056,7 +29062,7 @@ class bytes_Bytes extends bytes_BytesBlob {
29056
29062
  length;
29057
29063
  constructor(raw, len) {
29058
29064
  super(raw);
29059
- debug_check `${raw.byteLength === len} Given buffer has incorrect size ${raw.byteLength} vs expected ${len}`;
29065
+ debug_check(raw.byteLength === len, `Given buffer has incorrect size ${raw.byteLength} vs expected ${len}`);
29060
29066
  this.length = len;
29061
29067
  }
29062
29068
  /** Create new [`Bytes<X>`] given a backing buffer and it's length. */
@@ -29065,7 +29071,7 @@ class bytes_Bytes extends bytes_BytesBlob {
29065
29071
  }
29066
29072
  /** Create new [`Bytes<X>`] given an array of bytes and it's length. */
29067
29073
  static fromNumbers(v, len) {
29068
- debug_check `${v.find((x) => (x & 0xff) !== x) === undefined} Bytes.fromNumbers used with non-byte number array.`;
29074
+ debug_check(v.find((x) => (x & 0xff) !== x) === undefined, "Bytes.fromNumbers used with non-byte number array.");
29069
29075
  const x = new Uint8Array(v);
29070
29076
  return new bytes_Bytes(x, len);
29071
29077
  }
@@ -29076,7 +29082,7 @@ class bytes_Bytes extends bytes_BytesBlob {
29076
29082
  // TODO [ToDr] `fill` should have the argments swapped to align with the rest.
29077
29083
  /** Create a [`Bytes<X>`] with all bytes filled with given input number. */
29078
29084
  static fill(len, input) {
29079
- debug_check `${(input & 0xff) === input} Input has to be a byte.`;
29085
+ debug_check((input & 0xff) === input, "Input has to be a byte.");
29080
29086
  const bytes = bytes_Bytes.zero(len);
29081
29087
  bytes.raw.fill(input, 0, len);
29082
29088
  return bytes;
@@ -29099,7 +29105,7 @@ class bytes_Bytes extends bytes_BytesBlob {
29099
29105
  }
29100
29106
  /** Compare the sequence to another one. */
29101
29107
  isEqualTo(other) {
29102
- debug_check `${this.length === other.length} Comparing incorrectly typed bytes!`;
29108
+ debug_check(this.length === other.length, "Comparing incorrectly typed bytes!");
29103
29109
  return u8ArraySameLengthEqual(this.raw, other.raw);
29104
29110
  }
29105
29111
  /** Converts current type into some opaque extension. */
@@ -29108,7 +29114,7 @@ class bytes_Bytes extends bytes_BytesBlob {
29108
29114
  }
29109
29115
  }
29110
29116
  function byteFromString(s) {
29111
- debug_check `${s.length === 2} Two-character string expected`;
29117
+ debug_check(s.length === 2, "Two-character string expected");
29112
29118
  const a = numberFromCharCode(s.charCodeAt(0));
29113
29119
  const b = numberFromCharCode(s.charCodeAt(1));
29114
29120
  return (a << 4) | b;
@@ -29162,53 +29168,42 @@ const bytesBlobComparator = (a, b) => a.compare(b);
29162
29168
 
29163
29169
  ;// CONCATENATED MODULE: ./packages/core/numbers/index.ts
29164
29170
 
29165
- const asTypedNumber = (v) => v;
29171
+ const asWithBytesRepresentation = (v) => v;
29166
29172
  const MAX_VALUE_U8 = 0xff;
29167
29173
  const MAX_VALUE_U16 = 0xffff;
29168
29174
  const MAX_VALUE_U32 = 0xffff_ffff;
29169
29175
  const MAX_VALUE_U64 = 0xffffffffffffffffn;
29170
29176
  /** Attempt to cast an input number into U8. */
29171
- const tryAsU8 = (v) => {
29172
- debug_check `${isU8(v)} input must have one-byte representation, got ${v}`;
29173
- return asTypedNumber(v);
29174
- };
29177
+ const tryAsU8 = (v) => ensure(v, isU8(v), `input must have one-byte representation, got ${v}`);
29175
29178
  /** Check if given number is a valid U8 number. */
29176
29179
  const isU8 = (v) => (v & MAX_VALUE_U8) === v;
29177
29180
  /** Attempt to cast an input number into U16. */
29178
- const numbers_tryAsU16 = (v) => {
29179
- debug_check `${isU16(v)} input must have two-byte representation, got ${v}`;
29180
- return asTypedNumber(v);
29181
- };
29181
+ const numbers_tryAsU16 = (v) => ensure(v, isU16(v), `input must have two-byte representation, got ${v}`);
29182
29182
  /** Check if given number is a valid U16 number. */
29183
29183
  const isU16 = (v) => (v & MAX_VALUE_U16) === v;
29184
29184
  /** Attempt to cast an input number into U32. */
29185
- const numbers_tryAsU32 = (v) => {
29186
- debug_check `${isU32(v)} input must have four-byte representation, got ${v}`;
29187
- return asTypedNumber(v);
29188
- };
29185
+ const numbers_tryAsU32 = (v) => ensure(v, isU32(v), `input must have four-byte representation, got ${v}`);
29189
29186
  /** Check if given number is a valid U32 number. */
29190
29187
  const isU32 = (v) => (v & MAX_VALUE_U32) >>> 0 === v;
29191
29188
  /** Attempt to cast an input number into U64. */
29192
29189
  const numbers_tryAsU64 = (x) => {
29193
29190
  const v = BigInt(x);
29194
- debug_check `${isU64(v)} input must have eight-byte representation, got ${x}`;
29195
- return asTypedNumber(v);
29191
+ return ensure(v, isU64(v), `input must have eight-byte representation, got ${x}`);
29196
29192
  };
29197
29193
  /** Check if given number is a valid U64 number. */
29198
29194
  const isU64 = (v) => (v & MAX_VALUE_U64) === v;
29199
29195
  /** Collate two U32 parts into one U64. */
29200
29196
  const u64FromParts = ({ lower, upper }) => {
29201
29197
  const val = (BigInt(upper) << 32n) + BigInt(lower);
29202
- return asTypedNumber(val);
29198
+ return asWithBytesRepresentation(val);
29203
29199
  };
29204
29200
  /** Split U64 into lower & upper parts. */
29205
29201
  const u64IntoParts = (v) => {
29206
- // Number(...) safe: both parts are <= 0xffffffff
29207
- const lower = Number(v & (2n ** 32n - 1n));
29208
- const upper = Number(v >> 32n);
29202
+ const lower = v & (2n ** 32n - 1n);
29203
+ const upper = v >> 32n;
29209
29204
  return {
29210
- lower: asTypedNumber(lower),
29211
- upper: asTypedNumber(upper),
29205
+ lower: asWithBytesRepresentation(Number(lower)),
29206
+ upper: asWithBytesRepresentation(Number(upper)),
29212
29207
  };
29213
29208
  };
29214
29209
  /**
@@ -29248,8 +29243,8 @@ function numbers_u32AsLeBytes(value) {
29248
29243
  * Interpret 4-byte `Uint8Array` as U32 written as little endian.
29249
29244
  */
29250
29245
  function leBytesAsU32(uint8Array) {
29251
- debug_check `${uint8Array.length === 4} Input must be a Uint8Array of length 4`;
29252
- return asTypedNumber(uint8Array[0] | (uint8Array[1] << 8) | (uint8Array[2] << 16) | (uint8Array[3] << 24));
29246
+ debug_check(uint8Array.length === 4, "Input must be a Uint8Array of length 4");
29247
+ return asWithBytesRepresentation(uint8Array[0] | (uint8Array[1] << 8) | (uint8Array[2] << 16) | (uint8Array[3] << 24));
29253
29248
  }
29254
29249
  /** Get the smallest value between U64 a and values given as input parameters. */
29255
29250
  const minU64 = (a, ...values) => values.reduce((min, value) => (value > min ? min : value), a);
@@ -29540,7 +29535,7 @@ class decoder_Decoder {
29540
29535
  this.skip(newOffset - this.offset);
29541
29536
  }
29542
29537
  else {
29543
- debug_check `${newOffset >= 0} The offset has to be positive`;
29538
+ debug_check(newOffset >= 0, "The offset has to be positive");
29544
29539
  this.offset = newOffset;
29545
29540
  }
29546
29541
  }
@@ -29568,7 +29563,7 @@ class decoder_Decoder {
29568
29563
  return num;
29569
29564
  }
29570
29565
  ensureHasBytes(bytes) {
29571
- debug_check `${bytes >= 0} Negative number of bytes given.`;
29566
+ debug_check(bytes >= 0, "Negative number of bytes given.");
29572
29567
  if (this.offset + bytes > this.source.length) {
29573
29568
  throw new Error(`Attempting to decode more data than there is left. Need ${bytes}, left: ${this.source.length - this.offset}.`);
29574
29569
  }
@@ -29576,7 +29571,7 @@ class decoder_Decoder {
29576
29571
  }
29577
29572
  const MASKS = [0xff, 0xfe, 0xfc, 0xf8, 0xf0, 0xe0, 0xc0, 0x80];
29578
29573
  function decodeVariableLengthExtraBytes(firstByte) {
29579
- debug_check `${firstByte >= 0 && firstByte < 256} Incorrect byte value: ${firstByte}`;
29574
+ debug_check(firstByte >= 0 && firstByte < 256, `Incorrect byte value: ${firstByte}`);
29580
29575
  for (let i = 0; i < MASKS.length; i++) {
29581
29576
  if (firstByte >= MASKS[i]) {
29582
29577
  return 8 - i;
@@ -29731,7 +29726,7 @@ class descriptor_Descriptor {
29731
29726
 
29732
29727
 
29733
29728
  function tryAsExactBytes(a) {
29734
- debug_check `${a.isExact} The value is not exact size estimation!`;
29729
+ debug_check(a.isExact, "The value is not exact size estimation!");
29735
29730
  return a.bytes;
29736
29731
  }
29737
29732
  function addSizeHints(a, b) {
@@ -29838,8 +29833,8 @@ class encoder_Encoder {
29838
29833
  // we still allow positive numbers from `[maxNum / 2, maxNum)`.
29839
29834
  // So it does not matter if the argument is a negative value,
29840
29835
  // OR if someone just gave us two-complement already.
29841
- debug_check `${num < maxNum} Only for numbers up to 2**64 - 1`;
29842
- debug_check `${-num <= maxNum / 2n} Only for numbers down to -2**63`;
29836
+ debug_check(num < maxNum, "Only for numbers up to 2**64 - 1");
29837
+ debug_check(-num <= maxNum / 2n, "Only for numbers down to -2**63");
29843
29838
  this.ensureBigEnough(8);
29844
29839
  this.dataView.setBigInt64(this.offset, num, true);
29845
29840
  this.offset += 8;
@@ -29903,8 +29898,8 @@ class encoder_Encoder {
29903
29898
  // we still allow positive numbers from `[maxNum / 2, maxNum)`.
29904
29899
  // So it does not matter if the argument is a negative value,
29905
29900
  // OR if someone just gave us two-complement already.
29906
- debug_check `${num < maxNum} Only for numbers up to 2**${BITS * bytesToEncode} - 1`;
29907
- debug_check `${-num <= maxNum / 2} Only for numbers down to -2**${BITS * bytesToEncode - 1}`;
29901
+ debug_check(num < maxNum, `Only for numbers up to 2**${BITS * bytesToEncode} - 1`);
29902
+ debug_check(-num <= maxNum / 2, `Only for numbers down to -2**${BITS * bytesToEncode - 1}`);
29908
29903
  this.ensureBigEnough(bytesToEncode);
29909
29904
  }
29910
29905
  /**
@@ -29915,8 +29910,8 @@ class encoder_Encoder {
29915
29910
  * https://graypaper.fluffylabs.dev/#/579bd12/365202365202
29916
29911
  */
29917
29912
  varU32(num) {
29918
- debug_check `${num >= 0} Only for natural numbers.`;
29919
- debug_check `${num < 2 ** 32} Only for numbers up to 2**32`;
29913
+ debug_check(num >= 0, "Only for natural numbers.");
29914
+ debug_check(num < 2 ** 32, "Only for numbers up to 2**32");
29920
29915
  this.varU64(BigInt(num));
29921
29916
  }
29922
29917
  /**
@@ -30067,7 +30062,7 @@ class encoder_Encoder {
30067
30062
  * https://graypaper.fluffylabs.dev/#/579bd12/374400374400
30068
30063
  */
30069
30064
  sequenceVarLen(encode, elements) {
30070
- debug_check `${elements.length <= 2 ** 32} Wow, that's a nice long sequence you've got here.`;
30065
+ debug_check(elements.length <= 2 ** 32, "Wow, that's a nice long sequence you've got here.");
30071
30066
  this.varU32(numbers_tryAsU32(elements.length));
30072
30067
  this.sequenceFixLen(encode, elements);
30073
30068
  }
@@ -30088,7 +30083,7 @@ class encoder_Encoder {
30088
30083
  * anyway, so if we really should throw we will.
30089
30084
  */
30090
30085
  ensureBigEnough(length, options = { silent: false }) {
30091
- debug_check `${length >= 0} Negative length given`;
30086
+ debug_check(length >= 0, "Negative length given");
30092
30087
  const newLength = this.offset + length;
30093
30088
  if (newLength > MAX_LENGTH) {
30094
30089
  if (options.silent) {
@@ -30224,12 +30219,10 @@ class ObjectView {
30224
30219
  decodeUpTo(field) {
30225
30220
  const index = this.descriptorsKeys.indexOf(field);
30226
30221
  const lastField = this.descriptorsKeys[this.lastDecodedFieldIdx];
30227
- debug_check `
30228
- ${this.lastDecodedFieldIdx < index}
30229
- Unjustified call to 'decodeUpTo' -
30222
+ debug_check(this.lastDecodedFieldIdx < index, `Unjustified call to 'decodeUpTo' -
30230
30223
  the index ($Blobindex}, ${String(field)})
30231
30224
  is already decoded (${this.lastDecodedFieldIdx}, ${String(lastField)}).
30232
- `;
30225
+ `);
30233
30226
  let lastItem = this.cache.get(lastField);
30234
30227
  const skipper = new Skipper(this.decoder);
30235
30228
  // now skip all of the fields and further populate the cache.
@@ -30245,10 +30238,8 @@ class ObjectView {
30245
30238
  this.cache.set(field, lastItem);
30246
30239
  this.lastDecodedFieldIdx = i;
30247
30240
  }
30248
- if (lastItem === undefined) {
30249
- throw new Error("Last item must be set, since the loop turns at least once.");
30250
- }
30251
- return lastItem;
30241
+ const last = ensure(lastItem, lastItem !== undefined, "Last item must be set, since the loop turns at least once.");
30242
+ return last;
30252
30243
  }
30253
30244
  }
30254
30245
  /**
@@ -30281,10 +30272,8 @@ class SequenceView {
30281
30272
  *[Symbol.iterator]() {
30282
30273
  for (let i = 0; i < this.length; i++) {
30283
30274
  const val = this.get(i);
30284
- if (val === undefined) {
30285
- throw new Error("We are within 0..this.length so all items are defined.");
30286
- }
30287
- yield val;
30275
+ const v = ensure(val, val !== undefined, "We are within 0..this.length so all items are defined.");
30276
+ yield v;
30288
30277
  }
30289
30278
  }
30290
30279
  /** Create an array of all views mapped to some particular value. */
@@ -30327,10 +30316,7 @@ class SequenceView {
30327
30316
  return bytes_BytesBlob.blobFrom(this.decoder.source.subarray(this.initialDecoderOffset, this.decoder.bytesRead()));
30328
30317
  }
30329
30318
  decodeUpTo(index) {
30330
- debug_check `
30331
- ${this.lastDecodedIdx < index}
30332
- Unjustified call to 'decodeUpTo' - the index (${index}) is already decoded (${this.lastDecodedIdx}).
30333
- `;
30319
+ debug_check(this.lastDecodedIdx < index, `Unjustified call to 'decodeUpTo' - the index (${index}) is already decoded (${this.lastDecodedIdx}).`);
30334
30320
  let lastItem = this.cache.get(this.lastDecodedIdx);
30335
30321
  const skipper = new Skipper(this.decoder);
30336
30322
  // now skip all of the fields and further populate the cache.
@@ -30345,10 +30331,8 @@ class SequenceView {
30345
30331
  this.cache.set(i, lastItem);
30346
30332
  this.lastDecodedIdx = i;
30347
30333
  }
30348
- if (lastItem === undefined) {
30349
- throw new Error("Last item must be set, since the loop turns at least once.");
30350
- }
30351
- return lastItem;
30334
+ const last = ensure(lastItem, lastItem !== undefined, "Last item must be set, since the loop turns at least once.");
30335
+ return last;
30352
30336
  }
30353
30337
  }
30354
30338
 
@@ -30381,10 +30365,7 @@ const TYPICAL_DICTIONARY_LENGTH = 32;
30381
30365
  */
30382
30366
  function readonlyArray(desc) {
30383
30367
  return desc.convert((x) => {
30384
- debug_check `
30385
- ${Array.isArray(x)}
30386
- Non-arrays are not supported as 'readonly': got ${typeof x}, ${x}
30387
- `;
30368
+ debug_check(Array.isArray(x), `Non-arrays are not supported as 'readonly': got ${typeof x}, ${x}`);
30388
30369
  // NOTE [ToDr] This assumption is incorrect in general, but it's documented
30389
30370
  // in the general note. We avoid `.slice()` the array for performance reasons.
30390
30371
  return x;
@@ -31339,17 +31320,10 @@ async function initAll() {
31339
31320
  await init.ed25519();
31340
31321
  await init.reedSolomon();
31341
31322
  }
31342
- function initOnce(doInit) {
31343
- let ready = null;
31344
- return async () => {
31345
- if (ready === null) ready = doInit();
31346
- return await ready;
31347
- };
31348
- }
31349
31323
  const init = {
31350
- bandersnatch: initOnce(async () => await bandersnatch_default({ module_or_path: await bandersnatch_bg_default() })),
31351
- ed25519: initOnce(async () => await ed25519_wasm_default({ module_or_path: await ed25519_wasm_bg_default() })),
31352
- reedSolomon: initOnce(async () => await reed_solomon_wasm_default({ module_or_path: await reed_solomon_wasm_bg_default() }))
31324
+ bandersnatch: async () => await bandersnatch_default({ module_or_path: await bandersnatch_bg_default() }),
31325
+ ed25519: async () => await ed25519_wasm_default({ module_or_path: await ed25519_wasm_bg_default() }),
31326
+ reedSolomon: async () => await reed_solomon_wasm_default({ module_or_path: await reed_solomon_wasm_bg_default() })
31353
31327
  };
31354
31328
 
31355
31329
  //#endregion
@@ -31371,7 +31345,7 @@ const BLS_KEY_BYTES = 144;
31371
31345
  /** Derive a Bandersnatch public key from a seed. */
31372
31346
  function bandersnatch_publicKey(seed) {
31373
31347
  const key = bandersnatch.derive_public_key(seed);
31374
- check `${key[0] === 0} Invalid Bandersnatch public key derived from seed`;
31348
+ check(key[0] === 0, "Invalid Bandersnatch public key derived from seed");
31375
31349
  return Bytes.fromBlob(key.subarray(1), BANDERSNATCH_KEY_BYTES).asOpaque();
31376
31350
  }
31377
31351
 
@@ -31859,7 +31833,7 @@ async function ed25519_verify(input) {
31859
31833
  data.set(signature.raw, offset);
31860
31834
  offset += ED25519_SIGNATURE_BYTES;
31861
31835
  const messageLength = message.length;
31862
- debug_check `${messageLength < 256} Message needs to be shorter than 256 bytes. Got: ${messageLength}`;
31836
+ debug_check(messageLength < 256, `Message needs to be shorter than 256 bytes. Got: ${messageLength}`);
31863
31837
  data[offset] = messageLength;
31864
31838
  offset += 1;
31865
31839
  data.set(message.raw, offset);
@@ -31888,7 +31862,6 @@ async function verifyBatch(input) {
31888
31862
 
31889
31863
  ;// CONCATENATED MODULE: ./packages/core/hash/hash.ts
31890
31864
 
31891
-
31892
31865
  /**
31893
31866
  * Size of the output of the hash functions.
31894
31867
  *
@@ -31898,7 +31871,6 @@ async function verifyBatch(input) {
31898
31871
  const hash_HASH_SIZE = 32;
31899
31872
  /** A hash without last byte (useful for trie representation). */
31900
31873
  const TRUNCATED_HASH_SIZE = 31;
31901
- const ZERO_HASH = bytes_Bytes.zero(hash_HASH_SIZE);
31902
31874
  /**
31903
31875
  * Container for some object with a hash that is related to this object.
31904
31876
  *
@@ -31943,7 +31915,7 @@ class PageAllocator {
31943
31915
  // TODO [ToDr] Benchmark the performance!
31944
31916
  constructor(hashesPerPage) {
31945
31917
  this.hashesPerPage = hashesPerPage;
31946
- check `${hashesPerPage > 0 && hashesPerPage >>> 0 === hashesPerPage} Expected a non-zero integer.`;
31918
+ check(hashesPerPage > 0 && hashesPerPage >>> 0 === hashesPerPage, "Expected a non-zero integer.");
31947
31919
  this.resetPage();
31948
31920
  }
31949
31921
  resetPage() {
@@ -32242,8 +32214,8 @@ class MultiMap {
32242
32214
  * if needed.
32243
32215
  */
32244
32216
  constructor(keysLength, keyMappers) {
32245
- check `${keysLength > 0} Keys cannot be empty.`;
32246
- check `${keyMappers === undefined || keyMappers.length === keysLength} Incorrect number of key mappers given!`;
32217
+ check(keysLength > 0, "Keys cannot be empty.");
32218
+ check(keyMappers === undefined || keyMappers.length === keysLength, "Incorrect number of key mappers given!");
32247
32219
  this.data = new Map();
32248
32220
  this.keyMappers = keyMappers === undefined ? Array(keysLength).fill(null) : keyMappers;
32249
32221
  }
@@ -32344,7 +32316,7 @@ class sized_array_FixedSizeArray extends Array {
32344
32316
  this.fixedLength = this.length;
32345
32317
  }
32346
32318
  static new(data, len) {
32347
- debug_check `${data.length === len} Expected an array of size: ${len}, got: ${data.length}`;
32319
+ debug_check(data.length === len, `Expected an array of size: ${len}, got: ${data.length}`);
32348
32320
  const arr = new sized_array_FixedSizeArray(len);
32349
32321
  for (let i = 0; i < len; i++) {
32350
32322
  arr[i] = data[i];
@@ -32478,7 +32450,7 @@ class SortedArray {
32478
32450
  }
32479
32451
  /** Create a new SortedSet from two sorted collections. */
32480
32452
  static fromTwoSortedCollections(first, second) {
32481
- debug_check `${first.comparator === second.comparator} Cannot merge arrays if they do not use the same comparator`;
32453
+ debug_check(first.comparator === second.comparator, "Cannot merge arrays if they do not use the same comparator");
32482
32454
  const comparator = first.comparator;
32483
32455
  const arr1 = first.array;
32484
32456
  const arr1Length = arr1.length;
@@ -32598,7 +32570,7 @@ class SortedSet extends SortedArray {
32598
32570
  }
32599
32571
  /** Create a new SortedSet from two sorted collections. */
32600
32572
  static fromTwoSortedCollections(first, second) {
32601
- debug_check `${first.comparator === second.comparator} Cannot merge arrays if they do not use the same comparator`;
32573
+ debug_check(first.comparator === second.comparator, "Cannot merge arrays if they do not use the same comparator");
32602
32574
  const comparator = first.comparator;
32603
32575
  if (first.length === 0) {
32604
32576
  return SortedSet.fromSortedArray(comparator, second.array);
@@ -33617,12 +33589,9 @@ const common_tryAsServiceGas = (v) => opaque_asOpaqueType(numbers_tryAsU64(v));
33617
33589
  /** Attempt to convert a number into `CoreIndex`. */
33618
33590
  const common_tryAsCoreIndex = (v) => opaque_asOpaqueType(numbers_tryAsU16(v));
33619
33591
  /** Attempt to convert a number into `Epoch`. */
33620
- const tryAsEpoch = (v) => asOpaqueType(tryAsU32(v));
33592
+ const tryAsEpoch = (v) => opaque_asOpaqueType(numbers_tryAsU32(v));
33621
33593
  function tryAsPerValidator(array, spec) {
33622
- debug_check `
33623
- ${array.length === spec.validatorsCount}
33624
- Invalid per-validator array length. Expected ${spec.validatorsCount}, got: ${array.length}
33625
- `;
33594
+ debug_check(array.length === spec.validatorsCount, `Invalid per-validator array length. Expected ${spec.validatorsCount}, got: ${array.length}`);
33626
33595
  return sized_array_asKnownSize(array);
33627
33596
  }
33628
33597
  const codecPerValidator = (val) => codecWithContext((context) => {
@@ -33631,10 +33600,7 @@ const codecPerValidator = (val) => codecWithContext((context) => {
33631
33600
  });
33632
33601
  });
33633
33602
  function tryAsPerEpochBlock(array, spec) {
33634
- debug_check `
33635
- ${array.length === spec.epochLength}
33636
- Invalid per-epoch-block array length. Expected ${spec.epochLength}, got: ${array.length}
33637
- `;
33603
+ debug_check(array.length === spec.epochLength, `Invalid per-epoch-block array length. Expected ${spec.epochLength}, got: ${array.length}`);
33638
33604
  return sized_array_asKnownSize(array);
33639
33605
  }
33640
33606
  const codecPerEpochBlock = (val) => codecWithContext((context) => {
@@ -33905,14 +33871,9 @@ class WorkItem extends WithDebug {
33905
33871
 
33906
33872
 
33907
33873
 
33908
-
33909
33874
  /** Verify the value is within the `WorkItemsCount` bounds. */
33910
33875
  function work_package_tryAsWorkItemsCount(len) {
33911
- debug_check `
33912
- ${len >= MIN_NUMBER_OF_WORK_ITEMS && len <= work_package_MAX_NUMBER_OF_WORK_ITEMS}
33913
- WorkItemsCount: Expected '${MIN_NUMBER_OF_WORK_ITEMS} <= count <= ${work_package_MAX_NUMBER_OF_WORK_ITEMS}' got ${len}
33914
- `;
33915
- return tryAsU8(len);
33876
+ return ensure(len, len >= MIN_NUMBER_OF_WORK_ITEMS && len <= work_package_MAX_NUMBER_OF_WORK_ITEMS, `WorkItemsCount: Expected '${MIN_NUMBER_OF_WORK_ITEMS} <= count <= ${work_package_MAX_NUMBER_OF_WORK_ITEMS}' got ${len}`);
33916
33877
  }
33917
33878
  /** Minimal number of work items in the work package or results in work report. */
33918
33879
  const MIN_NUMBER_OF_WORK_ITEMS = 1;
@@ -35886,10 +35847,7 @@ class AvailabilityAssignment extends WithDebug {
35886
35847
 
35887
35848
  /** Check if given array has correct length before casting to the opaque type. */
35888
35849
  function tryAsPerCore(array, spec) {
35889
- debug_check `
35890
- ${array.length === spec.coresCount}
35891
- Invalid per-core array length. Expected ${spec.coresCount}, got: ${array.length}
35892
- `;
35850
+ debug_check(array.length === spec.coresCount, `Invalid per-core array length. Expected ${spec.coresCount}, got: ${array.length}`);
35893
35851
  return opaque_asOpaqueType(array);
35894
35852
  }
35895
35853
  const codecPerCore = (val) => codecWithContext((context) => {
@@ -37140,7 +37098,7 @@ class InMemoryState extends WithDebug {
37140
37098
  }
37141
37099
  removeServices(servicesRemoved) {
37142
37100
  for (const serviceId of servicesRemoved ?? []) {
37143
- debug_check `${this.services.has(serviceId)} Attempting to remove non-existing service: ${serviceId}`;
37101
+ debug_check(this.services.has(serviceId), `Attempting to remove non-existing service: ${serviceId}`);
37144
37102
  this.services.delete(serviceId);
37145
37103
  }
37146
37104
  }
@@ -37157,10 +37115,7 @@ class InMemoryState extends WithDebug {
37157
37115
  }
37158
37116
  else if (kind === UpdateStorageKind.Remove) {
37159
37117
  const { key } = action;
37160
- debug_check `
37161
- ${service.data.storage.has(key.toString())}
37162
- Attempting to remove non-existing storage item at ${serviceId}: ${action.key}
37163
- `;
37118
+ debug_check(service.data.storage.has(key.toString()), `Attempting to remove non-existing storage item at ${serviceId}: ${action.key}`);
37164
37119
  service.data.storage.delete(key.toString());
37165
37120
  }
37166
37121
  else {
@@ -37843,12 +37798,12 @@ class TrieNode {
37843
37798
  }
37844
37799
  /** View this node as a branch node */
37845
37800
  asBranchNode() {
37846
- debug_check `${this.getNodeType() === NodeType.Branch} not a branch!`;
37801
+ debug_check(this.getNodeType() === NodeType.Branch);
37847
37802
  return new BranchNode(this);
37848
37803
  }
37849
37804
  /** View this node as a leaf node */
37850
37805
  asLeafNode() {
37851
- debug_check `${this.getNodeType() !== NodeType.Branch} not a leaf!`;
37806
+ debug_check(this.getNodeType() !== NodeType.Branch);
37852
37807
  return new LeafNode(this);
37853
37808
  }
37854
37809
  toString() {
@@ -38336,7 +38291,7 @@ function createSubtreeForBothLeaves(traversedPath, nodes, leafToReplace, leaf) {
38336
38291
  * Return a single bit from `key` located at `bitIndex`.
38337
38292
  */
38338
38293
  function getBit(key, bitIndex) {
38339
- debug_check `${bitIndex < TRUNCATED_KEY_BITS} invalid bit index passed ${bitIndex}`;
38294
+ debug_check(bitIndex < TRUNCATED_KEY_BITS);
38340
38295
  const byte = bitIndex >>> 3;
38341
38296
  const bit = bitIndex - (byte << 3);
38342
38297
  const mask = 0b10_00_00_00 >>> bit;
@@ -39496,7 +39451,7 @@ class TypedPort {
39496
39451
  * Send a response given the worker that has previously requested something.
39497
39452
  */
39498
39453
  respond(localState, request, data, transferList) {
39499
- debug_check `${request.kind === "request"}`;
39454
+ debug_check(request.kind === "request");
39500
39455
  this.postMessage({
39501
39456
  kind: "response",
39502
39457
  id: request.id,
@@ -39527,11 +39482,10 @@ class TypedPort {
39527
39482
  throw new Error(`Invalid message: ${JSON.stringify(msg)}.`);
39528
39483
  }
39529
39484
  switch (msg.kind) {
39530
- case "response": {
39531
- debug_check `${this.responseListeners.eventNames().indexOf(reqEvent(msg.id)) !== -1}`;
39485
+ case "response":
39486
+ debug_check(this.responseListeners.eventNames().indexOf(reqEvent(msg.id)) !== -1);
39532
39487
  this.responseListeners.emit(reqEvent(msg.id), null, msg.data, msg.name, msg.localState, msg);
39533
39488
  break;
39534
- }
39535
39489
  case "signal":
39536
39490
  this.listeners.emit("signal", msg.name, msg.data, msg.localState, msg);
39537
39491
  break;
@@ -39746,9 +39700,9 @@ class MessageChannelStateMachine {
39746
39700
  const promise = new Promise((resolve, reject) => {
39747
39701
  parentPort.once("message", (value) => {
39748
39702
  try {
39749
- debug_check `${value.kind === "request"} The initial message should be a request with channel.`;
39750
- debug_check `${value.name === CHANNEL_MESSAGE}`;
39751
- debug_check `${value.data instanceof external_node_worker_threads_namespaceObject.MessagePort}`;
39703
+ debug_check(value.kind === "request", "The initial message should be a request with channel.");
39704
+ debug_check(value.name === CHANNEL_MESSAGE);
39705
+ debug_check(value.data instanceof external_node_worker_threads_namespaceObject.MessagePort);
39752
39706
  const port = new TypedPort(value.data);
39753
39707
  port.respond(machine.currentState().stateName, value, Ok);
39754
39708
  resolve(port);
@@ -39828,7 +39782,7 @@ class StateMachine {
39828
39782
  /** Get state object by name. */
39829
39783
  getState(name) {
39830
39784
  const state = this.allStates.get(name);
39831
- debug_check `${state !== undefined} Unable to retrieve state object for ${name}.`;
39785
+ debug_check(state !== undefined, `Unable to retrieve state object for ${name}.`);
39832
39786
  return state;
39833
39787
  }
39834
39788
  /** Get the currently active state object. */
@@ -40223,6 +40177,7 @@ class ImporterReady extends State {
40223
40177
  response: rootHash === null ? bytes_Bytes.zero(hash_HASH_SIZE).raw : rootHash.raw,
40224
40178
  };
40225
40179
  }
40180
+ // NOTE [ToDr] This should rather be using the import queue, instead of going directly.
40226
40181
  async importBlock(block) {
40227
40182
  if (this.importer === null) {
40228
40183
  state_machine_logger.error(`${this.constructor.name} importer not initialized yet!`);
@@ -40234,13 +40189,17 @@ class ImporterReady extends State {
40234
40189
  if (block instanceof Uint8Array) {
40235
40190
  const config = this.getConfig();
40236
40191
  const blockView = decoder_Decoder.decodeObject(Block.Codec.View, block, config.chainSpec);
40192
+ const headerView = blockView.header.view();
40193
+ const timeSlot = headerView.timeSlotIndex.materialize();
40237
40194
  let response;
40238
40195
  try {
40239
- const res = await this.importer.importBlock(blockView, config.omitSealVerification);
40196
+ const res = await this.importer.importBlock(blockView, null, config.omitSealVerification);
40240
40197
  if (res.isOk) {
40241
- response = result_Result.ok(this.importer.getBestStateRootHash() ?? ZERO_HASH.asOpaque());
40198
+ state_machine_logger.info(`🧊 Best block: #${timeSlot} (${res.ok.hash})`);
40199
+ response = result_Result.ok(this.importer.getBestStateRootHash() ?? bytes_Bytes.zero(hash_HASH_SIZE).asOpaque());
40242
40200
  }
40243
40201
  else {
40202
+ state_machine_logger.log(`❌ Rejected block #${timeSlot}: ${resultToString(res)}`);
40244
40203
  response = result_Result.error(resultToString(res));
40245
40204
  }
40246
40205
  }
@@ -40561,22 +40520,19 @@ class Preimages {
40561
40520
 
40562
40521
  const NO_OF_REGISTERS = 13;
40563
40522
  const REGISTER_SIZE_SHIFT = 3;
40564
- const tryAsRegisterIndex = (index) => {
40565
- debug_check `${index >= 0 && index < NO_OF_REGISTERS} Incorrect register index: ${index}!`;
40566
- return opaque_asOpaqueType(index);
40567
- };
40523
+ const tryAsRegisterIndex = (index) => ensure(index, index >= 0 && index <= NO_OF_REGISTERS, `Incorrect register index: ${index}!`);
40568
40524
  class Registers {
40569
40525
  bytes;
40570
40526
  asSigned;
40571
40527
  asUnsigned;
40572
40528
  constructor(bytes = new Uint8Array(NO_OF_REGISTERS << REGISTER_SIZE_SHIFT)) {
40573
40529
  this.bytes = bytes;
40574
- debug_check `${bytes.length === NO_OF_REGISTERS << REGISTER_SIZE_SHIFT} Invalid size of registers array.`;
40530
+ debug_check(bytes.length === NO_OF_REGISTERS << REGISTER_SIZE_SHIFT, "Invalid size of registers array.");
40575
40531
  this.asSigned = new BigInt64Array(bytes.buffer, bytes.byteOffset);
40576
40532
  this.asUnsigned = new BigUint64Array(bytes.buffer, bytes.byteOffset);
40577
40533
  }
40578
40534
  static fromBytes(bytes) {
40579
- debug_check `${bytes.length === NO_OF_REGISTERS << REGISTER_SIZE_SHIFT} Invalid size of registers array.`;
40535
+ debug_check(bytes.length === NO_OF_REGISTERS << REGISTER_SIZE_SHIFT, "Invalid size of registers array.");
40580
40536
  return new Registers(bytes);
40581
40537
  }
40582
40538
  getBytesAsLittleEndian(index, len) {
@@ -40722,7 +40678,7 @@ class Mask {
40722
40678
  return this.lookupTableForward[index] === 0;
40723
40679
  }
40724
40680
  getNoOfBytesToNextInstruction(index) {
40725
- debug_check `${index >= 0} index (${index}) cannot be a negative number`;
40681
+ debug_check(index >= 0, `index (${index}) cannot be a negative number`);
40726
40682
  return Math.min(this.lookupTableForward[index] ?? 0, MAX_INSTRUCTION_DISTANCE);
40727
40683
  }
40728
40684
  buildLookupTableForward(mask) {
@@ -41722,7 +41678,7 @@ const PAGE_SIZE_SHIFT = 12;
41722
41678
  const PAGE_SIZE = 1 << PAGE_SIZE_SHIFT;
41723
41679
  const MIN_ALLOCATION_SHIFT = (() => {
41724
41680
  const MIN_ALLOCATION_SHIFT = 7;
41725
- debug_check `${MIN_ALLOCATION_SHIFT >= 0 && MIN_ALLOCATION_SHIFT < PAGE_SIZE_SHIFT} incorrect minimal allocation shift`;
41681
+ debug_check(MIN_ALLOCATION_SHIFT >= 0 && MIN_ALLOCATION_SHIFT < PAGE_SIZE_SHIFT, "incorrect minimal allocation shift");
41726
41682
  return MIN_ALLOCATION_SHIFT;
41727
41683
  })();
41728
41684
  const MIN_ALLOCATION_LENGTH = PAGE_SIZE >> MIN_ALLOCATION_SHIFT;
@@ -41735,28 +41691,16 @@ const MAX_NUMBER_OF_PAGES = MEMORY_SIZE / PAGE_SIZE;
41735
41691
  ;// CONCATENATED MODULE: ./packages/core/pvm-interpreter/memory/memory-index.ts
41736
41692
 
41737
41693
 
41738
- const tryAsMemoryIndex = (index) => {
41739
- debug_check `${index >= 0 && index <= MAX_MEMORY_INDEX} Incorrect memory index: ${index}!`;
41740
- return opaque_asOpaqueType(index);
41741
- };
41742
- const tryAsSbrkIndex = (index) => {
41743
- debug_check `${index >= 0 && index <= MAX_MEMORY_INDEX + 1} Incorrect sbrk index: ${index}!`;
41744
- return opaque_asOpaqueType(index);
41745
- };
41694
+ const tryAsMemoryIndex = (index) => ensure(index, index >= 0 && index <= MAX_MEMORY_INDEX, `Incorrect memory index: ${index}!`);
41695
+ const tryAsSbrkIndex = (index) => ensure(index, index >= 0 && index <= MAX_MEMORY_INDEX + 1, `Incorrect sbrk index: ${index}!`);
41746
41696
 
41747
41697
  ;// CONCATENATED MODULE: ./packages/core/pvm-interpreter/memory/pages/page-utils.ts
41748
41698
 
41749
41699
 
41750
41700
  /** Ensure that given memory `index` is within `[0...PAGE_SIZE)` and can be used to index a page */
41751
- const tryAsPageIndex = (index) => {
41752
- debug_check `${index >= 0 && index < PAGE_SIZE}, Incorect page index: ${index}!`;
41753
- return opaque_asOpaqueType(index);
41754
- };
41701
+ const tryAsPageIndex = (index) => ensure(index, index >= 0 && index < PAGE_SIZE, `Incorect page index: ${index}!`);
41755
41702
  /** Ensure that given `index` represents an index of one of the pages. */
41756
- const tryAsPageNumber = (index) => {
41757
- debug_check `${index >= 0 && index <= LAST_PAGE_NUMBER}, Incorect page number: ${index}!`;
41758
- return opaque_asOpaqueType(index);
41759
- };
41703
+ const tryAsPageNumber = (index) => ensure(index, index >= 0 && index <= LAST_PAGE_NUMBER, `Incorrect page number: ${index}!`);
41760
41704
  /**
41761
41705
  * Get the next page number and wrap the result if it is bigger than LAST_PAGE_NUMBER
41762
41706
  *
@@ -42288,10 +42232,10 @@ class MemoryBuilder {
42288
42232
  */
42289
42233
  setReadablePages(start, end, data = new Uint8Array()) {
42290
42234
  this.ensureNotFinalized();
42291
- debug_check `${start < end} end has to be bigger than start`;
42292
- debug_check `${start % PAGE_SIZE === 0} start needs to be a multiple of page size (${PAGE_SIZE})`;
42293
- debug_check `${end % PAGE_SIZE === 0} end needs to be a multiple of page size (${PAGE_SIZE})`;
42294
- debug_check `${data.length <= end - start} the initial data is longer than address range`;
42235
+ debug_check(start < end, "end has to be bigger than start");
42236
+ debug_check(start % PAGE_SIZE === 0, `start needs to be a multiple of page size (${PAGE_SIZE})`);
42237
+ debug_check(end % PAGE_SIZE === 0, `end needs to be a multiple of page size (${PAGE_SIZE})`);
42238
+ debug_check(data.length <= end - start, "the initial data is longer than address range");
42295
42239
  const length = end - start;
42296
42240
  const range = MemoryRange.fromStartAndLength(start, length);
42297
42241
  this.ensureNoReservedMemoryUsage(range);
@@ -42316,10 +42260,10 @@ class MemoryBuilder {
42316
42260
  */
42317
42261
  setWriteablePages(start, end, data = new Uint8Array()) {
42318
42262
  this.ensureNotFinalized();
42319
- debug_check `${start < end} end has to be bigger than start`;
42320
- debug_check `${start % PAGE_SIZE === 0} start needs to be a multiple of page size (${PAGE_SIZE})`;
42321
- debug_check `${end % PAGE_SIZE === 0} end needs to be a multiple of page size (${PAGE_SIZE})`;
42322
- debug_check `${data.length <= end - start} the initial data is longer than address range`;
42263
+ debug_check(start < end, "end has to be bigger than start");
42264
+ debug_check(start % PAGE_SIZE === 0, `start needs to be a multiple of page size (${PAGE_SIZE})`);
42265
+ debug_check(end % PAGE_SIZE === 0, `end needs to be a multiple of page size (${PAGE_SIZE})`);
42266
+ debug_check(data.length <= end - start, "the initial data is longer than address range");
42323
42267
  const length = end - start;
42324
42268
  const range = MemoryRange.fromStartAndLength(start, length);
42325
42269
  this.ensureNoReservedMemoryUsage(range);
@@ -42341,7 +42285,7 @@ class MemoryBuilder {
42341
42285
  this.ensureNotFinalized();
42342
42286
  const pageOffset = start % PAGE_SIZE;
42343
42287
  const remainingSpaceOnPage = PAGE_SIZE - pageOffset;
42344
- debug_check `${data.length <= remainingSpaceOnPage} The data has to fit into a single page.`;
42288
+ debug_check(data.length <= remainingSpaceOnPage, "The data has to fit into a single page.");
42345
42289
  const length = data.length;
42346
42290
  const range = MemoryRange.fromStartAndLength(start, length);
42347
42291
  this.ensureNoReservedMemoryUsage(range);
@@ -42355,10 +42299,7 @@ class MemoryBuilder {
42355
42299
  return this;
42356
42300
  }
42357
42301
  finalize(startHeapIndex, endHeapIndex) {
42358
- debug_check `
42359
- ${startHeapIndex <= endHeapIndex}
42360
- startHeapIndex (${startHeapIndex}) has to be less than or equal to endHeapIndex (${endHeapIndex})
42361
- `;
42302
+ debug_check(startHeapIndex <= endHeapIndex, `startHeapIndex (${startHeapIndex}) has to be less than or equal to endHeapIndex (${endHeapIndex})`);
42362
42303
  this.ensureNotFinalized();
42363
42304
  const range = MemoryRange.fromStartAndLength(startHeapIndex, endHeapIndex - startHeapIndex);
42364
42305
  const pages = PageRange.fromMemoryRange(range);
@@ -42596,7 +42537,7 @@ function mulU64(a, b) {
42596
42537
  *
42597
42538
  * The result of multiplication is a 64-bits number and we are only interested in the part that lands in the upper 32-bits.
42598
42539
  * For example if we multiply `0xffffffff * 0xffffffff`, we get:
42599
-
42540
+
42600
42541
  * | 64-bits | 64-bits |
42601
42542
  * +--------------------+--------------------+
42602
42543
  * | upper | lower |
@@ -42632,7 +42573,7 @@ function mulUpperSS(a, b) {
42632
42573
  return interpretAsSigned(resultLimitedTo64Bits);
42633
42574
  }
42634
42575
  function unsignedRightShiftBigInt(value, shift) {
42635
- debug_check `${shift >= 0} Shift count must be non-negative`;
42576
+ debug_check(shift >= 0, "Shift count must be non-negative");
42636
42577
  const fillBit = value < 0 ? "1" : "0";
42637
42578
  // Convert the BigInt to its binary representation
42638
42579
  const binaryRepresentation = value.toString(2).padStart(64, fillBit);
@@ -44048,10 +43989,7 @@ class TwoRegsTwoImmsDispatcher {
44048
43989
  class JumpTable {
44049
43990
  indices;
44050
43991
  constructor(itemByteLength, bytes) {
44051
- debug_check `
44052
- ${itemByteLength === 0 || bytes.length % itemByteLength === 0}
44053
- Length of jump table (${bytes.length}) should be a multiple of item lenght (${itemByteLength})!
44054
- `;
43992
+ debug_check(itemByteLength === 0 || bytes.length % itemByteLength === 0, `Length of jump table (${bytes.length}) should be a multiple of item lenght (${itemByteLength})!`);
44055
43993
  const length = itemByteLength === 0 ? 0 : bytes.length / itemByteLength;
44056
43994
  this.indices = new Uint32Array(length);
44057
43995
  for (let i = 0; i < length; i++) {
@@ -44495,10 +44433,7 @@ class ReturnValue {
44495
44433
  this.consumedGas = consumedGas;
44496
44434
  this.status = status;
44497
44435
  this.memorySlice = memorySlice;
44498
- debug_check `
44499
- ${(status === null && memorySlice !== null) || (status !== null && memorySlice === null)}
44500
- 'status' and 'memorySlice' must not both be null or both be non-null — exactly one must be provided
44501
- `;
44436
+ debug_check((status === null && memorySlice !== null) || (status !== null && memorySlice === null), "`status` and `memorySlice` must not both be null or both be non-null — exactly one must be provided");
44502
44437
  }
44503
44438
  static fromStatus(consumedGas, status) {
44504
44439
  return new ReturnValue(consumedGas, status, null);
@@ -44547,10 +44482,7 @@ class HostCalls {
44547
44482
  if (status !== status_Status.HOST) {
44548
44483
  return this.getReturnValue(status, pvmInstance);
44549
44484
  }
44550
- debug_check `
44551
- ${pvmInstance.getExitParam() !== null}
44552
- "We know that the exit param is not null, because the status is 'Status.HOST'
44553
- `;
44485
+ debug_check(pvmInstance.getExitParam() !== null, "We know that the exit param is not null, because the status is `Status.HOST`");
44554
44486
  const hostCallIndex = pvmInstance.getExitParam() ?? -1;
44555
44487
  const gas = pvmInstance.getGasCounter();
44556
44488
  const regs = new HostCallRegisters(pvmInstance.getRegisters());
@@ -44610,7 +44542,7 @@ class host_calls_manager_HostCallsManager {
44610
44542
  constructor({ missing, handlers = [], }) {
44611
44543
  this.missing = missing;
44612
44544
  for (const handler of handlers) {
44613
- debug_check `${this.hostCalls.get(handler.index) === undefined} Overwriting host call handler at index ${handler.index}`;
44545
+ debug_check(this.hostCalls.get(handler.index) === undefined, `Overwriting host call handler at index ${handler.index}`);
44614
44546
  this.hostCalls.set(handler.index, handler);
44615
44547
  }
44616
44548
  }
@@ -44733,7 +44665,7 @@ function getServiceId(serviceId) {
44733
44665
  return null;
44734
44666
  }
44735
44667
  function writeServiceIdAsLeBytes(serviceId, destination) {
44736
- debug_check `${destination.length >= SERVICE_ID_BYTES} Not enough space in the destination.`;
44668
+ debug_check(destination.length >= SERVICE_ID_BYTES, "Not enough space in the destination.");
44737
44669
  destination.set(numbers_u32AsLeBytes(serviceId));
44738
44670
  }
44739
44671
  /** Clamp a U64 to the maximum value of a 32-bit unsigned integer. */
@@ -44822,27 +44754,13 @@ class SpiProgram extends WithDebug {
44822
44754
  this.registers = registers;
44823
44755
  }
44824
44756
  }
44825
- /**
44826
- * program = E_3(|o|) ++ E_3(|w|) ++ E_2(z) ++ E_3(s) ++ o ++ w ++ E_4(|c|) ++ c
44827
- *
44828
- * E_n - little endian encoding, n - length
44829
- * o - initial read only data
44830
- * w - initial heap
44831
- * z - heap pages filled with zeros
44832
- * s - stack size
44833
- * c - program code
44834
- *
44835
- * https://graypaper.fluffylabs.dev/#/579bd12/2b92022b9202
44836
- */
44837
44757
  function decodeStandardProgram(program, args) {
44838
44758
  const decoder = decoder_Decoder.fromBlob(program);
44839
44759
  const oLength = decoder.u24();
44840
44760
  const wLength = decoder.u24();
44841
- debug_check `${args.length <= DATA_LEGNTH} Incorrect arguments length`;
44842
- debug_check `${oLength <= DATA_LEGNTH} Incorrect readonly segment length`;
44843
- const readOnlyLength = oLength;
44844
- debug_check `${wLength <= DATA_LEGNTH} Incorrect heap segment length`;
44845
- const heapLength = wLength;
44761
+ const argsLength = ensure(args.length, args.length <= DATA_LEGNTH, "Incorrect arguments length");
44762
+ const readOnlyLength = ensure(oLength, oLength <= DATA_LEGNTH, "Incorrect readonly segment length");
44763
+ const heapLength = ensure(wLength, wLength <= DATA_LEGNTH, "Incorrect heap segment length");
44846
44764
  const noOfHeapZerosPages = decoder.u16();
44847
44765
  const stackSize = decoder.u24();
44848
44766
  const readOnlyMemory = decoder.bytes(readOnlyLength).raw;
@@ -44858,14 +44776,14 @@ function decodeStandardProgram(program, args) {
44858
44776
  const stackStart = STACK_SEGMENT - memory_utils_alignToPageSize(stackSize);
44859
44777
  const stackEnd = STACK_SEGMENT;
44860
44778
  const argsStart = ARGS_SEGMENT;
44861
- const argsEnd = argsStart + memory_utils_alignToPageSize(args.length);
44862
- const argsZerosEnd = argsEnd + memory_utils_alignToPageSize(args.length);
44779
+ const argsEnd = argsStart + memory_utils_alignToPageSize(argsLength);
44780
+ const argsZerosEnd = argsEnd + memory_utils_alignToPageSize(argsLength);
44863
44781
  function nonEmpty(s) {
44864
44782
  return s !== false;
44865
44783
  }
44866
44784
  const readableMemory = [
44867
44785
  readOnlyLength > 0 && getMemorySegment(readonlyDataStart, readonlyDataEnd, readOnlyMemory),
44868
- args.length > 0 && getMemorySegment(argsStart, argsEnd, args),
44786
+ argsLength > 0 && getMemorySegment(argsStart, argsEnd, args),
44869
44787
  argsEnd < argsZerosEnd && getMemorySegment(argsEnd, argsZerosEnd),
44870
44788
  ].filter(nonEmpty);
44871
44789
  const writeableMemory = [
@@ -57659,6 +57577,88 @@ const initNetwork = async (importerReady, workerConfig, genesisHeaderHash, netwo
57659
57577
  ;// CONCATENATED MODULE: external "node:fs/promises"
57660
57578
  const external_node_fs_promises_namespaceObject = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("node:fs/promises");
57661
57579
  var external_node_fs_promises_default = /*#__PURE__*/__nccwpck_require__.n(external_node_fs_promises_namespaceObject);
57580
+ ;// CONCATENATED MODULE: ./workers/importer/import-queue.ts
57581
+
57582
+
57583
+
57584
+
57585
+
57586
+ class ImportQueue {
57587
+ spec;
57588
+ importer;
57589
+ toImport = SortedArray.fromSortedArray((a, b) => {
57590
+ const diff = a.timeSlot - b.timeSlot;
57591
+ if (diff < 0) {
57592
+ return Ordering.Greater;
57593
+ }
57594
+ if (diff > 0) {
57595
+ return Ordering.Less;
57596
+ }
57597
+ return Ordering.Equal;
57598
+ });
57599
+ queuedBlocks = HashSet.new();
57600
+ lastEpoch = tryAsEpoch(2 ** 32 - 1);
57601
+ constructor(spec, importer) {
57602
+ this.spec = spec;
57603
+ this.importer = importer;
57604
+ }
57605
+ isCurrentEpoch(timeSlot) {
57606
+ const epoch = Math.floor(timeSlot / this.spec.epochLength);
57607
+ return this.lastEpoch === epoch;
57608
+ }
57609
+ startPreverification() {
57610
+ for (const entry of this.toImport) {
57611
+ if (this.isCurrentEpoch(entry.timeSlot)) {
57612
+ entry.seal = this.importer.preverifySeal(entry.timeSlot, entry.block);
57613
+ }
57614
+ }
57615
+ }
57616
+ static getBlockDetails(block) {
57617
+ let encodedHeader;
57618
+ let timeSlot;
57619
+ try {
57620
+ encodedHeader = block.header.encoded();
57621
+ timeSlot = block.header.view().timeSlotIndex.materialize();
57622
+ }
57623
+ catch {
57624
+ return result_Result.error("invalid");
57625
+ }
57626
+ const headerHash = hashBytes(encodedHeader).asOpaque();
57627
+ return result_Result.ok(new WithHash(headerHash, { block, timeSlot }));
57628
+ }
57629
+ push(details) {
57630
+ const headerHash = details.hash;
57631
+ if (this.queuedBlocks.has(headerHash)) {
57632
+ return result_Result.error("already queued");
57633
+ }
57634
+ const { timeSlot, block } = details.data;
57635
+ const entry = {
57636
+ headerHash,
57637
+ timeSlot,
57638
+ block,
57639
+ seal: this.isCurrentEpoch(timeSlot) ? this.importer.preverifySeal(timeSlot, block) : Promise.resolve(null),
57640
+ };
57641
+ this.toImport.insert(entry);
57642
+ this.queuedBlocks.insert(headerHash);
57643
+ return result_Result.ok(result_OK);
57644
+ }
57645
+ shift() {
57646
+ const entry = this.toImport.pop();
57647
+ if (entry !== undefined) {
57648
+ this.queuedBlocks.delete(entry.headerHash);
57649
+ const blockEpoch = Math.floor(entry.timeSlot / this.spec.epochLength);
57650
+ const hasEpochChanged = this.lastEpoch !== blockEpoch;
57651
+ this.lastEpoch = tryAsEpoch(blockEpoch);
57652
+ // currently removed block is changing the epoch, so fire up
57653
+ // preverifcation for the following blocks.
57654
+ if (hasEpochChanged) {
57655
+ this.startPreverification();
57656
+ }
57657
+ }
57658
+ return entry;
57659
+ }
57660
+ }
57661
+
57662
57662
  ;// CONCATENATED MODULE: ./packages/jam/transition/block-verifier.ts
57663
57663
 
57664
57664
 
@@ -57672,7 +57672,7 @@ var BlockVerifierError;
57672
57672
  BlockVerifierError[BlockVerifierError["InvalidStateRoot"] = 4] = "InvalidStateRoot";
57673
57673
  BlockVerifierError[BlockVerifierError["AlreadyImported"] = 5] = "AlreadyImported";
57674
57674
  })(BlockVerifierError || (BlockVerifierError = {}));
57675
- const block_verifier_ZERO_HASH = bytes_Bytes.zero(hash_HASH_SIZE).asOpaque();
57675
+ const ZERO_HASH = bytes_Bytes.zero(hash_HASH_SIZE).asOpaque();
57676
57676
  class BlockVerifier {
57677
57677
  hasher;
57678
57678
  blocks;
@@ -57692,7 +57692,7 @@ class BlockVerifier {
57692
57692
  // https://graypaper.fluffylabs.dev/#/cc517d7/0c9d000c9d00?v=0.6.5
57693
57693
  const parentHash = headerView.parentHeaderHash.materialize();
57694
57694
  // importing genesis block
57695
- if (!parentHash.isEqualTo(block_verifier_ZERO_HASH)) {
57695
+ if (!parentHash.isEqualTo(ZERO_HASH)) {
57696
57696
  const parentBlock = this.blocks.getHeader(parentHash);
57697
57697
  if (parentBlock === null) {
57698
57698
  return result_Result.error(BlockVerifierError.ParentNotFound, `Parent ${parentHash.toString()} not found`);
@@ -58205,22 +58205,301 @@ async function verifyTickets(bandersnatch, numberOfValidators, epochRoot, ticket
58205
58205
  }));
58206
58206
  }
58207
58207
 
58208
- ;// CONCATENATED MODULE: ./packages/jam/safrole/bandersnatch-wasm.ts
58208
+ ;// CONCATENATED MODULE: ./packages/core/concurrent/parent.ts
58209
+
58210
+
58211
+ // Amount of tasks in the queue that will trigger creation of new worker thread.
58212
+ // NOTE this might need to be configurable in the future.
58213
+ const QUEUE_SIZE_WORKER_THRESHOLD = 5;
58214
+ /** Execution pool manager. */
58215
+ class Executor {
58216
+ workers;
58217
+ maxWorkers;
58218
+ workerPath;
58219
+ /** Initialize a new concurrent executor given a path to the worker. */
58220
+ static async initialize(workerPath, options) {
58221
+ debug_check(options.maxWorkers > 0, "Max workers has to be positive.");
58222
+ debug_check(options.minWorkers <= options.maxWorkers, "Min workers has to be lower or equal to max workers.");
58223
+ const workers = [];
58224
+ for (let i = 0; i < options.minWorkers; i++) {
58225
+ workers.push(await initWorker(workerPath));
58226
+ }
58227
+ return new Executor(workers, options.maxWorkers, workerPath);
58228
+ }
58229
+ // keeps track of the indices of worker threads that are currently free and available to execute tasks
58230
+ freeWorkerIndices = [];
58231
+ taskQueue = [];
58232
+ isDestroyed = false;
58233
+ isWorkerInitializing = false;
58234
+ constructor(workers, maxWorkers, workerPath) {
58235
+ this.workers = workers;
58236
+ this.maxWorkers = maxWorkers;
58237
+ this.workerPath = workerPath;
58238
+ // intial free workers.
58239
+ for (let i = 0; i < workers.length; i++) {
58240
+ this.freeWorkerIndices.push(i);
58241
+ }
58242
+ }
58243
+ /** Attempt to initialize a new worker. */
58244
+ async initNewWorker(onSuccess = () => { }) {
58245
+ if (this.workers.length >= this.maxWorkers) {
58246
+ // biome-ignore lint/suspicious/noConsole: warning
58247
+ console.warn(`Task queue has ${this.taskQueue.length} pending items and we can't init any more workers.`);
58248
+ return;
58249
+ }
58250
+ if (this.isWorkerInitializing) {
58251
+ return;
58252
+ }
58253
+ this.isWorkerInitializing = true;
58254
+ this.workers.push(await initWorker(this.workerPath));
58255
+ this.freeWorkerIndices.push(this.workers.length - 1);
58256
+ this.isWorkerInitializing = false;
58257
+ onSuccess();
58258
+ }
58259
+ /** Terminate all workers and clear the executor. */
58260
+ async destroy() {
58261
+ for (const worker of this.workers) {
58262
+ worker.port.close();
58263
+ await worker.worker.terminate();
58264
+ }
58265
+ this.workers.length = 0;
58266
+ this.isDestroyed = true;
58267
+ }
58268
+ /** Execute a task with given parameters. */
58269
+ async run(params) {
58270
+ return new Promise((resolve, reject) => {
58271
+ if (this.isDestroyed) {
58272
+ reject("pool destroyed");
58273
+ return;
58274
+ }
58275
+ this.taskQueue.push({
58276
+ params,
58277
+ resolve,
58278
+ reject,
58279
+ });
58280
+ this.processEntryFromTaskQueue();
58281
+ });
58282
+ }
58283
+ /** Process single element from the task queue. */
58284
+ processEntryFromTaskQueue() {
58285
+ const freeWorker = this.freeWorkerIndices.pop();
58286
+ // no free workers available currently,
58287
+ // we will retry when one of the tasks completes.
58288
+ if (freeWorker === undefined) {
58289
+ if (this.taskQueue.length > QUEUE_SIZE_WORKER_THRESHOLD) {
58290
+ this.initNewWorker(() => {
58291
+ // process an entry in this newly initialized worker.
58292
+ this.processEntryFromTaskQueue();
58293
+ });
58294
+ }
58295
+ return;
58296
+ }
58297
+ const task = this.taskQueue.pop();
58298
+ // no tasks in the queue
58299
+ if (task === undefined) {
58300
+ this.freeWorkerIndices.push(freeWorker);
58301
+ return;
58302
+ }
58303
+ const worker = this.workers[freeWorker];
58304
+ worker.runTask(task, () => {
58305
+ // mark the worker as available again
58306
+ this.freeWorkerIndices.push(freeWorker);
58307
+ // and continue processing the queue
58308
+ this.processEntryFromTaskQueue();
58309
+ });
58310
+ }
58311
+ }
58312
+ async function initWorker(workerPath) {
58313
+ // create a worker and initialize communication channel
58314
+ const { port1, port2 } = new MessageChannel();
58315
+ const workerThread = new external_node_worker_threads_namespaceObject.Worker(workerPath, {});
58316
+ workerThread.postMessage(port1, [port1]);
58317
+ // // wait for the worker to start
58318
+ await new Promise((resolve, reject) => {
58319
+ workerThread.once("message", resolve);
58320
+ workerThread.once("error", reject);
58321
+ });
58322
+ // make sure the threads don't prevent the program from stopping.
58323
+ workerThread.unref();
58324
+ return new WorkerChannel(workerThread, port2);
58325
+ }
58326
+ class WorkerChannel {
58327
+ worker;
58328
+ port;
58329
+ constructor(worker, port) {
58330
+ this.worker = worker;
58331
+ this.port = port;
58332
+ }
58333
+ runTask(task, onFinish) {
58334
+ const message = {
58335
+ params: task.params,
58336
+ };
58337
+ // when we receive a response, make sure to process it
58338
+ this.port.once("message", (e) => {
58339
+ if (e.isOk) {
58340
+ task.resolve(e.ok);
58341
+ }
58342
+ else {
58343
+ task.reject(new Error(e.error));
58344
+ }
58345
+ onFinish();
58346
+ });
58347
+ // send the task to work on.
58348
+ this.port.postMessage(message, message.params.getTransferList());
58349
+ }
58350
+ }
58209
58351
 
58352
+ ;// CONCATENATED MODULE: ./packages/core/concurrent/worker.ts
58353
+
58354
+
58355
+ /** A in-worker abstraction. */
58356
+ class ConcurrentWorker {
58357
+ runInternal;
58358
+ state;
58359
+ static new(run, state) {
58360
+ return new ConcurrentWorker(run, state);
58361
+ }
58362
+ constructor(runInternal, state) {
58363
+ this.runInternal = runInternal;
58364
+ this.state = state;
58365
+ }
58366
+ listenToParentPort() {
58367
+ if (external_node_worker_threads_namespaceObject.parentPort === null) {
58368
+ throw new Error("This method is meant to be run inside a worker thread!");
58369
+ }
58370
+ external_node_worker_threads_namespaceObject.parentPort.once("close", () => {
58371
+ process.exit(0);
58372
+ });
58373
+ external_node_worker_threads_namespaceObject.parentPort.once("message", (port) => {
58374
+ this.listenTo(port);
58375
+ // send back readiness signal.
58376
+ external_node_worker_threads_namespaceObject.parentPort?.postMessage("ready");
58377
+ });
58378
+ }
58379
+ listenTo(port) {
58380
+ port.once("close", () => {
58381
+ port.removeAllListeners();
58382
+ process.exit(0);
58383
+ });
58384
+ port.on("message", (ev) => {
58385
+ const { params } = ev;
58386
+ this.run(params)
58387
+ .then((result) => {
58388
+ const response = result_Result.ok(result);
58389
+ port.postMessage(response, result.getTransferList());
58390
+ })
58391
+ .catch((e) => {
58392
+ const response = result_Result.error(`${e}`);
58393
+ port.postMessage(response, []);
58394
+ });
58395
+ });
58396
+ }
58397
+ async run(params) {
58398
+ return await this.runInternal(params, this.state);
58399
+ }
58400
+ async destroy() { }
58401
+ }
58402
+
58403
+ ;// CONCATENATED MODULE: ./packages/core/concurrent/index.ts
58404
+
58405
+
58406
+
58407
+ ;// CONCATENATED MODULE: ./packages/jam/safrole/bandersnatch-wasm/params.ts
58408
+ var Method;
58409
+ (function (Method) {
58410
+ Method[Method["RingCommitment"] = 0] = "RingCommitment";
58411
+ Method[Method["BatchVerifyTickets"] = 1] = "BatchVerifyTickets";
58412
+ Method[Method["VerifySeal"] = 2] = "VerifySeal";
58413
+ })(Method || (Method = {}));
58414
+ class params_Response {
58415
+ data;
58416
+ constructor(data) {
58417
+ this.data = data;
58418
+ }
58419
+ getTransferList() {
58420
+ return [this.data.buffer];
58421
+ }
58422
+ }
58423
+ class Params {
58424
+ params;
58425
+ constructor(params) {
58426
+ this.params = params;
58427
+ }
58428
+ getTransferList() {
58429
+ return [];
58430
+ }
58431
+ }
58432
+
58433
+ ;// CONCATENATED MODULE: ./packages/jam/safrole/bandersnatch-wasm/worker.ts
58434
+
58435
+
58436
+
58437
+
58438
+ const worker = ConcurrentWorker.new(async (p) => {
58439
+ await initAll();
58440
+ const params = p.params;
58441
+ const method = params.method;
58442
+ if (method === Method.RingCommitment) {
58443
+ return Promise.resolve(new params_Response(bandersnatch_exports.ring_commitment(params.keys)));
58444
+ }
58445
+ if (method === Method.BatchVerifyTickets) {
58446
+ return Promise.resolve(new params_Response(bandersnatch_exports.batch_verify_tickets(params.ringSize, params.commitment, params.ticketsData, params.contextLength)));
58447
+ }
58448
+ if (method === Method.VerifySeal) {
58449
+ return Promise.resolve(new params_Response(bandersnatch_exports.verify_seal(params.authorKey, params.signature, params.payload, params.auxData)));
58450
+ }
58451
+ debug_assertNever(method);
58452
+ }, null);
58453
+
58454
+ ;// CONCATENATED MODULE: ./packages/jam/safrole/bandersnatch-wasm/index.ts
58455
+
58456
+
58457
+
58458
+
58459
+ const bandersnatch_wasm_workerFile = __nccwpck_require__.ab + "bootstrap-bandersnatch.mjs";
58210
58460
  class BandernsatchWasm {
58211
- constructor() { }
58212
- static async new() {
58213
- await initAll();
58214
- return new BandernsatchWasm();
58461
+ executor;
58462
+ constructor(executor) {
58463
+ this.executor = executor;
58464
+ }
58465
+ destroy() {
58466
+ return this.executor.destroy();
58467
+ }
58468
+ static async new({ synchronous }) {
58469
+ const workers = external_node_os_default().cpus().length;
58470
+ return new BandernsatchWasm(!synchronous
58471
+ ? await Executor.initialize(bandersnatch_wasm_workerFile, {
58472
+ minWorkers: Math.max(1, Math.floor(workers / 2)),
58473
+ maxWorkers: workers,
58474
+ })
58475
+ : worker);
58215
58476
  }
58216
58477
  async verifySeal(authorKey, signature, payload, auxData) {
58217
- return bandersnatch_exports.verify_seal(authorKey, signature, payload, auxData);
58478
+ const x = await this.executor.run(new Params({
58479
+ method: Method.VerifySeal,
58480
+ authorKey,
58481
+ signature,
58482
+ payload,
58483
+ auxData,
58484
+ }));
58485
+ return x.data;
58218
58486
  }
58219
58487
  async getRingCommitment(keys) {
58220
- return bandersnatch_exports.ring_commitment(keys);
58488
+ const x = await this.executor.run(new Params({
58489
+ method: Method.RingCommitment,
58490
+ keys,
58491
+ }));
58492
+ return x.data;
58221
58493
  }
58222
58494
  async batchVerifyTicket(ringSize, commitment, ticketsData, contextLength) {
58223
- return bandersnatch_exports.batch_verify_tickets(ringSize, commitment, ticketsData, contextLength);
58495
+ const x = await this.executor.run(new Params({
58496
+ method: Method.BatchVerifyTickets,
58497
+ ringSize,
58498
+ commitment,
58499
+ ticketsData,
58500
+ contextLength,
58501
+ }));
58502
+ return x.data;
58224
58503
  }
58225
58504
  }
58226
58505
 
@@ -58263,7 +58542,7 @@ class Safrole {
58263
58542
  chainSpec;
58264
58543
  state;
58265
58544
  bandersnatch;
58266
- constructor(chainSpec, state, bandersnatch = BandernsatchWasm.new()) {
58545
+ constructor(chainSpec, state, bandersnatch = BandernsatchWasm.new({ synchronous: true })) {
58267
58546
  this.chainSpec = chainSpec;
58268
58547
  this.state = state;
58269
58548
  this.bandersnatch = bandersnatch;
@@ -58641,7 +58920,7 @@ var SafroleSealError;
58641
58920
  const BANDERSNATCH_ZERO_KEY = bytes_Bytes.zero(BANDERSNATCH_KEY_BYTES).asOpaque();
58642
58921
  class SafroleSeal {
58643
58922
  bandersnatch;
58644
- constructor(bandersnatch = BandernsatchWasm.new()) {
58923
+ constructor(bandersnatch = BandernsatchWasm.new({ synchronous: true })) {
58645
58924
  this.bandersnatch = bandersnatch;
58646
58925
  }
58647
58926
  /**
@@ -58953,8 +59232,8 @@ class PartiallyUpdatedState {
58953
59232
  this.stateUpdate.services.preimages.push(newUpdate);
58954
59233
  }
58955
59234
  updateServiceStorageUtilisation(serviceId, items, bytes, serviceInfo) {
58956
- debug_check `${items >= 0} storageUtilisationCount has to be a positive number, got: ${items}`;
58957
- debug_check `${bytes >= 0} storageUtilisationBytes has to be a positive number, got: ${bytes}`;
59235
+ debug_check(items >= 0, `storageUtilisationCount has to be a positive number, got: ${items}`);
59236
+ debug_check(bytes >= 0, `storageUtilisationBytes has to be a positive number, got: ${bytes}`);
58958
59237
  const overflowItems = !isU32(items);
58959
59238
  const overflowBytes = !isU64(bytes);
58960
59239
  // TODO [ToDr] this is not specified in GP, but it seems sensible.
@@ -59379,7 +59658,7 @@ class AccumulateExternalities {
59379
59658
  }
59380
59659
  // TODO [ToDr] Not sure if we should update the service info in that case,
59381
59660
  // but for now we let that case fall-through.
59382
- debug_check `${len === PreimageStatusKind.Unavailable} preimage is not unavailable`;
59661
+ debug_check(len === PreimageStatusKind.Unavailable);
59383
59662
  }
59384
59663
  // make sure we have enough balance for this update
59385
59664
  // https://graypaper.fluffylabs.dev/#/9a08063/381201381601?v=0.6.6
@@ -59875,7 +60154,7 @@ class Assurances {
59875
60154
  return result_Result.error(AssurancesError.InvalidOrder, `order: expected: ${prevValidatorIndex + 1}, got: ${validatorIndex}`);
59876
60155
  }
59877
60156
  prevValidatorIndex = assurance.validatorIndex;
59878
- debug_check `${bitfield.bitLength === coresCount} Invalid bitfield length of ${bitfield.bitLength}`;
60157
+ debug_check(bitfield.bitLength === coresCount, `Invalid bitfield length of ${bitfield.bitLength}`);
59879
60158
  const setBits = bitfield.indicesOfSetBits();
59880
60159
  for (const idx of setBits) {
59881
60160
  perCoreAssurances[idx] += 1;
@@ -62199,7 +62478,7 @@ class DeferredTransfers {
62199
62478
  transferStatistics.set(serviceId, { count: numbers_tryAsU32(transfers.length), gasUsed: common_tryAsServiceGas(consumedGas) });
62200
62479
  const [updatedState, checkpointedState] = partialState.getStateUpdates();
62201
62480
  currentStateUpdate = updatedState;
62202
- debug_check `${checkpointedState === null} On transfer cannot invoke checkpoint.`;
62481
+ debug_check(checkpointedState === null, "On transfer cannot invoke checkpoint.");
62203
62482
  }
62204
62483
  return result_Result.ok({
62205
62484
  // NOTE: we return only services, since it's impossible to update
@@ -62537,7 +62816,7 @@ const ENTROPY_BYTES = 32;
62537
62816
  * https://graypaper.fluffylabs.dev/#/579bd12/3b9a013b9a01
62538
62817
  */
62539
62818
  function fisherYatesShuffle(arr, entropy) {
62540
- debug_check `${entropy.length === ENTROPY_BYTES} Expected entropy of length ${ENTROPY_BYTES}, got ${entropy.length}`;
62819
+ debug_check(entropy.length === ENTROPY_BYTES, `Expected entropy of length ${ENTROPY_BYTES}, got ${entropy.length}`);
62541
62820
  const n = arr.length;
62542
62821
  const randomNumbers = hashToNumberSequence(entropy, arr.length);
62543
62822
  const result = new Array(n);
@@ -63384,7 +63663,7 @@ class Statistics {
63384
63663
  /** get statistics for the current epoch */
63385
63664
  const statistics = this.getStatistics(slot);
63386
63665
  const { current, cores, services } = statistics;
63387
- debug_check `${current[authorIndex] !== undefined} authorIndex is out of bounds`;
63666
+ debug_check(current[authorIndex] !== undefined, "authorIndex is out of bounds");
63388
63667
  /** One validator can produce maximal one block per timeslot */
63389
63668
  const newBlocksCount = current[authorIndex].blocks + 1;
63390
63669
  current[authorIndex].blocks = numbers_tryAsU32(newBlocksCount);
@@ -63584,11 +63863,11 @@ class OnChain {
63584
63863
  authorization;
63585
63864
  // chapter 13: https://graypaper.fluffylabs.dev/#/68eaa1f/18b60118b601?v=0.6.4
63586
63865
  statistics;
63587
- constructor(chainSpec, state, blocks, hasher) {
63866
+ constructor(chainSpec, state, blocks, hasher, { enableParallelSealVerification }) {
63588
63867
  this.chainSpec = chainSpec;
63589
63868
  this.state = state;
63590
63869
  this.hasher = hasher;
63591
- const bandersnatch = BandernsatchWasm.new();
63870
+ const bandersnatch = BandernsatchWasm.new({ synchronous: !enableParallelSealVerification });
63592
63871
  this.statistics = new Statistics(chainSpec, state);
63593
63872
  this.safrole = new Safrole(chainSpec, state, bandersnatch);
63594
63873
  this.safroleSeal = new SafroleSeal(bandersnatch);
@@ -63606,16 +63885,16 @@ class OnChain {
63606
63885
  const sealState = this.safrole.getSafroleSealState(timeSlot);
63607
63886
  return await this.safroleSeal.verifyHeaderSeal(block.header.view(), sealState);
63608
63887
  }
63609
- async transition(block, headerHash, omitSealVerification = false) {
63888
+ async transition(block, headerHash, preverifiedSeal = null, omitSealVerification = false) {
63610
63889
  const headerView = block.header.view();
63611
63890
  const header = block.header.materialize();
63612
63891
  const timeSlot = header.timeSlotIndex;
63613
63892
  // safrole seal
63614
- let newEntropyHash;
63893
+ let newEntropyHash = preverifiedSeal;
63615
63894
  if (omitSealVerification) {
63616
63895
  newEntropyHash = hashBytes(header.seal).asOpaque();
63617
63896
  }
63618
- else {
63897
+ if (newEntropyHash === null) {
63619
63898
  const sealResult = await this.verifySeal(timeSlot, block);
63620
63899
  if (sealResult.isError) {
63621
63900
  return stfError(StfErrorKind.SafroleSeal, sealResult);
@@ -63722,7 +64001,7 @@ class OnChain {
63722
64001
  assertEmpty(deferredTransfersRest);
63723
64002
  const accumulateRoot = await this.accumulateOutput.transition({ accumulationOutputLog });
63724
64003
  // recent history
63725
- const recentHistoryUpdate = this.recentHistory.transition({
64004
+ const recentHistoryUpdate = await this.recentHistory.transition({
63726
64005
  partial: recentHistoryPartialUpdate,
63727
64006
  headerHash,
63728
64007
  accumulateRoot,
@@ -63802,7 +64081,6 @@ function checkOffendersMatch(offendersMark, headerOffendersMark) {
63802
64081
 
63803
64082
 
63804
64083
 
63805
-
63806
64084
  var ImporterErrorKind;
63807
64085
  (function (ImporterErrorKind) {
63808
64086
  ImporterErrorKind[ImporterErrorKind["Verifier"] = 0] = "Verifier";
@@ -63830,28 +64108,29 @@ class Importer {
63830
64108
  throw new Error(`Unable to load best state from header hash: ${currentBestHeaderHash}.`);
63831
64109
  }
63832
64110
  this.verifier = new BlockVerifier(hasher, blocks);
63833
- this.stf = new OnChain(spec, state, blocks, hasher);
64111
+ this.stf = new OnChain(spec, state, blocks, hasher, { enableParallelSealVerification: true });
63834
64112
  this.state = state;
63835
64113
  this.currentHash = currentBestHeaderHash;
63836
64114
  logger.info(`😎 Best time slot: ${state.timeslot} (header hash: ${currentBestHeaderHash})`);
63837
64115
  }
63838
- async importBlock(block, omitSealVerification) {
63839
- const timer = measure("importBlock");
63840
- const timeSlot = extractTimeSlot(block);
63841
- const maybeBestHeader = await this.importBlockInternal(block, omitSealVerification);
63842
- if (maybeBestHeader.isOk) {
63843
- const bestHeader = maybeBestHeader.ok;
63844
- this.logger.info(`🧊 Best block: #${timeSlot} (${bestHeader.hash})`);
63845
- this.logger.log(timer());
63846
- return maybeBestHeader;
63847
- }
63848
- this.logger.log(`❌ Rejected block #${timeSlot}: ${resultToString(maybeBestHeader)}`);
63849
- this.logger.log(timer());
63850
- return maybeBestHeader;
63851
- }
63852
- async importBlockInternal(block, omitSealVerification = false) {
64116
+ /** Attempt to pre-verify the seal to speed up importing. */
64117
+ async preverifySeal(timeSlot, block) {
64118
+ try {
64119
+ const res = await this.stf.verifySeal(timeSlot, block);
64120
+ if (res.isOk) {
64121
+ return res.ok;
64122
+ }
64123
+ this.logger.warn(`Unable to pre-verify the seal: ${resultToString(res)}`);
64124
+ return null;
64125
+ }
64126
+ catch (e) {
64127
+ this.logger.warn(`Error while trying to pre-verify the seal: ${e}`);
64128
+ return null;
64129
+ }
64130
+ }
64131
+ async importBlock(block, preverifiedSeal, omitSealVerification = false) {
63853
64132
  const logger = this.logger;
63854
- logger.log("🧱 Attempting to import a new block");
64133
+ logger.log(`🧱 Attempting to import a new block ${preverifiedSeal !== null ? "(seal preverified)" : ""}`);
63855
64134
  const timerVerify = measure("import:verify");
63856
64135
  const hash = await this.verifier.verifyBlock(block);
63857
64136
  logger.log(timerVerify());
@@ -63876,7 +64155,7 @@ class Importer {
63876
64155
  const headerHash = hash.ok;
63877
64156
  logger.log(`🧱 Verified block: Got hash ${headerHash} for block at slot ${timeSlot}.`);
63878
64157
  const timerStf = measure("import:stf");
63879
- const res = await this.stf.transition(block, headerHash, omitSealVerification);
64158
+ const res = await this.stf.transition(block, headerHash, preverifiedSeal, omitSealVerification);
63880
64159
  logger.log(timerStf());
63881
64160
  if (res.isError) {
63882
64161
  return importerError(ImporterErrorKind.Stf, res);
@@ -63926,19 +64205,6 @@ class Importer {
63926
64205
  return stateEntries ?? null;
63927
64206
  }
63928
64207
  }
63929
- /**
63930
- * Attempt to safely extract timeslot of a block.
63931
- *
63932
- * NOTE: it may fail if encoding is invalid.
63933
- */
63934
- function extractTimeSlot(block) {
63935
- try {
63936
- return block.header.view().timeSlotIndex.materialize();
63937
- }
63938
- catch {
63939
- return tryAsTimeSlot(2 ** 32 - 1);
63940
- }
63941
- }
63942
64208
 
63943
64209
  ;// CONCATENATED MODULE: ./workers/importer/index.ts
63944
64210
 
@@ -63950,6 +64216,8 @@ function extractTimeSlot(block) {
63950
64216
 
63951
64217
 
63952
64218
 
64219
+
64220
+
63953
64221
  const importer_logger = Logger.new(import.meta.filename, "importer");
63954
64222
  if (!external_node_worker_threads_namespaceObject.isMainThread) {
63955
64223
  Logger.configureAll(process.env.JAM_LOG ?? "", Level.LOG);
@@ -63966,6 +64234,7 @@ async function createImporter(config) {
63966
64234
  const importer = new Importer(config.chainSpec, hasher, importer_logger, blocks, states);
63967
64235
  return {
63968
64236
  lmdb,
64237
+ blocks,
63969
64238
  importer,
63970
64239
  };
63971
64240
  }
@@ -63980,27 +64249,65 @@ async function importer_main(channel) {
63980
64249
  importer_logger.info(`📥 Importer starting ${channel.currentState()}`);
63981
64250
  // Await the configuration object
63982
64251
  const ready = await channel.waitForState("ready(importer)");
63983
- let closeDb = async () => { };
63984
64252
  const finished = await ready.doUntil("finished", async (worker, port) => {
63985
64253
  const config = worker.getConfig();
63986
- const { lmdb, importer } = await createImporter(config);
63987
- closeDb = async () => {
63988
- await lmdb.close();
63989
- };
64254
+ const { blocks, importer } = await createImporter(config);
63990
64255
  // TODO [ToDr] this is shit, since we have circular dependency.
63991
64256
  worker.setImporter(importer);
63992
64257
  importer_logger.info("📥 Importer waiting for blocks.");
64258
+ // TODO [ToDr] back pressure?
64259
+ let isProcessing = false;
64260
+ const importingQueue = new ImportQueue(config.chainSpec, importer);
63993
64261
  worker.onBlock.on(async (block) => {
63994
- const res = await importer.importBlock(block, config.omitSealVerification);
63995
- if (res.isOk) {
63996
- worker.announce(port, res.ok);
64262
+ const details = ImportQueue.getBlockDetails(block);
64263
+ // ignore invalid blocks.
64264
+ if (details.isError) {
64265
+ importer_logger.trace("🧊 Ignoring invalid block.");
64266
+ return;
64267
+ }
64268
+ // ignore already known blocks
64269
+ if (blocks.getHeader(details.ok.hash) !== null) {
64270
+ importer_logger.trace(`🧊 Already imported block: #${details.ok.data.timeSlot}.`);
64271
+ return;
64272
+ }
64273
+ const importResult = importingQueue.push(details.ok);
64274
+ // ignore blocks that are already queued
64275
+ if (importResult.isError) {
64276
+ importer_logger.trace(`🧊 Already queued block: #${details.ok.data.timeSlot}.`);
64277
+ return;
64278
+ }
64279
+ importer_logger.log(`🧊 Queued block: #${details.ok.data.timeSlot} (skip seal: ${config.omitSealVerification})`);
64280
+ if (isProcessing) {
64281
+ return;
64282
+ }
64283
+ isProcessing = true;
64284
+ try {
64285
+ for (;;) {
64286
+ const entry = importingQueue.shift();
64287
+ if (entry === undefined) {
64288
+ return;
64289
+ }
64290
+ const { block, seal, timeSlot } = entry;
64291
+ const timer = measure("importBlock");
64292
+ const maybeBestHeader = await importer.importBlock(block, await seal, config.omitSealVerification);
64293
+ if (maybeBestHeader.isOk) {
64294
+ const bestHeader = maybeBestHeader.ok;
64295
+ worker.announce(port, bestHeader);
64296
+ importer_logger.info(`🧊 Best block: #${bestHeader.data.timeSlotIndex.materialize()} (${bestHeader.hash})`);
64297
+ }
64298
+ else {
64299
+ importer_logger.log(`❌ Rejected block #${timeSlot}: ${resultToString(maybeBestHeader)}`);
64300
+ }
64301
+ importer_logger.log(timer());
64302
+ }
64303
+ }
64304
+ finally {
64305
+ isProcessing = false;
63997
64306
  }
63998
64307
  });
63999
64308
  await wasmPromise;
64000
64309
  });
64001
64310
  importer_logger.info("📥 Importer finished. Closing channel.");
64002
- // close the database
64003
- await closeDb();
64004
64311
  // Close the comms to gracefuly close the app.
64005
64312
  finished.currentState().close(channel);
64006
64313
  }