@typeberry/jam 0.1.0 → 0.1.1-127cc86

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/index.js CHANGED
@@ -28332,29 +28332,17 @@ function isBrowser() {
28332
28332
  * We avoid using `node:assert` to keep compatibility with a browser environment.
28333
28333
  * Note the checks should not have any side effects, since we might decide
28334
28334
  * to remove all of them in a post-processing step.
28335
- */
28336
- function debug_check(condition, message) {
28337
- if (!condition) {
28338
- throw new Error(`Assertion failure: ${message ?? ""}`);
28339
- }
28340
- }
28341
- function cast(_a, condition) {
28342
- return condition;
28343
- }
28344
- /**
28345
- * Yet another function to perform runtime assertions.
28346
- * This function returns a new type to mark in the code that this value was checked and you don't have to do it again.
28347
28335
  *
28348
- * In the post-processing step all usages of this functions should be replaced with simple casting. An example:
28349
- * const x = checkAndType<number, CheckedNumber>(y);
28350
- * should be replaced with:
28351
- * const x = y as CheckedNumber;
28336
+ * NOTE the function is intended to be used as tagged template string for the performance
28337
+ * reasons.
28352
28338
  */
28353
- function ensure(a, condition, message) {
28354
- if (cast(a, condition)) {
28355
- return a;
28339
+ function debug_check(strings, condition, ...data) {
28340
+ if (!condition) {
28341
+ // add an empty value so that `data.length === strings.length`
28342
+ data.unshift("");
28343
+ const message = strings.map((v, index) => `${v}${data[index] ?? ""}`);
28344
+ throw new Error(`Assertion failure:${message.join("")}`);
28356
28345
  }
28357
- throw new Error(`Assertion failure: ${message ?? ""}`);
28358
28346
  }
28359
28347
  /**
28360
28348
  * The function can be used to make sure that a particular type is `never`
@@ -28524,7 +28512,7 @@ function resultToString(res) {
28524
28512
  const result_Result = {
28525
28513
  /** Create new [`Result`] with `Ok` status. */
28526
28514
  ok: (ok) => {
28527
- debug_check(ok !== undefined, "`Ok` type cannot be undefined.");
28515
+ debug_check `${ok !== undefined} 'ok' type cannot be undefined.`;
28528
28516
  return {
28529
28517
  isOk: true,
28530
28518
  isError: false,
@@ -28533,7 +28521,7 @@ const result_Result = {
28533
28521
  },
28534
28522
  /** Create new [`Result`] with `Error` status. */
28535
28523
  error: (error, details = "") => {
28536
- debug_check(error !== undefined, "`Error` type cannot be undefined.");
28524
+ debug_check `${error !== undefined} 'Error' type cannot be undefined.`;
28537
28525
  return {
28538
28526
  isOk: false,
28539
28527
  isError: true,
@@ -28817,7 +28805,10 @@ class BitVec {
28817
28805
  constructor(data, bitLength) {
28818
28806
  this.data = data;
28819
28807
  this.bitLength = bitLength;
28820
- debug_check(data.length * 8 >= bitLength, `Not enough bytes in the data array. Need ${data.length * 8} has ${bitLength}.`);
28808
+ debug_check `
28809
+ ${data.length * 8 >= bitLength}
28810
+ Not enough bytes in the data array. Need ${data.length * 8} has ${bitLength}.
28811
+ `;
28821
28812
  this.byteLength = Math.ceil(bitLength / 8);
28822
28813
  }
28823
28814
  /** Return a raw in-memory representation of this [`BitVec`]. */
@@ -28826,7 +28817,10 @@ class BitVec {
28826
28817
  }
28827
28818
  /** Perform OR operation on all bits in place. */
28828
28819
  sumWith(other) {
28829
- debug_check(other.bitLength === this.bitLength, `Invalid bit length for sumWith: ${other.bitLength} vs ${this.bitLength}`);
28820
+ debug_check `
28821
+ ${other.bitLength === this.bitLength}
28822
+ Invalid bit length for sumWith: ${other.bitLength} vs ${this.bitLength}
28823
+ `;
28830
28824
  const otherRaw = other.raw;
28831
28825
  for (let i = 0; i < this.byteLength; i++) {
28832
28826
  this.data[i] |= otherRaw[i];
@@ -28836,7 +28830,7 @@ class BitVec {
28836
28830
  * Set the bit at index `idx` to value `val`.
28837
28831
  */
28838
28832
  setBit(idx, val) {
28839
- debug_check(idx < this.bitLength, `Index out of bounds. Need ${idx} has ${this.bitLength}.`);
28833
+ debug_check `${idx >= 0 && idx < this.bitLength} Index out of bounds. Need ${idx} has ${this.bitLength}.`;
28840
28834
  const byteIndex = Math.floor(idx / 8);
28841
28835
  const bitIndexInByte = idx % 8;
28842
28836
  const mask = 1 << bitIndexInByte;
@@ -28851,7 +28845,7 @@ class BitVec {
28851
28845
  * Return `true` if the bit at index `idx` is set.
28852
28846
  */
28853
28847
  isSet(idx) {
28854
- debug_check(idx < this.bitLength, `Index out of bounds. Need ${idx} has ${this.bitLength}.`);
28848
+ debug_check `${idx >= 0 && idx < this.bitLength} Index out of bounds. Need ${idx} has ${this.bitLength}.`;
28855
28849
  const byteIndex = Math.floor(idx / 8);
28856
28850
  const bitIndexInByte = idx % 8;
28857
28851
  const mask = 1 << bitIndexInByte;
@@ -29018,7 +29012,7 @@ class bytes_BytesBlob {
29018
29012
  }
29019
29013
  /** Create a new [`BytesBlob`] from an array of bytes. */
29020
29014
  static blobFromNumbers(v) {
29021
- debug_check(v.find((x) => (x & 0xff) !== x) === undefined, "BytesBlob.blobFromNumbers used with non-byte number array.");
29015
+ debug_check `${v.find((x) => (x & 0xff) !== x) === undefined} BytesBlob.blobFromNumbers used with non-byte number array.`;
29022
29016
  const arr = new Uint8Array(v);
29023
29017
  return new bytes_BytesBlob(arr);
29024
29018
  }
@@ -29062,7 +29056,7 @@ class bytes_Bytes extends bytes_BytesBlob {
29062
29056
  length;
29063
29057
  constructor(raw, len) {
29064
29058
  super(raw);
29065
- debug_check(raw.byteLength === len, `Given buffer has incorrect size ${raw.byteLength} vs expected ${len}`);
29059
+ debug_check `${raw.byteLength === len} Given buffer has incorrect size ${raw.byteLength} vs expected ${len}`;
29066
29060
  this.length = len;
29067
29061
  }
29068
29062
  /** Create new [`Bytes<X>`] given a backing buffer and it's length. */
@@ -29071,7 +29065,7 @@ class bytes_Bytes extends bytes_BytesBlob {
29071
29065
  }
29072
29066
  /** Create new [`Bytes<X>`] given an array of bytes and it's length. */
29073
29067
  static fromNumbers(v, len) {
29074
- debug_check(v.find((x) => (x & 0xff) !== x) === undefined, "Bytes.fromNumbers used with non-byte number array.");
29068
+ debug_check `${v.find((x) => (x & 0xff) !== x) === undefined} Bytes.fromNumbers used with non-byte number array.`;
29075
29069
  const x = new Uint8Array(v);
29076
29070
  return new bytes_Bytes(x, len);
29077
29071
  }
@@ -29082,7 +29076,7 @@ class bytes_Bytes extends bytes_BytesBlob {
29082
29076
  // TODO [ToDr] `fill` should have the argments swapped to align with the rest.
29083
29077
  /** Create a [`Bytes<X>`] with all bytes filled with given input number. */
29084
29078
  static fill(len, input) {
29085
- debug_check((input & 0xff) === input, "Input has to be a byte.");
29079
+ debug_check `${(input & 0xff) === input} Input has to be a byte.`;
29086
29080
  const bytes = bytes_Bytes.zero(len);
29087
29081
  bytes.raw.fill(input, 0, len);
29088
29082
  return bytes;
@@ -29105,7 +29099,7 @@ class bytes_Bytes extends bytes_BytesBlob {
29105
29099
  }
29106
29100
  /** Compare the sequence to another one. */
29107
29101
  isEqualTo(other) {
29108
- debug_check(this.length === other.length, "Comparing incorrectly typed bytes!");
29102
+ debug_check `${this.length === other.length} Comparing incorrectly typed bytes!`;
29109
29103
  return u8ArraySameLengthEqual(this.raw, other.raw);
29110
29104
  }
29111
29105
  /** Converts current type into some opaque extension. */
@@ -29114,7 +29108,7 @@ class bytes_Bytes extends bytes_BytesBlob {
29114
29108
  }
29115
29109
  }
29116
29110
  function byteFromString(s) {
29117
- debug_check(s.length === 2, "Two-character string expected");
29111
+ debug_check `${s.length === 2} Two-character string expected`;
29118
29112
  const a = numberFromCharCode(s.charCodeAt(0));
29119
29113
  const b = numberFromCharCode(s.charCodeAt(1));
29120
29114
  return (a << 4) | b;
@@ -29168,42 +29162,53 @@ const bytesBlobComparator = (a, b) => a.compare(b);
29168
29162
 
29169
29163
  ;// CONCATENATED MODULE: ./packages/core/numbers/index.ts
29170
29164
 
29171
- const asWithBytesRepresentation = (v) => v;
29165
+ const asTypedNumber = (v) => v;
29172
29166
  const MAX_VALUE_U8 = 0xff;
29173
29167
  const MAX_VALUE_U16 = 0xffff;
29174
29168
  const MAX_VALUE_U32 = 0xffff_ffff;
29175
29169
  const MAX_VALUE_U64 = 0xffffffffffffffffn;
29176
29170
  /** Attempt to cast an input number into U8. */
29177
- const tryAsU8 = (v) => ensure(v, isU8(v), `input must have one-byte representation, got ${v}`);
29171
+ const tryAsU8 = (v) => {
29172
+ debug_check `${isU8(v)} input must have one-byte representation, got ${v}`;
29173
+ return asTypedNumber(v);
29174
+ };
29178
29175
  /** Check if given number is a valid U8 number. */
29179
29176
  const isU8 = (v) => (v & MAX_VALUE_U8) === v;
29180
29177
  /** Attempt to cast an input number into U16. */
29181
- const numbers_tryAsU16 = (v) => ensure(v, isU16(v), `input must have two-byte representation, got ${v}`);
29178
+ const numbers_tryAsU16 = (v) => {
29179
+ debug_check `${isU16(v)} input must have two-byte representation, got ${v}`;
29180
+ return asTypedNumber(v);
29181
+ };
29182
29182
  /** Check if given number is a valid U16 number. */
29183
29183
  const isU16 = (v) => (v & MAX_VALUE_U16) === v;
29184
29184
  /** Attempt to cast an input number into U32. */
29185
- const numbers_tryAsU32 = (v) => ensure(v, isU32(v), `input must have four-byte representation, got ${v}`);
29185
+ const numbers_tryAsU32 = (v) => {
29186
+ debug_check `${isU32(v)} input must have four-byte representation, got ${v}`;
29187
+ return asTypedNumber(v);
29188
+ };
29186
29189
  /** Check if given number is a valid U32 number. */
29187
29190
  const isU32 = (v) => (v & MAX_VALUE_U32) >>> 0 === v;
29188
29191
  /** Attempt to cast an input number into U64. */
29189
29192
  const numbers_tryAsU64 = (x) => {
29190
29193
  const v = BigInt(x);
29191
- return ensure(v, isU64(v), `input must have eight-byte representation, got ${x}`);
29194
+ debug_check `${isU64(v)} input must have eight-byte representation, got ${x}`;
29195
+ return asTypedNumber(v);
29192
29196
  };
29193
29197
  /** Check if given number is a valid U64 number. */
29194
29198
  const isU64 = (v) => (v & MAX_VALUE_U64) === v;
29195
29199
  /** Collate two U32 parts into one U64. */
29196
29200
  const u64FromParts = ({ lower, upper }) => {
29197
29201
  const val = (BigInt(upper) << 32n) + BigInt(lower);
29198
- return asWithBytesRepresentation(val);
29202
+ return asTypedNumber(val);
29199
29203
  };
29200
29204
  /** Split U64 into lower & upper parts. */
29201
29205
  const u64IntoParts = (v) => {
29202
- const lower = v & (2n ** 32n - 1n);
29203
- const upper = v >> 32n;
29206
+ // Number(...) safe: both parts are <= 0xffffffff
29207
+ const lower = Number(v & (2n ** 32n - 1n));
29208
+ const upper = Number(v >> 32n);
29204
29209
  return {
29205
- lower: asWithBytesRepresentation(Number(lower)),
29206
- upper: asWithBytesRepresentation(Number(upper)),
29210
+ lower: asTypedNumber(lower),
29211
+ upper: asTypedNumber(upper),
29207
29212
  };
29208
29213
  };
29209
29214
  /**
@@ -29243,8 +29248,8 @@ function numbers_u32AsLeBytes(value) {
29243
29248
  * Interpret 4-byte `Uint8Array` as U32 written as little endian.
29244
29249
  */
29245
29250
  function leBytesAsU32(uint8Array) {
29246
- debug_check(uint8Array.length === 4, "Input must be a Uint8Array of length 4");
29247
- return asWithBytesRepresentation(uint8Array[0] | (uint8Array[1] << 8) | (uint8Array[2] << 16) | (uint8Array[3] << 24));
29251
+ debug_check `${uint8Array.length === 4} Input must be a Uint8Array of length 4`;
29252
+ return asTypedNumber(uint8Array[0] | (uint8Array[1] << 8) | (uint8Array[2] << 16) | (uint8Array[3] << 24));
29248
29253
  }
29249
29254
  /** Get the smallest value between U64 a and values given as input parameters. */
29250
29255
  const minU64 = (a, ...values) => values.reduce((min, value) => (value > min ? min : value), a);
@@ -29535,7 +29540,7 @@ class decoder_Decoder {
29535
29540
  this.skip(newOffset - this.offset);
29536
29541
  }
29537
29542
  else {
29538
- debug_check(newOffset >= 0, "The offset has to be positive");
29543
+ debug_check `${newOffset >= 0} The offset has to be positive`;
29539
29544
  this.offset = newOffset;
29540
29545
  }
29541
29546
  }
@@ -29563,7 +29568,7 @@ class decoder_Decoder {
29563
29568
  return num;
29564
29569
  }
29565
29570
  ensureHasBytes(bytes) {
29566
- debug_check(bytes >= 0, "Negative number of bytes given.");
29571
+ debug_check `${bytes >= 0} Negative number of bytes given.`;
29567
29572
  if (this.offset + bytes > this.source.length) {
29568
29573
  throw new Error(`Attempting to decode more data than there is left. Need ${bytes}, left: ${this.source.length - this.offset}.`);
29569
29574
  }
@@ -29571,7 +29576,7 @@ class decoder_Decoder {
29571
29576
  }
29572
29577
  const MASKS = [0xff, 0xfe, 0xfc, 0xf8, 0xf0, 0xe0, 0xc0, 0x80];
29573
29578
  function decodeVariableLengthExtraBytes(firstByte) {
29574
- debug_check(firstByte >= 0 && firstByte < 256, `Incorrect byte value: ${firstByte}`);
29579
+ debug_check `${firstByte >= 0 && firstByte < 256} Incorrect byte value: ${firstByte}`;
29575
29580
  for (let i = 0; i < MASKS.length; i++) {
29576
29581
  if (firstByte >= MASKS[i]) {
29577
29582
  return 8 - i;
@@ -29726,7 +29731,7 @@ class descriptor_Descriptor {
29726
29731
 
29727
29732
 
29728
29733
  function tryAsExactBytes(a) {
29729
- debug_check(a.isExact, "The value is not exact size estimation!");
29734
+ debug_check `${a.isExact} The value is not exact size estimation!`;
29730
29735
  return a.bytes;
29731
29736
  }
29732
29737
  function addSizeHints(a, b) {
@@ -29833,8 +29838,8 @@ class encoder_Encoder {
29833
29838
  // we still allow positive numbers from `[maxNum / 2, maxNum)`.
29834
29839
  // So it does not matter if the argument is a negative value,
29835
29840
  // OR if someone just gave us two-complement already.
29836
- debug_check(num < maxNum, "Only for numbers up to 2**64 - 1");
29837
- debug_check(-num <= maxNum / 2n, "Only for numbers down to -2**63");
29841
+ debug_check `${num < maxNum} Only for numbers up to 2**64 - 1`;
29842
+ debug_check `${-num <= maxNum / 2n} Only for numbers down to -2**63`;
29838
29843
  this.ensureBigEnough(8);
29839
29844
  this.dataView.setBigInt64(this.offset, num, true);
29840
29845
  this.offset += 8;
@@ -29898,8 +29903,8 @@ class encoder_Encoder {
29898
29903
  // we still allow positive numbers from `[maxNum / 2, maxNum)`.
29899
29904
  // So it does not matter if the argument is a negative value,
29900
29905
  // OR if someone just gave us two-complement already.
29901
- debug_check(num < maxNum, `Only for numbers up to 2**${BITS * bytesToEncode} - 1`);
29902
- debug_check(-num <= maxNum / 2, `Only for numbers down to -2**${BITS * bytesToEncode - 1}`);
29906
+ debug_check `${num < maxNum} Only for numbers up to 2**${BITS * bytesToEncode} - 1`;
29907
+ debug_check `${-num <= maxNum / 2} Only for numbers down to -2**${BITS * bytesToEncode - 1}`;
29903
29908
  this.ensureBigEnough(bytesToEncode);
29904
29909
  }
29905
29910
  /**
@@ -29910,8 +29915,8 @@ class encoder_Encoder {
29910
29915
  * https://graypaper.fluffylabs.dev/#/579bd12/365202365202
29911
29916
  */
29912
29917
  varU32(num) {
29913
- debug_check(num >= 0, "Only for natural numbers.");
29914
- debug_check(num < 2 ** 32, "Only for numbers up to 2**32");
29918
+ debug_check `${num >= 0} Only for natural numbers.`;
29919
+ debug_check `${num < 2 ** 32} Only for numbers up to 2**32`;
29915
29920
  this.varU64(BigInt(num));
29916
29921
  }
29917
29922
  /**
@@ -30062,7 +30067,7 @@ class encoder_Encoder {
30062
30067
  * https://graypaper.fluffylabs.dev/#/579bd12/374400374400
30063
30068
  */
30064
30069
  sequenceVarLen(encode, elements) {
30065
- debug_check(elements.length <= 2 ** 32, "Wow, that's a nice long sequence you've got here.");
30070
+ debug_check `${elements.length <= 2 ** 32} Wow, that's a nice long sequence you've got here.`;
30066
30071
  this.varU32(numbers_tryAsU32(elements.length));
30067
30072
  this.sequenceFixLen(encode, elements);
30068
30073
  }
@@ -30083,7 +30088,7 @@ class encoder_Encoder {
30083
30088
  * anyway, so if we really should throw we will.
30084
30089
  */
30085
30090
  ensureBigEnough(length, options = { silent: false }) {
30086
- debug_check(length >= 0, "Negative length given");
30091
+ debug_check `${length >= 0} Negative length given`;
30087
30092
  const newLength = this.offset + length;
30088
30093
  if (newLength > MAX_LENGTH) {
30089
30094
  if (options.silent) {
@@ -30219,10 +30224,12 @@ class ObjectView {
30219
30224
  decodeUpTo(field) {
30220
30225
  const index = this.descriptorsKeys.indexOf(field);
30221
30226
  const lastField = this.descriptorsKeys[this.lastDecodedFieldIdx];
30222
- debug_check(this.lastDecodedFieldIdx < index, `Unjustified call to 'decodeUpTo' -
30227
+ debug_check `
30228
+ ${this.lastDecodedFieldIdx < index}
30229
+ Unjustified call to 'decodeUpTo' -
30223
30230
  the index ($Blobindex}, ${String(field)})
30224
30231
  is already decoded (${this.lastDecodedFieldIdx}, ${String(lastField)}).
30225
- `);
30232
+ `;
30226
30233
  let lastItem = this.cache.get(lastField);
30227
30234
  const skipper = new Skipper(this.decoder);
30228
30235
  // now skip all of the fields and further populate the cache.
@@ -30238,8 +30245,10 @@ class ObjectView {
30238
30245
  this.cache.set(field, lastItem);
30239
30246
  this.lastDecodedFieldIdx = i;
30240
30247
  }
30241
- const last = ensure(lastItem, lastItem !== undefined, "Last item must be set, since the loop turns at least once.");
30242
- return last;
30248
+ if (lastItem === undefined) {
30249
+ throw new Error("Last item must be set, since the loop turns at least once.");
30250
+ }
30251
+ return lastItem;
30243
30252
  }
30244
30253
  }
30245
30254
  /**
@@ -30272,8 +30281,10 @@ class SequenceView {
30272
30281
  *[Symbol.iterator]() {
30273
30282
  for (let i = 0; i < this.length; i++) {
30274
30283
  const val = this.get(i);
30275
- const v = ensure(val, val !== undefined, "We are within 0..this.length so all items are defined.");
30276
- yield v;
30284
+ if (val === undefined) {
30285
+ throw new Error("We are within 0..this.length so all items are defined.");
30286
+ }
30287
+ yield val;
30277
30288
  }
30278
30289
  }
30279
30290
  /** Create an array of all views mapped to some particular value. */
@@ -30316,7 +30327,10 @@ class SequenceView {
30316
30327
  return bytes_BytesBlob.blobFrom(this.decoder.source.subarray(this.initialDecoderOffset, this.decoder.bytesRead()));
30317
30328
  }
30318
30329
  decodeUpTo(index) {
30319
- debug_check(this.lastDecodedIdx < index, `Unjustified call to 'decodeUpTo' - the index (${index}) is already decoded (${this.lastDecodedIdx}).`);
30330
+ debug_check `
30331
+ ${this.lastDecodedIdx < index}
30332
+ Unjustified call to 'decodeUpTo' - the index (${index}) is already decoded (${this.lastDecodedIdx}).
30333
+ `;
30320
30334
  let lastItem = this.cache.get(this.lastDecodedIdx);
30321
30335
  const skipper = new Skipper(this.decoder);
30322
30336
  // now skip all of the fields and further populate the cache.
@@ -30331,8 +30345,10 @@ class SequenceView {
30331
30345
  this.cache.set(i, lastItem);
30332
30346
  this.lastDecodedIdx = i;
30333
30347
  }
30334
- const last = ensure(lastItem, lastItem !== undefined, "Last item must be set, since the loop turns at least once.");
30335
- return last;
30348
+ if (lastItem === undefined) {
30349
+ throw new Error("Last item must be set, since the loop turns at least once.");
30350
+ }
30351
+ return lastItem;
30336
30352
  }
30337
30353
  }
30338
30354
 
@@ -30365,7 +30381,10 @@ const TYPICAL_DICTIONARY_LENGTH = 32;
30365
30381
  */
30366
30382
  function readonlyArray(desc) {
30367
30383
  return desc.convert((x) => {
30368
- debug_check(Array.isArray(x), `Non-arrays are not supported as 'readonly': got ${typeof x}, ${x}`);
30384
+ debug_check `
30385
+ ${Array.isArray(x)}
30386
+ Non-arrays are not supported as 'readonly': got ${typeof x}, ${x}
30387
+ `;
30369
30388
  // NOTE [ToDr] This assumption is incorrect in general, but it's documented
30370
30389
  // in the general note. We avoid `.slice()` the array for performance reasons.
30371
30390
  return x;
@@ -31320,10 +31339,17 @@ async function initAll() {
31320
31339
  await init.ed25519();
31321
31340
  await init.reedSolomon();
31322
31341
  }
31342
+ function initOnce(doInit) {
31343
+ let ready = null;
31344
+ return async () => {
31345
+ if (ready === null) ready = doInit();
31346
+ return await ready;
31347
+ };
31348
+ }
31323
31349
  const init = {
31324
- bandersnatch: async () => await bandersnatch_default({ module_or_path: await bandersnatch_bg_default() }),
31325
- ed25519: async () => await ed25519_wasm_default({ module_or_path: await ed25519_wasm_bg_default() }),
31326
- reedSolomon: async () => await reed_solomon_wasm_default({ module_or_path: await reed_solomon_wasm_bg_default() })
31350
+ bandersnatch: initOnce(async () => await bandersnatch_default({ module_or_path: await bandersnatch_bg_default() })),
31351
+ ed25519: initOnce(async () => await ed25519_wasm_default({ module_or_path: await ed25519_wasm_bg_default() })),
31352
+ reedSolomon: initOnce(async () => await reed_solomon_wasm_default({ module_or_path: await reed_solomon_wasm_bg_default() }))
31327
31353
  };
31328
31354
 
31329
31355
  //#endregion
@@ -31345,7 +31371,7 @@ const BLS_KEY_BYTES = 144;
31345
31371
  /** Derive a Bandersnatch public key from a seed. */
31346
31372
  function bandersnatch_publicKey(seed) {
31347
31373
  const key = bandersnatch.derive_public_key(seed);
31348
- check(key[0] === 0, "Invalid Bandersnatch public key derived from seed");
31374
+ check `${key[0] === 0} Invalid Bandersnatch public key derived from seed`;
31349
31375
  return Bytes.fromBlob(key.subarray(1), BANDERSNATCH_KEY_BYTES).asOpaque();
31350
31376
  }
31351
31377
 
@@ -31833,7 +31859,7 @@ async function ed25519_verify(input) {
31833
31859
  data.set(signature.raw, offset);
31834
31860
  offset += ED25519_SIGNATURE_BYTES;
31835
31861
  const messageLength = message.length;
31836
- debug_check(messageLength < 256, `Message needs to be shorter than 256 bytes. Got: ${messageLength}`);
31862
+ debug_check `${messageLength < 256} Message needs to be shorter than 256 bytes. Got: ${messageLength}`;
31837
31863
  data[offset] = messageLength;
31838
31864
  offset += 1;
31839
31865
  data.set(message.raw, offset);
@@ -31862,6 +31888,7 @@ async function verifyBatch(input) {
31862
31888
 
31863
31889
  ;// CONCATENATED MODULE: ./packages/core/hash/hash.ts
31864
31890
 
31891
+
31865
31892
  /**
31866
31893
  * Size of the output of the hash functions.
31867
31894
  *
@@ -31871,6 +31898,7 @@ async function verifyBatch(input) {
31871
31898
  const hash_HASH_SIZE = 32;
31872
31899
  /** A hash without last byte (useful for trie representation). */
31873
31900
  const TRUNCATED_HASH_SIZE = 31;
31901
+ const ZERO_HASH = bytes_Bytes.zero(hash_HASH_SIZE);
31874
31902
  /**
31875
31903
  * Container for some object with a hash that is related to this object.
31876
31904
  *
@@ -31915,7 +31943,7 @@ class PageAllocator {
31915
31943
  // TODO [ToDr] Benchmark the performance!
31916
31944
  constructor(hashesPerPage) {
31917
31945
  this.hashesPerPage = hashesPerPage;
31918
- check(hashesPerPage > 0 && hashesPerPage >>> 0 === hashesPerPage, "Expected a non-zero integer.");
31946
+ check `${hashesPerPage > 0 && hashesPerPage >>> 0 === hashesPerPage} Expected a non-zero integer.`;
31919
31947
  this.resetPage();
31920
31948
  }
31921
31949
  resetPage() {
@@ -32214,8 +32242,8 @@ class MultiMap {
32214
32242
  * if needed.
32215
32243
  */
32216
32244
  constructor(keysLength, keyMappers) {
32217
- check(keysLength > 0, "Keys cannot be empty.");
32218
- check(keyMappers === undefined || keyMappers.length === keysLength, "Incorrect number of key mappers given!");
32245
+ check `${keysLength > 0} Keys cannot be empty.`;
32246
+ check `${keyMappers === undefined || keyMappers.length === keysLength} Incorrect number of key mappers given!`;
32219
32247
  this.data = new Map();
32220
32248
  this.keyMappers = keyMappers === undefined ? Array(keysLength).fill(null) : keyMappers;
32221
32249
  }
@@ -32316,7 +32344,7 @@ class sized_array_FixedSizeArray extends Array {
32316
32344
  this.fixedLength = this.length;
32317
32345
  }
32318
32346
  static new(data, len) {
32319
- debug_check(data.length === len, `Expected an array of size: ${len}, got: ${data.length}`);
32347
+ debug_check `${data.length === len} Expected an array of size: ${len}, got: ${data.length}`;
32320
32348
  const arr = new sized_array_FixedSizeArray(len);
32321
32349
  for (let i = 0; i < len; i++) {
32322
32350
  arr[i] = data[i];
@@ -32450,7 +32478,7 @@ class SortedArray {
32450
32478
  }
32451
32479
  /** Create a new SortedSet from two sorted collections. */
32452
32480
  static fromTwoSortedCollections(first, second) {
32453
- debug_check(first.comparator === second.comparator, "Cannot merge arrays if they do not use the same comparator");
32481
+ debug_check `${first.comparator === second.comparator} Cannot merge arrays if they do not use the same comparator`;
32454
32482
  const comparator = first.comparator;
32455
32483
  const arr1 = first.array;
32456
32484
  const arr1Length = arr1.length;
@@ -32570,7 +32598,7 @@ class SortedSet extends SortedArray {
32570
32598
  }
32571
32599
  /** Create a new SortedSet from two sorted collections. */
32572
32600
  static fromTwoSortedCollections(first, second) {
32573
- debug_check(first.comparator === second.comparator, "Cannot merge arrays if they do not use the same comparator");
32601
+ debug_check `${first.comparator === second.comparator} Cannot merge arrays if they do not use the same comparator`;
32574
32602
  const comparator = first.comparator;
32575
32603
  if (first.length === 0) {
32576
32604
  return SortedSet.fromSortedArray(comparator, second.array);
@@ -33589,9 +33617,12 @@ const common_tryAsServiceGas = (v) => opaque_asOpaqueType(numbers_tryAsU64(v));
33589
33617
  /** Attempt to convert a number into `CoreIndex`. */
33590
33618
  const common_tryAsCoreIndex = (v) => opaque_asOpaqueType(numbers_tryAsU16(v));
33591
33619
  /** Attempt to convert a number into `Epoch`. */
33592
- const tryAsEpoch = (v) => opaque_asOpaqueType(numbers_tryAsU32(v));
33620
+ const tryAsEpoch = (v) => asOpaqueType(tryAsU32(v));
33593
33621
  function tryAsPerValidator(array, spec) {
33594
- debug_check(array.length === spec.validatorsCount, `Invalid per-validator array length. Expected ${spec.validatorsCount}, got: ${array.length}`);
33622
+ debug_check `
33623
+ ${array.length === spec.validatorsCount}
33624
+ Invalid per-validator array length. Expected ${spec.validatorsCount}, got: ${array.length}
33625
+ `;
33595
33626
  return sized_array_asKnownSize(array);
33596
33627
  }
33597
33628
  const codecPerValidator = (val) => codecWithContext((context) => {
@@ -33600,7 +33631,10 @@ const codecPerValidator = (val) => codecWithContext((context) => {
33600
33631
  });
33601
33632
  });
33602
33633
  function tryAsPerEpochBlock(array, spec) {
33603
- debug_check(array.length === spec.epochLength, `Invalid per-epoch-block array length. Expected ${spec.epochLength}, got: ${array.length}`);
33634
+ debug_check `
33635
+ ${array.length === spec.epochLength}
33636
+ Invalid per-epoch-block array length. Expected ${spec.epochLength}, got: ${array.length}
33637
+ `;
33604
33638
  return sized_array_asKnownSize(array);
33605
33639
  }
33606
33640
  const codecPerEpochBlock = (val) => codecWithContext((context) => {
@@ -33871,9 +33905,14 @@ class WorkItem extends WithDebug {
33871
33905
 
33872
33906
 
33873
33907
 
33908
+
33874
33909
  /** Verify the value is within the `WorkItemsCount` bounds. */
33875
33910
  function work_package_tryAsWorkItemsCount(len) {
33876
- return ensure(len, len >= MIN_NUMBER_OF_WORK_ITEMS && len <= work_package_MAX_NUMBER_OF_WORK_ITEMS, `WorkItemsCount: Expected '${MIN_NUMBER_OF_WORK_ITEMS} <= count <= ${work_package_MAX_NUMBER_OF_WORK_ITEMS}' got ${len}`);
33911
+ debug_check `
33912
+ ${len >= MIN_NUMBER_OF_WORK_ITEMS && len <= work_package_MAX_NUMBER_OF_WORK_ITEMS}
33913
+ WorkItemsCount: Expected '${MIN_NUMBER_OF_WORK_ITEMS} <= count <= ${work_package_MAX_NUMBER_OF_WORK_ITEMS}' got ${len}
33914
+ `;
33915
+ return tryAsU8(len);
33877
33916
  }
33878
33917
  /** Minimal number of work items in the work package or results in work report. */
33879
33918
  const MIN_NUMBER_OF_WORK_ITEMS = 1;
@@ -35847,7 +35886,10 @@ class AvailabilityAssignment extends WithDebug {
35847
35886
 
35848
35887
  /** Check if given array has correct length before casting to the opaque type. */
35849
35888
  function tryAsPerCore(array, spec) {
35850
- debug_check(array.length === spec.coresCount, `Invalid per-core array length. Expected ${spec.coresCount}, got: ${array.length}`);
35889
+ debug_check `
35890
+ ${array.length === spec.coresCount}
35891
+ Invalid per-core array length. Expected ${spec.coresCount}, got: ${array.length}
35892
+ `;
35851
35893
  return opaque_asOpaqueType(array);
35852
35894
  }
35853
35895
  const codecPerCore = (val) => codecWithContext((context) => {
@@ -37098,7 +37140,7 @@ class InMemoryState extends WithDebug {
37098
37140
  }
37099
37141
  removeServices(servicesRemoved) {
37100
37142
  for (const serviceId of servicesRemoved ?? []) {
37101
- debug_check(this.services.has(serviceId), `Attempting to remove non-existing service: ${serviceId}`);
37143
+ debug_check `${this.services.has(serviceId)} Attempting to remove non-existing service: ${serviceId}`;
37102
37144
  this.services.delete(serviceId);
37103
37145
  }
37104
37146
  }
@@ -37115,7 +37157,10 @@ class InMemoryState extends WithDebug {
37115
37157
  }
37116
37158
  else if (kind === UpdateStorageKind.Remove) {
37117
37159
  const { key } = action;
37118
- debug_check(service.data.storage.has(key.toString()), `Attempting to remove non-existing storage item at ${serviceId}: ${action.key}`);
37160
+ debug_check `
37161
+ ${service.data.storage.has(key.toString())}
37162
+ Attempting to remove non-existing storage item at ${serviceId}: ${action.key}
37163
+ `;
37119
37164
  service.data.storage.delete(key.toString());
37120
37165
  }
37121
37166
  else {
@@ -37798,12 +37843,12 @@ class TrieNode {
37798
37843
  }
37799
37844
  /** View this node as a branch node */
37800
37845
  asBranchNode() {
37801
- debug_check(this.getNodeType() === NodeType.Branch);
37846
+ debug_check `${this.getNodeType() === NodeType.Branch} not a branch!`;
37802
37847
  return new BranchNode(this);
37803
37848
  }
37804
37849
  /** View this node as a leaf node */
37805
37850
  asLeafNode() {
37806
- debug_check(this.getNodeType() !== NodeType.Branch);
37851
+ debug_check `${this.getNodeType() !== NodeType.Branch} not a leaf!`;
37807
37852
  return new LeafNode(this);
37808
37853
  }
37809
37854
  toString() {
@@ -38291,7 +38336,7 @@ function createSubtreeForBothLeaves(traversedPath, nodes, leafToReplace, leaf) {
38291
38336
  * Return a single bit from `key` located at `bitIndex`.
38292
38337
  */
38293
38338
  function getBit(key, bitIndex) {
38294
- debug_check(bitIndex < TRUNCATED_KEY_BITS);
38339
+ debug_check `${bitIndex < TRUNCATED_KEY_BITS} invalid bit index passed ${bitIndex}`;
38295
38340
  const byte = bitIndex >>> 3;
38296
38341
  const bit = bitIndex - (byte << 3);
38297
38342
  const mask = 0b10_00_00_00 >>> bit;
@@ -39451,7 +39496,7 @@ class TypedPort {
39451
39496
  * Send a response given the worker that has previously requested something.
39452
39497
  */
39453
39498
  respond(localState, request, data, transferList) {
39454
- debug_check(request.kind === "request");
39499
+ debug_check `${request.kind === "request"}`;
39455
39500
  this.postMessage({
39456
39501
  kind: "response",
39457
39502
  id: request.id,
@@ -39482,10 +39527,11 @@ class TypedPort {
39482
39527
  throw new Error(`Invalid message: ${JSON.stringify(msg)}.`);
39483
39528
  }
39484
39529
  switch (msg.kind) {
39485
- case "response":
39486
- debug_check(this.responseListeners.eventNames().indexOf(reqEvent(msg.id)) !== -1);
39530
+ case "response": {
39531
+ debug_check `${this.responseListeners.eventNames().indexOf(reqEvent(msg.id)) !== -1}`;
39487
39532
  this.responseListeners.emit(reqEvent(msg.id), null, msg.data, msg.name, msg.localState, msg);
39488
39533
  break;
39534
+ }
39489
39535
  case "signal":
39490
39536
  this.listeners.emit("signal", msg.name, msg.data, msg.localState, msg);
39491
39537
  break;
@@ -39700,9 +39746,9 @@ class MessageChannelStateMachine {
39700
39746
  const promise = new Promise((resolve, reject) => {
39701
39747
  parentPort.once("message", (value) => {
39702
39748
  try {
39703
- debug_check(value.kind === "request", "The initial message should be a request with channel.");
39704
- debug_check(value.name === CHANNEL_MESSAGE);
39705
- debug_check(value.data instanceof external_node_worker_threads_namespaceObject.MessagePort);
39749
+ debug_check `${value.kind === "request"} The initial message should be a request with channel.`;
39750
+ debug_check `${value.name === CHANNEL_MESSAGE}`;
39751
+ debug_check `${value.data instanceof external_node_worker_threads_namespaceObject.MessagePort}`;
39706
39752
  const port = new TypedPort(value.data);
39707
39753
  port.respond(machine.currentState().stateName, value, Ok);
39708
39754
  resolve(port);
@@ -39782,7 +39828,7 @@ class StateMachine {
39782
39828
  /** Get state object by name. */
39783
39829
  getState(name) {
39784
39830
  const state = this.allStates.get(name);
39785
- debug_check(state !== undefined, `Unable to retrieve state object for ${name}.`);
39831
+ debug_check `${state !== undefined} Unable to retrieve state object for ${name}.`;
39786
39832
  return state;
39787
39833
  }
39788
39834
  /** Get the currently active state object. */
@@ -40177,7 +40223,6 @@ class ImporterReady extends State {
40177
40223
  response: rootHash === null ? bytes_Bytes.zero(hash_HASH_SIZE).raw : rootHash.raw,
40178
40224
  };
40179
40225
  }
40180
- // NOTE [ToDr] This should rather be using the import queue, instead of going directly.
40181
40226
  async importBlock(block) {
40182
40227
  if (this.importer === null) {
40183
40228
  state_machine_logger.error(`${this.constructor.name} importer not initialized yet!`);
@@ -40189,17 +40234,13 @@ class ImporterReady extends State {
40189
40234
  if (block instanceof Uint8Array) {
40190
40235
  const config = this.getConfig();
40191
40236
  const blockView = decoder_Decoder.decodeObject(Block.Codec.View, block, config.chainSpec);
40192
- const headerView = blockView.header.view();
40193
- const timeSlot = headerView.timeSlotIndex.materialize();
40194
40237
  let response;
40195
40238
  try {
40196
- const res = await this.importer.importBlock(blockView, null, config.omitSealVerification);
40239
+ const res = await this.importer.importBlock(blockView, config.omitSealVerification);
40197
40240
  if (res.isOk) {
40198
- state_machine_logger.info(`🧊 Best block: #${timeSlot} (${res.ok.hash})`);
40199
- response = result_Result.ok(this.importer.getBestStateRootHash() ?? bytes_Bytes.zero(hash_HASH_SIZE).asOpaque());
40241
+ response = result_Result.ok(this.importer.getBestStateRootHash() ?? ZERO_HASH.asOpaque());
40200
40242
  }
40201
40243
  else {
40202
- state_machine_logger.log(`❌ Rejected block #${timeSlot}: ${resultToString(res)}`);
40203
40244
  response = result_Result.error(resultToString(res));
40204
40245
  }
40205
40246
  }
@@ -40520,19 +40561,22 @@ class Preimages {
40520
40561
 
40521
40562
  const NO_OF_REGISTERS = 13;
40522
40563
  const REGISTER_SIZE_SHIFT = 3;
40523
- const tryAsRegisterIndex = (index) => ensure(index, index >= 0 && index <= NO_OF_REGISTERS, `Incorrect register index: ${index}!`);
40564
+ const tryAsRegisterIndex = (index) => {
40565
+ debug_check `${index >= 0 && index < NO_OF_REGISTERS} Incorrect register index: ${index}!`;
40566
+ return opaque_asOpaqueType(index);
40567
+ };
40524
40568
  class Registers {
40525
40569
  bytes;
40526
40570
  asSigned;
40527
40571
  asUnsigned;
40528
40572
  constructor(bytes = new Uint8Array(NO_OF_REGISTERS << REGISTER_SIZE_SHIFT)) {
40529
40573
  this.bytes = bytes;
40530
- debug_check(bytes.length === NO_OF_REGISTERS << REGISTER_SIZE_SHIFT, "Invalid size of registers array.");
40574
+ debug_check `${bytes.length === NO_OF_REGISTERS << REGISTER_SIZE_SHIFT} Invalid size of registers array.`;
40531
40575
  this.asSigned = new BigInt64Array(bytes.buffer, bytes.byteOffset);
40532
40576
  this.asUnsigned = new BigUint64Array(bytes.buffer, bytes.byteOffset);
40533
40577
  }
40534
40578
  static fromBytes(bytes) {
40535
- debug_check(bytes.length === NO_OF_REGISTERS << REGISTER_SIZE_SHIFT, "Invalid size of registers array.");
40579
+ debug_check `${bytes.length === NO_OF_REGISTERS << REGISTER_SIZE_SHIFT} Invalid size of registers array.`;
40536
40580
  return new Registers(bytes);
40537
40581
  }
40538
40582
  getBytesAsLittleEndian(index, len) {
@@ -40678,7 +40722,7 @@ class Mask {
40678
40722
  return this.lookupTableForward[index] === 0;
40679
40723
  }
40680
40724
  getNoOfBytesToNextInstruction(index) {
40681
- debug_check(index >= 0, `index (${index}) cannot be a negative number`);
40725
+ debug_check `${index >= 0} index (${index}) cannot be a negative number`;
40682
40726
  return Math.min(this.lookupTableForward[index] ?? 0, MAX_INSTRUCTION_DISTANCE);
40683
40727
  }
40684
40728
  buildLookupTableForward(mask) {
@@ -41678,7 +41722,7 @@ const PAGE_SIZE_SHIFT = 12;
41678
41722
  const PAGE_SIZE = 1 << PAGE_SIZE_SHIFT;
41679
41723
  const MIN_ALLOCATION_SHIFT = (() => {
41680
41724
  const MIN_ALLOCATION_SHIFT = 7;
41681
- debug_check(MIN_ALLOCATION_SHIFT >= 0 && MIN_ALLOCATION_SHIFT < PAGE_SIZE_SHIFT, "incorrect minimal allocation shift");
41725
+ debug_check `${MIN_ALLOCATION_SHIFT >= 0 && MIN_ALLOCATION_SHIFT < PAGE_SIZE_SHIFT} incorrect minimal allocation shift`;
41682
41726
  return MIN_ALLOCATION_SHIFT;
41683
41727
  })();
41684
41728
  const MIN_ALLOCATION_LENGTH = PAGE_SIZE >> MIN_ALLOCATION_SHIFT;
@@ -41691,16 +41735,28 @@ const MAX_NUMBER_OF_PAGES = MEMORY_SIZE / PAGE_SIZE;
41691
41735
  ;// CONCATENATED MODULE: ./packages/core/pvm-interpreter/memory/memory-index.ts
41692
41736
 
41693
41737
 
41694
- const tryAsMemoryIndex = (index) => ensure(index, index >= 0 && index <= MAX_MEMORY_INDEX, `Incorrect memory index: ${index}!`);
41695
- const tryAsSbrkIndex = (index) => ensure(index, index >= 0 && index <= MAX_MEMORY_INDEX + 1, `Incorrect sbrk index: ${index}!`);
41738
+ const tryAsMemoryIndex = (index) => {
41739
+ debug_check `${index >= 0 && index <= MAX_MEMORY_INDEX} Incorrect memory index: ${index}!`;
41740
+ return opaque_asOpaqueType(index);
41741
+ };
41742
+ const tryAsSbrkIndex = (index) => {
41743
+ debug_check `${index >= 0 && index <= MAX_MEMORY_INDEX + 1} Incorrect sbrk index: ${index}!`;
41744
+ return opaque_asOpaqueType(index);
41745
+ };
41696
41746
 
41697
41747
  ;// CONCATENATED MODULE: ./packages/core/pvm-interpreter/memory/pages/page-utils.ts
41698
41748
 
41699
41749
 
41700
41750
  /** Ensure that given memory `index` is within `[0...PAGE_SIZE)` and can be used to index a page */
41701
- const tryAsPageIndex = (index) => ensure(index, index >= 0 && index < PAGE_SIZE, `Incorect page index: ${index}!`);
41751
+ const tryAsPageIndex = (index) => {
41752
+ debug_check `${index >= 0 && index < PAGE_SIZE}, Incorect page index: ${index}!`;
41753
+ return opaque_asOpaqueType(index);
41754
+ };
41702
41755
  /** Ensure that given `index` represents an index of one of the pages. */
41703
- const tryAsPageNumber = (index) => ensure(index, index >= 0 && index <= LAST_PAGE_NUMBER, `Incorrect page number: ${index}!`);
41756
+ const tryAsPageNumber = (index) => {
41757
+ debug_check `${index >= 0 && index <= LAST_PAGE_NUMBER}, Incorect page number: ${index}!`;
41758
+ return opaque_asOpaqueType(index);
41759
+ };
41704
41760
  /**
41705
41761
  * Get the next page number and wrap the result if it is bigger than LAST_PAGE_NUMBER
41706
41762
  *
@@ -42232,10 +42288,10 @@ class MemoryBuilder {
42232
42288
  */
42233
42289
  setReadablePages(start, end, data = new Uint8Array()) {
42234
42290
  this.ensureNotFinalized();
42235
- debug_check(start < end, "end has to be bigger than start");
42236
- debug_check(start % PAGE_SIZE === 0, `start needs to be a multiple of page size (${PAGE_SIZE})`);
42237
- debug_check(end % PAGE_SIZE === 0, `end needs to be a multiple of page size (${PAGE_SIZE})`);
42238
- debug_check(data.length <= end - start, "the initial data is longer than address range");
42291
+ debug_check `${start < end} end has to be bigger than start`;
42292
+ debug_check `${start % PAGE_SIZE === 0} start needs to be a multiple of page size (${PAGE_SIZE})`;
42293
+ debug_check `${end % PAGE_SIZE === 0} end needs to be a multiple of page size (${PAGE_SIZE})`;
42294
+ debug_check `${data.length <= end - start} the initial data is longer than address range`;
42239
42295
  const length = end - start;
42240
42296
  const range = MemoryRange.fromStartAndLength(start, length);
42241
42297
  this.ensureNoReservedMemoryUsage(range);
@@ -42260,10 +42316,10 @@ class MemoryBuilder {
42260
42316
  */
42261
42317
  setWriteablePages(start, end, data = new Uint8Array()) {
42262
42318
  this.ensureNotFinalized();
42263
- debug_check(start < end, "end has to be bigger than start");
42264
- debug_check(start % PAGE_SIZE === 0, `start needs to be a multiple of page size (${PAGE_SIZE})`);
42265
- debug_check(end % PAGE_SIZE === 0, `end needs to be a multiple of page size (${PAGE_SIZE})`);
42266
- debug_check(data.length <= end - start, "the initial data is longer than address range");
42319
+ debug_check `${start < end} end has to be bigger than start`;
42320
+ debug_check `${start % PAGE_SIZE === 0} start needs to be a multiple of page size (${PAGE_SIZE})`;
42321
+ debug_check `${end % PAGE_SIZE === 0} end needs to be a multiple of page size (${PAGE_SIZE})`;
42322
+ debug_check `${data.length <= end - start} the initial data is longer than address range`;
42267
42323
  const length = end - start;
42268
42324
  const range = MemoryRange.fromStartAndLength(start, length);
42269
42325
  this.ensureNoReservedMemoryUsage(range);
@@ -42285,7 +42341,7 @@ class MemoryBuilder {
42285
42341
  this.ensureNotFinalized();
42286
42342
  const pageOffset = start % PAGE_SIZE;
42287
42343
  const remainingSpaceOnPage = PAGE_SIZE - pageOffset;
42288
- debug_check(data.length <= remainingSpaceOnPage, "The data has to fit into a single page.");
42344
+ debug_check `${data.length <= remainingSpaceOnPage} The data has to fit into a single page.`;
42289
42345
  const length = data.length;
42290
42346
  const range = MemoryRange.fromStartAndLength(start, length);
42291
42347
  this.ensureNoReservedMemoryUsage(range);
@@ -42299,7 +42355,10 @@ class MemoryBuilder {
42299
42355
  return this;
42300
42356
  }
42301
42357
  finalize(startHeapIndex, endHeapIndex) {
42302
- debug_check(startHeapIndex <= endHeapIndex, `startHeapIndex (${startHeapIndex}) has to be less than or equal to endHeapIndex (${endHeapIndex})`);
42358
+ debug_check `
42359
+ ${startHeapIndex <= endHeapIndex}
42360
+ startHeapIndex (${startHeapIndex}) has to be less than or equal to endHeapIndex (${endHeapIndex})
42361
+ `;
42303
42362
  this.ensureNotFinalized();
42304
42363
  const range = MemoryRange.fromStartAndLength(startHeapIndex, endHeapIndex - startHeapIndex);
42305
42364
  const pages = PageRange.fromMemoryRange(range);
@@ -42537,7 +42596,7 @@ function mulU64(a, b) {
42537
42596
  *
42538
42597
  * The result of multiplication is a 64-bits number and we are only interested in the part that lands in the upper 32-bits.
42539
42598
  * For example if we multiply `0xffffffff * 0xffffffff`, we get:
42540
-
42599
+
42541
42600
  * | 64-bits | 64-bits |
42542
42601
  * +--------------------+--------------------+
42543
42602
  * | upper | lower |
@@ -42573,7 +42632,7 @@ function mulUpperSS(a, b) {
42573
42632
  return interpretAsSigned(resultLimitedTo64Bits);
42574
42633
  }
42575
42634
  function unsignedRightShiftBigInt(value, shift) {
42576
- debug_check(shift >= 0, "Shift count must be non-negative");
42635
+ debug_check `${shift >= 0} Shift count must be non-negative`;
42577
42636
  const fillBit = value < 0 ? "1" : "0";
42578
42637
  // Convert the BigInt to its binary representation
42579
42638
  const binaryRepresentation = value.toString(2).padStart(64, fillBit);
@@ -43989,7 +44048,10 @@ class TwoRegsTwoImmsDispatcher {
43989
44048
  class JumpTable {
43990
44049
  indices;
43991
44050
  constructor(itemByteLength, bytes) {
43992
- debug_check(itemByteLength === 0 || bytes.length % itemByteLength === 0, `Length of jump table (${bytes.length}) should be a multiple of item lenght (${itemByteLength})!`);
44051
+ debug_check `
44052
+ ${itemByteLength === 0 || bytes.length % itemByteLength === 0}
44053
+ Length of jump table (${bytes.length}) should be a multiple of item lenght (${itemByteLength})!
44054
+ `;
43993
44055
  const length = itemByteLength === 0 ? 0 : bytes.length / itemByteLength;
43994
44056
  this.indices = new Uint32Array(length);
43995
44057
  for (let i = 0; i < length; i++) {
@@ -44433,7 +44495,10 @@ class ReturnValue {
44433
44495
  this.consumedGas = consumedGas;
44434
44496
  this.status = status;
44435
44497
  this.memorySlice = memorySlice;
44436
- debug_check((status === null && memorySlice !== null) || (status !== null && memorySlice === null), "`status` and `memorySlice` must not both be null or both be non-null — exactly one must be provided");
44498
+ debug_check `
44499
+ ${(status === null && memorySlice !== null) || (status !== null && memorySlice === null)}
44500
+ 'status' and 'memorySlice' must not both be null or both be non-null — exactly one must be provided
44501
+ `;
44437
44502
  }
44438
44503
  static fromStatus(consumedGas, status) {
44439
44504
  return new ReturnValue(consumedGas, status, null);
@@ -44482,7 +44547,10 @@ class HostCalls {
44482
44547
  if (status !== status_Status.HOST) {
44483
44548
  return this.getReturnValue(status, pvmInstance);
44484
44549
  }
44485
- debug_check(pvmInstance.getExitParam() !== null, "We know that the exit param is not null, because the status is `Status.HOST`");
44550
+ debug_check `
44551
+ ${pvmInstance.getExitParam() !== null}
44552
+ "We know that the exit param is not null, because the status is 'Status.HOST'
44553
+ `;
44486
44554
  const hostCallIndex = pvmInstance.getExitParam() ?? -1;
44487
44555
  const gas = pvmInstance.getGasCounter();
44488
44556
  const regs = new HostCallRegisters(pvmInstance.getRegisters());
@@ -44542,7 +44610,7 @@ class host_calls_manager_HostCallsManager {
44542
44610
  constructor({ missing, handlers = [], }) {
44543
44611
  this.missing = missing;
44544
44612
  for (const handler of handlers) {
44545
- debug_check(this.hostCalls.get(handler.index) === undefined, `Overwriting host call handler at index ${handler.index}`);
44613
+ debug_check `${this.hostCalls.get(handler.index) === undefined} Overwriting host call handler at index ${handler.index}`;
44546
44614
  this.hostCalls.set(handler.index, handler);
44547
44615
  }
44548
44616
  }
@@ -44665,7 +44733,7 @@ function getServiceId(serviceId) {
44665
44733
  return null;
44666
44734
  }
44667
44735
  function writeServiceIdAsLeBytes(serviceId, destination) {
44668
- debug_check(destination.length >= SERVICE_ID_BYTES, "Not enough space in the destination.");
44736
+ debug_check `${destination.length >= SERVICE_ID_BYTES} Not enough space in the destination.`;
44669
44737
  destination.set(numbers_u32AsLeBytes(serviceId));
44670
44738
  }
44671
44739
  /** Clamp a U64 to the maximum value of a 32-bit unsigned integer. */
@@ -44754,13 +44822,27 @@ class SpiProgram extends WithDebug {
44754
44822
  this.registers = registers;
44755
44823
  }
44756
44824
  }
44825
+ /**
44826
+ * program = E_3(|o|) ++ E_3(|w|) ++ E_2(z) ++ E_3(s) ++ o ++ w ++ E_4(|c|) ++ c
44827
+ *
44828
+ * E_n - little endian encoding, n - length
44829
+ * o - initial read only data
44830
+ * w - initial heap
44831
+ * z - heap pages filled with zeros
44832
+ * s - stack size
44833
+ * c - program code
44834
+ *
44835
+ * https://graypaper.fluffylabs.dev/#/579bd12/2b92022b9202
44836
+ */
44757
44837
  function decodeStandardProgram(program, args) {
44758
44838
  const decoder = decoder_Decoder.fromBlob(program);
44759
44839
  const oLength = decoder.u24();
44760
44840
  const wLength = decoder.u24();
44761
- const argsLength = ensure(args.length, args.length <= DATA_LEGNTH, "Incorrect arguments length");
44762
- const readOnlyLength = ensure(oLength, oLength <= DATA_LEGNTH, "Incorrect readonly segment length");
44763
- const heapLength = ensure(wLength, wLength <= DATA_LEGNTH, "Incorrect heap segment length");
44841
+ debug_check `${args.length <= DATA_LEGNTH} Incorrect arguments length`;
44842
+ debug_check `${oLength <= DATA_LEGNTH} Incorrect readonly segment length`;
44843
+ const readOnlyLength = oLength;
44844
+ debug_check `${wLength <= DATA_LEGNTH} Incorrect heap segment length`;
44845
+ const heapLength = wLength;
44764
44846
  const noOfHeapZerosPages = decoder.u16();
44765
44847
  const stackSize = decoder.u24();
44766
44848
  const readOnlyMemory = decoder.bytes(readOnlyLength).raw;
@@ -44776,14 +44858,14 @@ function decodeStandardProgram(program, args) {
44776
44858
  const stackStart = STACK_SEGMENT - memory_utils_alignToPageSize(stackSize);
44777
44859
  const stackEnd = STACK_SEGMENT;
44778
44860
  const argsStart = ARGS_SEGMENT;
44779
- const argsEnd = argsStart + memory_utils_alignToPageSize(argsLength);
44780
- const argsZerosEnd = argsEnd + memory_utils_alignToPageSize(argsLength);
44861
+ const argsEnd = argsStart + memory_utils_alignToPageSize(args.length);
44862
+ const argsZerosEnd = argsEnd + memory_utils_alignToPageSize(args.length);
44781
44863
  function nonEmpty(s) {
44782
44864
  return s !== false;
44783
44865
  }
44784
44866
  const readableMemory = [
44785
44867
  readOnlyLength > 0 && getMemorySegment(readonlyDataStart, readonlyDataEnd, readOnlyMemory),
44786
- argsLength > 0 && getMemorySegment(argsStart, argsEnd, args),
44868
+ args.length > 0 && getMemorySegment(argsStart, argsEnd, args),
44787
44869
  argsEnd < argsZerosEnd && getMemorySegment(argsEnd, argsZerosEnd),
44788
44870
  ].filter(nonEmpty);
44789
44871
  const writeableMemory = [
@@ -57452,7 +57534,7 @@ async function startNetwork(config) {
57452
57534
  }
57453
57535
 
57454
57536
  ;// CONCATENATED MODULE: ./packages/jam/node/package.json
57455
- const package_namespaceObject = {"rE":"0.1.0"};
57537
+ const package_namespaceObject = {"rE":"0.1.1"};
57456
57538
  ;// CONCATENATED MODULE: ./packages/jam/node/main.ts
57457
57539
 
57458
57540
 
@@ -57577,88 +57659,6 @@ const initNetwork = async (importerReady, workerConfig, genesisHeaderHash, netwo
57577
57659
  ;// CONCATENATED MODULE: external "node:fs/promises"
57578
57660
  const external_node_fs_promises_namespaceObject = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("node:fs/promises");
57579
57661
  var external_node_fs_promises_default = /*#__PURE__*/__nccwpck_require__.n(external_node_fs_promises_namespaceObject);
57580
- ;// CONCATENATED MODULE: ./workers/importer/import-queue.ts
57581
-
57582
-
57583
-
57584
-
57585
-
57586
- class ImportQueue {
57587
- spec;
57588
- importer;
57589
- toImport = SortedArray.fromSortedArray((a, b) => {
57590
- const diff = a.timeSlot - b.timeSlot;
57591
- if (diff < 0) {
57592
- return Ordering.Greater;
57593
- }
57594
- if (diff > 0) {
57595
- return Ordering.Less;
57596
- }
57597
- return Ordering.Equal;
57598
- });
57599
- queuedBlocks = HashSet.new();
57600
- lastEpoch = tryAsEpoch(2 ** 32 - 1);
57601
- constructor(spec, importer) {
57602
- this.spec = spec;
57603
- this.importer = importer;
57604
- }
57605
- isCurrentEpoch(timeSlot) {
57606
- const epoch = Math.floor(timeSlot / this.spec.epochLength);
57607
- return this.lastEpoch === epoch;
57608
- }
57609
- startPreverification() {
57610
- for (const entry of this.toImport) {
57611
- if (this.isCurrentEpoch(entry.timeSlot)) {
57612
- entry.seal = this.importer.preverifySeal(entry.timeSlot, entry.block);
57613
- }
57614
- }
57615
- }
57616
- static getBlockDetails(block) {
57617
- let encodedHeader;
57618
- let timeSlot;
57619
- try {
57620
- encodedHeader = block.header.encoded();
57621
- timeSlot = block.header.view().timeSlotIndex.materialize();
57622
- }
57623
- catch {
57624
- return result_Result.error("invalid");
57625
- }
57626
- const headerHash = hashBytes(encodedHeader).asOpaque();
57627
- return result_Result.ok(new WithHash(headerHash, { block, timeSlot }));
57628
- }
57629
- push(details) {
57630
- const headerHash = details.hash;
57631
- if (this.queuedBlocks.has(headerHash)) {
57632
- return result_Result.error("already queued");
57633
- }
57634
- const { timeSlot, block } = details.data;
57635
- const entry = {
57636
- headerHash,
57637
- timeSlot,
57638
- block,
57639
- seal: this.isCurrentEpoch(timeSlot) ? this.importer.preverifySeal(timeSlot, block) : Promise.resolve(null),
57640
- };
57641
- this.toImport.insert(entry);
57642
- this.queuedBlocks.insert(headerHash);
57643
- return result_Result.ok(result_OK);
57644
- }
57645
- shift() {
57646
- const entry = this.toImport.pop();
57647
- if (entry !== undefined) {
57648
- this.queuedBlocks.delete(entry.headerHash);
57649
- const blockEpoch = Math.floor(entry.timeSlot / this.spec.epochLength);
57650
- const hasEpochChanged = this.lastEpoch !== blockEpoch;
57651
- this.lastEpoch = tryAsEpoch(blockEpoch);
57652
- // currently removed block is changing the epoch, so fire up
57653
- // preverifcation for the following blocks.
57654
- if (hasEpochChanged) {
57655
- this.startPreverification();
57656
- }
57657
- }
57658
- return entry;
57659
- }
57660
- }
57661
-
57662
57662
  ;// CONCATENATED MODULE: ./packages/jam/transition/block-verifier.ts
57663
57663
 
57664
57664
 
@@ -57672,7 +57672,7 @@ var BlockVerifierError;
57672
57672
  BlockVerifierError[BlockVerifierError["InvalidStateRoot"] = 4] = "InvalidStateRoot";
57673
57673
  BlockVerifierError[BlockVerifierError["AlreadyImported"] = 5] = "AlreadyImported";
57674
57674
  })(BlockVerifierError || (BlockVerifierError = {}));
57675
- const ZERO_HASH = bytes_Bytes.zero(hash_HASH_SIZE).asOpaque();
57675
+ const block_verifier_ZERO_HASH = bytes_Bytes.zero(hash_HASH_SIZE).asOpaque();
57676
57676
  class BlockVerifier {
57677
57677
  hasher;
57678
57678
  blocks;
@@ -57692,7 +57692,7 @@ class BlockVerifier {
57692
57692
  // https://graypaper.fluffylabs.dev/#/cc517d7/0c9d000c9d00?v=0.6.5
57693
57693
  const parentHash = headerView.parentHeaderHash.materialize();
57694
57694
  // importing genesis block
57695
- if (!parentHash.isEqualTo(ZERO_HASH)) {
57695
+ if (!parentHash.isEqualTo(block_verifier_ZERO_HASH)) {
57696
57696
  const parentBlock = this.blocks.getHeader(parentHash);
57697
57697
  if (parentBlock === null) {
57698
57698
  return result_Result.error(BlockVerifierError.ParentNotFound, `Parent ${parentHash.toString()} not found`);
@@ -58205,301 +58205,22 @@ async function verifyTickets(bandersnatch, numberOfValidators, epochRoot, ticket
58205
58205
  }));
58206
58206
  }
58207
58207
 
58208
- ;// CONCATENATED MODULE: ./packages/core/concurrent/parent.ts
58209
-
58210
-
58211
- // Amount of tasks in the queue that will trigger creation of new worker thread.
58212
- // NOTE this might need to be configurable in the future.
58213
- const QUEUE_SIZE_WORKER_THRESHOLD = 5;
58214
- /** Execution pool manager. */
58215
- class Executor {
58216
- workers;
58217
- maxWorkers;
58218
- workerPath;
58219
- /** Initialize a new concurrent executor given a path to the worker. */
58220
- static async initialize(workerPath, options) {
58221
- debug_check(options.maxWorkers > 0, "Max workers has to be positive.");
58222
- debug_check(options.minWorkers <= options.maxWorkers, "Min workers has to be lower or equal to max workers.");
58223
- const workers = [];
58224
- for (let i = 0; i < options.minWorkers; i++) {
58225
- workers.push(await initWorker(workerPath));
58226
- }
58227
- return new Executor(workers, options.maxWorkers, workerPath);
58228
- }
58229
- // keeps track of the indices of worker threads that are currently free and available to execute tasks
58230
- freeWorkerIndices = [];
58231
- taskQueue = [];
58232
- isDestroyed = false;
58233
- isWorkerInitializing = false;
58234
- constructor(workers, maxWorkers, workerPath) {
58235
- this.workers = workers;
58236
- this.maxWorkers = maxWorkers;
58237
- this.workerPath = workerPath;
58238
- // intial free workers.
58239
- for (let i = 0; i < workers.length; i++) {
58240
- this.freeWorkerIndices.push(i);
58241
- }
58242
- }
58243
- /** Attempt to initialize a new worker. */
58244
- async initNewWorker(onSuccess = () => { }) {
58245
- if (this.workers.length >= this.maxWorkers) {
58246
- // biome-ignore lint/suspicious/noConsole: warning
58247
- console.warn(`Task queue has ${this.taskQueue.length} pending items and we can't init any more workers.`);
58248
- return;
58249
- }
58250
- if (this.isWorkerInitializing) {
58251
- return;
58252
- }
58253
- this.isWorkerInitializing = true;
58254
- this.workers.push(await initWorker(this.workerPath));
58255
- this.freeWorkerIndices.push(this.workers.length - 1);
58256
- this.isWorkerInitializing = false;
58257
- onSuccess();
58258
- }
58259
- /** Terminate all workers and clear the executor. */
58260
- async destroy() {
58261
- for (const worker of this.workers) {
58262
- worker.port.close();
58263
- await worker.worker.terminate();
58264
- }
58265
- this.workers.length = 0;
58266
- this.isDestroyed = true;
58267
- }
58268
- /** Execute a task with given parameters. */
58269
- async run(params) {
58270
- return new Promise((resolve, reject) => {
58271
- if (this.isDestroyed) {
58272
- reject("pool destroyed");
58273
- return;
58274
- }
58275
- this.taskQueue.push({
58276
- params,
58277
- resolve,
58278
- reject,
58279
- });
58280
- this.processEntryFromTaskQueue();
58281
- });
58282
- }
58283
- /** Process single element from the task queue. */
58284
- processEntryFromTaskQueue() {
58285
- const freeWorker = this.freeWorkerIndices.pop();
58286
- // no free workers available currently,
58287
- // we will retry when one of the tasks completes.
58288
- if (freeWorker === undefined) {
58289
- if (this.taskQueue.length > QUEUE_SIZE_WORKER_THRESHOLD) {
58290
- this.initNewWorker(() => {
58291
- // process an entry in this newly initialized worker.
58292
- this.processEntryFromTaskQueue();
58293
- });
58294
- }
58295
- return;
58296
- }
58297
- const task = this.taskQueue.pop();
58298
- // no tasks in the queue
58299
- if (task === undefined) {
58300
- this.freeWorkerIndices.push(freeWorker);
58301
- return;
58302
- }
58303
- const worker = this.workers[freeWorker];
58304
- worker.runTask(task, () => {
58305
- // mark the worker as available again
58306
- this.freeWorkerIndices.push(freeWorker);
58307
- // and continue processing the queue
58308
- this.processEntryFromTaskQueue();
58309
- });
58310
- }
58311
- }
58312
- async function initWorker(workerPath) {
58313
- // create a worker and initialize communication channel
58314
- const { port1, port2 } = new MessageChannel();
58315
- const workerThread = new external_node_worker_threads_namespaceObject.Worker(workerPath, {});
58316
- workerThread.postMessage(port1, [port1]);
58317
- // // wait for the worker to start
58318
- await new Promise((resolve, reject) => {
58319
- workerThread.once("message", resolve);
58320
- workerThread.once("error", reject);
58321
- });
58322
- // make sure the threads don't prevent the program from stopping.
58323
- workerThread.unref();
58324
- return new WorkerChannel(workerThread, port2);
58325
- }
58326
- class WorkerChannel {
58327
- worker;
58328
- port;
58329
- constructor(worker, port) {
58330
- this.worker = worker;
58331
- this.port = port;
58332
- }
58333
- runTask(task, onFinish) {
58334
- const message = {
58335
- params: task.params,
58336
- };
58337
- // when we receive a response, make sure to process it
58338
- this.port.once("message", (e) => {
58339
- if (e.isOk) {
58340
- task.resolve(e.ok);
58341
- }
58342
- else {
58343
- task.reject(new Error(e.error));
58344
- }
58345
- onFinish();
58346
- });
58347
- // send the task to work on.
58348
- this.port.postMessage(message, message.params.getTransferList());
58349
- }
58350
- }
58208
+ ;// CONCATENATED MODULE: ./packages/jam/safrole/bandersnatch-wasm.ts
58351
58209
 
58352
- ;// CONCATENATED MODULE: ./packages/core/concurrent/worker.ts
58353
-
58354
-
58355
- /** A in-worker abstraction. */
58356
- class ConcurrentWorker {
58357
- runInternal;
58358
- state;
58359
- static new(run, state) {
58360
- return new ConcurrentWorker(run, state);
58361
- }
58362
- constructor(runInternal, state) {
58363
- this.runInternal = runInternal;
58364
- this.state = state;
58365
- }
58366
- listenToParentPort() {
58367
- if (external_node_worker_threads_namespaceObject.parentPort === null) {
58368
- throw new Error("This method is meant to be run inside a worker thread!");
58369
- }
58370
- external_node_worker_threads_namespaceObject.parentPort.once("close", () => {
58371
- process.exit(0);
58372
- });
58373
- external_node_worker_threads_namespaceObject.parentPort.once("message", (port) => {
58374
- this.listenTo(port);
58375
- // send back readiness signal.
58376
- external_node_worker_threads_namespaceObject.parentPort?.postMessage("ready");
58377
- });
58378
- }
58379
- listenTo(port) {
58380
- port.once("close", () => {
58381
- port.removeAllListeners();
58382
- process.exit(0);
58383
- });
58384
- port.on("message", (ev) => {
58385
- const { params } = ev;
58386
- this.run(params)
58387
- .then((result) => {
58388
- const response = result_Result.ok(result);
58389
- port.postMessage(response, result.getTransferList());
58390
- })
58391
- .catch((e) => {
58392
- const response = result_Result.error(`${e}`);
58393
- port.postMessage(response, []);
58394
- });
58395
- });
58396
- }
58397
- async run(params) {
58398
- return await this.runInternal(params, this.state);
58399
- }
58400
- async destroy() { }
58401
- }
58402
-
58403
- ;// CONCATENATED MODULE: ./packages/core/concurrent/index.ts
58404
-
58405
-
58406
-
58407
- ;// CONCATENATED MODULE: ./packages/jam/safrole/bandersnatch-wasm/params.ts
58408
- var Method;
58409
- (function (Method) {
58410
- Method[Method["RingCommitment"] = 0] = "RingCommitment";
58411
- Method[Method["BatchVerifyTickets"] = 1] = "BatchVerifyTickets";
58412
- Method[Method["VerifySeal"] = 2] = "VerifySeal";
58413
- })(Method || (Method = {}));
58414
- class params_Response {
58415
- data;
58416
- constructor(data) {
58417
- this.data = data;
58418
- }
58419
- getTransferList() {
58420
- return [this.data.buffer];
58421
- }
58422
- }
58423
- class Params {
58424
- params;
58425
- constructor(params) {
58426
- this.params = params;
58427
- }
58428
- getTransferList() {
58429
- return [];
58430
- }
58431
- }
58432
-
58433
- ;// CONCATENATED MODULE: ./packages/jam/safrole/bandersnatch-wasm/worker.ts
58434
-
58435
-
58436
-
58437
-
58438
- const worker = ConcurrentWorker.new(async (p) => {
58439
- await initAll();
58440
- const params = p.params;
58441
- const method = params.method;
58442
- if (method === Method.RingCommitment) {
58443
- return Promise.resolve(new params_Response(bandersnatch_exports.ring_commitment(params.keys)));
58444
- }
58445
- if (method === Method.BatchVerifyTickets) {
58446
- return Promise.resolve(new params_Response(bandersnatch_exports.batch_verify_tickets(params.ringSize, params.commitment, params.ticketsData, params.contextLength)));
58447
- }
58448
- if (method === Method.VerifySeal) {
58449
- return Promise.resolve(new params_Response(bandersnatch_exports.verify_seal(params.authorKey, params.signature, params.payload, params.auxData)));
58450
- }
58451
- debug_assertNever(method);
58452
- }, null);
58453
-
58454
- ;// CONCATENATED MODULE: ./packages/jam/safrole/bandersnatch-wasm/index.ts
58455
-
58456
-
58457
-
58458
-
58459
- const bandersnatch_wasm_workerFile = __nccwpck_require__.ab + "bootstrap-bandersnatch.mjs";
58460
58210
  class BandernsatchWasm {
58461
- executor;
58462
- constructor(executor) {
58463
- this.executor = executor;
58464
- }
58465
- destroy() {
58466
- return this.executor.destroy();
58467
- }
58468
- static async new({ synchronous }) {
58469
- const workers = external_node_os_default().cpus().length;
58470
- return new BandernsatchWasm(!synchronous
58471
- ? await Executor.initialize(bandersnatch_wasm_workerFile, {
58472
- minWorkers: Math.max(1, Math.floor(workers / 2)),
58473
- maxWorkers: workers,
58474
- })
58475
- : worker);
58211
+ constructor() { }
58212
+ static async new() {
58213
+ await initAll();
58214
+ return new BandernsatchWasm();
58476
58215
  }
58477
58216
  async verifySeal(authorKey, signature, payload, auxData) {
58478
- const x = await this.executor.run(new Params({
58479
- method: Method.VerifySeal,
58480
- authorKey,
58481
- signature,
58482
- payload,
58483
- auxData,
58484
- }));
58485
- return x.data;
58217
+ return bandersnatch_exports.verify_seal(authorKey, signature, payload, auxData);
58486
58218
  }
58487
58219
  async getRingCommitment(keys) {
58488
- const x = await this.executor.run(new Params({
58489
- method: Method.RingCommitment,
58490
- keys,
58491
- }));
58492
- return x.data;
58220
+ return bandersnatch_exports.ring_commitment(keys);
58493
58221
  }
58494
58222
  async batchVerifyTicket(ringSize, commitment, ticketsData, contextLength) {
58495
- const x = await this.executor.run(new Params({
58496
- method: Method.BatchVerifyTickets,
58497
- ringSize,
58498
- commitment,
58499
- ticketsData,
58500
- contextLength,
58501
- }));
58502
- return x.data;
58223
+ return bandersnatch_exports.batch_verify_tickets(ringSize, commitment, ticketsData, contextLength);
58503
58224
  }
58504
58225
  }
58505
58226
 
@@ -58542,7 +58263,7 @@ class Safrole {
58542
58263
  chainSpec;
58543
58264
  state;
58544
58265
  bandersnatch;
58545
- constructor(chainSpec, state, bandersnatch = BandernsatchWasm.new({ synchronous: true })) {
58266
+ constructor(chainSpec, state, bandersnatch = BandernsatchWasm.new()) {
58546
58267
  this.chainSpec = chainSpec;
58547
58268
  this.state = state;
58548
58269
  this.bandersnatch = bandersnatch;
@@ -58920,7 +58641,7 @@ var SafroleSealError;
58920
58641
  const BANDERSNATCH_ZERO_KEY = bytes_Bytes.zero(BANDERSNATCH_KEY_BYTES).asOpaque();
58921
58642
  class SafroleSeal {
58922
58643
  bandersnatch;
58923
- constructor(bandersnatch = BandernsatchWasm.new({ synchronous: true })) {
58644
+ constructor(bandersnatch = BandernsatchWasm.new()) {
58924
58645
  this.bandersnatch = bandersnatch;
58925
58646
  }
58926
58647
  /**
@@ -59232,8 +58953,8 @@ class PartiallyUpdatedState {
59232
58953
  this.stateUpdate.services.preimages.push(newUpdate);
59233
58954
  }
59234
58955
  updateServiceStorageUtilisation(serviceId, items, bytes, serviceInfo) {
59235
- debug_check(items >= 0, `storageUtilisationCount has to be a positive number, got: ${items}`);
59236
- debug_check(bytes >= 0, `storageUtilisationBytes has to be a positive number, got: ${bytes}`);
58956
+ debug_check `${items >= 0} storageUtilisationCount has to be a positive number, got: ${items}`;
58957
+ debug_check `${bytes >= 0} storageUtilisationBytes has to be a positive number, got: ${bytes}`;
59237
58958
  const overflowItems = !isU32(items);
59238
58959
  const overflowBytes = !isU64(bytes);
59239
58960
  // TODO [ToDr] this is not specified in GP, but it seems sensible.
@@ -59658,7 +59379,7 @@ class AccumulateExternalities {
59658
59379
  }
59659
59380
  // TODO [ToDr] Not sure if we should update the service info in that case,
59660
59381
  // but for now we let that case fall-through.
59661
- debug_check(len === PreimageStatusKind.Unavailable);
59382
+ debug_check `${len === PreimageStatusKind.Unavailable} preimage is not unavailable`;
59662
59383
  }
59663
59384
  // make sure we have enough balance for this update
59664
59385
  // https://graypaper.fluffylabs.dev/#/9a08063/381201381601?v=0.6.6
@@ -60154,7 +59875,7 @@ class Assurances {
60154
59875
  return result_Result.error(AssurancesError.InvalidOrder, `order: expected: ${prevValidatorIndex + 1}, got: ${validatorIndex}`);
60155
59876
  }
60156
59877
  prevValidatorIndex = assurance.validatorIndex;
60157
- debug_check(bitfield.bitLength === coresCount, `Invalid bitfield length of ${bitfield.bitLength}`);
59878
+ debug_check `${bitfield.bitLength === coresCount} Invalid bitfield length of ${bitfield.bitLength}`;
60158
59879
  const setBits = bitfield.indicesOfSetBits();
60159
59880
  for (const idx of setBits) {
60160
59881
  perCoreAssurances[idx] += 1;
@@ -62478,7 +62199,7 @@ class DeferredTransfers {
62478
62199
  transferStatistics.set(serviceId, { count: numbers_tryAsU32(transfers.length), gasUsed: common_tryAsServiceGas(consumedGas) });
62479
62200
  const [updatedState, checkpointedState] = partialState.getStateUpdates();
62480
62201
  currentStateUpdate = updatedState;
62481
- debug_check(checkpointedState === null, "On transfer cannot invoke checkpoint.");
62202
+ debug_check `${checkpointedState === null} On transfer cannot invoke checkpoint.`;
62482
62203
  }
62483
62204
  return result_Result.ok({
62484
62205
  // NOTE: we return only services, since it's impossible to update
@@ -62816,7 +62537,7 @@ const ENTROPY_BYTES = 32;
62816
62537
  * https://graypaper.fluffylabs.dev/#/579bd12/3b9a013b9a01
62817
62538
  */
62818
62539
  function fisherYatesShuffle(arr, entropy) {
62819
- debug_check(entropy.length === ENTROPY_BYTES, `Expected entropy of length ${ENTROPY_BYTES}, got ${entropy.length}`);
62540
+ debug_check `${entropy.length === ENTROPY_BYTES} Expected entropy of length ${ENTROPY_BYTES}, got ${entropy.length}`;
62820
62541
  const n = arr.length;
62821
62542
  const randomNumbers = hashToNumberSequence(entropy, arr.length);
62822
62543
  const result = new Array(n);
@@ -63663,7 +63384,7 @@ class Statistics {
63663
63384
  /** get statistics for the current epoch */
63664
63385
  const statistics = this.getStatistics(slot);
63665
63386
  const { current, cores, services } = statistics;
63666
- debug_check(current[authorIndex] !== undefined, "authorIndex is out of bounds");
63387
+ debug_check `${current[authorIndex] !== undefined} authorIndex is out of bounds`;
63667
63388
  /** One validator can produce maximal one block per timeslot */
63668
63389
  const newBlocksCount = current[authorIndex].blocks + 1;
63669
63390
  current[authorIndex].blocks = numbers_tryAsU32(newBlocksCount);
@@ -63863,11 +63584,11 @@ class OnChain {
63863
63584
  authorization;
63864
63585
  // chapter 13: https://graypaper.fluffylabs.dev/#/68eaa1f/18b60118b601?v=0.6.4
63865
63586
  statistics;
63866
- constructor(chainSpec, state, blocks, hasher, { enableParallelSealVerification }) {
63587
+ constructor(chainSpec, state, blocks, hasher) {
63867
63588
  this.chainSpec = chainSpec;
63868
63589
  this.state = state;
63869
63590
  this.hasher = hasher;
63870
- const bandersnatch = BandernsatchWasm.new({ synchronous: !enableParallelSealVerification });
63591
+ const bandersnatch = BandernsatchWasm.new();
63871
63592
  this.statistics = new Statistics(chainSpec, state);
63872
63593
  this.safrole = new Safrole(chainSpec, state, bandersnatch);
63873
63594
  this.safroleSeal = new SafroleSeal(bandersnatch);
@@ -63885,16 +63606,16 @@ class OnChain {
63885
63606
  const sealState = this.safrole.getSafroleSealState(timeSlot);
63886
63607
  return await this.safroleSeal.verifyHeaderSeal(block.header.view(), sealState);
63887
63608
  }
63888
- async transition(block, headerHash, preverifiedSeal = null, omitSealVerification = false) {
63609
+ async transition(block, headerHash, omitSealVerification = false) {
63889
63610
  const headerView = block.header.view();
63890
63611
  const header = block.header.materialize();
63891
63612
  const timeSlot = header.timeSlotIndex;
63892
63613
  // safrole seal
63893
- let newEntropyHash = preverifiedSeal;
63614
+ let newEntropyHash;
63894
63615
  if (omitSealVerification) {
63895
63616
  newEntropyHash = hashBytes(header.seal).asOpaque();
63896
63617
  }
63897
- if (newEntropyHash === null) {
63618
+ else {
63898
63619
  const sealResult = await this.verifySeal(timeSlot, block);
63899
63620
  if (sealResult.isError) {
63900
63621
  return stfError(StfErrorKind.SafroleSeal, sealResult);
@@ -64001,7 +63722,7 @@ class OnChain {
64001
63722
  assertEmpty(deferredTransfersRest);
64002
63723
  const accumulateRoot = await this.accumulateOutput.transition({ accumulationOutputLog });
64003
63724
  // recent history
64004
- const recentHistoryUpdate = await this.recentHistory.transition({
63725
+ const recentHistoryUpdate = this.recentHistory.transition({
64005
63726
  partial: recentHistoryPartialUpdate,
64006
63727
  headerHash,
64007
63728
  accumulateRoot,
@@ -64081,6 +63802,7 @@ function checkOffendersMatch(offendersMark, headerOffendersMark) {
64081
63802
 
64082
63803
 
64083
63804
 
63805
+
64084
63806
  var ImporterErrorKind;
64085
63807
  (function (ImporterErrorKind) {
64086
63808
  ImporterErrorKind[ImporterErrorKind["Verifier"] = 0] = "Verifier";
@@ -64108,29 +63830,28 @@ class Importer {
64108
63830
  throw new Error(`Unable to load best state from header hash: ${currentBestHeaderHash}.`);
64109
63831
  }
64110
63832
  this.verifier = new BlockVerifier(hasher, blocks);
64111
- this.stf = new OnChain(spec, state, blocks, hasher, { enableParallelSealVerification: true });
63833
+ this.stf = new OnChain(spec, state, blocks, hasher);
64112
63834
  this.state = state;
64113
63835
  this.currentHash = currentBestHeaderHash;
64114
63836
  logger.info(`😎 Best time slot: ${state.timeslot} (header hash: ${currentBestHeaderHash})`);
64115
63837
  }
64116
- /** Attempt to pre-verify the seal to speed up importing. */
64117
- async preverifySeal(timeSlot, block) {
64118
- try {
64119
- const res = await this.stf.verifySeal(timeSlot, block);
64120
- if (res.isOk) {
64121
- return res.ok;
64122
- }
64123
- this.logger.warn(`Unable to pre-verify the seal: ${resultToString(res)}`);
64124
- return null;
64125
- }
64126
- catch (e) {
64127
- this.logger.warn(`Error while trying to pre-verify the seal: ${e}`);
64128
- return null;
64129
- }
64130
- }
64131
- async importBlock(block, preverifiedSeal, omitSealVerification = false) {
63838
+ async importBlock(block, omitSealVerification) {
63839
+ const timer = measure("importBlock");
63840
+ const timeSlot = extractTimeSlot(block);
63841
+ const maybeBestHeader = await this.importBlockInternal(block, omitSealVerification);
63842
+ if (maybeBestHeader.isOk) {
63843
+ const bestHeader = maybeBestHeader.ok;
63844
+ this.logger.info(`🧊 Best block: #${timeSlot} (${bestHeader.hash})`);
63845
+ this.logger.log(timer());
63846
+ return maybeBestHeader;
63847
+ }
63848
+ this.logger.log(`❌ Rejected block #${timeSlot}: ${resultToString(maybeBestHeader)}`);
63849
+ this.logger.log(timer());
63850
+ return maybeBestHeader;
63851
+ }
63852
+ async importBlockInternal(block, omitSealVerification = false) {
64132
63853
  const logger = this.logger;
64133
- logger.log(`🧱 Attempting to import a new block ${preverifiedSeal !== null ? "(seal preverified)" : ""}`);
63854
+ logger.log("🧱 Attempting to import a new block");
64134
63855
  const timerVerify = measure("import:verify");
64135
63856
  const hash = await this.verifier.verifyBlock(block);
64136
63857
  logger.log(timerVerify());
@@ -64155,7 +63876,7 @@ class Importer {
64155
63876
  const headerHash = hash.ok;
64156
63877
  logger.log(`🧱 Verified block: Got hash ${headerHash} for block at slot ${timeSlot}.`);
64157
63878
  const timerStf = measure("import:stf");
64158
- const res = await this.stf.transition(block, headerHash, preverifiedSeal, omitSealVerification);
63879
+ const res = await this.stf.transition(block, headerHash, omitSealVerification);
64159
63880
  logger.log(timerStf());
64160
63881
  if (res.isError) {
64161
63882
  return importerError(ImporterErrorKind.Stf, res);
@@ -64205,6 +63926,19 @@ class Importer {
64205
63926
  return stateEntries ?? null;
64206
63927
  }
64207
63928
  }
63929
+ /**
63930
+ * Attempt to safely extract timeslot of a block.
63931
+ *
63932
+ * NOTE: it may fail if encoding is invalid.
63933
+ */
63934
+ function extractTimeSlot(block) {
63935
+ try {
63936
+ return block.header.view().timeSlotIndex.materialize();
63937
+ }
63938
+ catch {
63939
+ return tryAsTimeSlot(2 ** 32 - 1);
63940
+ }
63941
+ }
64208
63942
 
64209
63943
  ;// CONCATENATED MODULE: ./workers/importer/index.ts
64210
63944
 
@@ -64216,8 +63950,6 @@ class Importer {
64216
63950
 
64217
63951
 
64218
63952
 
64219
-
64220
-
64221
63953
  const importer_logger = Logger.new(import.meta.filename, "importer");
64222
63954
  if (!external_node_worker_threads_namespaceObject.isMainThread) {
64223
63955
  Logger.configureAll(process.env.JAM_LOG ?? "", Level.LOG);
@@ -64234,7 +63966,6 @@ async function createImporter(config) {
64234
63966
  const importer = new Importer(config.chainSpec, hasher, importer_logger, blocks, states);
64235
63967
  return {
64236
63968
  lmdb,
64237
- blocks,
64238
63969
  importer,
64239
63970
  };
64240
63971
  }
@@ -64249,65 +63980,27 @@ async function importer_main(channel) {
64249
63980
  importer_logger.info(`📥 Importer starting ${channel.currentState()}`);
64250
63981
  // Await the configuration object
64251
63982
  const ready = await channel.waitForState("ready(importer)");
63983
+ let closeDb = async () => { };
64252
63984
  const finished = await ready.doUntil("finished", async (worker, port) => {
64253
63985
  const config = worker.getConfig();
64254
- const { blocks, importer } = await createImporter(config);
63986
+ const { lmdb, importer } = await createImporter(config);
63987
+ closeDb = async () => {
63988
+ await lmdb.close();
63989
+ };
64255
63990
  // TODO [ToDr] this is shit, since we have circular dependency.
64256
63991
  worker.setImporter(importer);
64257
63992
  importer_logger.info("📥 Importer waiting for blocks.");
64258
- // TODO [ToDr] back pressure?
64259
- let isProcessing = false;
64260
- const importingQueue = new ImportQueue(config.chainSpec, importer);
64261
63993
  worker.onBlock.on(async (block) => {
64262
- const details = ImportQueue.getBlockDetails(block);
64263
- // ignore invalid blocks.
64264
- if (details.isError) {
64265
- importer_logger.trace("🧊 Ignoring invalid block.");
64266
- return;
64267
- }
64268
- // ignore already known blocks
64269
- if (blocks.getHeader(details.ok.hash) !== null) {
64270
- importer_logger.trace(`🧊 Already imported block: #${details.ok.data.timeSlot}.`);
64271
- return;
64272
- }
64273
- const importResult = importingQueue.push(details.ok);
64274
- // ignore blocks that are already queued
64275
- if (importResult.isError) {
64276
- importer_logger.trace(`🧊 Already queued block: #${details.ok.data.timeSlot}.`);
64277
- return;
64278
- }
64279
- importer_logger.log(`🧊 Queued block: #${details.ok.data.timeSlot} (skip seal: ${config.omitSealVerification})`);
64280
- if (isProcessing) {
64281
- return;
64282
- }
64283
- isProcessing = true;
64284
- try {
64285
- for (;;) {
64286
- const entry = importingQueue.shift();
64287
- if (entry === undefined) {
64288
- return;
64289
- }
64290
- const { block, seal, timeSlot } = entry;
64291
- const timer = measure("importBlock");
64292
- const maybeBestHeader = await importer.importBlock(block, await seal, config.omitSealVerification);
64293
- if (maybeBestHeader.isOk) {
64294
- const bestHeader = maybeBestHeader.ok;
64295
- worker.announce(port, bestHeader);
64296
- importer_logger.info(`🧊 Best block: #${bestHeader.data.timeSlotIndex.materialize()} (${bestHeader.hash})`);
64297
- }
64298
- else {
64299
- importer_logger.log(`❌ Rejected block #${timeSlot}: ${resultToString(maybeBestHeader)}`);
64300
- }
64301
- importer_logger.log(timer());
64302
- }
64303
- }
64304
- finally {
64305
- isProcessing = false;
63994
+ const res = await importer.importBlock(block, config.omitSealVerification);
63995
+ if (res.isOk) {
63996
+ worker.announce(port, res.ok);
64306
63997
  }
64307
63998
  });
64308
63999
  await wasmPromise;
64309
64000
  });
64310
64001
  importer_logger.info("📥 Importer finished. Closing channel.");
64002
+ // close the database
64003
+ await closeDb();
64311
64004
  // Close the comms to gracefuly close the app.
64312
64005
  finished.currentState().close(channel);
64313
64006
  }
@@ -64503,7 +64196,7 @@ function readJsonBlock(file, chainSpec) {
64503
64196
  var minimist = __nccwpck_require__(8595);
64504
64197
  var minimist_default = /*#__PURE__*/__nccwpck_require__.n(minimist);
64505
64198
  ;// CONCATENATED MODULE: ./bin/jam/package.json
64506
- const jam_package_namespaceObject = {"rE":"0.1.0"};
64199
+ const jam_package_namespaceObject = {"rE":"0.1.1"};
64507
64200
  ;// CONCATENATED MODULE: ./bin/jam/args.ts
64508
64201
 
64509
64202