@typeberry/jam 0.5.1-b2fd1d5 → 0.5.1-eb84786

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -20837,6 +20837,7 @@ class Provide {
20837
20837
  }
20838
20838
  const result = this.partialState.providePreimage(serviceId, preimage);
20839
20839
  logger_logger.trace `[${this.currentServiceId}] PROVIDE(${serviceId}, ${preimage.toStringTruncated()}) <- ${result_resultToString(result)}`;
20840
+ logger_logger.insane `[${this.currentServiceId}] PROVIDE(${serviceId}, ${preimage}) <- ${result_resultToString(result)}`;
20840
20841
  if (result.isOk) {
20841
20842
  regs.set(provide_IN_OUT_REG, results_HostCallResult.OK);
20842
20843
  return;
@@ -21320,6 +21321,7 @@ class Fetch {
21320
21321
  return host_call_handler_PvmExecution.Panic;
21321
21322
  }
21322
21323
  logger_logger.trace `[${this.currentServiceId}] FETCH(${kind}) <- ${value?.toStringTruncated()}`;
21324
+ logger_logger.insane `[${this.currentServiceId}] FETCH(${kind}) <- ${value}`;
21323
21325
  // write result
21324
21326
  regs.set(fetch_IN_OUT_REG, value === null ? results_HostCallResult.NONE : valueLength);
21325
21327
  }
@@ -21619,6 +21621,7 @@ class Lookup {
21619
21621
  // v
21620
21622
  const preImage = this.account.lookup(serviceId, preImageHash);
21621
21623
  logger_logger.trace `[${this.currentServiceId}] LOOKUP(${serviceId}, ${preImageHash}) <- ${preImage?.toStringTruncated() ?? "<missing>"}...`;
21624
+ logger_logger.insane `[${this.currentServiceId}] LOOKUP(${serviceId}, ${preImageHash}) <- ${preImage ?? "<missing>"}`;
21622
21625
  const preImageLength = preImage === null ? numbers_tryAsU64(0) : numbers_tryAsU64(preImage.raw.length);
21623
21626
  const preimageBlobOffset = regs.get(10);
21624
21627
  const lengthToWrite = regs.get(11);
@@ -21725,6 +21728,7 @@ class Read {
21725
21728
  }
21726
21729
  if (chunk.length > 0) {
21727
21730
  logger_logger.trace `[${this.currentServiceId}] READ(${serviceId}, ${rawKey}) <- ${bytes_BytesBlob.blobFrom(chunk).toStringTruncated()}`;
21731
+ logger_logger.insane `[${this.currentServiceId}] READ(${serviceId}, ${rawKey}) <- ${bytes_BytesBlob.blobFrom(chunk)}`;
21728
21732
  }
21729
21733
  else {
21730
21734
  // just a query for length of stored data
@@ -21790,6 +21794,7 @@ class Write {
21790
21794
  // a
21791
21795
  const result = this.account.write(storageKey, maybeValue);
21792
21796
  logger_logger.trace `[${this.currentServiceId}] WRITE(${storageKey}, ${maybeValue?.toStringTruncated() ?? "remove"}) <- ${result_resultToString(result)}`;
21797
+ logger_logger.insane `[${this.currentServiceId}] WRITE(${storageKey}, ${maybeValue ?? "remove"}) <- ${result_resultToString(result)}`;
21793
21798
  if (result.isError) {
21794
21799
  regs.set(write_IN_OUT_REG, results_HostCallResult.FULL);
21795
21800
  return;
@@ -21851,6 +21856,7 @@ class Export {
21851
21856
  // attempt to export a segment and fail if it's above the maximum.
21852
21857
  const segmentExported = this.refine.exportSegment(segment);
21853
21858
  logger.trace `[${this.currentServiceId}] EXPORT(${segment.toStringTruncated()}) <- ${resultToString(segmentExported)}`;
21859
+ logger.insane `[${this.currentServiceId}] EXPORT(${segment}) <- ${resultToString(segmentExported)}`;
21854
21860
  if (segmentExported.isOk) {
21855
21861
  regs.set(export_IN_OUT_REG, tryAsU64(segmentExported.ok));
21856
21862
  }
@@ -22094,6 +22100,7 @@ class Machine {
22094
22100
  // NOTE: Highly unlikely, but machineId could potentially collide with HOST_CALL_RESULT.
22095
22101
  const machinInitResult = await this.refine.machineInit(code, entrypoint);
22096
22102
  logger.trace `[${this.currentServiceId}] MACHINE(${code.toStringTruncated()}, ${entrypoint}) <- ${resultToString(machinInitResult)}`;
22103
+ logger.insane `[${this.currentServiceId}] MACHINE(${code}, ${entrypoint}) <- ${resultToString(machinInitResult)}`;
22097
22104
  if (machinInitResult.isError) {
22098
22105
  regs.set(machine_IN_OUT_REG, HostCallResult.HUH);
22099
22106
  }
@@ -25242,6 +25249,8 @@ class BandernsatchWasm {
25242
25249
 
25243
25250
 
25244
25251
 
25252
+
25253
+ const safrole_logger = Logger.new(import.meta.filename, "safrole");
25245
25254
  const safrole_VALIDATOR_META_BYTES = 128;
25246
25255
  const ticketComparator = (a, b) => bytesBlobComparator(a.id, b.id);
25247
25256
  var SafroleErrorCode;
@@ -25329,6 +25338,14 @@ class Safrole {
25329
25338
  async prepareValidatorKeysForNextEpoch(postOffenders) {
25330
25339
  const stateEpoch = Math.floor(this.state.timeslot / this.chainSpec.epochLength);
25331
25340
  const nextEpochStart = (stateEpoch + 1) * this.chainSpec.epochLength;
25341
+ /**
25342
+ * In real life, this would occur around ~2840,
25343
+ * but this scenario appears in tests, so we need to handle it.
25344
+ */
25345
+ if (nextEpochStart >= 2 ** 32) {
25346
+ safrole_logger.warn `Timeslot overflow imminent, cannot prepare validator keys for next epoch.`;
25347
+ return Result.ok(null);
25348
+ }
25332
25349
  return await this.getValidatorKeys(common_tryAsTimeSlot(nextEpochStart), postOffenders);
25333
25350
  }
25334
25351
  async getValidatorKeys(timeslot, postOffenders) {
@@ -29070,6 +29087,7 @@ class Importer {
29070
29087
  logger;
29071
29088
  blocks;
29072
29089
  states;
29090
+ options;
29073
29091
  verifier;
29074
29092
  stf;
29075
29093
  // TODO [ToDr] we cannot assume state reference does not change.
@@ -29077,11 +29095,12 @@ class Importer {
29077
29095
  // Hash of the block that we have the posterior state for in `state`.
29078
29096
  currentHash;
29079
29097
  metrics;
29080
- constructor(spec, pvm, hasher, logger, blocks, states) {
29098
+ constructor(spec, pvm, hasher, logger, blocks, states, options = {}) {
29081
29099
  this.hasher = hasher;
29082
29100
  this.logger = logger;
29083
29101
  this.blocks = blocks;
29084
29102
  this.states = states;
29103
+ this.options = options;
29085
29104
  this.metrics = createMetrics();
29086
29105
  const currentBestHeaderHash = this.blocks.getBestHeaderHash();
29087
29106
  const state = states.getState(currentBestHeaderHash);
@@ -29136,7 +29155,9 @@ class Importer {
29136
29155
  logger.log `🧱 Attempting to import a new block`;
29137
29156
  const timerVerify = measure("import:verify");
29138
29157
  const verifyStart = now();
29139
- const hash = await this.verifier.verifyBlock(block);
29158
+ const hash = await this.verifier.verifyBlock(block, {
29159
+ skipParentAndStateRoot: this.options.initGenesisFromAncestry ?? false,
29160
+ });
29140
29161
  const verifyDuration = now() - verifyStart;
29141
29162
  logger.log `${timerVerify()}`;
29142
29163
  if (hash.isError) {
@@ -29239,14 +29260,14 @@ function extractTimeSlot(block) {
29239
29260
  const main_logger = Logger.new(import.meta.filename, "importer");
29240
29261
  const keccakHasher = KeccakHasher.create();
29241
29262
  const blake2b = Blake2b.createHasher();
29242
- async function createImporter(config) {
29263
+ async function createImporter(config, options = {}) {
29243
29264
  const chainSpec = config.chainSpec;
29244
29265
  const db = config.openDatabase({ readonly: false });
29245
29266
  const pvm = config.workerParams.pvm;
29246
29267
  const blocks = db.getBlocksDb();
29247
29268
  const states = db.getStatesDb();
29248
29269
  const hasher = new TransitionHasher(await keccakHasher, await blake2b);
29249
- const importer = new Importer(chainSpec, pvm, hasher, main_logger, blocks, states);
29270
+ const importer = new Importer(chainSpec, pvm, hasher, main_logger, blocks, states, options);
29250
29271
  return {
29251
29272
  importer,
29252
29273
  db,