@typeberry/jam 0.5.1-b2fd1d5 → 0.5.1-eb84786

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/index.js CHANGED
@@ -152591,12 +152591,7 @@ function getDatabasePath(blake2b, nodeName, genesisHeader, databaseBasePath) {
152591
152591
  genesisHeaderHash,
152592
152592
  };
152593
152593
  }
152594
- /**
152595
- * Initialize the database unless it's already initialized.
152596
- *
152597
- * The function checks the genesis header
152598
- */
152599
- async function initializeDatabase(spec, blake2b, genesisHeaderHash, rootDb, config, ancestry) {
152594
+ async function initializeDatabase(spec, blake2b, genesisHeaderHash, rootDb, config, ancestry, options = {}) {
152600
152595
  const blocks = rootDb.getBlocksDb();
152601
152596
  const states = rootDb.getStatesDb();
152602
152597
  const header = blocks.getBestHeaderHash();
@@ -152617,14 +152612,16 @@ async function initializeDatabase(spec, blake2b, genesisHeaderHash, rootDb, conf
152617
152612
  common_logger.log `🧬 Writing genesis block #${genesisHeader.timeSlotIndex}: ${genesisHeaderHash}`;
152618
152613
  const { genesisStateSerialized, genesisStateRootHash } = loadGenesisState(spec, blake2b, config.genesisState);
152619
152614
  // write to db
152620
- await blocks.insertBlock(new WithHash(genesisHeaderHash, blockView));
152615
+ // When initGenesisFromAncestry is set, use ancestry[0][0] as the initial block hash (for fuzz-target mode)
152616
+ const initialBlockHash = (options.initGenesisFromAncestry ?? false) && ancestry.length > 0 ? ancestry[0][0] : genesisHeaderHash;
152617
+ await blocks.insertBlock(new WithHash(initialBlockHash, blockView));
152621
152618
  // insert fake blocks for ancestry data
152622
152619
  for (const [hash, slot] of ancestry) {
152623
152620
  await blocks.insertBlock(new WithHash(hash, reencodeAsView(Block.Codec, emptyBlock(slot), spec)));
152624
152621
  }
152625
- await states.insertInitialState(genesisHeaderHash, genesisStateSerialized);
152626
- await blocks.setPostStateRoot(genesisHeaderHash, genesisStateRootHash);
152627
- await blocks.setBestHeaderHash(genesisHeaderHash);
152622
+ await states.insertInitialState(initialBlockHash, genesisStateSerialized);
152623
+ await blocks.setPostStateRoot(initialBlockHash, genesisStateRootHash);
152624
+ await blocks.setBestHeaderHash(initialBlockHash);
152628
152625
  }
152629
152626
  function loadGenesisState(spec, blake2b, data) {
152630
152627
  const stateEntries = state_entries_StateEntries.fromEntriesUnsafe(data.entries());
@@ -159974,6 +159971,7 @@ class Provide {
159974
159971
  }
159975
159972
  const result = this.partialState.providePreimage(serviceId, preimage);
159976
159973
  logger_logger.trace `[${this.currentServiceId}] PROVIDE(${serviceId}, ${preimage.toStringTruncated()}) <- ${result_resultToString(result)}`;
159974
+ logger_logger.insane `[${this.currentServiceId}] PROVIDE(${serviceId}, ${preimage}) <- ${result_resultToString(result)}`;
159977
159975
  if (result.isOk) {
159978
159976
  regs.set(provide_IN_OUT_REG, results_HostCallResult.OK);
159979
159977
  return;
@@ -160457,6 +160455,7 @@ class Fetch {
160457
160455
  return host_call_handler_PvmExecution.Panic;
160458
160456
  }
160459
160457
  logger_logger.trace `[${this.currentServiceId}] FETCH(${kind}) <- ${value?.toStringTruncated()}`;
160458
+ logger_logger.insane `[${this.currentServiceId}] FETCH(${kind}) <- ${value}`;
160460
160459
  // write result
160461
160460
  regs.set(fetch_IN_OUT_REG, value === null ? results_HostCallResult.NONE : valueLength);
160462
160461
  }
@@ -160756,6 +160755,7 @@ class Lookup {
160756
160755
  // v
160757
160756
  const preImage = this.account.lookup(serviceId, preImageHash);
160758
160757
  logger_logger.trace `[${this.currentServiceId}] LOOKUP(${serviceId}, ${preImageHash}) <- ${preImage?.toStringTruncated() ?? "<missing>"}...`;
160758
+ logger_logger.insane `[${this.currentServiceId}] LOOKUP(${serviceId}, ${preImageHash}) <- ${preImage ?? "<missing>"}`;
160759
160759
  const preImageLength = preImage === null ? numbers_tryAsU64(0) : numbers_tryAsU64(preImage.raw.length);
160760
160760
  const preimageBlobOffset = regs.get(10);
160761
160761
  const lengthToWrite = regs.get(11);
@@ -160862,6 +160862,7 @@ class Read {
160862
160862
  }
160863
160863
  if (chunk.length > 0) {
160864
160864
  logger_logger.trace `[${this.currentServiceId}] READ(${serviceId}, ${rawKey}) <- ${bytes_BytesBlob.blobFrom(chunk).toStringTruncated()}`;
160865
+ logger_logger.insane `[${this.currentServiceId}] READ(${serviceId}, ${rawKey}) <- ${bytes_BytesBlob.blobFrom(chunk)}`;
160865
160866
  }
160866
160867
  else {
160867
160868
  // just a query for length of stored data
@@ -160927,6 +160928,7 @@ class Write {
160927
160928
  // a
160928
160929
  const result = this.account.write(storageKey, maybeValue);
160929
160930
  logger_logger.trace `[${this.currentServiceId}] WRITE(${storageKey}, ${maybeValue?.toStringTruncated() ?? "remove"}) <- ${result_resultToString(result)}`;
160931
+ logger_logger.insane `[${this.currentServiceId}] WRITE(${storageKey}, ${maybeValue ?? "remove"}) <- ${result_resultToString(result)}`;
160930
160932
  if (result.isError) {
160931
160933
  regs.set(write_IN_OUT_REG, results_HostCallResult.FULL);
160932
160934
  return;
@@ -160988,6 +160990,7 @@ class Export {
160988
160990
  // attempt to export a segment and fail if it's above the maximum.
160989
160991
  const segmentExported = this.refine.exportSegment(segment);
160990
160992
  logger.trace `[${this.currentServiceId}] EXPORT(${segment.toStringTruncated()}) <- ${resultToString(segmentExported)}`;
160993
+ logger.insane `[${this.currentServiceId}] EXPORT(${segment}) <- ${resultToString(segmentExported)}`;
160991
160994
  if (segmentExported.isOk) {
160992
160995
  regs.set(export_IN_OUT_REG, tryAsU64(segmentExported.ok));
160993
160996
  }
@@ -161231,6 +161234,7 @@ class Machine {
161231
161234
  // NOTE: Highly unlikely, but machineId could potentially collide with HOST_CALL_RESULT.
161232
161235
  const machinInitResult = await this.refine.machineInit(code, entrypoint);
161233
161236
  logger.trace `[${this.currentServiceId}] MACHINE(${code.toStringTruncated()}, ${entrypoint}) <- ${resultToString(machinInitResult)}`;
161237
+ logger.insane `[${this.currentServiceId}] MACHINE(${code}, ${entrypoint}) <- ${resultToString(machinInitResult)}`;
161234
161238
  if (machinInitResult.isError) {
161235
161239
  regs.set(machine_IN_OUT_REG, HostCallResult.HUH);
161236
161240
  }
@@ -164379,6 +164383,8 @@ class BandernsatchWasm {
164379
164383
 
164380
164384
 
164381
164385
 
164386
+
164387
+ const safrole_logger = logger_Logger.new(import.meta.filename, "safrole");
164382
164388
  const safrole_VALIDATOR_META_BYTES = 128;
164383
164389
  const ticketComparator = (a, b) => bytesBlobComparator(a.id, b.id);
164384
164390
  var SafroleErrorCode;
@@ -164466,6 +164472,14 @@ class Safrole {
164466
164472
  async prepareValidatorKeysForNextEpoch(postOffenders) {
164467
164473
  const stateEpoch = Math.floor(this.state.timeslot / this.chainSpec.epochLength);
164468
164474
  const nextEpochStart = (stateEpoch + 1) * this.chainSpec.epochLength;
164475
+ /**
164476
+ * In real life, this would occur around ~2840,
164477
+ * but this scenario appears in tests, so we need to handle it.
164478
+ */
164479
+ if (nextEpochStart >= 2 ** 32) {
164480
+ safrole_logger.warn `Timeslot overflow imminent, cannot prepare validator keys for next epoch.`;
164481
+ return result_Result.ok(null);
164482
+ }
164469
164483
  return await this.getValidatorKeys(tryAsTimeSlot(nextEpochStart), postOffenders);
164470
164484
  }
164471
164485
  async getValidatorKeys(timeslot, postOffenders) {
@@ -166611,6 +166625,7 @@ class Importer {
166611
166625
  logger;
166612
166626
  blocks;
166613
166627
  states;
166628
+ options;
166614
166629
  verifier;
166615
166630
  stf;
166616
166631
  // TODO [ToDr] we cannot assume state reference does not change.
@@ -166618,11 +166633,12 @@ class Importer {
166618
166633
  // Hash of the block that we have the posterior state for in `state`.
166619
166634
  currentHash;
166620
166635
  metrics;
166621
- constructor(spec, pvm, hasher, logger, blocks, states) {
166636
+ constructor(spec, pvm, hasher, logger, blocks, states, options = {}) {
166622
166637
  this.hasher = hasher;
166623
166638
  this.logger = logger;
166624
166639
  this.blocks = blocks;
166625
166640
  this.states = states;
166641
+ this.options = options;
166626
166642
  this.metrics = metrics_createMetrics();
166627
166643
  const currentBestHeaderHash = this.blocks.getBestHeaderHash();
166628
166644
  const state = states.getState(currentBestHeaderHash);
@@ -166677,7 +166693,9 @@ class Importer {
166677
166693
  logger.log `🧱 Attempting to import a new block`;
166678
166694
  const timerVerify = measure("import:verify");
166679
166695
  const verifyStart = now();
166680
- const hash = await this.verifier.verifyBlock(block);
166696
+ const hash = await this.verifier.verifyBlock(block, {
166697
+ skipParentAndStateRoot: this.options.initGenesisFromAncestry ?? false,
166698
+ });
166681
166699
  const verifyDuration = now() - verifyStart;
166682
166700
  logger.log `${timerVerify()}`;
166683
166701
  if (hash.isError) {
@@ -166780,14 +166798,14 @@ function extractTimeSlot(block) {
166780
166798
  const main_logger = logger_Logger.new(import.meta.filename, "importer");
166781
166799
  const keccakHasher = KeccakHasher.create();
166782
166800
  const blake2b = blake2b_Blake2b.createHasher();
166783
- async function createImporter(config) {
166801
+ async function createImporter(config, options = {}) {
166784
166802
  const chainSpec = config.chainSpec;
166785
166803
  const db = config.openDatabase({ readonly: false });
166786
166804
  const pvm = config.workerParams.pvm;
166787
166805
  const blocks = db.getBlocksDb();
166788
166806
  const states = db.getStatesDb();
166789
166807
  const hasher = new TransitionHasher(await keccakHasher, await blake2b);
166790
- const importer = new Importer(chainSpec, pvm, hasher, main_logger, blocks, states);
166808
+ const importer = new Importer(chainSpec, pvm, hasher, main_logger, blocks, states, options);
166791
166809
  return {
166792
166810
  importer,
166793
166811
  db,
@@ -170123,7 +170141,7 @@ const initNetwork = async (importer, rootDb, baseConfig, genesisHeaderHash, netw
170123
170141
 
170124
170142
 
170125
170143
  const zeroHash = bytes_Bytes.zero(hash_HASH_SIZE).asOpaque();
170126
- async function mainImporter(config, withRelPath) {
170144
+ async function mainImporter(config, withRelPath, options = {}) {
170127
170145
  await initAll();
170128
170146
  common_logger.info `🫐 Typeberry ${node_package_namespaceObject.rE}. GP: ${CURRENT_VERSION} (${CURRENT_SUITE})`;
170129
170147
  common_logger.info `🎸 Starting importer: ${config.nodeName}.`;
@@ -170153,9 +170171,13 @@ async function mainImporter(config, withRelPath) {
170153
170171
  // Initialize the database with genesis state and block if there isn't one.
170154
170172
  common_logger.info `🛢️ Opening database at ${dbPath}`;
170155
170173
  const rootDb = workerConfig.openDatabase({ readonly: false });
170156
- await initializeDatabase(chainSpec, blake2b, genesisHeaderHash, rootDb, config.node.chainSpec, config.ancestry);
170174
+ await initializeDatabase(chainSpec, blake2b, genesisHeaderHash, rootDb, config.node.chainSpec, config.ancestry, {
170175
+ initGenesisFromAncestry: options.initGenesisFromAncestry,
170176
+ });
170157
170177
  await rootDb.close();
170158
- const { db, importer } = await createImporter(workerConfig);
170178
+ const { db, importer } = await createImporter(workerConfig, {
170179
+ initGenesisFromAncestry: options.initGenesisFromAncestry,
170180
+ });
170159
170181
  await importer.prepareForNextEpoch();
170160
170182
  const api = {
170161
170183
  chainSpec,
@@ -170254,7 +170276,7 @@ async function mainFuzz(fuzzConfig, withRelPath) {
170254
170276
  },
170255
170277
  ancestry,
170256
170278
  network: null,
170257
- }, withRelPath);
170279
+ }, withRelPath, { initGenesisFromAncestry: fuzzConfig.initGenesisFromAncestry });
170258
170280
  runningNode = newNode;
170259
170281
  return await newNode.getBestStateRootHash();
170260
170282
  },
@@ -170486,6 +170508,7 @@ const jam_package_namespaceObject = {"rE":"0.5.1"};
170486
170508
 
170487
170509
 
170488
170510
 
170511
+
170489
170512
  const HELP = `
170490
170513
  @typeberry/jam ${jam_package_namespaceObject.rE} by Fluffy Labs.
170491
170514
 
@@ -170574,6 +170597,11 @@ function parseArgs(input, withRelPath) {
170574
170597
  case Command.FuzzTarget: {
170575
170598
  const data = parseSharedOptions(args);
170576
170599
  const { version } = parseValueOption(args, "version", "number", parseFuzzVersion, 1);
170600
+ const initGenesisFromAncestry = args["init-genesis-from-ancestry"] === true;
170601
+ delete args["init-genesis-from-ancestry"];
170602
+ if (initGenesisFromAncestry) {
170603
+ common_logger.warn `Init genesis from ancestry is enabled. Parent hash and state root verification is skipped.`;
170604
+ }
170577
170605
  const socket = args._.shift() ?? null;
170578
170606
  assertNoMoreArgs(args);
170579
170607
  return {
@@ -170582,6 +170610,7 @@ function parseArgs(input, withRelPath) {
170582
170610
  ...data,
170583
170611
  version,
170584
170612
  socket,
170613
+ initGenesisFromAncestry,
170585
170614
  },
170586
170615
  };
170587
170616
  }
@@ -170781,7 +170810,8 @@ async function startNode(args, withRelPath) {
170781
170810
  if (args.command === Command.FuzzTarget) {
170782
170811
  const version = args.args.version;
170783
170812
  const socket = args.args.socket;
170784
- return mainFuzz({ jamNodeConfig, version, socket }, withRelPath);
170813
+ const initGenesisFromAncestry = args.args.initGenesisFromAncestry;
170814
+ return mainFuzz({ jamNodeConfig, version, socket, initGenesisFromAncestry }, withRelPath);
170785
170815
  }
170786
170816
  // Just import a bunch of blocks
170787
170817
  if (args.command === Command.Import) {