@typeberry/convert 0.1.3-135961b → 0.1.3-462ca77

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/index.js +107 -35
  2. package/index.js.map +1 -1
  3. package/package.json +1 -1
package/index.js CHANGED
@@ -5264,6 +5264,20 @@ const result_Result = {
5264
5264
  },
5265
5265
  };
5266
5266
 
5267
+ ;// CONCATENATED MODULE: ./packages/core/utils/safe-alloc-uint8array.ts
5268
+ // about 2GB, the maximum ArrayBuffer length on Chrome confirmed by several sources:
5269
+ // - https://issues.chromium.org/issues/40055619
5270
+ // - https://stackoverflow.com/a/72124984
5271
+ // - https://onnxruntime.ai/docs/tutorials/web/large-models.html#maximum-size-of-arraybuffer
5272
+ const MAX_LENGTH = 2145386496;
5273
+ function safe_alloc_uint8array_safeAllocUint8Array(length) {
5274
+ if (length > MAX_LENGTH) {
5275
+ // biome-ignore lint/suspicious/noConsole: can't have a dependency on logger here
5276
+ console.warn(`Trying to allocate ${length} bytes, which is greater than the maximum of ${MAX_LENGTH}.`);
5277
+ }
5278
+ return new Uint8Array(Math.min(MAX_LENGTH, length));
5279
+ }
5280
+
5267
5281
  ;// CONCATENATED MODULE: external "node:assert"
5268
5282
  const external_node_assert_namespaceObject = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("node:assert");
5269
5283
  ;// CONCATENATED MODULE: ./packages/core/utils/test.ts
@@ -5507,6 +5521,7 @@ function isResult(x) {
5507
5521
 
5508
5522
 
5509
5523
 
5524
+
5510
5525
  // EXTERNAL MODULE: ./node_modules/minimist/index.js
5511
5526
  var minimist = __nccwpck_require__(595);
5512
5527
  var minimist_default = /*#__PURE__*/__nccwpck_require__.n(minimist);
@@ -5533,7 +5548,7 @@ class bitvec_BitVec {
5533
5548
  * Create new [`BitVec`] with all values set to `false`.
5534
5549
  */
5535
5550
  static empty(bitLength) {
5536
- const data = new Uint8Array(Math.ceil(bitLength / 8));
5551
+ const data = safe_alloc_uint8array_safeAllocUint8Array(Math.ceil(bitLength / 8));
5537
5552
  return new bitvec_BitVec(data, bitLength);
5538
5553
  }
5539
5554
  byteLength;
@@ -5733,7 +5748,7 @@ class bytes_BytesBlob {
5733
5748
  static blobFromParts(v, ...rest) {
5734
5749
  const vArr = v instanceof Uint8Array ? [v] : v;
5735
5750
  const totalLength = vArr.reduce((a, v) => a + v.length, 0) + rest.reduce((a, v) => a + v.length, 0);
5736
- const buffer = new Uint8Array(totalLength);
5751
+ const buffer = safe_alloc_uint8array_safeAllocUint8Array(totalLength);
5737
5752
  let offset = 0;
5738
5753
  for (const r of vArr) {
5739
5754
  buffer.set(r, offset);
@@ -5806,7 +5821,7 @@ class bytes_Bytes extends bytes_BytesBlob {
5806
5821
  }
5807
5822
  /** Create an empty [`Bytes<X>`] of given length. */
5808
5823
  static zero(len) {
5809
- return new bytes_Bytes(new Uint8Array(len), len);
5824
+ return new bytes_Bytes(safe_alloc_uint8array_safeAllocUint8Array(len), len);
5810
5825
  }
5811
5826
  // TODO [ToDr] `fill` should have the argments swapped to align with the rest.
5812
5827
  /** Create a [`Bytes<X>`] with all bytes filled with given input number. */
@@ -6476,7 +6491,7 @@ function addSizeHints(a, b) {
6476
6491
  };
6477
6492
  }
6478
6493
  const DEFAULT_START_LENGTH = 512; // 512B
6479
- const MAX_LENGTH = 10 * 1024 * 1024; // 10MB
6494
+ const encoder_MAX_LENGTH = 10 * 1024 * 1024; // 10MB
6480
6495
  /**
6481
6496
  * JAM encoder.
6482
6497
  */
@@ -6492,7 +6507,7 @@ class encoder_Encoder {
6492
6507
  return new encoder_Encoder(options.destination);
6493
6508
  }
6494
6509
  const startLength = options?.expectedLength ?? DEFAULT_START_LENGTH;
6495
- const buffer = new ArrayBuffer(Math.min(MAX_LENGTH, startLength), { maxByteLength: MAX_LENGTH });
6510
+ const buffer = new ArrayBuffer(Math.min(encoder_MAX_LENGTH, startLength), { maxByteLength: encoder_MAX_LENGTH });
6496
6511
  const destination = new Uint8Array(buffer);
6497
6512
  return new encoder_Encoder(destination, buffer);
6498
6513
  }
@@ -6825,11 +6840,11 @@ class encoder_Encoder {
6825
6840
  ensureBigEnough(length, options = { silent: false }) {
6826
6841
  debug_check `${length >= 0} Negative length given`;
6827
6842
  const newLength = this.offset + length;
6828
- if (newLength > MAX_LENGTH) {
6843
+ if (newLength > encoder_MAX_LENGTH) {
6829
6844
  if (options.silent) {
6830
6845
  return;
6831
6846
  }
6832
- throw new Error(`The encoded size would reach the maximum of ${MAX_LENGTH}.`);
6847
+ throw new Error(`The encoded size would reach the maximum of ${encoder_MAX_LENGTH}.`);
6833
6848
  }
6834
6849
  if (newLength > this.destination.length) {
6835
6850
  // we can try to resize the underlying buffer
@@ -6837,7 +6852,7 @@ class encoder_Encoder {
6837
6852
  // make sure we at least double the size of the buffer every time.
6838
6853
  const minExtend = Math.max(newLength, this.buffer.byteLength << 1);
6839
6854
  // but we must never exceed the max length.
6840
- this.buffer.resize(Math.min(MAX_LENGTH, minExtend));
6855
+ this.buffer.resize(Math.min(encoder_MAX_LENGTH, minExtend));
6841
6856
  }
6842
6857
  // and then check again
6843
6858
  if (newLength > this.destination.length) {
@@ -8169,7 +8184,7 @@ async function verify(input) {
8169
8184
  return Promise.resolve([]);
8170
8185
  }
8171
8186
  const dataLength = input.reduce((acc, { message, key, signature }) => acc + key.length + signature.length + message.length + 1, 0);
8172
- const data = new Uint8Array(dataLength);
8187
+ const data = safeAllocUint8Array(dataLength);
8173
8188
  let offset = 0;
8174
8189
  for (const { key, message, signature } of input) {
8175
8190
  data.set(key.raw, offset);
@@ -8256,7 +8271,7 @@ class allocator_SimpleAllocator {
8256
8271
  /** An allocator that works by allocating larger (continuous) pages of memory. */
8257
8272
  class PageAllocator {
8258
8273
  hashesPerPage;
8259
- page = new Uint8Array(0);
8274
+ page = safeAllocUint8Array(0);
8260
8275
  currentHash = 0;
8261
8276
  // TODO [ToDr] Benchmark the performance!
8262
8277
  constructor(hashesPerPage) {
@@ -8267,7 +8282,7 @@ class PageAllocator {
8267
8282
  resetPage() {
8268
8283
  const pageSizeBytes = this.hashesPerPage * HASH_SIZE;
8269
8284
  this.currentHash = 0;
8270
- this.page = new Uint8Array(pageSizeBytes);
8285
+ this.page = safeAllocUint8Array(pageSizeBytes);
8271
8286
  }
8272
8287
  emptyHash() {
8273
8288
  const startIdx = this.currentHash * HASH_SIZE;
@@ -14661,7 +14676,7 @@ class SerializedService {
14661
14676
  getStorage(rawKey) {
14662
14677
  if (compatibility_Compatibility.isLessThan(compatibility_GpVersion.V0_6_7)) {
14663
14678
  const SERVICE_ID_BYTES = 4;
14664
- const serviceIdAndKey = new Uint8Array(SERVICE_ID_BYTES + rawKey.length);
14679
+ const serviceIdAndKey = safe_alloc_uint8array_safeAllocUint8Array(SERVICE_ID_BYTES + rawKey.length);
14665
14680
  serviceIdAndKey.set(numbers_u32AsLeBytes(this.serviceId));
14666
14681
  serviceIdAndKey.set(rawKey.raw, SERVICE_ID_BYTES);
14667
14682
  const key = opaque_asOpaqueType(bytes_BytesBlob.blobFrom(hashBytes(serviceIdAndKey).raw));
@@ -14752,7 +14767,7 @@ class nodes_TrieNode {
14752
14767
  raw;
14753
14768
  constructor(
14754
14769
  /** Exactly 512 bits / 64 bytes */
14755
- raw = new Uint8Array(nodes_TRIE_NODE_BYTES)) {
14770
+ raw = safe_alloc_uint8array_safeAllocUint8Array(nodes_TRIE_NODE_BYTES)) {
14756
14771
  this.raw = raw;
14757
14772
  }
14758
14773
  /** Returns the type of the node */
@@ -16187,7 +16202,7 @@ class registers_Registers {
16187
16202
  bytes;
16188
16203
  asSigned;
16189
16204
  asUnsigned;
16190
- constructor(bytes = new Uint8Array(registers_NO_OF_REGISTERS << REGISTER_SIZE_SHIFT)) {
16205
+ constructor(bytes = safeAllocUint8Array(registers_NO_OF_REGISTERS << REGISTER_SIZE_SHIFT)) {
16191
16206
  this.bytes = bytes;
16192
16207
  check `${bytes.length === registers_NO_OF_REGISTERS << REGISTER_SIZE_SHIFT} Invalid size of registers array.`;
16193
16208
  this.asSigned = new BigInt64Array(bytes.buffer, bytes.byteOffset);
@@ -16350,7 +16365,7 @@ class mask_Mask {
16350
16365
  return Math.min(this.lookupTableForward[index] ?? 0, MAX_INSTRUCTION_DISTANCE);
16351
16366
  }
16352
16367
  buildLookupTableForward(mask) {
16353
- const table = new Uint8Array(mask.bitLength);
16368
+ const table = safeAllocUint8Array(mask.bitLength);
16354
16369
  let lastInstructionOffset = 0;
16355
16370
  for (let i = mask.bitLength - 1; i >= 0; i--) {
16356
16371
  if (mask.isSet(i)) {
@@ -19861,7 +19876,7 @@ class host_calls_HostCalls {
19861
19876
  const regs = pvmInstance.getRegisters();
19862
19877
  const maybeAddress = regs.getLowerU32(7);
19863
19878
  const maybeLength = regs.getLowerU32(8);
19864
- const result = new Uint8Array(maybeLength);
19879
+ const result = safeAllocUint8Array(maybeLength);
19865
19880
  const startAddress = tryAsMemoryIndex(maybeAddress);
19866
19881
  const loadResult = memory.loadInto(result, startAddress);
19867
19882
  if (loadResult.isError) {
@@ -20785,6 +20800,15 @@ var ResultValues;
20785
20800
  ResultValues[ResultValues["Ok"] = 0] = "Ok";
20786
20801
  ResultValues[ResultValues["Error"] = 1] = "Error";
20787
20802
  })(ResultValues || (ResultValues = {}));
20803
+ /**
20804
+ * Getting a ring commitment is pretty expensive (hundreds of ms),
20805
+ * yet the validators do not always change.
20806
+ * For current benchmarks, we get a huge hit every epoch, hence
20807
+ * to overcome that we cache the results of getting ring commitment.
20808
+ * Note we can also tentatively populate this cache, before we even
20809
+ * reach the epoch change block.
20810
+ */
20811
+ const ringCommitmentCache = [];
20788
20812
  // TODO [ToDr] We export the entire object to allow mocking in tests.
20789
20813
  // Ideally we would just export functions and figure out how to mock
20790
20814
  // properly in ESM.
@@ -20800,9 +20824,27 @@ async function verifySeal(bandersnatch, authorKey, signature, payload, encodedUn
20800
20824
  }
20801
20825
  return result_Result.ok(bytes_Bytes.fromBlob(sealResult.subarray(1), hash_HASH_SIZE).asOpaque());
20802
20826
  }
20803
- async function getRingCommitment(bandersnatch, validators) {
20804
- const keys = bytes_BytesBlob.blobFromParts(validators.map((x) => x.raw)).raw;
20805
- const commitmentResult = await bandersnatch.getRingCommitment(keys);
20827
+ function getRingCommitment(bandersnatch, validators) {
20828
+ const keys = bytes_BytesBlob.blobFromParts(validators.map((x) => x.raw));
20829
+ // We currently compare the large bytes blob, but the number of entries in the cache
20830
+ // must be low. If the cache ever grows larger, we should rather consider hashing the keys.
20831
+ const MAX_CACHE_ENTRIES = 3;
20832
+ const cacheEntry = ringCommitmentCache.find((v) => v.keys.isEqualTo(keys));
20833
+ if (cacheEntry !== undefined) {
20834
+ return cacheEntry.value;
20835
+ }
20836
+ const value = getRingCommitmentNoCache(bandersnatch, keys);
20837
+ ringCommitmentCache.push({
20838
+ keys,
20839
+ value,
20840
+ });
20841
+ if (ringCommitmentCache.length > MAX_CACHE_ENTRIES) {
20842
+ ringCommitmentCache.shift();
20843
+ }
20844
+ return value;
20845
+ }
20846
+ async function getRingCommitmentNoCache(bandersnatch, keys) {
20847
+ const commitmentResult = await bandersnatch.getRingCommitment(keys.raw);
20806
20848
  if (commitmentResult[RESULT_INDEX] === ResultValues.Error) {
20807
20849
  return result_Result.error(null);
20808
20850
  }
@@ -20928,6 +20970,18 @@ class safrole_Safrole {
20928
20970
  }
20929
20971
  return FixedSizeArray.new([newRandomnessAcc, ...rest], 4);
20930
20972
  }
20973
+ /**
20974
+ * Pre-populate cache for validator keys, and especially the ring commitment.
20975
+ *
20976
+ * NOTE the function is still doing quite some work, so it should only be used
20977
+ * once per epoch. The optimisation relies on the fact that the `bandersnatch.getRingCommitment`
20978
+ * call will be cached.
20979
+ */
20980
+ async prepareValidatorKeysForNextEpoch(postOffenders) {
20981
+ const stateEpoch = Math.floor(this.state.timeslot / this.chainSpec.epochLength);
20982
+ const nextEpochStart = (stateEpoch + 1) * this.chainSpec.epochLength;
20983
+ return await this.getValidatorKeys(tryAsTimeSlot(nextEpochStart), postOffenders);
20984
+ }
20931
20985
  async getValidatorKeys(timeslot, postOffenders) {
20932
20986
  /**
20933
20987
  * Epoch is not changed so the previous state is returned
@@ -23081,7 +23135,7 @@ class Assign {
23081
23135
  const authorizationQueueStart = regs.get(8);
23082
23136
  // a
23083
23137
  const authManager = getServiceId(regs.get(9));
23084
- const res = new Uint8Array(hash_HASH_SIZE * gp_constants_AUTHORIZATION_QUEUE_SIZE);
23138
+ const res = safe_alloc_uint8array_safeAllocUint8Array(hash_HASH_SIZE * gp_constants_AUTHORIZATION_QUEUE_SIZE);
23085
23139
  const memoryReadResult = memory.loadInto(res, authorizationQueueStart);
23086
23140
  // error while reading the memory.
23087
23141
  if (memoryReadResult.isError) {
@@ -23169,7 +23223,7 @@ class Bless {
23169
23223
  * https://graypaper.fluffylabs.dev/#/7e6ff6a/368100368100?v=0.6.7
23170
23224
  */
23171
23225
  const autoAccumulateEntries = [];
23172
- const result = new Uint8Array(tryAsExactBytes(serviceIdAndGasCodec.sizeHint));
23226
+ const result = safe_alloc_uint8array_safeAllocUint8Array(tryAsExactBytes(serviceIdAndGasCodec.sizeHint));
23173
23227
  const decoder = decoder_Decoder.fromBlob(result);
23174
23228
  let memIndex = sourceStart;
23175
23229
  for (let i = 0n; i < numberOfItems; i += 1n) {
@@ -23186,7 +23240,7 @@ class Bless {
23186
23240
  memIndex = numbers_tryAsU64(memIndex + numbers_tryAsU64(decoder.bytesRead()));
23187
23241
  }
23188
23242
  // https://graypaper.fluffylabs.dev/#/7e6ff6a/367200367200?v=0.6.7
23189
- const res = new Uint8Array(tryAsExactBytes(descriptors_codec.u32.sizeHint) * this.chainSpec.coresCount);
23243
+ const res = safe_alloc_uint8array_safeAllocUint8Array(tryAsExactBytes(descriptors_codec.u32.sizeHint) * this.chainSpec.coresCount);
23190
23244
  const authorizersDecoder = decoder_Decoder.fromBlob(res);
23191
23245
  const memoryReadResult = memory.loadInto(res, authorization);
23192
23246
  if (memoryReadResult.isError) {
@@ -23282,6 +23336,7 @@ class Checkpoint {
23282
23336
 
23283
23337
 
23284
23338
 
23339
+
23285
23340
  const designate_IN_OUT_REG = 7;
23286
23341
  const VALIDATOR_DATA_BYTES = tryAsExactBytes(validator_data_ValidatorData.Codec.sizeHint);
23287
23342
  /**
@@ -23304,7 +23359,7 @@ class Designate {
23304
23359
  async execute(_gas, regs, memory) {
23305
23360
  // `o`
23306
23361
  const validatorsStart = regs.get(designate_IN_OUT_REG);
23307
- const res = new Uint8Array(VALIDATOR_DATA_BYTES * this.chainSpec.validatorsCount);
23362
+ const res = safe_alloc_uint8array_safeAllocUint8Array(VALIDATOR_DATA_BYTES * this.chainSpec.validatorsCount);
23308
23363
  const memoryReadResult = memory.loadInto(res, validatorsStart);
23309
23364
  // error while reading the memory.
23310
23365
  if (memoryReadResult.isError) {
@@ -23535,7 +23590,7 @@ class Provide {
23535
23590
  const preimageLength = regs.get(9);
23536
23591
  const length = utils_clampU64ToU32(preimageLength);
23537
23592
  // `i`
23538
- const preimage = bytes_BytesBlob.blobFrom(new Uint8Array(length));
23593
+ const preimage = bytes_BytesBlob.blobFrom(safe_alloc_uint8array_safeAllocUint8Array(length));
23539
23594
  const memoryReadResult = memory.loadInto(preimage.raw, preimageStart);
23540
23595
  if (memoryReadResult.isError) {
23541
23596
  logger_logger.trace `PROVIDE(${serviceId}, ${preimage.toStringTruncated()}) <- PANIC`;
@@ -24097,6 +24152,7 @@ const codecServiceAccountInfoWithThresholdBalance = descriptors_codec.object({
24097
24152
 
24098
24153
 
24099
24154
 
24155
+
24100
24156
  const decoder = new TextDecoder("utf8");
24101
24157
  /**
24102
24158
  * Log message to the console
@@ -24118,8 +24174,8 @@ class log_LogHostCall {
24118
24174
  const targetLength = regs.get(9);
24119
24175
  const msgStart = regs.get(10);
24120
24176
  const msgLength = regs.get(11);
24121
- const target = new Uint8Array(clampU64ToU32(targetLength));
24122
- const message = new Uint8Array(clampU64ToU32(msgLength));
24177
+ const target = safeAllocUint8Array(clampU64ToU32(targetLength));
24178
+ const message = safeAllocUint8Array(clampU64ToU32(msgLength));
24123
24179
  if (targetStart !== 0n) {
24124
24180
  memory.loadInto(target, targetStart);
24125
24181
  }
@@ -24138,6 +24194,7 @@ class log_LogHostCall {
24138
24194
 
24139
24195
 
24140
24196
 
24197
+
24141
24198
  const lookup_IN_OUT_REG = 7;
24142
24199
  /**
24143
24200
  * Lookup a preimage.
@@ -24180,7 +24237,7 @@ class lookup_Lookup {
24180
24237
  // NOTE [MaSo] this is ok to cast to number, because we are bounded by the
24181
24238
  // valueLength in both cases and valueLength is WC (4,000,000,000) + metadata
24182
24239
  // which is less than 2^32
24183
- const chunk = preImage === null ? new Uint8Array(0) : preImage.raw.subarray(Number(offset), Number(offset + length));
24240
+ const chunk = preImage === null ? safeAllocUint8Array(0) : preImage.raw.subarray(Number(offset), Number(offset + length));
24184
24241
  const memoryWriteResult = memory.storeFrom(destinationAddress, chunk);
24185
24242
  if (memoryWriteResult.isError) {
24186
24243
  return PvmExecution.Panic;
@@ -24201,6 +24258,7 @@ class lookup_Lookup {
24201
24258
 
24202
24259
 
24203
24260
 
24261
+
24204
24262
  const read_IN_OUT_REG = 7;
24205
24263
  /**
24206
24264
  * Read account storage.
@@ -24228,7 +24286,7 @@ class read_Read {
24228
24286
  const destinationAddress = regs.get(10);
24229
24287
  const storageKeyLengthClamped = clampU64ToU32(storageKeyLength);
24230
24288
  // k
24231
- const rawKey = BytesBlob.blobFrom(new Uint8Array(storageKeyLengthClamped));
24289
+ const rawKey = BytesBlob.blobFrom(safeAllocUint8Array(storageKeyLengthClamped));
24232
24290
  const memoryReadResult = memory.loadInto(rawKey.raw, storageKeyStartAddress);
24233
24291
  if (memoryReadResult.isError) {
24234
24292
  logger.trace `READ(${serviceId}, ${rawKey}) <- PANIC`;
@@ -24246,7 +24304,7 @@ class read_Read {
24246
24304
  // NOTE [MaSo] this is ok to cast to number, because we are bounded by the
24247
24305
  // valueLength in both cases and valueLength is WC (4,000,000,000) + metadata
24248
24306
  // which is less than 2^32
24249
- const chunk = value === null ? new Uint8Array(0) : value.raw.subarray(Number(offset), Number(offset + blobLength));
24307
+ const chunk = value === null ? safeAllocUint8Array(0) : value.raw.subarray(Number(offset), Number(offset + blobLength));
24250
24308
  const memoryWriteResult = memory.storeFrom(destinationAddress, chunk);
24251
24309
  if (memoryWriteResult.isError) {
24252
24310
  logger.trace `READ(${serviceId}, ${rawKey}) <- PANIC`;
@@ -24297,7 +24355,7 @@ class write_Write {
24297
24355
  // v_z
24298
24356
  const valueLength = regs.get(10);
24299
24357
  const storageKeyLengthClamped = clampU64ToU32(storageKeyLength);
24300
- const rawStorageKey = new Uint8Array(storageKeyLengthClamped);
24358
+ const rawStorageKey = safeAllocUint8Array(storageKeyLengthClamped);
24301
24359
  const keyLoadingResult = memory.loadInto(rawStorageKey, storageKeyStartAddress);
24302
24360
  if (keyLoadingResult.isError) {
24303
24361
  logger.trace `WRITE() <- PANIC`;
@@ -24306,7 +24364,7 @@ class write_Write {
24306
24364
  // k
24307
24365
  const storageKey = asOpaqueType(BytesBlob.blobFrom(rawStorageKey));
24308
24366
  const valueLengthClamped = clampU64ToU32(valueLength);
24309
- const value = new Uint8Array(valueLengthClamped);
24367
+ const value = safeAllocUint8Array(valueLengthClamped);
24310
24368
  const valueLoadingResult = memory.loadInto(value, valueStart);
24311
24369
  // Note [MaSo] this is ok to return bcs if valueLength is 0, then this panic won't happen
24312
24370
  if (valueLoadingResult.isError) {
@@ -25195,7 +25253,7 @@ function shuffling_fisherYatesShuffle(arr, entropy) {
25195
25253
  }
25196
25254
  function hashToNumberSequence(entropy, length) {
25197
25255
  const result = new Array(length);
25198
- const randomBytes = new Uint8Array(ENTROPY_BYTES + 4);
25256
+ const randomBytes = safeAllocUint8Array(ENTROPY_BYTES + 4);
25199
25257
  randomBytes.set(entropy.raw);
25200
25258
  for (let i = 0; i < length; i++) {
25201
25259
  randomBytes.set(u32AsLeBytes(tryAsU32(Math.floor(i / 8))), ENTROPY_BYTES);
@@ -26225,6 +26283,7 @@ class chain_stf_OnChain {
26225
26283
  authorization;
26226
26284
  // chapter 13: https://graypaper.fluffylabs.dev/#/68eaa1f/18b60118b601?v=0.6.4
26227
26285
  statistics;
26286
+ isReadyForNextEpoch = Promise.resolve(false);
26228
26287
  constructor(chainSpec, state, blocks, hasher) {
26229
26288
  this.chainSpec = chainSpec;
26230
26289
  this.state = state;
@@ -26243,6 +26302,14 @@ class chain_stf_OnChain {
26243
26302
  this.preimages = new Preimages(state);
26244
26303
  this.authorization = new Authorization(chainSpec, state);
26245
26304
  }
26305
+ /** Pre-populate things worth caching for the next epoch. */
26306
+ async prepareForNextEpoch() {
26307
+ if (await this.isReadyForNextEpoch) {
26308
+ return;
26309
+ }
26310
+ const ready = this.safrole.prepareValidatorKeysForNextEpoch(this.state.disputesRecords.punishSet);
26311
+ this.isReadyForNextEpoch = ready.then((_) => true);
26312
+ }
26246
26313
  async verifySeal(timeSlot, block) {
26247
26314
  const sealState = this.safrole.getSafroleSealState(timeSlot);
26248
26315
  return await this.safroleSeal.verifyHeaderSeal(block.header.view(), sealState);
@@ -26251,6 +26318,10 @@ class chain_stf_OnChain {
26251
26318
  const headerView = block.header.view();
26252
26319
  const header = block.header.materialize();
26253
26320
  const timeSlot = header.timeSlotIndex;
26321
+ // reset the epoch cache state
26322
+ if (headerView.epochMarker.view() !== null) {
26323
+ this.isReadyForNextEpoch = Promise.resolve(false);
26324
+ }
26254
26325
  // safrole seal
26255
26326
  let newEntropyHash;
26256
26327
  if (omitSealVerification) {
@@ -26715,6 +26786,7 @@ async function runWorkPackageTest(test, file) {
26715
26786
 
26716
26787
 
26717
26788
 
26789
+
26718
26790
  class MemoryChunkItem {
26719
26791
  static fromJson = {
26720
26792
  address: "number",
@@ -26772,10 +26844,10 @@ async function runPvmTest(testContent) {
26772
26844
  const endPageIndex = tryAsMemoryIndex(startPageIndex + page.length);
26773
26845
  const isWriteable = page["is-writable"];
26774
26846
  if (isWriteable) {
26775
- memoryBuilder.setWriteablePages(startPageIndex, endPageIndex, new Uint8Array(page.length));
26847
+ memoryBuilder.setWriteablePages(startPageIndex, endPageIndex, safeAllocUint8Array(page.length));
26776
26848
  }
26777
26849
  else {
26778
- memoryBuilder.setReadablePages(startPageIndex, endPageIndex, new Uint8Array(page.length));
26850
+ memoryBuilder.setReadablePages(startPageIndex, endPageIndex, safeAllocUint8Array(page.length));
26779
26851
  }
26780
26852
  }
26781
26853
  for (const memoryChunk of initialMemory) {
@@ -26827,7 +26899,7 @@ async function runPvmTest(testContent) {
26827
26899
  }, {});
26828
26900
  for (const [pageNumberAsString, memoryChunks] of Object.entries(expectedMemoryByPageNumber)) {
26829
26901
  const pageNumber = tryAsPageNumber(Number(pageNumberAsString));
26830
- const expectedPage = new Uint8Array(PAGE_SIZE);
26902
+ const expectedPage = safeAllocUint8Array(PAGE_SIZE);
26831
26903
  for (const memoryChunk of memoryChunks) {
26832
26904
  const pageIndex = memoryChunk.address % PAGE_SIZE;
26833
26905
  expectedPage.set(memoryChunk.contents, pageIndex);