@typeberry/jam 0.1.3-135961b → 0.1.3-462ca77
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bootstrap-generator.mjs +31 -16
- package/bootstrap-generator.mjs.map +1 -1
- package/bootstrap-importer.mjs +115 -32
- package/bootstrap-importer.mjs.map +1 -1
- package/bootstrap-network.mjs +28 -13
- package/bootstrap-network.mjs.map +1 -1
- package/index.js +116 -32
- package/index.js.map +1 -1
- package/package.json +1 -1
package/index.js
CHANGED
|
@@ -25323,6 +25323,20 @@ const result_Result = {
|
|
|
25323
25323
|
},
|
|
25324
25324
|
};
|
|
25325
25325
|
|
|
25326
|
+
;// CONCATENATED MODULE: ./packages/core/utils/safe-alloc-uint8array.ts
|
|
25327
|
+
// about 2GB, the maximum ArrayBuffer length on Chrome confirmed by several sources:
|
|
25328
|
+
// - https://issues.chromium.org/issues/40055619
|
|
25329
|
+
// - https://stackoverflow.com/a/72124984
|
|
25330
|
+
// - https://onnxruntime.ai/docs/tutorials/web/large-models.html#maximum-size-of-arraybuffer
|
|
25331
|
+
const MAX_LENGTH = 2145386496;
|
|
25332
|
+
function safe_alloc_uint8array_safeAllocUint8Array(length) {
|
|
25333
|
+
if (length > MAX_LENGTH) {
|
|
25334
|
+
// biome-ignore lint/suspicious/noConsole: can't have a dependency on logger here
|
|
25335
|
+
console.warn(`Trying to allocate ${length} bytes, which is greater than the maximum of ${MAX_LENGTH}.`);
|
|
25336
|
+
}
|
|
25337
|
+
return new Uint8Array(Math.min(MAX_LENGTH, length));
|
|
25338
|
+
}
|
|
25339
|
+
|
|
25326
25340
|
;// CONCATENATED MODULE: external "node:assert"
|
|
25327
25341
|
const external_node_assert_namespaceObject = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("node:assert");
|
|
25328
25342
|
;// CONCATENATED MODULE: ./packages/core/utils/test.ts
|
|
@@ -25566,6 +25580,7 @@ function isResult(x) {
|
|
|
25566
25580
|
|
|
25567
25581
|
|
|
25568
25582
|
|
|
25583
|
+
|
|
25569
25584
|
;// CONCATENATED MODULE: ./packages/core/bytes/bitvec.ts
|
|
25570
25585
|
|
|
25571
25586
|
/**
|
|
@@ -25587,7 +25602,7 @@ class BitVec {
|
|
|
25587
25602
|
* Create new [`BitVec`] with all values set to `false`.
|
|
25588
25603
|
*/
|
|
25589
25604
|
static empty(bitLength) {
|
|
25590
|
-
const data =
|
|
25605
|
+
const data = safe_alloc_uint8array_safeAllocUint8Array(Math.ceil(bitLength / 8));
|
|
25591
25606
|
return new BitVec(data, bitLength);
|
|
25592
25607
|
}
|
|
25593
25608
|
byteLength;
|
|
@@ -25787,7 +25802,7 @@ class bytes_BytesBlob {
|
|
|
25787
25802
|
static blobFromParts(v, ...rest) {
|
|
25788
25803
|
const vArr = v instanceof Uint8Array ? [v] : v;
|
|
25789
25804
|
const totalLength = vArr.reduce((a, v) => a + v.length, 0) + rest.reduce((a, v) => a + v.length, 0);
|
|
25790
|
-
const buffer =
|
|
25805
|
+
const buffer = safe_alloc_uint8array_safeAllocUint8Array(totalLength);
|
|
25791
25806
|
let offset = 0;
|
|
25792
25807
|
for (const r of vArr) {
|
|
25793
25808
|
buffer.set(r, offset);
|
|
@@ -25860,7 +25875,7 @@ class bytes_Bytes extends bytes_BytesBlob {
|
|
|
25860
25875
|
}
|
|
25861
25876
|
/** Create an empty [`Bytes<X>`] of given length. */
|
|
25862
25877
|
static zero(len) {
|
|
25863
|
-
return new bytes_Bytes(
|
|
25878
|
+
return new bytes_Bytes(safe_alloc_uint8array_safeAllocUint8Array(len), len);
|
|
25864
25879
|
}
|
|
25865
25880
|
// TODO [ToDr] `fill` should have the argments swapped to align with the rest.
|
|
25866
25881
|
/** Create a [`Bytes<X>`] with all bytes filled with given input number. */
|
|
@@ -26530,7 +26545,7 @@ function addSizeHints(a, b) {
|
|
|
26530
26545
|
};
|
|
26531
26546
|
}
|
|
26532
26547
|
const DEFAULT_START_LENGTH = 512; // 512B
|
|
26533
|
-
const
|
|
26548
|
+
const encoder_MAX_LENGTH = 10 * 1024 * 1024; // 10MB
|
|
26534
26549
|
/**
|
|
26535
26550
|
* JAM encoder.
|
|
26536
26551
|
*/
|
|
@@ -26546,7 +26561,7 @@ class encoder_Encoder {
|
|
|
26546
26561
|
return new encoder_Encoder(options.destination);
|
|
26547
26562
|
}
|
|
26548
26563
|
const startLength = options?.expectedLength ?? DEFAULT_START_LENGTH;
|
|
26549
|
-
const buffer = new ArrayBuffer(Math.min(
|
|
26564
|
+
const buffer = new ArrayBuffer(Math.min(encoder_MAX_LENGTH, startLength), { maxByteLength: encoder_MAX_LENGTH });
|
|
26550
26565
|
const destination = new Uint8Array(buffer);
|
|
26551
26566
|
return new encoder_Encoder(destination, buffer);
|
|
26552
26567
|
}
|
|
@@ -26879,11 +26894,11 @@ class encoder_Encoder {
|
|
|
26879
26894
|
ensureBigEnough(length, options = { silent: false }) {
|
|
26880
26895
|
debug_check `${length >= 0} Negative length given`;
|
|
26881
26896
|
const newLength = this.offset + length;
|
|
26882
|
-
if (newLength >
|
|
26897
|
+
if (newLength > encoder_MAX_LENGTH) {
|
|
26883
26898
|
if (options.silent) {
|
|
26884
26899
|
return;
|
|
26885
26900
|
}
|
|
26886
|
-
throw new Error(`The encoded size would reach the maximum of ${
|
|
26901
|
+
throw new Error(`The encoded size would reach the maximum of ${encoder_MAX_LENGTH}.`);
|
|
26887
26902
|
}
|
|
26888
26903
|
if (newLength > this.destination.length) {
|
|
26889
26904
|
// we can try to resize the underlying buffer
|
|
@@ -26891,7 +26906,7 @@ class encoder_Encoder {
|
|
|
26891
26906
|
// make sure we at least double the size of the buffer every time.
|
|
26892
26907
|
const minExtend = Math.max(newLength, this.buffer.byteLength << 1);
|
|
26893
26908
|
// but we must never exceed the max length.
|
|
26894
|
-
this.buffer.resize(Math.min(
|
|
26909
|
+
this.buffer.resize(Math.min(encoder_MAX_LENGTH, minExtend));
|
|
26895
26910
|
}
|
|
26896
26911
|
// and then check again
|
|
26897
26912
|
if (newLength > this.destination.length) {
|
|
@@ -28651,7 +28666,7 @@ async function ed25519_verify(input) {
|
|
|
28651
28666
|
return Promise.resolve([]);
|
|
28652
28667
|
}
|
|
28653
28668
|
const dataLength = input.reduce((acc, { message, key, signature }) => acc + key.length + signature.length + message.length + 1, 0);
|
|
28654
|
-
const data =
|
|
28669
|
+
const data = safe_alloc_uint8array_safeAllocUint8Array(dataLength);
|
|
28655
28670
|
let offset = 0;
|
|
28656
28671
|
for (const { key, message, signature } of input) {
|
|
28657
28672
|
data.set(key.raw, offset);
|
|
@@ -28738,7 +28753,7 @@ class allocator_SimpleAllocator {
|
|
|
28738
28753
|
/** An allocator that works by allocating larger (continuous) pages of memory. */
|
|
28739
28754
|
class PageAllocator {
|
|
28740
28755
|
hashesPerPage;
|
|
28741
|
-
page =
|
|
28756
|
+
page = safeAllocUint8Array(0);
|
|
28742
28757
|
currentHash = 0;
|
|
28743
28758
|
// TODO [ToDr] Benchmark the performance!
|
|
28744
28759
|
constructor(hashesPerPage) {
|
|
@@ -28749,7 +28764,7 @@ class PageAllocator {
|
|
|
28749
28764
|
resetPage() {
|
|
28750
28765
|
const pageSizeBytes = this.hashesPerPage * HASH_SIZE;
|
|
28751
28766
|
this.currentHash = 0;
|
|
28752
|
-
this.page =
|
|
28767
|
+
this.page = safeAllocUint8Array(pageSizeBytes);
|
|
28753
28768
|
}
|
|
28754
28769
|
emptyHash() {
|
|
28755
28770
|
const startIdx = this.currentHash * HASH_SIZE;
|
|
@@ -34580,7 +34595,7 @@ class SerializedService {
|
|
|
34580
34595
|
getStorage(rawKey) {
|
|
34581
34596
|
if (Compatibility.isLessThan(GpVersion.V0_6_7)) {
|
|
34582
34597
|
const SERVICE_ID_BYTES = 4;
|
|
34583
|
-
const serviceIdAndKey =
|
|
34598
|
+
const serviceIdAndKey = safe_alloc_uint8array_safeAllocUint8Array(SERVICE_ID_BYTES + rawKey.length);
|
|
34584
34599
|
serviceIdAndKey.set(numbers_u32AsLeBytes(this.serviceId));
|
|
34585
34600
|
serviceIdAndKey.set(rawKey.raw, SERVICE_ID_BYTES);
|
|
34586
34601
|
const key = opaque_asOpaqueType(bytes_BytesBlob.blobFrom(hashBytes(serviceIdAndKey).raw));
|
|
@@ -34671,7 +34686,7 @@ class TrieNode {
|
|
|
34671
34686
|
raw;
|
|
34672
34687
|
constructor(
|
|
34673
34688
|
/** Exactly 512 bits / 64 bytes */
|
|
34674
|
-
raw =
|
|
34689
|
+
raw = safe_alloc_uint8array_safeAllocUint8Array(TRIE_NODE_BYTES)) {
|
|
34675
34690
|
this.raw = raw;
|
|
34676
34691
|
}
|
|
34677
34692
|
/** Returns the type of the node */
|
|
@@ -37393,7 +37408,7 @@ class Registers {
|
|
|
37393
37408
|
bytes;
|
|
37394
37409
|
asSigned;
|
|
37395
37410
|
asUnsigned;
|
|
37396
|
-
constructor(bytes =
|
|
37411
|
+
constructor(bytes = safe_alloc_uint8array_safeAllocUint8Array(NO_OF_REGISTERS << REGISTER_SIZE_SHIFT)) {
|
|
37397
37412
|
this.bytes = bytes;
|
|
37398
37413
|
debug_check `${bytes.length === NO_OF_REGISTERS << REGISTER_SIZE_SHIFT} Invalid size of registers array.`;
|
|
37399
37414
|
this.asSigned = new BigInt64Array(bytes.buffer, bytes.byteOffset);
|
|
@@ -37556,7 +37571,7 @@ class Mask {
|
|
|
37556
37571
|
return Math.min(this.lookupTableForward[index] ?? 0, MAX_INSTRUCTION_DISTANCE);
|
|
37557
37572
|
}
|
|
37558
37573
|
buildLookupTableForward(mask) {
|
|
37559
|
-
const table =
|
|
37574
|
+
const table = safe_alloc_uint8array_safeAllocUint8Array(mask.bitLength);
|
|
37560
37575
|
let lastInstructionOffset = 0;
|
|
37561
37576
|
for (let i = mask.bitLength - 1; i >= 0; i--) {
|
|
37562
37577
|
if (mask.isSet(i)) {
|
|
@@ -41374,7 +41389,7 @@ class HostCalls {
|
|
|
41374
41389
|
const regs = pvmInstance.getRegisters();
|
|
41375
41390
|
const maybeAddress = regs.getLowerU32(7);
|
|
41376
41391
|
const maybeLength = regs.getLowerU32(8);
|
|
41377
|
-
const result =
|
|
41392
|
+
const result = safe_alloc_uint8array_safeAllocUint8Array(maybeLength);
|
|
41378
41393
|
const startAddress = tryAsMemoryIndex(maybeAddress);
|
|
41379
41394
|
const loadResult = memory.loadInto(result, startAddress);
|
|
41380
41395
|
if (loadResult.isError) {
|
|
@@ -46410,6 +46425,15 @@ var ResultValues;
|
|
|
46410
46425
|
ResultValues[ResultValues["Ok"] = 0] = "Ok";
|
|
46411
46426
|
ResultValues[ResultValues["Error"] = 1] = "Error";
|
|
46412
46427
|
})(ResultValues || (ResultValues = {}));
|
|
46428
|
+
/**
|
|
46429
|
+
* Getting a ring commitment is pretty expensive (hundreds of ms),
|
|
46430
|
+
* yet the validators do not always change.
|
|
46431
|
+
* For current benchmarks, we get a huge hit every epoch, hence
|
|
46432
|
+
* to overcome that we cache the results of getting ring commitment.
|
|
46433
|
+
* Note we can also tentatively populate this cache, before we even
|
|
46434
|
+
* reach the epoch change block.
|
|
46435
|
+
*/
|
|
46436
|
+
const ringCommitmentCache = [];
|
|
46413
46437
|
// TODO [ToDr] We export the entire object to allow mocking in tests.
|
|
46414
46438
|
// Ideally we would just export functions and figure out how to mock
|
|
46415
46439
|
// properly in ESM.
|
|
@@ -46425,9 +46449,27 @@ async function verifySeal(bandersnatch, authorKey, signature, payload, encodedUn
|
|
|
46425
46449
|
}
|
|
46426
46450
|
return result_Result.ok(bytes_Bytes.fromBlob(sealResult.subarray(1), hash_HASH_SIZE).asOpaque());
|
|
46427
46451
|
}
|
|
46428
|
-
|
|
46429
|
-
const keys = bytes_BytesBlob.blobFromParts(validators.map((x) => x.raw))
|
|
46430
|
-
|
|
46452
|
+
function getRingCommitment(bandersnatch, validators) {
|
|
46453
|
+
const keys = bytes_BytesBlob.blobFromParts(validators.map((x) => x.raw));
|
|
46454
|
+
// We currently compare the large bytes blob, but the number of entries in the cache
|
|
46455
|
+
// must be low. If the cache ever grows larger, we should rather consider hashing the keys.
|
|
46456
|
+
const MAX_CACHE_ENTRIES = 3;
|
|
46457
|
+
const cacheEntry = ringCommitmentCache.find((v) => v.keys.isEqualTo(keys));
|
|
46458
|
+
if (cacheEntry !== undefined) {
|
|
46459
|
+
return cacheEntry.value;
|
|
46460
|
+
}
|
|
46461
|
+
const value = getRingCommitmentNoCache(bandersnatch, keys);
|
|
46462
|
+
ringCommitmentCache.push({
|
|
46463
|
+
keys,
|
|
46464
|
+
value,
|
|
46465
|
+
});
|
|
46466
|
+
if (ringCommitmentCache.length > MAX_CACHE_ENTRIES) {
|
|
46467
|
+
ringCommitmentCache.shift();
|
|
46468
|
+
}
|
|
46469
|
+
return value;
|
|
46470
|
+
}
|
|
46471
|
+
async function getRingCommitmentNoCache(bandersnatch, keys) {
|
|
46472
|
+
const commitmentResult = await bandersnatch.getRingCommitment(keys.raw);
|
|
46431
46473
|
if (commitmentResult[RESULT_INDEX] === ResultValues.Error) {
|
|
46432
46474
|
return result_Result.error(null);
|
|
46433
46475
|
}
|
|
@@ -46553,6 +46595,18 @@ class Safrole {
|
|
|
46553
46595
|
}
|
|
46554
46596
|
return sized_array_FixedSizeArray.new([newRandomnessAcc, ...rest], 4);
|
|
46555
46597
|
}
|
|
46598
|
+
/**
|
|
46599
|
+
* Pre-populate cache for validator keys, and especially the ring commitment.
|
|
46600
|
+
*
|
|
46601
|
+
* NOTE the function is still doing quite some work, so it should only be used
|
|
46602
|
+
* once per epoch. The optimisation relies on the fact that the `bandersnatch.getRingCommitment`
|
|
46603
|
+
* call will be cached.
|
|
46604
|
+
*/
|
|
46605
|
+
async prepareValidatorKeysForNextEpoch(postOffenders) {
|
|
46606
|
+
const stateEpoch = Math.floor(this.state.timeslot / this.chainSpec.epochLength);
|
|
46607
|
+
const nextEpochStart = (stateEpoch + 1) * this.chainSpec.epochLength;
|
|
46608
|
+
return await this.getValidatorKeys(tryAsTimeSlot(nextEpochStart), postOffenders);
|
|
46609
|
+
}
|
|
46556
46610
|
async getValidatorKeys(timeslot, postOffenders) {
|
|
46557
46611
|
/**
|
|
46558
46612
|
* Epoch is not changed so the previous state is returned
|
|
@@ -48710,7 +48764,7 @@ class Assign {
|
|
|
48710
48764
|
const authorizationQueueStart = regs.get(8);
|
|
48711
48765
|
// a
|
|
48712
48766
|
const authManager = getServiceId(regs.get(9));
|
|
48713
|
-
const res =
|
|
48767
|
+
const res = safe_alloc_uint8array_safeAllocUint8Array(hash_HASH_SIZE * AUTHORIZATION_QUEUE_SIZE);
|
|
48714
48768
|
const memoryReadResult = memory.loadInto(res, authorizationQueueStart);
|
|
48715
48769
|
// error while reading the memory.
|
|
48716
48770
|
if (memoryReadResult.isError) {
|
|
@@ -48798,7 +48852,7 @@ class Bless {
|
|
|
48798
48852
|
* https://graypaper.fluffylabs.dev/#/7e6ff6a/368100368100?v=0.6.7
|
|
48799
48853
|
*/
|
|
48800
48854
|
const autoAccumulateEntries = [];
|
|
48801
|
-
const result =
|
|
48855
|
+
const result = safe_alloc_uint8array_safeAllocUint8Array(tryAsExactBytes(serviceIdAndGasCodec.sizeHint));
|
|
48802
48856
|
const decoder = decoder_Decoder.fromBlob(result);
|
|
48803
48857
|
let memIndex = sourceStart;
|
|
48804
48858
|
for (let i = 0n; i < numberOfItems; i += 1n) {
|
|
@@ -48815,7 +48869,7 @@ class Bless {
|
|
|
48815
48869
|
memIndex = numbers_tryAsU64(memIndex + numbers_tryAsU64(decoder.bytesRead()));
|
|
48816
48870
|
}
|
|
48817
48871
|
// https://graypaper.fluffylabs.dev/#/7e6ff6a/367200367200?v=0.6.7
|
|
48818
|
-
const res =
|
|
48872
|
+
const res = safe_alloc_uint8array_safeAllocUint8Array(tryAsExactBytes(descriptors_codec.u32.sizeHint) * this.chainSpec.coresCount);
|
|
48819
48873
|
const authorizersDecoder = decoder_Decoder.fromBlob(res);
|
|
48820
48874
|
const memoryReadResult = memory.loadInto(res, authorization);
|
|
48821
48875
|
if (memoryReadResult.isError) {
|
|
@@ -48911,6 +48965,7 @@ class Checkpoint {
|
|
|
48911
48965
|
|
|
48912
48966
|
|
|
48913
48967
|
|
|
48968
|
+
|
|
48914
48969
|
const designate_IN_OUT_REG = 7;
|
|
48915
48970
|
const VALIDATOR_DATA_BYTES = tryAsExactBytes(ValidatorData.Codec.sizeHint);
|
|
48916
48971
|
/**
|
|
@@ -48933,7 +48988,7 @@ class Designate {
|
|
|
48933
48988
|
async execute(_gas, regs, memory) {
|
|
48934
48989
|
// `o`
|
|
48935
48990
|
const validatorsStart = regs.get(designate_IN_OUT_REG);
|
|
48936
|
-
const res =
|
|
48991
|
+
const res = safe_alloc_uint8array_safeAllocUint8Array(VALIDATOR_DATA_BYTES * this.chainSpec.validatorsCount);
|
|
48937
48992
|
const memoryReadResult = memory.loadInto(res, validatorsStart);
|
|
48938
48993
|
// error while reading the memory.
|
|
48939
48994
|
if (memoryReadResult.isError) {
|
|
@@ -49164,7 +49219,7 @@ class Provide {
|
|
|
49164
49219
|
const preimageLength = regs.get(9);
|
|
49165
49220
|
const length = clampU64ToU32(preimageLength);
|
|
49166
49221
|
// `i`
|
|
49167
|
-
const preimage = bytes_BytesBlob.blobFrom(
|
|
49222
|
+
const preimage = bytes_BytesBlob.blobFrom(safe_alloc_uint8array_safeAllocUint8Array(length));
|
|
49168
49223
|
const memoryReadResult = memory.loadInto(preimage.raw, preimageStart);
|
|
49169
49224
|
if (memoryReadResult.isError) {
|
|
49170
49225
|
logger_logger.trace `PROVIDE(${serviceId}, ${preimage.toStringTruncated()}) <- PANIC`;
|
|
@@ -49726,6 +49781,7 @@ const codecServiceAccountInfoWithThresholdBalance = descriptors_codec.object({
|
|
|
49726
49781
|
|
|
49727
49782
|
|
|
49728
49783
|
|
|
49784
|
+
|
|
49729
49785
|
const decoder = new TextDecoder("utf8");
|
|
49730
49786
|
/**
|
|
49731
49787
|
* Log message to the console
|
|
@@ -49747,8 +49803,8 @@ class LogHostCall {
|
|
|
49747
49803
|
const targetLength = regs.get(9);
|
|
49748
49804
|
const msgStart = regs.get(10);
|
|
49749
49805
|
const msgLength = regs.get(11);
|
|
49750
|
-
const target =
|
|
49751
|
-
const message =
|
|
49806
|
+
const target = safe_alloc_uint8array_safeAllocUint8Array(clampU64ToU32(targetLength));
|
|
49807
|
+
const message = safe_alloc_uint8array_safeAllocUint8Array(clampU64ToU32(msgLength));
|
|
49752
49808
|
if (targetStart !== 0n) {
|
|
49753
49809
|
memory.loadInto(target, targetStart);
|
|
49754
49810
|
}
|
|
@@ -49767,6 +49823,7 @@ class LogHostCall {
|
|
|
49767
49823
|
|
|
49768
49824
|
|
|
49769
49825
|
|
|
49826
|
+
|
|
49770
49827
|
const lookup_IN_OUT_REG = 7;
|
|
49771
49828
|
/**
|
|
49772
49829
|
* Lookup a preimage.
|
|
@@ -49809,7 +49866,7 @@ class Lookup {
|
|
|
49809
49866
|
// NOTE [MaSo] this is ok to cast to number, because we are bounded by the
|
|
49810
49867
|
// valueLength in both cases and valueLength is WC (4,000,000,000) + metadata
|
|
49811
49868
|
// which is less than 2^32
|
|
49812
|
-
const chunk = preImage === null ?
|
|
49869
|
+
const chunk = preImage === null ? safe_alloc_uint8array_safeAllocUint8Array(0) : preImage.raw.subarray(Number(offset), Number(offset + length));
|
|
49813
49870
|
const memoryWriteResult = memory.storeFrom(destinationAddress, chunk);
|
|
49814
49871
|
if (memoryWriteResult.isError) {
|
|
49815
49872
|
return PvmExecution.Panic;
|
|
@@ -49830,6 +49887,7 @@ class Lookup {
|
|
|
49830
49887
|
|
|
49831
49888
|
|
|
49832
49889
|
|
|
49890
|
+
|
|
49833
49891
|
const read_IN_OUT_REG = 7;
|
|
49834
49892
|
/**
|
|
49835
49893
|
* Read account storage.
|
|
@@ -49857,7 +49915,7 @@ class Read {
|
|
|
49857
49915
|
const destinationAddress = regs.get(10);
|
|
49858
49916
|
const storageKeyLengthClamped = clampU64ToU32(storageKeyLength);
|
|
49859
49917
|
// k
|
|
49860
|
-
const rawKey = bytes_BytesBlob.blobFrom(
|
|
49918
|
+
const rawKey = bytes_BytesBlob.blobFrom(safe_alloc_uint8array_safeAllocUint8Array(storageKeyLengthClamped));
|
|
49861
49919
|
const memoryReadResult = memory.loadInto(rawKey.raw, storageKeyStartAddress);
|
|
49862
49920
|
if (memoryReadResult.isError) {
|
|
49863
49921
|
logger_logger.trace `READ(${serviceId}, ${rawKey}) <- PANIC`;
|
|
@@ -49875,7 +49933,7 @@ class Read {
|
|
|
49875
49933
|
// NOTE [MaSo] this is ok to cast to number, because we are bounded by the
|
|
49876
49934
|
// valueLength in both cases and valueLength is WC (4,000,000,000) + metadata
|
|
49877
49935
|
// which is less than 2^32
|
|
49878
|
-
const chunk = value === null ?
|
|
49936
|
+
const chunk = value === null ? safe_alloc_uint8array_safeAllocUint8Array(0) : value.raw.subarray(Number(offset), Number(offset + blobLength));
|
|
49879
49937
|
const memoryWriteResult = memory.storeFrom(destinationAddress, chunk);
|
|
49880
49938
|
if (memoryWriteResult.isError) {
|
|
49881
49939
|
logger_logger.trace `READ(${serviceId}, ${rawKey}) <- PANIC`;
|
|
@@ -49926,7 +49984,7 @@ class Write {
|
|
|
49926
49984
|
// v_z
|
|
49927
49985
|
const valueLength = regs.get(10);
|
|
49928
49986
|
const storageKeyLengthClamped = clampU64ToU32(storageKeyLength);
|
|
49929
|
-
const rawStorageKey =
|
|
49987
|
+
const rawStorageKey = safe_alloc_uint8array_safeAllocUint8Array(storageKeyLengthClamped);
|
|
49930
49988
|
const keyLoadingResult = memory.loadInto(rawStorageKey, storageKeyStartAddress);
|
|
49931
49989
|
if (keyLoadingResult.isError) {
|
|
49932
49990
|
logger_logger.trace `WRITE() <- PANIC`;
|
|
@@ -49935,7 +49993,7 @@ class Write {
|
|
|
49935
49993
|
// k
|
|
49936
49994
|
const storageKey = opaque_asOpaqueType(bytes_BytesBlob.blobFrom(rawStorageKey));
|
|
49937
49995
|
const valueLengthClamped = clampU64ToU32(valueLength);
|
|
49938
|
-
const value =
|
|
49996
|
+
const value = safe_alloc_uint8array_safeAllocUint8Array(valueLengthClamped);
|
|
49939
49997
|
const valueLoadingResult = memory.loadInto(value, valueStart);
|
|
49940
49998
|
// Note [MaSo] this is ok to return bcs if valueLength is 0, then this panic won't happen
|
|
49941
49999
|
if (valueLoadingResult.isError) {
|
|
@@ -50824,7 +50882,7 @@ function fisherYatesShuffle(arr, entropy) {
|
|
|
50824
50882
|
}
|
|
50825
50883
|
function hashToNumberSequence(entropy, length) {
|
|
50826
50884
|
const result = new Array(length);
|
|
50827
|
-
const randomBytes =
|
|
50885
|
+
const randomBytes = safe_alloc_uint8array_safeAllocUint8Array(ENTROPY_BYTES + 4);
|
|
50828
50886
|
randomBytes.set(entropy.raw);
|
|
50829
50887
|
for (let i = 0; i < length; i++) {
|
|
50830
50888
|
randomBytes.set(numbers_u32AsLeBytes(numbers_tryAsU32(Math.floor(i / 8))), ENTROPY_BYTES);
|
|
@@ -51854,6 +51912,7 @@ class OnChain {
|
|
|
51854
51912
|
authorization;
|
|
51855
51913
|
// chapter 13: https://graypaper.fluffylabs.dev/#/68eaa1f/18b60118b601?v=0.6.4
|
|
51856
51914
|
statistics;
|
|
51915
|
+
isReadyForNextEpoch = Promise.resolve(false);
|
|
51857
51916
|
constructor(chainSpec, state, blocks, hasher) {
|
|
51858
51917
|
this.chainSpec = chainSpec;
|
|
51859
51918
|
this.state = state;
|
|
@@ -51872,6 +51931,14 @@ class OnChain {
|
|
|
51872
51931
|
this.preimages = new Preimages(state);
|
|
51873
51932
|
this.authorization = new Authorization(chainSpec, state);
|
|
51874
51933
|
}
|
|
51934
|
+
/** Pre-populate things worth caching for the next epoch. */
|
|
51935
|
+
async prepareForNextEpoch() {
|
|
51936
|
+
if (await this.isReadyForNextEpoch) {
|
|
51937
|
+
return;
|
|
51938
|
+
}
|
|
51939
|
+
const ready = this.safrole.prepareValidatorKeysForNextEpoch(this.state.disputesRecords.punishSet);
|
|
51940
|
+
this.isReadyForNextEpoch = ready.then((_) => true);
|
|
51941
|
+
}
|
|
51875
51942
|
async verifySeal(timeSlot, block) {
|
|
51876
51943
|
const sealState = this.safrole.getSafroleSealState(timeSlot);
|
|
51877
51944
|
return await this.safroleSeal.verifyHeaderSeal(block.header.view(), sealState);
|
|
@@ -51880,6 +51947,10 @@ class OnChain {
|
|
|
51880
51947
|
const headerView = block.header.view();
|
|
51881
51948
|
const header = block.header.materialize();
|
|
51882
51949
|
const timeSlot = header.timeSlotIndex;
|
|
51950
|
+
// reset the epoch cache state
|
|
51951
|
+
if (headerView.epochMarker.view() !== null) {
|
|
51952
|
+
this.isReadyForNextEpoch = Promise.resolve(false);
|
|
51953
|
+
}
|
|
51883
51954
|
// safrole seal
|
|
51884
51955
|
let newEntropyHash;
|
|
51885
51956
|
if (omitSealVerification) {
|
|
@@ -52103,8 +52174,18 @@ class Importer {
|
|
|
52103
52174
|
this.stf = new OnChain(spec, state, blocks, hasher);
|
|
52104
52175
|
this.state = state;
|
|
52105
52176
|
this.currentHash = currentBestHeaderHash;
|
|
52177
|
+
this.prepareForNextEpoch();
|
|
52106
52178
|
logger.info `😎 Best time slot: ${state.timeslot} (header hash: ${currentBestHeaderHash})`;
|
|
52107
52179
|
}
|
|
52180
|
+
/** Do some extra work for preparation for the next epoch. */
|
|
52181
|
+
async prepareForNextEpoch() {
|
|
52182
|
+
try {
|
|
52183
|
+
await this.stf.prepareForNextEpoch();
|
|
52184
|
+
}
|
|
52185
|
+
catch (e) {
|
|
52186
|
+
this.logger.error `Unable to prepare for next epoch: ${e}`;
|
|
52187
|
+
}
|
|
52188
|
+
}
|
|
52108
52189
|
async importBlock(block, omitSealVerification) {
|
|
52109
52190
|
const timer = measure("importBlock");
|
|
52110
52191
|
const timeSlot = extractTimeSlot(block);
|
|
@@ -52140,6 +52221,7 @@ class Importer {
|
|
|
52140
52221
|
return importerError(ImporterErrorKind.Verifier, e);
|
|
52141
52222
|
}
|
|
52142
52223
|
this.state.updateBackend(state?.backend);
|
|
52224
|
+
this.prepareForNextEpoch();
|
|
52143
52225
|
this.currentHash = parentHash;
|
|
52144
52226
|
}
|
|
52145
52227
|
const timeSlot = block.header.view().timeSlotIndex.materialize();
|
|
@@ -52166,6 +52248,7 @@ class Importer {
|
|
|
52166
52248
|
// TODO [ToDr] This is a temporary measure. We should rather read
|
|
52167
52249
|
// the state of a parent block to support forks and create a fresh STF.
|
|
52168
52250
|
this.state.updateBackend(newState.backend);
|
|
52251
|
+
this.prepareForNextEpoch();
|
|
52169
52252
|
this.currentHash = headerHash;
|
|
52170
52253
|
logger.log `${timerState()}`;
|
|
52171
52254
|
// insert new state and the block to DB.
|
|
@@ -52301,6 +52384,7 @@ async function mainImporter(config, withRelPath) {
|
|
|
52301
52384
|
const importerReady = new ImporterReady();
|
|
52302
52385
|
importerReady.setConfig(workerConfig);
|
|
52303
52386
|
importerReady.setImporter(importer);
|
|
52387
|
+
await importer.prepareForNextEpoch();
|
|
52304
52388
|
const api = {
|
|
52305
52389
|
chainSpec,
|
|
52306
52390
|
async importBlock(block) {
|