@typeberry/jam 0.1.3-135961b → 0.1.3-462ca77
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bootstrap-generator.mjs +31 -16
- package/bootstrap-generator.mjs.map +1 -1
- package/bootstrap-importer.mjs +115 -32
- package/bootstrap-importer.mjs.map +1 -1
- package/bootstrap-network.mjs +28 -13
- package/bootstrap-network.mjs.map +1 -1
- package/index.js +116 -32
- package/index.js.map +1 -1
- package/package.json +1 -1
package/bootstrap-importer.mjs
CHANGED
|
@@ -4572,6 +4572,20 @@ const result_Result = {
|
|
|
4572
4572
|
},
|
|
4573
4573
|
};
|
|
4574
4574
|
|
|
4575
|
+
;// CONCATENATED MODULE: ./packages/core/utils/safe-alloc-uint8array.ts
|
|
4576
|
+
// about 2GB, the maximum ArrayBuffer length on Chrome confirmed by several sources:
|
|
4577
|
+
// - https://issues.chromium.org/issues/40055619
|
|
4578
|
+
// - https://stackoverflow.com/a/72124984
|
|
4579
|
+
// - https://onnxruntime.ai/docs/tutorials/web/large-models.html#maximum-size-of-arraybuffer
|
|
4580
|
+
const MAX_LENGTH = 2145386496;
|
|
4581
|
+
function safe_alloc_uint8array_safeAllocUint8Array(length) {
|
|
4582
|
+
if (length > MAX_LENGTH) {
|
|
4583
|
+
// biome-ignore lint/suspicious/noConsole: can't have a dependency on logger here
|
|
4584
|
+
console.warn(`Trying to allocate ${length} bytes, which is greater than the maximum of ${MAX_LENGTH}.`);
|
|
4585
|
+
}
|
|
4586
|
+
return new Uint8Array(Math.min(MAX_LENGTH, length));
|
|
4587
|
+
}
|
|
4588
|
+
|
|
4575
4589
|
;// CONCATENATED MODULE: external "node:assert"
|
|
4576
4590
|
const external_node_assert_namespaceObject = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("node:assert");
|
|
4577
4591
|
;// CONCATENATED MODULE: ./packages/core/utils/test.ts
|
|
@@ -4815,6 +4829,7 @@ function isResult(x) {
|
|
|
4815
4829
|
|
|
4816
4830
|
|
|
4817
4831
|
|
|
4832
|
+
|
|
4818
4833
|
;// CONCATENATED MODULE: ./packages/core/bytes/bitvec.ts
|
|
4819
4834
|
|
|
4820
4835
|
/**
|
|
@@ -4836,7 +4851,7 @@ class BitVec {
|
|
|
4836
4851
|
* Create new [`BitVec`] with all values set to `false`.
|
|
4837
4852
|
*/
|
|
4838
4853
|
static empty(bitLength) {
|
|
4839
|
-
const data =
|
|
4854
|
+
const data = safe_alloc_uint8array_safeAllocUint8Array(Math.ceil(bitLength / 8));
|
|
4840
4855
|
return new BitVec(data, bitLength);
|
|
4841
4856
|
}
|
|
4842
4857
|
byteLength;
|
|
@@ -5036,7 +5051,7 @@ class bytes_BytesBlob {
|
|
|
5036
5051
|
static blobFromParts(v, ...rest) {
|
|
5037
5052
|
const vArr = v instanceof Uint8Array ? [v] : v;
|
|
5038
5053
|
const totalLength = vArr.reduce((a, v) => a + v.length, 0) + rest.reduce((a, v) => a + v.length, 0);
|
|
5039
|
-
const buffer =
|
|
5054
|
+
const buffer = safe_alloc_uint8array_safeAllocUint8Array(totalLength);
|
|
5040
5055
|
let offset = 0;
|
|
5041
5056
|
for (const r of vArr) {
|
|
5042
5057
|
buffer.set(r, offset);
|
|
@@ -5109,7 +5124,7 @@ class bytes_Bytes extends bytes_BytesBlob {
|
|
|
5109
5124
|
}
|
|
5110
5125
|
/** Create an empty [`Bytes<X>`] of given length. */
|
|
5111
5126
|
static zero(len) {
|
|
5112
|
-
return new bytes_Bytes(
|
|
5127
|
+
return new bytes_Bytes(safe_alloc_uint8array_safeAllocUint8Array(len), len);
|
|
5113
5128
|
}
|
|
5114
5129
|
// TODO [ToDr] `fill` should have the argments swapped to align with the rest.
|
|
5115
5130
|
/** Create a [`Bytes<X>`] with all bytes filled with given input number. */
|
|
@@ -5695,7 +5710,7 @@ async function ed25519_verify(input) {
|
|
|
5695
5710
|
return Promise.resolve([]);
|
|
5696
5711
|
}
|
|
5697
5712
|
const dataLength = input.reduce((acc, { message, key, signature }) => acc + key.length + signature.length + message.length + 1, 0);
|
|
5698
|
-
const data =
|
|
5713
|
+
const data = safe_alloc_uint8array_safeAllocUint8Array(dataLength);
|
|
5699
5714
|
let offset = 0;
|
|
5700
5715
|
for (const { key, message, signature } of input) {
|
|
5701
5716
|
data.set(key.raw, offset);
|
|
@@ -5782,7 +5797,7 @@ class allocator_SimpleAllocator {
|
|
|
5782
5797
|
/** An allocator that works by allocating larger (continuous) pages of memory. */
|
|
5783
5798
|
class PageAllocator {
|
|
5784
5799
|
hashesPerPage;
|
|
5785
|
-
page =
|
|
5800
|
+
page = safeAllocUint8Array(0);
|
|
5786
5801
|
currentHash = 0;
|
|
5787
5802
|
// TODO [ToDr] Benchmark the performance!
|
|
5788
5803
|
constructor(hashesPerPage) {
|
|
@@ -5793,7 +5808,7 @@ class PageAllocator {
|
|
|
5793
5808
|
resetPage() {
|
|
5794
5809
|
const pageSizeBytes = this.hashesPerPage * HASH_SIZE;
|
|
5795
5810
|
this.currentHash = 0;
|
|
5796
|
-
this.page =
|
|
5811
|
+
this.page = safeAllocUint8Array(pageSizeBytes);
|
|
5797
5812
|
}
|
|
5798
5813
|
emptyHash() {
|
|
5799
5814
|
const startIdx = this.currentHash * HASH_SIZE;
|
|
@@ -6517,7 +6532,7 @@ function addSizeHints(a, b) {
|
|
|
6517
6532
|
};
|
|
6518
6533
|
}
|
|
6519
6534
|
const DEFAULT_START_LENGTH = 512; // 512B
|
|
6520
|
-
const
|
|
6535
|
+
const encoder_MAX_LENGTH = 10 * 1024 * 1024; // 10MB
|
|
6521
6536
|
/**
|
|
6522
6537
|
* JAM encoder.
|
|
6523
6538
|
*/
|
|
@@ -6533,7 +6548,7 @@ class encoder_Encoder {
|
|
|
6533
6548
|
return new encoder_Encoder(options.destination);
|
|
6534
6549
|
}
|
|
6535
6550
|
const startLength = options?.expectedLength ?? DEFAULT_START_LENGTH;
|
|
6536
|
-
const buffer = new ArrayBuffer(Math.min(
|
|
6551
|
+
const buffer = new ArrayBuffer(Math.min(encoder_MAX_LENGTH, startLength), { maxByteLength: encoder_MAX_LENGTH });
|
|
6537
6552
|
const destination = new Uint8Array(buffer);
|
|
6538
6553
|
return new encoder_Encoder(destination, buffer);
|
|
6539
6554
|
}
|
|
@@ -6866,11 +6881,11 @@ class encoder_Encoder {
|
|
|
6866
6881
|
ensureBigEnough(length, options = { silent: false }) {
|
|
6867
6882
|
debug_check `${length >= 0} Negative length given`;
|
|
6868
6883
|
const newLength = this.offset + length;
|
|
6869
|
-
if (newLength >
|
|
6884
|
+
if (newLength > encoder_MAX_LENGTH) {
|
|
6870
6885
|
if (options.silent) {
|
|
6871
6886
|
return;
|
|
6872
6887
|
}
|
|
6873
|
-
throw new Error(`The encoded size would reach the maximum of ${
|
|
6888
|
+
throw new Error(`The encoded size would reach the maximum of ${encoder_MAX_LENGTH}.`);
|
|
6874
6889
|
}
|
|
6875
6890
|
if (newLength > this.destination.length) {
|
|
6876
6891
|
// we can try to resize the underlying buffer
|
|
@@ -6878,7 +6893,7 @@ class encoder_Encoder {
|
|
|
6878
6893
|
// make sure we at least double the size of the buffer every time.
|
|
6879
6894
|
const minExtend = Math.max(newLength, this.buffer.byteLength << 1);
|
|
6880
6895
|
// but we must never exceed the max length.
|
|
6881
|
-
this.buffer.resize(Math.min(
|
|
6896
|
+
this.buffer.resize(Math.min(encoder_MAX_LENGTH, minExtend));
|
|
6882
6897
|
}
|
|
6883
6898
|
// and then check again
|
|
6884
6899
|
if (newLength > this.destination.length) {
|
|
@@ -12036,7 +12051,7 @@ class SerializedService {
|
|
|
12036
12051
|
getStorage(rawKey) {
|
|
12037
12052
|
if (Compatibility.isLessThan(GpVersion.V0_6_7)) {
|
|
12038
12053
|
const SERVICE_ID_BYTES = 4;
|
|
12039
|
-
const serviceIdAndKey =
|
|
12054
|
+
const serviceIdAndKey = safe_alloc_uint8array_safeAllocUint8Array(SERVICE_ID_BYTES + rawKey.length);
|
|
12040
12055
|
serviceIdAndKey.set(numbers_u32AsLeBytes(this.serviceId));
|
|
12041
12056
|
serviceIdAndKey.set(rawKey.raw, SERVICE_ID_BYTES);
|
|
12042
12057
|
const key = opaque_asOpaqueType(bytes_BytesBlob.blobFrom(hashBytes(serviceIdAndKey).raw));
|
|
@@ -12127,7 +12142,7 @@ class TrieNode {
|
|
|
12127
12142
|
raw;
|
|
12128
12143
|
constructor(
|
|
12129
12144
|
/** Exactly 512 bits / 64 bytes */
|
|
12130
|
-
raw =
|
|
12145
|
+
raw = safe_alloc_uint8array_safeAllocUint8Array(TRIE_NODE_BYTES)) {
|
|
12131
12146
|
this.raw = raw;
|
|
12132
12147
|
}
|
|
12133
12148
|
/** Returns the type of the node */
|
|
@@ -14572,7 +14587,7 @@ class Registers {
|
|
|
14572
14587
|
bytes;
|
|
14573
14588
|
asSigned;
|
|
14574
14589
|
asUnsigned;
|
|
14575
|
-
constructor(bytes =
|
|
14590
|
+
constructor(bytes = safe_alloc_uint8array_safeAllocUint8Array(NO_OF_REGISTERS << REGISTER_SIZE_SHIFT)) {
|
|
14576
14591
|
this.bytes = bytes;
|
|
14577
14592
|
debug_check `${bytes.length === NO_OF_REGISTERS << REGISTER_SIZE_SHIFT} Invalid size of registers array.`;
|
|
14578
14593
|
this.asSigned = new BigInt64Array(bytes.buffer, bytes.byteOffset);
|
|
@@ -14735,7 +14750,7 @@ class Mask {
|
|
|
14735
14750
|
return Math.min(this.lookupTableForward[index] ?? 0, MAX_INSTRUCTION_DISTANCE);
|
|
14736
14751
|
}
|
|
14737
14752
|
buildLookupTableForward(mask) {
|
|
14738
|
-
const table =
|
|
14753
|
+
const table = safe_alloc_uint8array_safeAllocUint8Array(mask.bitLength);
|
|
14739
14754
|
let lastInstructionOffset = 0;
|
|
14740
14755
|
for (let i = mask.bitLength - 1; i >= 0; i--) {
|
|
14741
14756
|
if (mask.isSet(i)) {
|
|
@@ -18553,7 +18568,7 @@ class HostCalls {
|
|
|
18553
18568
|
const regs = pvmInstance.getRegisters();
|
|
18554
18569
|
const maybeAddress = regs.getLowerU32(7);
|
|
18555
18570
|
const maybeLength = regs.getLowerU32(8);
|
|
18556
|
-
const result =
|
|
18571
|
+
const result = safe_alloc_uint8array_safeAllocUint8Array(maybeLength);
|
|
18557
18572
|
const startAddress = tryAsMemoryIndex(maybeAddress);
|
|
18558
18573
|
const loadResult = memory.loadInto(result, startAddress);
|
|
18559
18574
|
if (loadResult.isError) {
|
|
@@ -19635,6 +19650,15 @@ var ResultValues;
|
|
|
19635
19650
|
ResultValues[ResultValues["Ok"] = 0] = "Ok";
|
|
19636
19651
|
ResultValues[ResultValues["Error"] = 1] = "Error";
|
|
19637
19652
|
})(ResultValues || (ResultValues = {}));
|
|
19653
|
+
/**
|
|
19654
|
+
* Getting a ring commitment is pretty expensive (hundreds of ms),
|
|
19655
|
+
* yet the validators do not always change.
|
|
19656
|
+
* For current benchmarks, we get a huge hit every epoch, hence
|
|
19657
|
+
* to overcome that we cache the results of getting ring commitment.
|
|
19658
|
+
* Note we can also tentatively populate this cache, before we even
|
|
19659
|
+
* reach the epoch change block.
|
|
19660
|
+
*/
|
|
19661
|
+
const ringCommitmentCache = [];
|
|
19638
19662
|
// TODO [ToDr] We export the entire object to allow mocking in tests.
|
|
19639
19663
|
// Ideally we would just export functions and figure out how to mock
|
|
19640
19664
|
// properly in ESM.
|
|
@@ -19650,9 +19674,27 @@ async function verifySeal(bandersnatch, authorKey, signature, payload, encodedUn
|
|
|
19650
19674
|
}
|
|
19651
19675
|
return result_Result.ok(bytes_Bytes.fromBlob(sealResult.subarray(1), hash_HASH_SIZE).asOpaque());
|
|
19652
19676
|
}
|
|
19653
|
-
|
|
19654
|
-
const keys = bytes_BytesBlob.blobFromParts(validators.map((x) => x.raw))
|
|
19655
|
-
|
|
19677
|
+
function getRingCommitment(bandersnatch, validators) {
|
|
19678
|
+
const keys = bytes_BytesBlob.blobFromParts(validators.map((x) => x.raw));
|
|
19679
|
+
// We currently compare the large bytes blob, but the number of entries in the cache
|
|
19680
|
+
// must be low. If the cache ever grows larger, we should rather consider hashing the keys.
|
|
19681
|
+
const MAX_CACHE_ENTRIES = 3;
|
|
19682
|
+
const cacheEntry = ringCommitmentCache.find((v) => v.keys.isEqualTo(keys));
|
|
19683
|
+
if (cacheEntry !== undefined) {
|
|
19684
|
+
return cacheEntry.value;
|
|
19685
|
+
}
|
|
19686
|
+
const value = getRingCommitmentNoCache(bandersnatch, keys);
|
|
19687
|
+
ringCommitmentCache.push({
|
|
19688
|
+
keys,
|
|
19689
|
+
value,
|
|
19690
|
+
});
|
|
19691
|
+
if (ringCommitmentCache.length > MAX_CACHE_ENTRIES) {
|
|
19692
|
+
ringCommitmentCache.shift();
|
|
19693
|
+
}
|
|
19694
|
+
return value;
|
|
19695
|
+
}
|
|
19696
|
+
async function getRingCommitmentNoCache(bandersnatch, keys) {
|
|
19697
|
+
const commitmentResult = await bandersnatch.getRingCommitment(keys.raw);
|
|
19656
19698
|
if (commitmentResult[RESULT_INDEX] === ResultValues.Error) {
|
|
19657
19699
|
return result_Result.error(null);
|
|
19658
19700
|
}
|
|
@@ -19778,6 +19820,18 @@ class Safrole {
|
|
|
19778
19820
|
}
|
|
19779
19821
|
return sized_array_FixedSizeArray.new([newRandomnessAcc, ...rest], 4);
|
|
19780
19822
|
}
|
|
19823
|
+
/**
|
|
19824
|
+
* Pre-populate cache for validator keys, and especially the ring commitment.
|
|
19825
|
+
*
|
|
19826
|
+
* NOTE the function is still doing quite some work, so it should only be used
|
|
19827
|
+
* once per epoch. The optimisation relies on the fact that the `bandersnatch.getRingCommitment`
|
|
19828
|
+
* call will be cached.
|
|
19829
|
+
*/
|
|
19830
|
+
async prepareValidatorKeysForNextEpoch(postOffenders) {
|
|
19831
|
+
const stateEpoch = Math.floor(this.state.timeslot / this.chainSpec.epochLength);
|
|
19832
|
+
const nextEpochStart = (stateEpoch + 1) * this.chainSpec.epochLength;
|
|
19833
|
+
return await this.getValidatorKeys(common_tryAsTimeSlot(nextEpochStart), postOffenders);
|
|
19834
|
+
}
|
|
19781
19835
|
async getValidatorKeys(timeslot, postOffenders) {
|
|
19782
19836
|
/**
|
|
19783
19837
|
* Epoch is not changed so the previous state is returned
|
|
@@ -21935,7 +21989,7 @@ class Assign {
|
|
|
21935
21989
|
const authorizationQueueStart = regs.get(8);
|
|
21936
21990
|
// a
|
|
21937
21991
|
const authManager = getServiceId(regs.get(9));
|
|
21938
|
-
const res =
|
|
21992
|
+
const res = safe_alloc_uint8array_safeAllocUint8Array(hash_HASH_SIZE * AUTHORIZATION_QUEUE_SIZE);
|
|
21939
21993
|
const memoryReadResult = memory.loadInto(res, authorizationQueueStart);
|
|
21940
21994
|
// error while reading the memory.
|
|
21941
21995
|
if (memoryReadResult.isError) {
|
|
@@ -22023,7 +22077,7 @@ class Bless {
|
|
|
22023
22077
|
* https://graypaper.fluffylabs.dev/#/7e6ff6a/368100368100?v=0.6.7
|
|
22024
22078
|
*/
|
|
22025
22079
|
const autoAccumulateEntries = [];
|
|
22026
|
-
const result =
|
|
22080
|
+
const result = safe_alloc_uint8array_safeAllocUint8Array(tryAsExactBytes(serviceIdAndGasCodec.sizeHint));
|
|
22027
22081
|
const decoder = decoder_Decoder.fromBlob(result);
|
|
22028
22082
|
let memIndex = sourceStart;
|
|
22029
22083
|
for (let i = 0n; i < numberOfItems; i += 1n) {
|
|
@@ -22040,7 +22094,7 @@ class Bless {
|
|
|
22040
22094
|
memIndex = numbers_tryAsU64(memIndex + numbers_tryAsU64(decoder.bytesRead()));
|
|
22041
22095
|
}
|
|
22042
22096
|
// https://graypaper.fluffylabs.dev/#/7e6ff6a/367200367200?v=0.6.7
|
|
22043
|
-
const res =
|
|
22097
|
+
const res = safe_alloc_uint8array_safeAllocUint8Array(tryAsExactBytes(descriptors_codec.u32.sizeHint) * this.chainSpec.coresCount);
|
|
22044
22098
|
const authorizersDecoder = decoder_Decoder.fromBlob(res);
|
|
22045
22099
|
const memoryReadResult = memory.loadInto(res, authorization);
|
|
22046
22100
|
if (memoryReadResult.isError) {
|
|
@@ -22136,6 +22190,7 @@ class Checkpoint {
|
|
|
22136
22190
|
|
|
22137
22191
|
|
|
22138
22192
|
|
|
22193
|
+
|
|
22139
22194
|
const designate_IN_OUT_REG = 7;
|
|
22140
22195
|
const VALIDATOR_DATA_BYTES = tryAsExactBytes(ValidatorData.Codec.sizeHint);
|
|
22141
22196
|
/**
|
|
@@ -22158,7 +22213,7 @@ class Designate {
|
|
|
22158
22213
|
async execute(_gas, regs, memory) {
|
|
22159
22214
|
// `o`
|
|
22160
22215
|
const validatorsStart = regs.get(designate_IN_OUT_REG);
|
|
22161
|
-
const res =
|
|
22216
|
+
const res = safe_alloc_uint8array_safeAllocUint8Array(VALIDATOR_DATA_BYTES * this.chainSpec.validatorsCount);
|
|
22162
22217
|
const memoryReadResult = memory.loadInto(res, validatorsStart);
|
|
22163
22218
|
// error while reading the memory.
|
|
22164
22219
|
if (memoryReadResult.isError) {
|
|
@@ -22389,7 +22444,7 @@ class Provide {
|
|
|
22389
22444
|
const preimageLength = regs.get(9);
|
|
22390
22445
|
const length = clampU64ToU32(preimageLength);
|
|
22391
22446
|
// `i`
|
|
22392
|
-
const preimage = bytes_BytesBlob.blobFrom(
|
|
22447
|
+
const preimage = bytes_BytesBlob.blobFrom(safe_alloc_uint8array_safeAllocUint8Array(length));
|
|
22393
22448
|
const memoryReadResult = memory.loadInto(preimage.raw, preimageStart);
|
|
22394
22449
|
if (memoryReadResult.isError) {
|
|
22395
22450
|
logger_logger.trace `PROVIDE(${serviceId}, ${preimage.toStringTruncated()}) <- PANIC`;
|
|
@@ -22951,6 +23006,7 @@ const codecServiceAccountInfoWithThresholdBalance = descriptors_codec.object({
|
|
|
22951
23006
|
|
|
22952
23007
|
|
|
22953
23008
|
|
|
23009
|
+
|
|
22954
23010
|
const decoder = new TextDecoder("utf8");
|
|
22955
23011
|
/**
|
|
22956
23012
|
* Log message to the console
|
|
@@ -22972,8 +23028,8 @@ class LogHostCall {
|
|
|
22972
23028
|
const targetLength = regs.get(9);
|
|
22973
23029
|
const msgStart = regs.get(10);
|
|
22974
23030
|
const msgLength = regs.get(11);
|
|
22975
|
-
const target =
|
|
22976
|
-
const message =
|
|
23031
|
+
const target = safe_alloc_uint8array_safeAllocUint8Array(clampU64ToU32(targetLength));
|
|
23032
|
+
const message = safe_alloc_uint8array_safeAllocUint8Array(clampU64ToU32(msgLength));
|
|
22977
23033
|
if (targetStart !== 0n) {
|
|
22978
23034
|
memory.loadInto(target, targetStart);
|
|
22979
23035
|
}
|
|
@@ -22992,6 +23048,7 @@ class LogHostCall {
|
|
|
22992
23048
|
|
|
22993
23049
|
|
|
22994
23050
|
|
|
23051
|
+
|
|
22995
23052
|
const lookup_IN_OUT_REG = 7;
|
|
22996
23053
|
/**
|
|
22997
23054
|
* Lookup a preimage.
|
|
@@ -23034,7 +23091,7 @@ class Lookup {
|
|
|
23034
23091
|
// NOTE [MaSo] this is ok to cast to number, because we are bounded by the
|
|
23035
23092
|
// valueLength in both cases and valueLength is WC (4,000,000,000) + metadata
|
|
23036
23093
|
// which is less than 2^32
|
|
23037
|
-
const chunk = preImage === null ?
|
|
23094
|
+
const chunk = preImage === null ? safe_alloc_uint8array_safeAllocUint8Array(0) : preImage.raw.subarray(Number(offset), Number(offset + length));
|
|
23038
23095
|
const memoryWriteResult = memory.storeFrom(destinationAddress, chunk);
|
|
23039
23096
|
if (memoryWriteResult.isError) {
|
|
23040
23097
|
return PvmExecution.Panic;
|
|
@@ -23055,6 +23112,7 @@ class Lookup {
|
|
|
23055
23112
|
|
|
23056
23113
|
|
|
23057
23114
|
|
|
23115
|
+
|
|
23058
23116
|
const read_IN_OUT_REG = 7;
|
|
23059
23117
|
/**
|
|
23060
23118
|
* Read account storage.
|
|
@@ -23082,7 +23140,7 @@ class Read {
|
|
|
23082
23140
|
const destinationAddress = regs.get(10);
|
|
23083
23141
|
const storageKeyLengthClamped = clampU64ToU32(storageKeyLength);
|
|
23084
23142
|
// k
|
|
23085
|
-
const rawKey = bytes_BytesBlob.blobFrom(
|
|
23143
|
+
const rawKey = bytes_BytesBlob.blobFrom(safe_alloc_uint8array_safeAllocUint8Array(storageKeyLengthClamped));
|
|
23086
23144
|
const memoryReadResult = memory.loadInto(rawKey.raw, storageKeyStartAddress);
|
|
23087
23145
|
if (memoryReadResult.isError) {
|
|
23088
23146
|
logger_logger.trace `READ(${serviceId}, ${rawKey}) <- PANIC`;
|
|
@@ -23100,7 +23158,7 @@ class Read {
|
|
|
23100
23158
|
// NOTE [MaSo] this is ok to cast to number, because we are bounded by the
|
|
23101
23159
|
// valueLength in both cases and valueLength is WC (4,000,000,000) + metadata
|
|
23102
23160
|
// which is less than 2^32
|
|
23103
|
-
const chunk = value === null ?
|
|
23161
|
+
const chunk = value === null ? safe_alloc_uint8array_safeAllocUint8Array(0) : value.raw.subarray(Number(offset), Number(offset + blobLength));
|
|
23104
23162
|
const memoryWriteResult = memory.storeFrom(destinationAddress, chunk);
|
|
23105
23163
|
if (memoryWriteResult.isError) {
|
|
23106
23164
|
logger_logger.trace `READ(${serviceId}, ${rawKey}) <- PANIC`;
|
|
@@ -23151,7 +23209,7 @@ class Write {
|
|
|
23151
23209
|
// v_z
|
|
23152
23210
|
const valueLength = regs.get(10);
|
|
23153
23211
|
const storageKeyLengthClamped = clampU64ToU32(storageKeyLength);
|
|
23154
|
-
const rawStorageKey =
|
|
23212
|
+
const rawStorageKey = safe_alloc_uint8array_safeAllocUint8Array(storageKeyLengthClamped);
|
|
23155
23213
|
const keyLoadingResult = memory.loadInto(rawStorageKey, storageKeyStartAddress);
|
|
23156
23214
|
if (keyLoadingResult.isError) {
|
|
23157
23215
|
logger_logger.trace `WRITE() <- PANIC`;
|
|
@@ -23160,7 +23218,7 @@ class Write {
|
|
|
23160
23218
|
// k
|
|
23161
23219
|
const storageKey = opaque_asOpaqueType(bytes_BytesBlob.blobFrom(rawStorageKey));
|
|
23162
23220
|
const valueLengthClamped = clampU64ToU32(valueLength);
|
|
23163
|
-
const value =
|
|
23221
|
+
const value = safe_alloc_uint8array_safeAllocUint8Array(valueLengthClamped);
|
|
23164
23222
|
const valueLoadingResult = memory.loadInto(value, valueStart);
|
|
23165
23223
|
// Note [MaSo] this is ok to return bcs if valueLength is 0, then this panic won't happen
|
|
23166
23224
|
if (valueLoadingResult.isError) {
|
|
@@ -24049,7 +24107,7 @@ function fisherYatesShuffle(arr, entropy) {
|
|
|
24049
24107
|
}
|
|
24050
24108
|
function hashToNumberSequence(entropy, length) {
|
|
24051
24109
|
const result = new Array(length);
|
|
24052
|
-
const randomBytes =
|
|
24110
|
+
const randomBytes = safe_alloc_uint8array_safeAllocUint8Array(ENTROPY_BYTES + 4);
|
|
24053
24111
|
randomBytes.set(entropy.raw);
|
|
24054
24112
|
for (let i = 0; i < length; i++) {
|
|
24055
24113
|
randomBytes.set(numbers_u32AsLeBytes(numbers_tryAsU32(Math.floor(i / 8))), ENTROPY_BYTES);
|
|
@@ -25079,6 +25137,7 @@ class OnChain {
|
|
|
25079
25137
|
authorization;
|
|
25080
25138
|
// chapter 13: https://graypaper.fluffylabs.dev/#/68eaa1f/18b60118b601?v=0.6.4
|
|
25081
25139
|
statistics;
|
|
25140
|
+
isReadyForNextEpoch = Promise.resolve(false);
|
|
25082
25141
|
constructor(chainSpec, state, blocks, hasher) {
|
|
25083
25142
|
this.chainSpec = chainSpec;
|
|
25084
25143
|
this.state = state;
|
|
@@ -25097,6 +25156,14 @@ class OnChain {
|
|
|
25097
25156
|
this.preimages = new Preimages(state);
|
|
25098
25157
|
this.authorization = new Authorization(chainSpec, state);
|
|
25099
25158
|
}
|
|
25159
|
+
/** Pre-populate things worth caching for the next epoch. */
|
|
25160
|
+
async prepareForNextEpoch() {
|
|
25161
|
+
if (await this.isReadyForNextEpoch) {
|
|
25162
|
+
return;
|
|
25163
|
+
}
|
|
25164
|
+
const ready = this.safrole.prepareValidatorKeysForNextEpoch(this.state.disputesRecords.punishSet);
|
|
25165
|
+
this.isReadyForNextEpoch = ready.then((_) => true);
|
|
25166
|
+
}
|
|
25100
25167
|
async verifySeal(timeSlot, block) {
|
|
25101
25168
|
const sealState = this.safrole.getSafroleSealState(timeSlot);
|
|
25102
25169
|
return await this.safroleSeal.verifyHeaderSeal(block.header.view(), sealState);
|
|
@@ -25105,6 +25172,10 @@ class OnChain {
|
|
|
25105
25172
|
const headerView = block.header.view();
|
|
25106
25173
|
const header = block.header.materialize();
|
|
25107
25174
|
const timeSlot = header.timeSlotIndex;
|
|
25175
|
+
// reset the epoch cache state
|
|
25176
|
+
if (headerView.epochMarker.view() !== null) {
|
|
25177
|
+
this.isReadyForNextEpoch = Promise.resolve(false);
|
|
25178
|
+
}
|
|
25108
25179
|
// safrole seal
|
|
25109
25180
|
let newEntropyHash;
|
|
25110
25181
|
if (omitSealVerification) {
|
|
@@ -25328,8 +25399,18 @@ class Importer {
|
|
|
25328
25399
|
this.stf = new OnChain(spec, state, blocks, hasher);
|
|
25329
25400
|
this.state = state;
|
|
25330
25401
|
this.currentHash = currentBestHeaderHash;
|
|
25402
|
+
this.prepareForNextEpoch();
|
|
25331
25403
|
logger.info `😎 Best time slot: ${state.timeslot} (header hash: ${currentBestHeaderHash})`;
|
|
25332
25404
|
}
|
|
25405
|
+
/** Do some extra work for preparation for the next epoch. */
|
|
25406
|
+
async prepareForNextEpoch() {
|
|
25407
|
+
try {
|
|
25408
|
+
await this.stf.prepareForNextEpoch();
|
|
25409
|
+
}
|
|
25410
|
+
catch (e) {
|
|
25411
|
+
this.logger.error `Unable to prepare for next epoch: ${e}`;
|
|
25412
|
+
}
|
|
25413
|
+
}
|
|
25333
25414
|
async importBlock(block, omitSealVerification) {
|
|
25334
25415
|
const timer = measure("importBlock");
|
|
25335
25416
|
const timeSlot = extractTimeSlot(block);
|
|
@@ -25365,6 +25446,7 @@ class Importer {
|
|
|
25365
25446
|
return importerError(ImporterErrorKind.Verifier, e);
|
|
25366
25447
|
}
|
|
25367
25448
|
this.state.updateBackend(state?.backend);
|
|
25449
|
+
this.prepareForNextEpoch();
|
|
25368
25450
|
this.currentHash = parentHash;
|
|
25369
25451
|
}
|
|
25370
25452
|
const timeSlot = block.header.view().timeSlotIndex.materialize();
|
|
@@ -25391,6 +25473,7 @@ class Importer {
|
|
|
25391
25473
|
// TODO [ToDr] This is a temporary measure. We should rather read
|
|
25392
25474
|
// the state of a parent block to support forks and create a fresh STF.
|
|
25393
25475
|
this.state.updateBackend(newState.backend);
|
|
25476
|
+
this.prepareForNextEpoch();
|
|
25394
25477
|
this.currentHash = headerHash;
|
|
25395
25478
|
logger.log `${timerState()}`;
|
|
25396
25479
|
// insert new state and the block to DB.
|