@typeberry/jam 0.1.0-b2d0b72 → 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bandersnatch/6b655f8772c01b768329.js +1 -0
- package/bandersnatch/ccf8ada94096a8f232f5.js +1 -0
- package/bandersnatch/e2fdc1b646378dd96eda.js +1 -0
- package/bandersnatch/index.js +3037 -0
- package/bandersnatch/index.js.map +1 -0
- package/bandersnatch/package.json +3 -0
- package/bandersnatch/sourcemap-register.cjs +1 -0
- package/block-generator/index.js +125 -212
- package/block-generator/index.js.map +1 -1
- package/bootstrap-bandersnatch.mjs +162 -0
- package/bootstrap-bandersnatch.mjs.map +1 -0
- package/importer/bootstrap-bandersnatch.mjs.map +1 -0
- package/importer/index.js +592 -282
- package/importer/index.js.map +1 -1
- package/index.js +589 -282
- package/index.js.map +1 -1
- package/jam-network/index.js +93 -139
- package/jam-network/index.js.map +1 -1
- package/package.json +1 -1
package/importer/index.js
CHANGED
|
@@ -4224,17 +4224,10 @@ async function initAll() {
|
|
|
4224
4224
|
await init.ed25519();
|
|
4225
4225
|
await init.reedSolomon();
|
|
4226
4226
|
}
|
|
4227
|
-
function initOnce(doInit) {
|
|
4228
|
-
let ready = null;
|
|
4229
|
-
return async () => {
|
|
4230
|
-
if (ready === null) ready = doInit();
|
|
4231
|
-
return await ready;
|
|
4232
|
-
};
|
|
4233
|
-
}
|
|
4234
4227
|
const init = {
|
|
4235
|
-
bandersnatch:
|
|
4236
|
-
ed25519:
|
|
4237
|
-
reedSolomon:
|
|
4228
|
+
bandersnatch: async () => await bandersnatch_default({ module_or_path: await bandersnatch_bg_default() }),
|
|
4229
|
+
ed25519: async () => await ed25519_wasm_default({ module_or_path: await ed25519_wasm_bg_default() }),
|
|
4230
|
+
reedSolomon: async () => await reed_solomon_wasm_default({ module_or_path: await reed_solomon_wasm_bg_default() })
|
|
4238
4231
|
};
|
|
4239
4232
|
|
|
4240
4233
|
//#endregion
|
|
@@ -4334,18 +4327,30 @@ function isBrowser() {
|
|
|
4334
4327
|
* We avoid using `node:assert` to keep compatibility with a browser environment.
|
|
4335
4328
|
* Note the checks should not have any side effects, since we might decide
|
|
4336
4329
|
* to remove all of them in a post-processing step.
|
|
4337
|
-
*
|
|
4338
|
-
* NOTE the function is intended to be used as tagged template string for the performance
|
|
4339
|
-
* reasons.
|
|
4340
4330
|
*/
|
|
4341
|
-
function debug_check(
|
|
4331
|
+
function debug_check(condition, message) {
|
|
4342
4332
|
if (!condition) {
|
|
4343
|
-
|
|
4344
|
-
data.unshift("");
|
|
4345
|
-
const message = strings.map((v, index) => `${v}${data[index] ?? ""}`);
|
|
4346
|
-
throw new Error(`Assertion failure:${message.join("")}`);
|
|
4333
|
+
throw new Error(`Assertion failure: ${message ?? ""}`);
|
|
4347
4334
|
}
|
|
4348
4335
|
}
|
|
4336
|
+
function cast(_a, condition) {
|
|
4337
|
+
return condition;
|
|
4338
|
+
}
|
|
4339
|
+
/**
|
|
4340
|
+
* Yet another function to perform runtime assertions.
|
|
4341
|
+
* This function returns a new type to mark in the code that this value was checked and you don't have to do it again.
|
|
4342
|
+
*
|
|
4343
|
+
* In the post-processing step all usages of this functions should be replaced with simple casting. An example:
|
|
4344
|
+
* const x = checkAndType<number, CheckedNumber>(y);
|
|
4345
|
+
* should be replaced with:
|
|
4346
|
+
* const x = y as CheckedNumber;
|
|
4347
|
+
*/
|
|
4348
|
+
function ensure(a, condition, message) {
|
|
4349
|
+
if (cast(a, condition)) {
|
|
4350
|
+
return a;
|
|
4351
|
+
}
|
|
4352
|
+
throw new Error(`Assertion failure: ${message ?? ""}`);
|
|
4353
|
+
}
|
|
4349
4354
|
/**
|
|
4350
4355
|
* The function can be used to make sure that a particular type is `never`
|
|
4351
4356
|
* at some point in the code.
|
|
@@ -4514,7 +4519,7 @@ function resultToString(res) {
|
|
|
4514
4519
|
const result_Result = {
|
|
4515
4520
|
/** Create new [`Result`] with `Ok` status. */
|
|
4516
4521
|
ok: (ok) => {
|
|
4517
|
-
debug_check
|
|
4522
|
+
debug_check(ok !== undefined, "`Ok` type cannot be undefined.");
|
|
4518
4523
|
return {
|
|
4519
4524
|
isOk: true,
|
|
4520
4525
|
isError: false,
|
|
@@ -4523,7 +4528,7 @@ const result_Result = {
|
|
|
4523
4528
|
},
|
|
4524
4529
|
/** Create new [`Result`] with `Error` status. */
|
|
4525
4530
|
error: (error, details = "") => {
|
|
4526
|
-
debug_check
|
|
4531
|
+
debug_check(error !== undefined, "`Error` type cannot be undefined.");
|
|
4527
4532
|
return {
|
|
4528
4533
|
isOk: false,
|
|
4529
4534
|
isError: true,
|
|
@@ -4807,10 +4812,7 @@ class BitVec {
|
|
|
4807
4812
|
constructor(data, bitLength) {
|
|
4808
4813
|
this.data = data;
|
|
4809
4814
|
this.bitLength = bitLength;
|
|
4810
|
-
debug_check `
|
|
4811
|
-
${data.length * 8 >= bitLength}
|
|
4812
|
-
Not enough bytes in the data array. Need ${data.length * 8} has ${bitLength}.
|
|
4813
|
-
`;
|
|
4815
|
+
debug_check(data.length * 8 >= bitLength, `Not enough bytes in the data array. Need ${data.length * 8} has ${bitLength}.`);
|
|
4814
4816
|
this.byteLength = Math.ceil(bitLength / 8);
|
|
4815
4817
|
}
|
|
4816
4818
|
/** Return a raw in-memory representation of this [`BitVec`]. */
|
|
@@ -4819,10 +4821,7 @@ class BitVec {
|
|
|
4819
4821
|
}
|
|
4820
4822
|
/** Perform OR operation on all bits in place. */
|
|
4821
4823
|
sumWith(other) {
|
|
4822
|
-
debug_check `
|
|
4823
|
-
${other.bitLength === this.bitLength}
|
|
4824
|
-
Invalid bit length for sumWith: ${other.bitLength} vs ${this.bitLength}
|
|
4825
|
-
`;
|
|
4824
|
+
debug_check(other.bitLength === this.bitLength, `Invalid bit length for sumWith: ${other.bitLength} vs ${this.bitLength}`);
|
|
4826
4825
|
const otherRaw = other.raw;
|
|
4827
4826
|
for (let i = 0; i < this.byteLength; i++) {
|
|
4828
4827
|
this.data[i] |= otherRaw[i];
|
|
@@ -4832,7 +4831,7 @@ class BitVec {
|
|
|
4832
4831
|
* Set the bit at index `idx` to value `val`.
|
|
4833
4832
|
*/
|
|
4834
4833
|
setBit(idx, val) {
|
|
4835
|
-
debug_check
|
|
4834
|
+
debug_check(idx < this.bitLength, `Index out of bounds. Need ${idx} has ${this.bitLength}.`);
|
|
4836
4835
|
const byteIndex = Math.floor(idx / 8);
|
|
4837
4836
|
const bitIndexInByte = idx % 8;
|
|
4838
4837
|
const mask = 1 << bitIndexInByte;
|
|
@@ -4847,7 +4846,7 @@ class BitVec {
|
|
|
4847
4846
|
* Return `true` if the bit at index `idx` is set.
|
|
4848
4847
|
*/
|
|
4849
4848
|
isSet(idx) {
|
|
4850
|
-
debug_check
|
|
4849
|
+
debug_check(idx < this.bitLength, `Index out of bounds. Need ${idx} has ${this.bitLength}.`);
|
|
4851
4850
|
const byteIndex = Math.floor(idx / 8);
|
|
4852
4851
|
const bitIndexInByte = idx % 8;
|
|
4853
4852
|
const mask = 1 << bitIndexInByte;
|
|
@@ -5014,7 +5013,7 @@ class bytes_BytesBlob {
|
|
|
5014
5013
|
}
|
|
5015
5014
|
/** Create a new [`BytesBlob`] from an array of bytes. */
|
|
5016
5015
|
static blobFromNumbers(v) {
|
|
5017
|
-
debug_check
|
|
5016
|
+
debug_check(v.find((x) => (x & 0xff) !== x) === undefined, "BytesBlob.blobFromNumbers used with non-byte number array.");
|
|
5018
5017
|
const arr = new Uint8Array(v);
|
|
5019
5018
|
return new bytes_BytesBlob(arr);
|
|
5020
5019
|
}
|
|
@@ -5058,7 +5057,7 @@ class bytes_Bytes extends bytes_BytesBlob {
|
|
|
5058
5057
|
length;
|
|
5059
5058
|
constructor(raw, len) {
|
|
5060
5059
|
super(raw);
|
|
5061
|
-
debug_check
|
|
5060
|
+
debug_check(raw.byteLength === len, `Given buffer has incorrect size ${raw.byteLength} vs expected ${len}`);
|
|
5062
5061
|
this.length = len;
|
|
5063
5062
|
}
|
|
5064
5063
|
/** Create new [`Bytes<X>`] given a backing buffer and it's length. */
|
|
@@ -5067,7 +5066,7 @@ class bytes_Bytes extends bytes_BytesBlob {
|
|
|
5067
5066
|
}
|
|
5068
5067
|
/** Create new [`Bytes<X>`] given an array of bytes and it's length. */
|
|
5069
5068
|
static fromNumbers(v, len) {
|
|
5070
|
-
debug_check
|
|
5069
|
+
debug_check(v.find((x) => (x & 0xff) !== x) === undefined, "Bytes.fromNumbers used with non-byte number array.");
|
|
5071
5070
|
const x = new Uint8Array(v);
|
|
5072
5071
|
return new bytes_Bytes(x, len);
|
|
5073
5072
|
}
|
|
@@ -5078,7 +5077,7 @@ class bytes_Bytes extends bytes_BytesBlob {
|
|
|
5078
5077
|
// TODO [ToDr] `fill` should have the argments swapped to align with the rest.
|
|
5079
5078
|
/** Create a [`Bytes<X>`] with all bytes filled with given input number. */
|
|
5080
5079
|
static fill(len, input) {
|
|
5081
|
-
debug_check
|
|
5080
|
+
debug_check((input & 0xff) === input, "Input has to be a byte.");
|
|
5082
5081
|
const bytes = bytes_Bytes.zero(len);
|
|
5083
5082
|
bytes.raw.fill(input, 0, len);
|
|
5084
5083
|
return bytes;
|
|
@@ -5101,7 +5100,7 @@ class bytes_Bytes extends bytes_BytesBlob {
|
|
|
5101
5100
|
}
|
|
5102
5101
|
/** Compare the sequence to another one. */
|
|
5103
5102
|
isEqualTo(other) {
|
|
5104
|
-
debug_check
|
|
5103
|
+
debug_check(this.length === other.length, "Comparing incorrectly typed bytes!");
|
|
5105
5104
|
return u8ArraySameLengthEqual(this.raw, other.raw);
|
|
5106
5105
|
}
|
|
5107
5106
|
/** Converts current type into some opaque extension. */
|
|
@@ -5110,7 +5109,7 @@ class bytes_Bytes extends bytes_BytesBlob {
|
|
|
5110
5109
|
}
|
|
5111
5110
|
}
|
|
5112
5111
|
function byteFromString(s) {
|
|
5113
|
-
debug_check
|
|
5112
|
+
debug_check(s.length === 2, "Two-character string expected");
|
|
5114
5113
|
const a = numberFromCharCode(s.charCodeAt(0));
|
|
5115
5114
|
const b = numberFromCharCode(s.charCodeAt(1));
|
|
5116
5115
|
return (a << 4) | b;
|
|
@@ -5179,7 +5178,7 @@ const BLS_KEY_BYTES = 144;
|
|
|
5179
5178
|
/** Derive a Bandersnatch public key from a seed. */
|
|
5180
5179
|
function bandersnatch_publicKey(seed) {
|
|
5181
5180
|
const key = bandersnatch.derive_public_key(seed);
|
|
5182
|
-
check
|
|
5181
|
+
check(key[0] === 0, "Invalid Bandersnatch public key derived from seed");
|
|
5183
5182
|
return Bytes.fromBlob(key.subarray(1), BANDERSNATCH_KEY_BYTES).asOpaque();
|
|
5184
5183
|
}
|
|
5185
5184
|
|
|
@@ -5667,7 +5666,7 @@ async function ed25519_verify(input) {
|
|
|
5667
5666
|
data.set(signature.raw, offset);
|
|
5668
5667
|
offset += ED25519_SIGNATURE_BYTES;
|
|
5669
5668
|
const messageLength = message.length;
|
|
5670
|
-
debug_check
|
|
5669
|
+
debug_check(messageLength < 256, `Message needs to be shorter than 256 bytes. Got: ${messageLength}`);
|
|
5671
5670
|
data[offset] = messageLength;
|
|
5672
5671
|
offset += 1;
|
|
5673
5672
|
data.set(message.raw, offset);
|
|
@@ -5696,7 +5695,6 @@ async function verifyBatch(input) {
|
|
|
5696
5695
|
|
|
5697
5696
|
;// CONCATENATED MODULE: ./packages/core/hash/hash.ts
|
|
5698
5697
|
|
|
5699
|
-
|
|
5700
5698
|
/**
|
|
5701
5699
|
* Size of the output of the hash functions.
|
|
5702
5700
|
*
|
|
@@ -5706,7 +5704,6 @@ async function verifyBatch(input) {
|
|
|
5706
5704
|
const hash_HASH_SIZE = 32;
|
|
5707
5705
|
/** A hash without last byte (useful for trie representation). */
|
|
5708
5706
|
const TRUNCATED_HASH_SIZE = 31;
|
|
5709
|
-
const ZERO_HASH = bytes_Bytes.zero(hash_HASH_SIZE);
|
|
5710
5707
|
/**
|
|
5711
5708
|
* Container for some object with a hash that is related to this object.
|
|
5712
5709
|
*
|
|
@@ -5751,7 +5748,7 @@ class PageAllocator {
|
|
|
5751
5748
|
// TODO [ToDr] Benchmark the performance!
|
|
5752
5749
|
constructor(hashesPerPage) {
|
|
5753
5750
|
this.hashesPerPage = hashesPerPage;
|
|
5754
|
-
check
|
|
5751
|
+
check(hashesPerPage > 0 && hashesPerPage >>> 0 === hashesPerPage, "Expected a non-zero integer.");
|
|
5755
5752
|
this.resetPage();
|
|
5756
5753
|
}
|
|
5757
5754
|
resetPage() {
|
|
@@ -5845,53 +5842,42 @@ function keccak_hashBlobs(hasher, blobs) {
|
|
|
5845
5842
|
|
|
5846
5843
|
;// CONCATENATED MODULE: ./packages/core/numbers/index.ts
|
|
5847
5844
|
|
|
5848
|
-
const
|
|
5845
|
+
const asWithBytesRepresentation = (v) => v;
|
|
5849
5846
|
const MAX_VALUE_U8 = 0xff;
|
|
5850
5847
|
const MAX_VALUE_U16 = 0xffff;
|
|
5851
5848
|
const MAX_VALUE_U32 = 0xffff_ffff;
|
|
5852
5849
|
const MAX_VALUE_U64 = 0xffffffffffffffffn;
|
|
5853
5850
|
/** Attempt to cast an input number into U8. */
|
|
5854
|
-
const numbers_tryAsU8 = (v) => {
|
|
5855
|
-
debug_check `${isU8(v)} input must have one-byte representation, got ${v}`;
|
|
5856
|
-
return asTypedNumber(v);
|
|
5857
|
-
};
|
|
5851
|
+
const numbers_tryAsU8 = (v) => ensure(v, isU8(v), `input must have one-byte representation, got ${v}`);
|
|
5858
5852
|
/** Check if given number is a valid U8 number. */
|
|
5859
5853
|
const isU8 = (v) => (v & MAX_VALUE_U8) === v;
|
|
5860
5854
|
/** Attempt to cast an input number into U16. */
|
|
5861
|
-
const numbers_tryAsU16 = (v) => {
|
|
5862
|
-
debug_check `${isU16(v)} input must have two-byte representation, got ${v}`;
|
|
5863
|
-
return asTypedNumber(v);
|
|
5864
|
-
};
|
|
5855
|
+
const numbers_tryAsU16 = (v) => ensure(v, isU16(v), `input must have two-byte representation, got ${v}`);
|
|
5865
5856
|
/** Check if given number is a valid U16 number. */
|
|
5866
5857
|
const isU16 = (v) => (v & MAX_VALUE_U16) === v;
|
|
5867
5858
|
/** Attempt to cast an input number into U32. */
|
|
5868
|
-
const numbers_tryAsU32 = (v) => {
|
|
5869
|
-
debug_check `${isU32(v)} input must have four-byte representation, got ${v}`;
|
|
5870
|
-
return asTypedNumber(v);
|
|
5871
|
-
};
|
|
5859
|
+
const numbers_tryAsU32 = (v) => ensure(v, isU32(v), `input must have four-byte representation, got ${v}`);
|
|
5872
5860
|
/** Check if given number is a valid U32 number. */
|
|
5873
5861
|
const isU32 = (v) => (v & MAX_VALUE_U32) >>> 0 === v;
|
|
5874
5862
|
/** Attempt to cast an input number into U64. */
|
|
5875
5863
|
const numbers_tryAsU64 = (x) => {
|
|
5876
5864
|
const v = BigInt(x);
|
|
5877
|
-
|
|
5878
|
-
return asTypedNumber(v);
|
|
5865
|
+
return ensure(v, isU64(v), `input must have eight-byte representation, got ${x}`);
|
|
5879
5866
|
};
|
|
5880
5867
|
/** Check if given number is a valid U64 number. */
|
|
5881
5868
|
const isU64 = (v) => (v & MAX_VALUE_U64) === v;
|
|
5882
5869
|
/** Collate two U32 parts into one U64. */
|
|
5883
5870
|
const u64FromParts = ({ lower, upper }) => {
|
|
5884
5871
|
const val = (BigInt(upper) << 32n) + BigInt(lower);
|
|
5885
|
-
return
|
|
5872
|
+
return asWithBytesRepresentation(val);
|
|
5886
5873
|
};
|
|
5887
5874
|
/** Split U64 into lower & upper parts. */
|
|
5888
5875
|
const u64IntoParts = (v) => {
|
|
5889
|
-
|
|
5890
|
-
const
|
|
5891
|
-
const upper = Number(v >> 32n);
|
|
5876
|
+
const lower = v & (2n ** 32n - 1n);
|
|
5877
|
+
const upper = v >> 32n;
|
|
5892
5878
|
return {
|
|
5893
|
-
lower:
|
|
5894
|
-
upper:
|
|
5879
|
+
lower: asWithBytesRepresentation(Number(lower)),
|
|
5880
|
+
upper: asWithBytesRepresentation(Number(upper)),
|
|
5895
5881
|
};
|
|
5896
5882
|
};
|
|
5897
5883
|
/**
|
|
@@ -5931,8 +5917,8 @@ function numbers_u32AsLeBytes(value) {
|
|
|
5931
5917
|
* Interpret 4-byte `Uint8Array` as U32 written as little endian.
|
|
5932
5918
|
*/
|
|
5933
5919
|
function leBytesAsU32(uint8Array) {
|
|
5934
|
-
debug_check
|
|
5935
|
-
return
|
|
5920
|
+
debug_check(uint8Array.length === 4, "Input must be a Uint8Array of length 4");
|
|
5921
|
+
return asWithBytesRepresentation(uint8Array[0] | (uint8Array[1] << 8) | (uint8Array[2] << 16) | (uint8Array[3] << 24));
|
|
5936
5922
|
}
|
|
5937
5923
|
/** Get the smallest value between U64 a and values given as input parameters. */
|
|
5938
5924
|
const minU64 = (a, ...values) => values.reduce((min, value) => (value > min ? min : value), a);
|
|
@@ -6280,7 +6266,7 @@ class decoder_Decoder {
|
|
|
6280
6266
|
this.skip(newOffset - this.offset);
|
|
6281
6267
|
}
|
|
6282
6268
|
else {
|
|
6283
|
-
debug_check
|
|
6269
|
+
debug_check(newOffset >= 0, "The offset has to be positive");
|
|
6284
6270
|
this.offset = newOffset;
|
|
6285
6271
|
}
|
|
6286
6272
|
}
|
|
@@ -6308,7 +6294,7 @@ class decoder_Decoder {
|
|
|
6308
6294
|
return num;
|
|
6309
6295
|
}
|
|
6310
6296
|
ensureHasBytes(bytes) {
|
|
6311
|
-
debug_check
|
|
6297
|
+
debug_check(bytes >= 0, "Negative number of bytes given.");
|
|
6312
6298
|
if (this.offset + bytes > this.source.length) {
|
|
6313
6299
|
throw new Error(`Attempting to decode more data than there is left. Need ${bytes}, left: ${this.source.length - this.offset}.`);
|
|
6314
6300
|
}
|
|
@@ -6316,7 +6302,7 @@ class decoder_Decoder {
|
|
|
6316
6302
|
}
|
|
6317
6303
|
const MASKS = [0xff, 0xfe, 0xfc, 0xf8, 0xf0, 0xe0, 0xc0, 0x80];
|
|
6318
6304
|
function decodeVariableLengthExtraBytes(firstByte) {
|
|
6319
|
-
debug_check
|
|
6305
|
+
debug_check(firstByte >= 0 && firstByte < 256, `Incorrect byte value: ${firstByte}`);
|
|
6320
6306
|
for (let i = 0; i < MASKS.length; i++) {
|
|
6321
6307
|
if (firstByte >= MASKS[i]) {
|
|
6322
6308
|
return 8 - i;
|
|
@@ -6471,7 +6457,7 @@ class descriptor_Descriptor {
|
|
|
6471
6457
|
|
|
6472
6458
|
|
|
6473
6459
|
function tryAsExactBytes(a) {
|
|
6474
|
-
debug_check
|
|
6460
|
+
debug_check(a.isExact, "The value is not exact size estimation!");
|
|
6475
6461
|
return a.bytes;
|
|
6476
6462
|
}
|
|
6477
6463
|
function addSizeHints(a, b) {
|
|
@@ -6578,8 +6564,8 @@ class encoder_Encoder {
|
|
|
6578
6564
|
// we still allow positive numbers from `[maxNum / 2, maxNum)`.
|
|
6579
6565
|
// So it does not matter if the argument is a negative value,
|
|
6580
6566
|
// OR if someone just gave us two-complement already.
|
|
6581
|
-
debug_check
|
|
6582
|
-
debug_check
|
|
6567
|
+
debug_check(num < maxNum, "Only for numbers up to 2**64 - 1");
|
|
6568
|
+
debug_check(-num <= maxNum / 2n, "Only for numbers down to -2**63");
|
|
6583
6569
|
this.ensureBigEnough(8);
|
|
6584
6570
|
this.dataView.setBigInt64(this.offset, num, true);
|
|
6585
6571
|
this.offset += 8;
|
|
@@ -6643,8 +6629,8 @@ class encoder_Encoder {
|
|
|
6643
6629
|
// we still allow positive numbers from `[maxNum / 2, maxNum)`.
|
|
6644
6630
|
// So it does not matter if the argument is a negative value,
|
|
6645
6631
|
// OR if someone just gave us two-complement already.
|
|
6646
|
-
debug_check
|
|
6647
|
-
debug_check
|
|
6632
|
+
debug_check(num < maxNum, `Only for numbers up to 2**${BITS * bytesToEncode} - 1`);
|
|
6633
|
+
debug_check(-num <= maxNum / 2, `Only for numbers down to -2**${BITS * bytesToEncode - 1}`);
|
|
6648
6634
|
this.ensureBigEnough(bytesToEncode);
|
|
6649
6635
|
}
|
|
6650
6636
|
/**
|
|
@@ -6655,8 +6641,8 @@ class encoder_Encoder {
|
|
|
6655
6641
|
* https://graypaper.fluffylabs.dev/#/579bd12/365202365202
|
|
6656
6642
|
*/
|
|
6657
6643
|
varU32(num) {
|
|
6658
|
-
debug_check
|
|
6659
|
-
debug_check
|
|
6644
|
+
debug_check(num >= 0, "Only for natural numbers.");
|
|
6645
|
+
debug_check(num < 2 ** 32, "Only for numbers up to 2**32");
|
|
6660
6646
|
this.varU64(BigInt(num));
|
|
6661
6647
|
}
|
|
6662
6648
|
/**
|
|
@@ -6807,7 +6793,7 @@ class encoder_Encoder {
|
|
|
6807
6793
|
* https://graypaper.fluffylabs.dev/#/579bd12/374400374400
|
|
6808
6794
|
*/
|
|
6809
6795
|
sequenceVarLen(encode, elements) {
|
|
6810
|
-
debug_check
|
|
6796
|
+
debug_check(elements.length <= 2 ** 32, "Wow, that's a nice long sequence you've got here.");
|
|
6811
6797
|
this.varU32(numbers_tryAsU32(elements.length));
|
|
6812
6798
|
this.sequenceFixLen(encode, elements);
|
|
6813
6799
|
}
|
|
@@ -6828,7 +6814,7 @@ class encoder_Encoder {
|
|
|
6828
6814
|
* anyway, so if we really should throw we will.
|
|
6829
6815
|
*/
|
|
6830
6816
|
ensureBigEnough(length, options = { silent: false }) {
|
|
6831
|
-
debug_check
|
|
6817
|
+
debug_check(length >= 0, "Negative length given");
|
|
6832
6818
|
const newLength = this.offset + length;
|
|
6833
6819
|
if (newLength > MAX_LENGTH) {
|
|
6834
6820
|
if (options.silent) {
|
|
@@ -6964,12 +6950,10 @@ class ObjectView {
|
|
|
6964
6950
|
decodeUpTo(field) {
|
|
6965
6951
|
const index = this.descriptorsKeys.indexOf(field);
|
|
6966
6952
|
const lastField = this.descriptorsKeys[this.lastDecodedFieldIdx];
|
|
6967
|
-
debug_check `
|
|
6968
|
-
${this.lastDecodedFieldIdx < index}
|
|
6969
|
-
Unjustified call to 'decodeUpTo' -
|
|
6953
|
+
debug_check(this.lastDecodedFieldIdx < index, `Unjustified call to 'decodeUpTo' -
|
|
6970
6954
|
the index ($Blobindex}, ${String(field)})
|
|
6971
6955
|
is already decoded (${this.lastDecodedFieldIdx}, ${String(lastField)}).
|
|
6972
|
-
|
|
6956
|
+
`);
|
|
6973
6957
|
let lastItem = this.cache.get(lastField);
|
|
6974
6958
|
const skipper = new Skipper(this.decoder);
|
|
6975
6959
|
// now skip all of the fields and further populate the cache.
|
|
@@ -6985,10 +6969,8 @@ class ObjectView {
|
|
|
6985
6969
|
this.cache.set(field, lastItem);
|
|
6986
6970
|
this.lastDecodedFieldIdx = i;
|
|
6987
6971
|
}
|
|
6988
|
-
|
|
6989
|
-
|
|
6990
|
-
}
|
|
6991
|
-
return lastItem;
|
|
6972
|
+
const last = ensure(lastItem, lastItem !== undefined, "Last item must be set, since the loop turns at least once.");
|
|
6973
|
+
return last;
|
|
6992
6974
|
}
|
|
6993
6975
|
}
|
|
6994
6976
|
/**
|
|
@@ -7021,10 +7003,8 @@ class SequenceView {
|
|
|
7021
7003
|
*[Symbol.iterator]() {
|
|
7022
7004
|
for (let i = 0; i < this.length; i++) {
|
|
7023
7005
|
const val = this.get(i);
|
|
7024
|
-
|
|
7025
|
-
|
|
7026
|
-
}
|
|
7027
|
-
yield val;
|
|
7006
|
+
const v = ensure(val, val !== undefined, "We are within 0..this.length so all items are defined.");
|
|
7007
|
+
yield v;
|
|
7028
7008
|
}
|
|
7029
7009
|
}
|
|
7030
7010
|
/** Create an array of all views mapped to some particular value. */
|
|
@@ -7067,10 +7047,7 @@ class SequenceView {
|
|
|
7067
7047
|
return bytes_BytesBlob.blobFrom(this.decoder.source.subarray(this.initialDecoderOffset, this.decoder.bytesRead()));
|
|
7068
7048
|
}
|
|
7069
7049
|
decodeUpTo(index) {
|
|
7070
|
-
debug_check `
|
|
7071
|
-
${this.lastDecodedIdx < index}
|
|
7072
|
-
Unjustified call to 'decodeUpTo' - the index (${index}) is already decoded (${this.lastDecodedIdx}).
|
|
7073
|
-
`;
|
|
7050
|
+
debug_check(this.lastDecodedIdx < index, `Unjustified call to 'decodeUpTo' - the index (${index}) is already decoded (${this.lastDecodedIdx}).`);
|
|
7074
7051
|
let lastItem = this.cache.get(this.lastDecodedIdx);
|
|
7075
7052
|
const skipper = new Skipper(this.decoder);
|
|
7076
7053
|
// now skip all of the fields and further populate the cache.
|
|
@@ -7085,10 +7062,8 @@ class SequenceView {
|
|
|
7085
7062
|
this.cache.set(i, lastItem);
|
|
7086
7063
|
this.lastDecodedIdx = i;
|
|
7087
7064
|
}
|
|
7088
|
-
|
|
7089
|
-
|
|
7090
|
-
}
|
|
7091
|
-
return lastItem;
|
|
7065
|
+
const last = ensure(lastItem, lastItem !== undefined, "Last item must be set, since the loop turns at least once.");
|
|
7066
|
+
return last;
|
|
7092
7067
|
}
|
|
7093
7068
|
}
|
|
7094
7069
|
|
|
@@ -7121,10 +7096,7 @@ const TYPICAL_DICTIONARY_LENGTH = 32;
|
|
|
7121
7096
|
*/
|
|
7122
7097
|
function readonlyArray(desc) {
|
|
7123
7098
|
return desc.convert((x) => {
|
|
7124
|
-
debug_check `
|
|
7125
|
-
${Array.isArray(x)}
|
|
7126
|
-
Non-arrays are not supported as 'readonly': got ${typeof x}, ${x}
|
|
7127
|
-
`;
|
|
7099
|
+
debug_check(Array.isArray(x), `Non-arrays are not supported as 'readonly': got ${typeof x}, ${x}`);
|
|
7128
7100
|
// NOTE [ToDr] This assumption is incorrect in general, but it's documented
|
|
7129
7101
|
// in the general note. We avoid `.slice()` the array for performance reasons.
|
|
7130
7102
|
return x;
|
|
@@ -7572,8 +7544,8 @@ class MultiMap {
|
|
|
7572
7544
|
* if needed.
|
|
7573
7545
|
*/
|
|
7574
7546
|
constructor(keysLength, keyMappers) {
|
|
7575
|
-
check
|
|
7576
|
-
check
|
|
7547
|
+
check(keysLength > 0, "Keys cannot be empty.");
|
|
7548
|
+
check(keyMappers === undefined || keyMappers.length === keysLength, "Incorrect number of key mappers given!");
|
|
7577
7549
|
this.data = new Map();
|
|
7578
7550
|
this.keyMappers = keyMappers === undefined ? Array(keysLength).fill(null) : keyMappers;
|
|
7579
7551
|
}
|
|
@@ -7674,7 +7646,7 @@ class sized_array_FixedSizeArray extends Array {
|
|
|
7674
7646
|
this.fixedLength = this.length;
|
|
7675
7647
|
}
|
|
7676
7648
|
static new(data, len) {
|
|
7677
|
-
debug_check
|
|
7649
|
+
debug_check(data.length === len, `Expected an array of size: ${len}, got: ${data.length}`);
|
|
7678
7650
|
const arr = new sized_array_FixedSizeArray(len);
|
|
7679
7651
|
for (let i = 0; i < len; i++) {
|
|
7680
7652
|
arr[i] = data[i];
|
|
@@ -7808,7 +7780,7 @@ class SortedArray {
|
|
|
7808
7780
|
}
|
|
7809
7781
|
/** Create a new SortedSet from two sorted collections. */
|
|
7810
7782
|
static fromTwoSortedCollections(first, second) {
|
|
7811
|
-
debug_check
|
|
7783
|
+
debug_check(first.comparator === second.comparator, "Cannot merge arrays if they do not use the same comparator");
|
|
7812
7784
|
const comparator = first.comparator;
|
|
7813
7785
|
const arr1 = first.array;
|
|
7814
7786
|
const arr1Length = arr1.length;
|
|
@@ -7928,7 +7900,7 @@ class SortedSet extends SortedArray {
|
|
|
7928
7900
|
}
|
|
7929
7901
|
/** Create a new SortedSet from two sorted collections. */
|
|
7930
7902
|
static fromTwoSortedCollections(first, second) {
|
|
7931
|
-
debug_check
|
|
7903
|
+
debug_check(first.comparator === second.comparator, "Cannot merge arrays if they do not use the same comparator");
|
|
7932
7904
|
const comparator = first.comparator;
|
|
7933
7905
|
if (first.length === 0) {
|
|
7934
7906
|
return SortedSet.fromSortedArray(comparator, second.array);
|
|
@@ -8564,12 +8536,9 @@ const common_tryAsServiceGas = (v) => opaque_asOpaqueType(numbers_tryAsU64(v));
|
|
|
8564
8536
|
/** Attempt to convert a number into `CoreIndex`. */
|
|
8565
8537
|
const common_tryAsCoreIndex = (v) => opaque_asOpaqueType(numbers_tryAsU16(v));
|
|
8566
8538
|
/** Attempt to convert a number into `Epoch`. */
|
|
8567
|
-
const tryAsEpoch = (v) =>
|
|
8539
|
+
const tryAsEpoch = (v) => opaque_asOpaqueType(numbers_tryAsU32(v));
|
|
8568
8540
|
function tryAsPerValidator(array, spec) {
|
|
8569
|
-
debug_check `
|
|
8570
|
-
${array.length === spec.validatorsCount}
|
|
8571
|
-
Invalid per-validator array length. Expected ${spec.validatorsCount}, got: ${array.length}
|
|
8572
|
-
`;
|
|
8541
|
+
debug_check(array.length === spec.validatorsCount, `Invalid per-validator array length. Expected ${spec.validatorsCount}, got: ${array.length}`);
|
|
8573
8542
|
return sized_array_asKnownSize(array);
|
|
8574
8543
|
}
|
|
8575
8544
|
const codecPerValidator = (val) => codecWithContext((context) => {
|
|
@@ -8578,10 +8547,7 @@ const codecPerValidator = (val) => codecWithContext((context) => {
|
|
|
8578
8547
|
});
|
|
8579
8548
|
});
|
|
8580
8549
|
function tryAsPerEpochBlock(array, spec) {
|
|
8581
|
-
debug_check `
|
|
8582
|
-
${array.length === spec.epochLength}
|
|
8583
|
-
Invalid per-epoch-block array length. Expected ${spec.epochLength}, got: ${array.length}
|
|
8584
|
-
`;
|
|
8550
|
+
debug_check(array.length === spec.epochLength, `Invalid per-epoch-block array length. Expected ${spec.epochLength}, got: ${array.length}`);
|
|
8585
8551
|
return sized_array_asKnownSize(array);
|
|
8586
8552
|
}
|
|
8587
8553
|
const codecPerEpochBlock = (val) => codecWithContext((context) => {
|
|
@@ -8852,14 +8818,9 @@ class WorkItem extends WithDebug {
|
|
|
8852
8818
|
|
|
8853
8819
|
|
|
8854
8820
|
|
|
8855
|
-
|
|
8856
8821
|
/** Verify the value is within the `WorkItemsCount` bounds. */
|
|
8857
8822
|
function work_package_tryAsWorkItemsCount(len) {
|
|
8858
|
-
|
|
8859
|
-
${len >= MIN_NUMBER_OF_WORK_ITEMS && len <= work_package_MAX_NUMBER_OF_WORK_ITEMS}
|
|
8860
|
-
WorkItemsCount: Expected '${MIN_NUMBER_OF_WORK_ITEMS} <= count <= ${work_package_MAX_NUMBER_OF_WORK_ITEMS}' got ${len}
|
|
8861
|
-
`;
|
|
8862
|
-
return numbers_tryAsU8(len);
|
|
8823
|
+
return ensure(len, len >= MIN_NUMBER_OF_WORK_ITEMS && len <= work_package_MAX_NUMBER_OF_WORK_ITEMS, `WorkItemsCount: Expected '${MIN_NUMBER_OF_WORK_ITEMS} <= count <= ${work_package_MAX_NUMBER_OF_WORK_ITEMS}' got ${len}`);
|
|
8863
8824
|
}
|
|
8864
8825
|
/** Minimal number of work items in the work package or results in work report. */
|
|
8865
8826
|
const MIN_NUMBER_OF_WORK_ITEMS = 1;
|
|
@@ -10102,10 +10063,7 @@ class AvailabilityAssignment extends WithDebug {
|
|
|
10102
10063
|
|
|
10103
10064
|
/** Check if given array has correct length before casting to the opaque type. */
|
|
10104
10065
|
function tryAsPerCore(array, spec) {
|
|
10105
|
-
debug_check `
|
|
10106
|
-
${array.length === spec.coresCount}
|
|
10107
|
-
Invalid per-core array length. Expected ${spec.coresCount}, got: ${array.length}
|
|
10108
|
-
`;
|
|
10066
|
+
debug_check(array.length === spec.coresCount, `Invalid per-core array length. Expected ${spec.coresCount}, got: ${array.length}`);
|
|
10109
10067
|
return opaque_asOpaqueType(array);
|
|
10110
10068
|
}
|
|
10111
10069
|
const codecPerCore = (val) => codecWithContext((context) => {
|
|
@@ -11356,7 +11314,7 @@ class InMemoryState extends WithDebug {
|
|
|
11356
11314
|
}
|
|
11357
11315
|
removeServices(servicesRemoved) {
|
|
11358
11316
|
for (const serviceId of servicesRemoved ?? []) {
|
|
11359
|
-
debug_check
|
|
11317
|
+
debug_check(this.services.has(serviceId), `Attempting to remove non-existing service: ${serviceId}`);
|
|
11360
11318
|
this.services.delete(serviceId);
|
|
11361
11319
|
}
|
|
11362
11320
|
}
|
|
@@ -11373,10 +11331,7 @@ class InMemoryState extends WithDebug {
|
|
|
11373
11331
|
}
|
|
11374
11332
|
else if (kind === UpdateStorageKind.Remove) {
|
|
11375
11333
|
const { key } = action;
|
|
11376
|
-
debug_check `
|
|
11377
|
-
${service.data.storage.has(key.toString())}
|
|
11378
|
-
Attempting to remove non-existing storage item at ${serviceId}: ${action.key}
|
|
11379
|
-
`;
|
|
11334
|
+
debug_check(service.data.storage.has(key.toString()), `Attempting to remove non-existing storage item at ${serviceId}: ${action.key}`);
|
|
11380
11335
|
service.data.storage.delete(key.toString());
|
|
11381
11336
|
}
|
|
11382
11337
|
else {
|
|
@@ -12059,12 +12014,12 @@ class TrieNode {
|
|
|
12059
12014
|
}
|
|
12060
12015
|
/** View this node as a branch node */
|
|
12061
12016
|
asBranchNode() {
|
|
12062
|
-
debug_check
|
|
12017
|
+
debug_check(this.getNodeType() === NodeType.Branch);
|
|
12063
12018
|
return new BranchNode(this);
|
|
12064
12019
|
}
|
|
12065
12020
|
/** View this node as a leaf node */
|
|
12066
12021
|
asLeafNode() {
|
|
12067
|
-
debug_check
|
|
12022
|
+
debug_check(this.getNodeType() !== NodeType.Branch);
|
|
12068
12023
|
return new LeafNode(this);
|
|
12069
12024
|
}
|
|
12070
12025
|
toString() {
|
|
@@ -12552,7 +12507,7 @@ function createSubtreeForBothLeaves(traversedPath, nodes, leafToReplace, leaf) {
|
|
|
12552
12507
|
* Return a single bit from `key` located at `bitIndex`.
|
|
12553
12508
|
*/
|
|
12554
12509
|
function getBit(key, bitIndex) {
|
|
12555
|
-
debug_check
|
|
12510
|
+
debug_check(bitIndex < TRUNCATED_KEY_BITS);
|
|
12556
12511
|
const byte = bitIndex >>> 3;
|
|
12557
12512
|
const bit = bitIndex - (byte << 3);
|
|
12558
12513
|
const mask = 0b10_00_00_00 >>> bit;
|
|
@@ -13877,7 +13832,7 @@ class TypedPort {
|
|
|
13877
13832
|
* Send a response given the worker that has previously requested something.
|
|
13878
13833
|
*/
|
|
13879
13834
|
respond(localState, request, data, transferList) {
|
|
13880
|
-
debug_check
|
|
13835
|
+
debug_check(request.kind === "request");
|
|
13881
13836
|
this.postMessage({
|
|
13882
13837
|
kind: "response",
|
|
13883
13838
|
id: request.id,
|
|
@@ -13908,11 +13863,10 @@ class TypedPort {
|
|
|
13908
13863
|
throw new Error(`Invalid message: ${JSON.stringify(msg)}.`);
|
|
13909
13864
|
}
|
|
13910
13865
|
switch (msg.kind) {
|
|
13911
|
-
case "response":
|
|
13912
|
-
debug_check
|
|
13866
|
+
case "response":
|
|
13867
|
+
debug_check(this.responseListeners.eventNames().indexOf(reqEvent(msg.id)) !== -1);
|
|
13913
13868
|
this.responseListeners.emit(reqEvent(msg.id), null, msg.data, msg.name, msg.localState, msg);
|
|
13914
13869
|
break;
|
|
13915
|
-
}
|
|
13916
13870
|
case "signal":
|
|
13917
13871
|
this.listeners.emit("signal", msg.name, msg.data, msg.localState, msg);
|
|
13918
13872
|
break;
|
|
@@ -14127,9 +14081,9 @@ class channel_MessageChannelStateMachine {
|
|
|
14127
14081
|
const promise = new Promise((resolve, reject) => {
|
|
14128
14082
|
parentPort.once("message", (value) => {
|
|
14129
14083
|
try {
|
|
14130
|
-
debug_check
|
|
14131
|
-
debug_check
|
|
14132
|
-
debug_check
|
|
14084
|
+
debug_check(value.kind === "request", "The initial message should be a request with channel.");
|
|
14085
|
+
debug_check(value.name === CHANNEL_MESSAGE);
|
|
14086
|
+
debug_check(value.data instanceof external_node_worker_threads_namespaceObject.MessagePort);
|
|
14133
14087
|
const port = new TypedPort(value.data);
|
|
14134
14088
|
port.respond(machine.currentState().stateName, value, Ok);
|
|
14135
14089
|
resolve(port);
|
|
@@ -14209,7 +14163,7 @@ class machine_StateMachine {
|
|
|
14209
14163
|
/** Get state object by name. */
|
|
14210
14164
|
getState(name) {
|
|
14211
14165
|
const state = this.allStates.get(name);
|
|
14212
|
-
debug_check
|
|
14166
|
+
debug_check(state !== undefined, `Unable to retrieve state object for ${name}.`);
|
|
14213
14167
|
return state;
|
|
14214
14168
|
}
|
|
14215
14169
|
/** Get the currently active state object. */
|
|
@@ -14474,22 +14428,19 @@ class Preimages {
|
|
|
14474
14428
|
|
|
14475
14429
|
const NO_OF_REGISTERS = 13;
|
|
14476
14430
|
const REGISTER_SIZE_SHIFT = 3;
|
|
14477
|
-
const tryAsRegisterIndex = (index) => {
|
|
14478
|
-
debug_check `${index >= 0 && index < NO_OF_REGISTERS} Incorrect register index: ${index}!`;
|
|
14479
|
-
return opaque_asOpaqueType(index);
|
|
14480
|
-
};
|
|
14431
|
+
const tryAsRegisterIndex = (index) => ensure(index, index >= 0 && index <= NO_OF_REGISTERS, `Incorrect register index: ${index}!`);
|
|
14481
14432
|
class Registers {
|
|
14482
14433
|
bytes;
|
|
14483
14434
|
asSigned;
|
|
14484
14435
|
asUnsigned;
|
|
14485
14436
|
constructor(bytes = new Uint8Array(NO_OF_REGISTERS << REGISTER_SIZE_SHIFT)) {
|
|
14486
14437
|
this.bytes = bytes;
|
|
14487
|
-
debug_check
|
|
14438
|
+
debug_check(bytes.length === NO_OF_REGISTERS << REGISTER_SIZE_SHIFT, "Invalid size of registers array.");
|
|
14488
14439
|
this.asSigned = new BigInt64Array(bytes.buffer, bytes.byteOffset);
|
|
14489
14440
|
this.asUnsigned = new BigUint64Array(bytes.buffer, bytes.byteOffset);
|
|
14490
14441
|
}
|
|
14491
14442
|
static fromBytes(bytes) {
|
|
14492
|
-
debug_check
|
|
14443
|
+
debug_check(bytes.length === NO_OF_REGISTERS << REGISTER_SIZE_SHIFT, "Invalid size of registers array.");
|
|
14493
14444
|
return new Registers(bytes);
|
|
14494
14445
|
}
|
|
14495
14446
|
getBytesAsLittleEndian(index, len) {
|
|
@@ -14635,7 +14586,7 @@ class Mask {
|
|
|
14635
14586
|
return this.lookupTableForward[index] === 0;
|
|
14636
14587
|
}
|
|
14637
14588
|
getNoOfBytesToNextInstruction(index) {
|
|
14638
|
-
debug_check
|
|
14589
|
+
debug_check(index >= 0, `index (${index}) cannot be a negative number`);
|
|
14639
14590
|
return Math.min(this.lookupTableForward[index] ?? 0, MAX_INSTRUCTION_DISTANCE);
|
|
14640
14591
|
}
|
|
14641
14592
|
buildLookupTableForward(mask) {
|
|
@@ -15635,7 +15586,7 @@ const PAGE_SIZE_SHIFT = 12;
|
|
|
15635
15586
|
const PAGE_SIZE = 1 << PAGE_SIZE_SHIFT;
|
|
15636
15587
|
const MIN_ALLOCATION_SHIFT = (() => {
|
|
15637
15588
|
const MIN_ALLOCATION_SHIFT = 7;
|
|
15638
|
-
debug_check
|
|
15589
|
+
debug_check(MIN_ALLOCATION_SHIFT >= 0 && MIN_ALLOCATION_SHIFT < PAGE_SIZE_SHIFT, "incorrect minimal allocation shift");
|
|
15639
15590
|
return MIN_ALLOCATION_SHIFT;
|
|
15640
15591
|
})();
|
|
15641
15592
|
const MIN_ALLOCATION_LENGTH = PAGE_SIZE >> MIN_ALLOCATION_SHIFT;
|
|
@@ -15648,28 +15599,16 @@ const MAX_NUMBER_OF_PAGES = MEMORY_SIZE / PAGE_SIZE;
|
|
|
15648
15599
|
;// CONCATENATED MODULE: ./packages/core/pvm-interpreter/memory/memory-index.ts
|
|
15649
15600
|
|
|
15650
15601
|
|
|
15651
|
-
const tryAsMemoryIndex = (index) => {
|
|
15652
|
-
|
|
15653
|
-
return opaque_asOpaqueType(index);
|
|
15654
|
-
};
|
|
15655
|
-
const tryAsSbrkIndex = (index) => {
|
|
15656
|
-
debug_check `${index >= 0 && index <= MAX_MEMORY_INDEX + 1} Incorrect sbrk index: ${index}!`;
|
|
15657
|
-
return opaque_asOpaqueType(index);
|
|
15658
|
-
};
|
|
15602
|
+
const tryAsMemoryIndex = (index) => ensure(index, index >= 0 && index <= MAX_MEMORY_INDEX, `Incorrect memory index: ${index}!`);
|
|
15603
|
+
const tryAsSbrkIndex = (index) => ensure(index, index >= 0 && index <= MAX_MEMORY_INDEX + 1, `Incorrect sbrk index: ${index}!`);
|
|
15659
15604
|
|
|
15660
15605
|
;// CONCATENATED MODULE: ./packages/core/pvm-interpreter/memory/pages/page-utils.ts
|
|
15661
15606
|
|
|
15662
15607
|
|
|
15663
15608
|
/** Ensure that given memory `index` is within `[0...PAGE_SIZE)` and can be used to index a page */
|
|
15664
|
-
const tryAsPageIndex = (index) => {
|
|
15665
|
-
debug_check `${index >= 0 && index < PAGE_SIZE}, Incorect page index: ${index}!`;
|
|
15666
|
-
return opaque_asOpaqueType(index);
|
|
15667
|
-
};
|
|
15609
|
+
const tryAsPageIndex = (index) => ensure(index, index >= 0 && index < PAGE_SIZE, `Incorect page index: ${index}!`);
|
|
15668
15610
|
/** Ensure that given `index` represents an index of one of the pages. */
|
|
15669
|
-
const tryAsPageNumber = (index) => {
|
|
15670
|
-
debug_check `${index >= 0 && index <= LAST_PAGE_NUMBER}, Incorect page number: ${index}!`;
|
|
15671
|
-
return opaque_asOpaqueType(index);
|
|
15672
|
-
};
|
|
15611
|
+
const tryAsPageNumber = (index) => ensure(index, index >= 0 && index <= LAST_PAGE_NUMBER, `Incorrect page number: ${index}!`);
|
|
15673
15612
|
/**
|
|
15674
15613
|
* Get the next page number and wrap the result if it is bigger than LAST_PAGE_NUMBER
|
|
15675
15614
|
*
|
|
@@ -16201,10 +16140,10 @@ class MemoryBuilder {
|
|
|
16201
16140
|
*/
|
|
16202
16141
|
setReadablePages(start, end, data = new Uint8Array()) {
|
|
16203
16142
|
this.ensureNotFinalized();
|
|
16204
|
-
debug_check
|
|
16205
|
-
debug_check
|
|
16206
|
-
debug_check
|
|
16207
|
-
debug_check
|
|
16143
|
+
debug_check(start < end, "end has to be bigger than start");
|
|
16144
|
+
debug_check(start % PAGE_SIZE === 0, `start needs to be a multiple of page size (${PAGE_SIZE})`);
|
|
16145
|
+
debug_check(end % PAGE_SIZE === 0, `end needs to be a multiple of page size (${PAGE_SIZE})`);
|
|
16146
|
+
debug_check(data.length <= end - start, "the initial data is longer than address range");
|
|
16208
16147
|
const length = end - start;
|
|
16209
16148
|
const range = MemoryRange.fromStartAndLength(start, length);
|
|
16210
16149
|
this.ensureNoReservedMemoryUsage(range);
|
|
@@ -16229,10 +16168,10 @@ class MemoryBuilder {
|
|
|
16229
16168
|
*/
|
|
16230
16169
|
setWriteablePages(start, end, data = new Uint8Array()) {
|
|
16231
16170
|
this.ensureNotFinalized();
|
|
16232
|
-
debug_check
|
|
16233
|
-
debug_check
|
|
16234
|
-
debug_check
|
|
16235
|
-
debug_check
|
|
16171
|
+
debug_check(start < end, "end has to be bigger than start");
|
|
16172
|
+
debug_check(start % PAGE_SIZE === 0, `start needs to be a multiple of page size (${PAGE_SIZE})`);
|
|
16173
|
+
debug_check(end % PAGE_SIZE === 0, `end needs to be a multiple of page size (${PAGE_SIZE})`);
|
|
16174
|
+
debug_check(data.length <= end - start, "the initial data is longer than address range");
|
|
16236
16175
|
const length = end - start;
|
|
16237
16176
|
const range = MemoryRange.fromStartAndLength(start, length);
|
|
16238
16177
|
this.ensureNoReservedMemoryUsage(range);
|
|
@@ -16254,7 +16193,7 @@ class MemoryBuilder {
|
|
|
16254
16193
|
this.ensureNotFinalized();
|
|
16255
16194
|
const pageOffset = start % PAGE_SIZE;
|
|
16256
16195
|
const remainingSpaceOnPage = PAGE_SIZE - pageOffset;
|
|
16257
|
-
debug_check
|
|
16196
|
+
debug_check(data.length <= remainingSpaceOnPage, "The data has to fit into a single page.");
|
|
16258
16197
|
const length = data.length;
|
|
16259
16198
|
const range = MemoryRange.fromStartAndLength(start, length);
|
|
16260
16199
|
this.ensureNoReservedMemoryUsage(range);
|
|
@@ -16268,10 +16207,7 @@ class MemoryBuilder {
|
|
|
16268
16207
|
return this;
|
|
16269
16208
|
}
|
|
16270
16209
|
finalize(startHeapIndex, endHeapIndex) {
|
|
16271
|
-
debug_check `
|
|
16272
|
-
${startHeapIndex <= endHeapIndex}
|
|
16273
|
-
startHeapIndex (${startHeapIndex}) has to be less than or equal to endHeapIndex (${endHeapIndex})
|
|
16274
|
-
`;
|
|
16210
|
+
debug_check(startHeapIndex <= endHeapIndex, `startHeapIndex (${startHeapIndex}) has to be less than or equal to endHeapIndex (${endHeapIndex})`);
|
|
16275
16211
|
this.ensureNotFinalized();
|
|
16276
16212
|
const range = MemoryRange.fromStartAndLength(startHeapIndex, endHeapIndex - startHeapIndex);
|
|
16277
16213
|
const pages = PageRange.fromMemoryRange(range);
|
|
@@ -16509,7 +16445,7 @@ function mulU64(a, b) {
|
|
|
16509
16445
|
*
|
|
16510
16446
|
* The result of multiplication is a 64-bits number and we are only interested in the part that lands in the upper 32-bits.
|
|
16511
16447
|
* For example if we multiply `0xffffffff * 0xffffffff`, we get:
|
|
16512
|
-
|
|
16448
|
+
|
|
16513
16449
|
* | 64-bits | 64-bits |
|
|
16514
16450
|
* +--------------------+--------------------+
|
|
16515
16451
|
* | upper | lower |
|
|
@@ -16545,7 +16481,7 @@ function mulUpperSS(a, b) {
|
|
|
16545
16481
|
return interpretAsSigned(resultLimitedTo64Bits);
|
|
16546
16482
|
}
|
|
16547
16483
|
function unsignedRightShiftBigInt(value, shift) {
|
|
16548
|
-
debug_check
|
|
16484
|
+
debug_check(shift >= 0, "Shift count must be non-negative");
|
|
16549
16485
|
const fillBit = value < 0 ? "1" : "0";
|
|
16550
16486
|
// Convert the BigInt to its binary representation
|
|
16551
16487
|
const binaryRepresentation = value.toString(2).padStart(64, fillBit);
|
|
@@ -17961,10 +17897,7 @@ class TwoRegsTwoImmsDispatcher {
|
|
|
17961
17897
|
class JumpTable {
|
|
17962
17898
|
indices;
|
|
17963
17899
|
constructor(itemByteLength, bytes) {
|
|
17964
|
-
debug_check `
|
|
17965
|
-
${itemByteLength === 0 || bytes.length % itemByteLength === 0}
|
|
17966
|
-
Length of jump table (${bytes.length}) should be a multiple of item lenght (${itemByteLength})!
|
|
17967
|
-
`;
|
|
17900
|
+
debug_check(itemByteLength === 0 || bytes.length % itemByteLength === 0, `Length of jump table (${bytes.length}) should be a multiple of item lenght (${itemByteLength})!`);
|
|
17968
17901
|
const length = itemByteLength === 0 ? 0 : bytes.length / itemByteLength;
|
|
17969
17902
|
this.indices = new Uint32Array(length);
|
|
17970
17903
|
for (let i = 0; i < length; i++) {
|
|
@@ -18408,10 +18341,7 @@ class ReturnValue {
|
|
|
18408
18341
|
this.consumedGas = consumedGas;
|
|
18409
18342
|
this.status = status;
|
|
18410
18343
|
this.memorySlice = memorySlice;
|
|
18411
|
-
debug_check `
|
|
18412
|
-
${(status === null && memorySlice !== null) || (status !== null && memorySlice === null)}
|
|
18413
|
-
'status' and 'memorySlice' must not both be null or both be non-null — exactly one must be provided
|
|
18414
|
-
`;
|
|
18344
|
+
debug_check((status === null && memorySlice !== null) || (status !== null && memorySlice === null), "`status` and `memorySlice` must not both be null or both be non-null — exactly one must be provided");
|
|
18415
18345
|
}
|
|
18416
18346
|
static fromStatus(consumedGas, status) {
|
|
18417
18347
|
return new ReturnValue(consumedGas, status, null);
|
|
@@ -18460,10 +18390,7 @@ class HostCalls {
|
|
|
18460
18390
|
if (status !== status_Status.HOST) {
|
|
18461
18391
|
return this.getReturnValue(status, pvmInstance);
|
|
18462
18392
|
}
|
|
18463
|
-
debug_check `
|
|
18464
|
-
${pvmInstance.getExitParam() !== null}
|
|
18465
|
-
"We know that the exit param is not null, because the status is 'Status.HOST'
|
|
18466
|
-
`;
|
|
18393
|
+
debug_check(pvmInstance.getExitParam() !== null, "We know that the exit param is not null, because the status is `Status.HOST`");
|
|
18467
18394
|
const hostCallIndex = pvmInstance.getExitParam() ?? -1;
|
|
18468
18395
|
const gas = pvmInstance.getGasCounter();
|
|
18469
18396
|
const regs = new HostCallRegisters(pvmInstance.getRegisters());
|
|
@@ -18523,7 +18450,7 @@ class host_calls_manager_HostCallsManager {
|
|
|
18523
18450
|
constructor({ missing, handlers = [], }) {
|
|
18524
18451
|
this.missing = missing;
|
|
18525
18452
|
for (const handler of handlers) {
|
|
18526
|
-
debug_check
|
|
18453
|
+
debug_check(this.hostCalls.get(handler.index) === undefined, `Overwriting host call handler at index ${handler.index}`);
|
|
18527
18454
|
this.hostCalls.set(handler.index, handler);
|
|
18528
18455
|
}
|
|
18529
18456
|
}
|
|
@@ -18646,7 +18573,7 @@ function getServiceId(serviceId) {
|
|
|
18646
18573
|
return null;
|
|
18647
18574
|
}
|
|
18648
18575
|
function writeServiceIdAsLeBytes(serviceId, destination) {
|
|
18649
|
-
debug_check
|
|
18576
|
+
debug_check(destination.length >= SERVICE_ID_BYTES, "Not enough space in the destination.");
|
|
18650
18577
|
destination.set(numbers_u32AsLeBytes(serviceId));
|
|
18651
18578
|
}
|
|
18652
18579
|
/** Clamp a U64 to the maximum value of a 32-bit unsigned integer. */
|
|
@@ -18735,27 +18662,13 @@ class SpiProgram extends WithDebug {
|
|
|
18735
18662
|
this.registers = registers;
|
|
18736
18663
|
}
|
|
18737
18664
|
}
|
|
18738
|
-
/**
|
|
18739
|
-
* program = E_3(|o|) ++ E_3(|w|) ++ E_2(z) ++ E_3(s) ++ o ++ w ++ E_4(|c|) ++ c
|
|
18740
|
-
*
|
|
18741
|
-
* E_n - little endian encoding, n - length
|
|
18742
|
-
* o - initial read only data
|
|
18743
|
-
* w - initial heap
|
|
18744
|
-
* z - heap pages filled with zeros
|
|
18745
|
-
* s - stack size
|
|
18746
|
-
* c - program code
|
|
18747
|
-
*
|
|
18748
|
-
* https://graypaper.fluffylabs.dev/#/579bd12/2b92022b9202
|
|
18749
|
-
*/
|
|
18750
18665
|
function decodeStandardProgram(program, args) {
|
|
18751
18666
|
const decoder = decoder_Decoder.fromBlob(program);
|
|
18752
18667
|
const oLength = decoder.u24();
|
|
18753
18668
|
const wLength = decoder.u24();
|
|
18754
|
-
|
|
18755
|
-
|
|
18756
|
-
const
|
|
18757
|
-
debug_check `${wLength <= DATA_LEGNTH} Incorrect heap segment length`;
|
|
18758
|
-
const heapLength = wLength;
|
|
18669
|
+
const argsLength = ensure(args.length, args.length <= DATA_LEGNTH, "Incorrect arguments length");
|
|
18670
|
+
const readOnlyLength = ensure(oLength, oLength <= DATA_LEGNTH, "Incorrect readonly segment length");
|
|
18671
|
+
const heapLength = ensure(wLength, wLength <= DATA_LEGNTH, "Incorrect heap segment length");
|
|
18759
18672
|
const noOfHeapZerosPages = decoder.u16();
|
|
18760
18673
|
const stackSize = decoder.u24();
|
|
18761
18674
|
const readOnlyMemory = decoder.bytes(readOnlyLength).raw;
|
|
@@ -18771,14 +18684,14 @@ function decodeStandardProgram(program, args) {
|
|
|
18771
18684
|
const stackStart = STACK_SEGMENT - memory_utils_alignToPageSize(stackSize);
|
|
18772
18685
|
const stackEnd = STACK_SEGMENT;
|
|
18773
18686
|
const argsStart = ARGS_SEGMENT;
|
|
18774
|
-
const argsEnd = argsStart + memory_utils_alignToPageSize(
|
|
18775
|
-
const argsZerosEnd = argsEnd + memory_utils_alignToPageSize(
|
|
18687
|
+
const argsEnd = argsStart + memory_utils_alignToPageSize(argsLength);
|
|
18688
|
+
const argsZerosEnd = argsEnd + memory_utils_alignToPageSize(argsLength);
|
|
18776
18689
|
function nonEmpty(s) {
|
|
18777
18690
|
return s !== false;
|
|
18778
18691
|
}
|
|
18779
18692
|
const readableMemory = [
|
|
18780
18693
|
readOnlyLength > 0 && getMemorySegment(readonlyDataStart, readonlyDataEnd, readOnlyMemory),
|
|
18781
|
-
|
|
18694
|
+
argsLength > 0 && getMemorySegment(argsStart, argsEnd, args),
|
|
18782
18695
|
argsEnd < argsZerosEnd && getMemorySegment(argsEnd, argsZerosEnd),
|
|
18783
18696
|
].filter(nonEmpty);
|
|
18784
18697
|
const writeableMemory = [
|
|
@@ -19007,6 +18920,88 @@ class PvmExecutor {
|
|
|
19007
18920
|
|
|
19008
18921
|
|
|
19009
18922
|
|
|
18923
|
+
;// CONCATENATED MODULE: ./workers/importer/import-queue.ts
|
|
18924
|
+
|
|
18925
|
+
|
|
18926
|
+
|
|
18927
|
+
|
|
18928
|
+
|
|
18929
|
+
class ImportQueue {
|
|
18930
|
+
spec;
|
|
18931
|
+
importer;
|
|
18932
|
+
toImport = SortedArray.fromSortedArray((a, b) => {
|
|
18933
|
+
const diff = a.timeSlot - b.timeSlot;
|
|
18934
|
+
if (diff < 0) {
|
|
18935
|
+
return Ordering.Greater;
|
|
18936
|
+
}
|
|
18937
|
+
if (diff > 0) {
|
|
18938
|
+
return Ordering.Less;
|
|
18939
|
+
}
|
|
18940
|
+
return Ordering.Equal;
|
|
18941
|
+
});
|
|
18942
|
+
queuedBlocks = HashSet.new();
|
|
18943
|
+
lastEpoch = tryAsEpoch(2 ** 32 - 1);
|
|
18944
|
+
constructor(spec, importer) {
|
|
18945
|
+
this.spec = spec;
|
|
18946
|
+
this.importer = importer;
|
|
18947
|
+
}
|
|
18948
|
+
isCurrentEpoch(timeSlot) {
|
|
18949
|
+
const epoch = Math.floor(timeSlot / this.spec.epochLength);
|
|
18950
|
+
return this.lastEpoch === epoch;
|
|
18951
|
+
}
|
|
18952
|
+
startPreverification() {
|
|
18953
|
+
for (const entry of this.toImport) {
|
|
18954
|
+
if (this.isCurrentEpoch(entry.timeSlot)) {
|
|
18955
|
+
entry.seal = this.importer.preverifySeal(entry.timeSlot, entry.block);
|
|
18956
|
+
}
|
|
18957
|
+
}
|
|
18958
|
+
}
|
|
18959
|
+
static getBlockDetails(block) {
|
|
18960
|
+
let encodedHeader;
|
|
18961
|
+
let timeSlot;
|
|
18962
|
+
try {
|
|
18963
|
+
encodedHeader = block.header.encoded();
|
|
18964
|
+
timeSlot = block.header.view().timeSlotIndex.materialize();
|
|
18965
|
+
}
|
|
18966
|
+
catch {
|
|
18967
|
+
return result_Result.error("invalid");
|
|
18968
|
+
}
|
|
18969
|
+
const headerHash = hashBytes(encodedHeader).asOpaque();
|
|
18970
|
+
return result_Result.ok(new WithHash(headerHash, { block, timeSlot }));
|
|
18971
|
+
}
|
|
18972
|
+
push(details) {
|
|
18973
|
+
const headerHash = details.hash;
|
|
18974
|
+
if (this.queuedBlocks.has(headerHash)) {
|
|
18975
|
+
return result_Result.error("already queued");
|
|
18976
|
+
}
|
|
18977
|
+
const { timeSlot, block } = details.data;
|
|
18978
|
+
const entry = {
|
|
18979
|
+
headerHash,
|
|
18980
|
+
timeSlot,
|
|
18981
|
+
block,
|
|
18982
|
+
seal: this.isCurrentEpoch(timeSlot) ? this.importer.preverifySeal(timeSlot, block) : Promise.resolve(null),
|
|
18983
|
+
};
|
|
18984
|
+
this.toImport.insert(entry);
|
|
18985
|
+
this.queuedBlocks.insert(headerHash);
|
|
18986
|
+
return result_Result.ok(result_OK);
|
|
18987
|
+
}
|
|
18988
|
+
shift() {
|
|
18989
|
+
const entry = this.toImport.pop();
|
|
18990
|
+
if (entry !== undefined) {
|
|
18991
|
+
this.queuedBlocks.delete(entry.headerHash);
|
|
18992
|
+
const blockEpoch = Math.floor(entry.timeSlot / this.spec.epochLength);
|
|
18993
|
+
const hasEpochChanged = this.lastEpoch !== blockEpoch;
|
|
18994
|
+
this.lastEpoch = tryAsEpoch(blockEpoch);
|
|
18995
|
+
// currently removed block is changing the epoch, so fire up
|
|
18996
|
+
// preverifcation for the following blocks.
|
|
18997
|
+
if (hasEpochChanged) {
|
|
18998
|
+
this.startPreverification();
|
|
18999
|
+
}
|
|
19000
|
+
}
|
|
19001
|
+
return entry;
|
|
19002
|
+
}
|
|
19003
|
+
}
|
|
19004
|
+
|
|
19010
19005
|
;// CONCATENATED MODULE: ./packages/jam/transition/block-verifier.ts
|
|
19011
19006
|
|
|
19012
19007
|
|
|
@@ -19020,7 +19015,7 @@ var BlockVerifierError;
|
|
|
19020
19015
|
BlockVerifierError[BlockVerifierError["InvalidStateRoot"] = 4] = "InvalidStateRoot";
|
|
19021
19016
|
BlockVerifierError[BlockVerifierError["AlreadyImported"] = 5] = "AlreadyImported";
|
|
19022
19017
|
})(BlockVerifierError || (BlockVerifierError = {}));
|
|
19023
|
-
const
|
|
19018
|
+
const ZERO_HASH = bytes_Bytes.zero(hash_HASH_SIZE).asOpaque();
|
|
19024
19019
|
class BlockVerifier {
|
|
19025
19020
|
hasher;
|
|
19026
19021
|
blocks;
|
|
@@ -19040,7 +19035,7 @@ class BlockVerifier {
|
|
|
19040
19035
|
// https://graypaper.fluffylabs.dev/#/cc517d7/0c9d000c9d00?v=0.6.5
|
|
19041
19036
|
const parentHash = headerView.parentHeaderHash.materialize();
|
|
19042
19037
|
// importing genesis block
|
|
19043
|
-
if (!parentHash.isEqualTo(
|
|
19038
|
+
if (!parentHash.isEqualTo(ZERO_HASH)) {
|
|
19044
19039
|
const parentBlock = this.blocks.getHeader(parentHash);
|
|
19045
19040
|
if (parentBlock === null) {
|
|
19046
19041
|
return result_Result.error(BlockVerifierError.ParentNotFound, `Parent ${parentHash.toString()} not found`);
|
|
@@ -19553,22 +19548,304 @@ async function verifyTickets(bandersnatch, numberOfValidators, epochRoot, ticket
|
|
|
19553
19548
|
}));
|
|
19554
19549
|
}
|
|
19555
19550
|
|
|
19556
|
-
;// CONCATENATED MODULE:
|
|
19551
|
+
;// CONCATENATED MODULE: external "node:os"
|
|
19552
|
+
const external_node_os_namespaceObject = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("node:os");
|
|
19553
|
+
var external_node_os_default = /*#__PURE__*/__nccwpck_require__.n(external_node_os_namespaceObject);
|
|
19554
|
+
;// CONCATENATED MODULE: ./packages/core/concurrent/parent.ts
|
|
19555
|
+
|
|
19556
|
+
|
|
19557
|
+
// Amount of tasks in the queue that will trigger creation of new worker thread.
|
|
19558
|
+
// NOTE this might need to be configurable in the future.
|
|
19559
|
+
const QUEUE_SIZE_WORKER_THRESHOLD = 5;
|
|
19560
|
+
/** Execution pool manager. */
|
|
19561
|
+
class Executor {
|
|
19562
|
+
workers;
|
|
19563
|
+
maxWorkers;
|
|
19564
|
+
workerPath;
|
|
19565
|
+
/** Initialize a new concurrent executor given a path to the worker. */
|
|
19566
|
+
static async initialize(workerPath, options) {
|
|
19567
|
+
debug_check(options.maxWorkers > 0, "Max workers has to be positive.");
|
|
19568
|
+
debug_check(options.minWorkers <= options.maxWorkers, "Min workers has to be lower or equal to max workers.");
|
|
19569
|
+
const workers = [];
|
|
19570
|
+
for (let i = 0; i < options.minWorkers; i++) {
|
|
19571
|
+
workers.push(await initWorker(workerPath));
|
|
19572
|
+
}
|
|
19573
|
+
return new Executor(workers, options.maxWorkers, workerPath);
|
|
19574
|
+
}
|
|
19575
|
+
// keeps track of the indices of worker threads that are currently free and available to execute tasks
|
|
19576
|
+
freeWorkerIndices = [];
|
|
19577
|
+
taskQueue = [];
|
|
19578
|
+
isDestroyed = false;
|
|
19579
|
+
isWorkerInitializing = false;
|
|
19580
|
+
constructor(workers, maxWorkers, workerPath) {
|
|
19581
|
+
this.workers = workers;
|
|
19582
|
+
this.maxWorkers = maxWorkers;
|
|
19583
|
+
this.workerPath = workerPath;
|
|
19584
|
+
// intial free workers.
|
|
19585
|
+
for (let i = 0; i < workers.length; i++) {
|
|
19586
|
+
this.freeWorkerIndices.push(i);
|
|
19587
|
+
}
|
|
19588
|
+
}
|
|
19589
|
+
/** Attempt to initialize a new worker. */
|
|
19590
|
+
async initNewWorker(onSuccess = () => { }) {
|
|
19591
|
+
if (this.workers.length >= this.maxWorkers) {
|
|
19592
|
+
// biome-ignore lint/suspicious/noConsole: warning
|
|
19593
|
+
console.warn(`Task queue has ${this.taskQueue.length} pending items and we can't init any more workers.`);
|
|
19594
|
+
return;
|
|
19595
|
+
}
|
|
19596
|
+
if (this.isWorkerInitializing) {
|
|
19597
|
+
return;
|
|
19598
|
+
}
|
|
19599
|
+
this.isWorkerInitializing = true;
|
|
19600
|
+
this.workers.push(await initWorker(this.workerPath));
|
|
19601
|
+
this.freeWorkerIndices.push(this.workers.length - 1);
|
|
19602
|
+
this.isWorkerInitializing = false;
|
|
19603
|
+
onSuccess();
|
|
19604
|
+
}
|
|
19605
|
+
/** Terminate all workers and clear the executor. */
|
|
19606
|
+
async destroy() {
|
|
19607
|
+
for (const worker of this.workers) {
|
|
19608
|
+
worker.port.close();
|
|
19609
|
+
await worker.worker.terminate();
|
|
19610
|
+
}
|
|
19611
|
+
this.workers.length = 0;
|
|
19612
|
+
this.isDestroyed = true;
|
|
19613
|
+
}
|
|
19614
|
+
/** Execute a task with given parameters. */
|
|
19615
|
+
async run(params) {
|
|
19616
|
+
return new Promise((resolve, reject) => {
|
|
19617
|
+
if (this.isDestroyed) {
|
|
19618
|
+
reject("pool destroyed");
|
|
19619
|
+
return;
|
|
19620
|
+
}
|
|
19621
|
+
this.taskQueue.push({
|
|
19622
|
+
params,
|
|
19623
|
+
resolve,
|
|
19624
|
+
reject,
|
|
19625
|
+
});
|
|
19626
|
+
this.processEntryFromTaskQueue();
|
|
19627
|
+
});
|
|
19628
|
+
}
|
|
19629
|
+
/** Process single element from the task queue. */
|
|
19630
|
+
processEntryFromTaskQueue() {
|
|
19631
|
+
const freeWorker = this.freeWorkerIndices.pop();
|
|
19632
|
+
// no free workers available currently,
|
|
19633
|
+
// we will retry when one of the tasks completes.
|
|
19634
|
+
if (freeWorker === undefined) {
|
|
19635
|
+
if (this.taskQueue.length > QUEUE_SIZE_WORKER_THRESHOLD) {
|
|
19636
|
+
this.initNewWorker(() => {
|
|
19637
|
+
// process an entry in this newly initialized worker.
|
|
19638
|
+
this.processEntryFromTaskQueue();
|
|
19639
|
+
});
|
|
19640
|
+
}
|
|
19641
|
+
return;
|
|
19642
|
+
}
|
|
19643
|
+
const task = this.taskQueue.pop();
|
|
19644
|
+
// no tasks in the queue
|
|
19645
|
+
if (task === undefined) {
|
|
19646
|
+
this.freeWorkerIndices.push(freeWorker);
|
|
19647
|
+
return;
|
|
19648
|
+
}
|
|
19649
|
+
const worker = this.workers[freeWorker];
|
|
19650
|
+
worker.runTask(task, () => {
|
|
19651
|
+
// mark the worker as available again
|
|
19652
|
+
this.freeWorkerIndices.push(freeWorker);
|
|
19653
|
+
// and continue processing the queue
|
|
19654
|
+
this.processEntryFromTaskQueue();
|
|
19655
|
+
});
|
|
19656
|
+
}
|
|
19657
|
+
}
|
|
19658
|
+
async function initWorker(workerPath) {
|
|
19659
|
+
// create a worker and initialize communication channel
|
|
19660
|
+
const { port1, port2 } = new MessageChannel();
|
|
19661
|
+
const workerThread = new external_node_worker_threads_namespaceObject.Worker(workerPath, {});
|
|
19662
|
+
workerThread.postMessage(port1, [port1]);
|
|
19663
|
+
// // wait for the worker to start
|
|
19664
|
+
await new Promise((resolve, reject) => {
|
|
19665
|
+
workerThread.once("message", resolve);
|
|
19666
|
+
workerThread.once("error", reject);
|
|
19667
|
+
});
|
|
19668
|
+
// make sure the threads don't prevent the program from stopping.
|
|
19669
|
+
workerThread.unref();
|
|
19670
|
+
return new WorkerChannel(workerThread, port2);
|
|
19671
|
+
}
|
|
19672
|
+
class WorkerChannel {
|
|
19673
|
+
worker;
|
|
19674
|
+
port;
|
|
19675
|
+
constructor(worker, port) {
|
|
19676
|
+
this.worker = worker;
|
|
19677
|
+
this.port = port;
|
|
19678
|
+
}
|
|
19679
|
+
runTask(task, onFinish) {
|
|
19680
|
+
const message = {
|
|
19681
|
+
params: task.params,
|
|
19682
|
+
};
|
|
19683
|
+
// when we receive a response, make sure to process it
|
|
19684
|
+
this.port.once("message", (e) => {
|
|
19685
|
+
if (e.isOk) {
|
|
19686
|
+
task.resolve(e.ok);
|
|
19687
|
+
}
|
|
19688
|
+
else {
|
|
19689
|
+
task.reject(new Error(e.error));
|
|
19690
|
+
}
|
|
19691
|
+
onFinish();
|
|
19692
|
+
});
|
|
19693
|
+
// send the task to work on.
|
|
19694
|
+
this.port.postMessage(message, message.params.getTransferList());
|
|
19695
|
+
}
|
|
19696
|
+
}
|
|
19697
|
+
|
|
19698
|
+
;// CONCATENATED MODULE: ./packages/core/concurrent/worker.ts
|
|
19699
|
+
|
|
19700
|
+
|
|
19701
|
+
/** A in-worker abstraction. */
|
|
19702
|
+
class ConcurrentWorker {
|
|
19703
|
+
runInternal;
|
|
19704
|
+
state;
|
|
19705
|
+
static new(run, state) {
|
|
19706
|
+
return new ConcurrentWorker(run, state);
|
|
19707
|
+
}
|
|
19708
|
+
constructor(runInternal, state) {
|
|
19709
|
+
this.runInternal = runInternal;
|
|
19710
|
+
this.state = state;
|
|
19711
|
+
}
|
|
19712
|
+
listenToParentPort() {
|
|
19713
|
+
if (external_node_worker_threads_namespaceObject.parentPort === null) {
|
|
19714
|
+
throw new Error("This method is meant to be run inside a worker thread!");
|
|
19715
|
+
}
|
|
19716
|
+
external_node_worker_threads_namespaceObject.parentPort.once("close", () => {
|
|
19717
|
+
process.exit(0);
|
|
19718
|
+
});
|
|
19719
|
+
external_node_worker_threads_namespaceObject.parentPort.once("message", (port) => {
|
|
19720
|
+
this.listenTo(port);
|
|
19721
|
+
// send back readiness signal.
|
|
19722
|
+
external_node_worker_threads_namespaceObject.parentPort?.postMessage("ready");
|
|
19723
|
+
});
|
|
19724
|
+
}
|
|
19725
|
+
listenTo(port) {
|
|
19726
|
+
port.once("close", () => {
|
|
19727
|
+
port.removeAllListeners();
|
|
19728
|
+
process.exit(0);
|
|
19729
|
+
});
|
|
19730
|
+
port.on("message", (ev) => {
|
|
19731
|
+
const { params } = ev;
|
|
19732
|
+
this.run(params)
|
|
19733
|
+
.then((result) => {
|
|
19734
|
+
const response = result_Result.ok(result);
|
|
19735
|
+
port.postMessage(response, result.getTransferList());
|
|
19736
|
+
})
|
|
19737
|
+
.catch((e) => {
|
|
19738
|
+
const response = result_Result.error(`${e}`);
|
|
19739
|
+
port.postMessage(response, []);
|
|
19740
|
+
});
|
|
19741
|
+
});
|
|
19742
|
+
}
|
|
19743
|
+
async run(params) {
|
|
19744
|
+
return await this.runInternal(params, this.state);
|
|
19745
|
+
}
|
|
19746
|
+
async destroy() { }
|
|
19747
|
+
}
|
|
19748
|
+
|
|
19749
|
+
;// CONCATENATED MODULE: ./packages/core/concurrent/index.ts
|
|
19750
|
+
|
|
19751
|
+
|
|
19752
|
+
|
|
19753
|
+
;// CONCATENATED MODULE: ./packages/jam/safrole/bandersnatch-wasm/params.ts
|
|
19754
|
+
var Method;
|
|
19755
|
+
(function (Method) {
|
|
19756
|
+
Method[Method["RingCommitment"] = 0] = "RingCommitment";
|
|
19757
|
+
Method[Method["BatchVerifyTickets"] = 1] = "BatchVerifyTickets";
|
|
19758
|
+
Method[Method["VerifySeal"] = 2] = "VerifySeal";
|
|
19759
|
+
})(Method || (Method = {}));
|
|
19760
|
+
class params_Response {
|
|
19761
|
+
data;
|
|
19762
|
+
constructor(data) {
|
|
19763
|
+
this.data = data;
|
|
19764
|
+
}
|
|
19765
|
+
getTransferList() {
|
|
19766
|
+
return [this.data.buffer];
|
|
19767
|
+
}
|
|
19768
|
+
}
|
|
19769
|
+
class Params {
|
|
19770
|
+
params;
|
|
19771
|
+
constructor(params) {
|
|
19772
|
+
this.params = params;
|
|
19773
|
+
}
|
|
19774
|
+
getTransferList() {
|
|
19775
|
+
return [];
|
|
19776
|
+
}
|
|
19777
|
+
}
|
|
19778
|
+
|
|
19779
|
+
;// CONCATENATED MODULE: ./packages/jam/safrole/bandersnatch-wasm/worker.ts
|
|
19780
|
+
|
|
19781
|
+
|
|
19782
|
+
|
|
19783
|
+
|
|
19784
|
+
const worker = ConcurrentWorker.new(async (p) => {
|
|
19785
|
+
await initAll();
|
|
19786
|
+
const params = p.params;
|
|
19787
|
+
const method = params.method;
|
|
19788
|
+
if (method === Method.RingCommitment) {
|
|
19789
|
+
return Promise.resolve(new params_Response(bandersnatch_exports.ring_commitment(params.keys)));
|
|
19790
|
+
}
|
|
19791
|
+
if (method === Method.BatchVerifyTickets) {
|
|
19792
|
+
return Promise.resolve(new params_Response(bandersnatch_exports.batch_verify_tickets(params.ringSize, params.commitment, params.ticketsData, params.contextLength)));
|
|
19793
|
+
}
|
|
19794
|
+
if (method === Method.VerifySeal) {
|
|
19795
|
+
return Promise.resolve(new params_Response(bandersnatch_exports.verify_seal(params.authorKey, params.signature, params.payload, params.auxData)));
|
|
19796
|
+
}
|
|
19797
|
+
debug_assertNever(method);
|
|
19798
|
+
}, null);
|
|
19799
|
+
|
|
19800
|
+
;// CONCATENATED MODULE: ./packages/jam/safrole/bandersnatch-wasm/index.ts
|
|
19801
|
+
|
|
19557
19802
|
|
|
19803
|
+
|
|
19804
|
+
|
|
19805
|
+
const workerFile = __nccwpck_require__.ab + "bootstrap-bandersnatch.mjs";
|
|
19558
19806
|
class BandernsatchWasm {
|
|
19559
|
-
|
|
19560
|
-
|
|
19561
|
-
|
|
19562
|
-
|
|
19807
|
+
executor;
|
|
19808
|
+
constructor(executor) {
|
|
19809
|
+
this.executor = executor;
|
|
19810
|
+
}
|
|
19811
|
+
destroy() {
|
|
19812
|
+
return this.executor.destroy();
|
|
19813
|
+
}
|
|
19814
|
+
static async new({ synchronous }) {
|
|
19815
|
+
const workers = external_node_os_default().cpus().length;
|
|
19816
|
+
return new BandernsatchWasm(!synchronous
|
|
19817
|
+
? await Executor.initialize(workerFile, {
|
|
19818
|
+
minWorkers: Math.max(1, Math.floor(workers / 2)),
|
|
19819
|
+
maxWorkers: workers,
|
|
19820
|
+
})
|
|
19821
|
+
: worker);
|
|
19563
19822
|
}
|
|
19564
19823
|
async verifySeal(authorKey, signature, payload, auxData) {
|
|
19565
|
-
|
|
19824
|
+
const x = await this.executor.run(new Params({
|
|
19825
|
+
method: Method.VerifySeal,
|
|
19826
|
+
authorKey,
|
|
19827
|
+
signature,
|
|
19828
|
+
payload,
|
|
19829
|
+
auxData,
|
|
19830
|
+
}));
|
|
19831
|
+
return x.data;
|
|
19566
19832
|
}
|
|
19567
19833
|
async getRingCommitment(keys) {
|
|
19568
|
-
|
|
19834
|
+
const x = await this.executor.run(new Params({
|
|
19835
|
+
method: Method.RingCommitment,
|
|
19836
|
+
keys,
|
|
19837
|
+
}));
|
|
19838
|
+
return x.data;
|
|
19569
19839
|
}
|
|
19570
19840
|
async batchVerifyTicket(ringSize, commitment, ticketsData, contextLength) {
|
|
19571
|
-
|
|
19841
|
+
const x = await this.executor.run(new Params({
|
|
19842
|
+
method: Method.BatchVerifyTickets,
|
|
19843
|
+
ringSize,
|
|
19844
|
+
commitment,
|
|
19845
|
+
ticketsData,
|
|
19846
|
+
contextLength,
|
|
19847
|
+
}));
|
|
19848
|
+
return x.data;
|
|
19572
19849
|
}
|
|
19573
19850
|
}
|
|
19574
19851
|
|
|
@@ -19611,7 +19888,7 @@ class Safrole {
|
|
|
19611
19888
|
chainSpec;
|
|
19612
19889
|
state;
|
|
19613
19890
|
bandersnatch;
|
|
19614
|
-
constructor(chainSpec, state, bandersnatch = BandernsatchWasm.new()) {
|
|
19891
|
+
constructor(chainSpec, state, bandersnatch = BandernsatchWasm.new({ synchronous: true })) {
|
|
19615
19892
|
this.chainSpec = chainSpec;
|
|
19616
19893
|
this.state = state;
|
|
19617
19894
|
this.bandersnatch = bandersnatch;
|
|
@@ -19989,7 +20266,7 @@ var SafroleSealError;
|
|
|
19989
20266
|
const BANDERSNATCH_ZERO_KEY = bytes_Bytes.zero(BANDERSNATCH_KEY_BYTES).asOpaque();
|
|
19990
20267
|
class SafroleSeal {
|
|
19991
20268
|
bandersnatch;
|
|
19992
|
-
constructor(bandersnatch = BandernsatchWasm.new()) {
|
|
20269
|
+
constructor(bandersnatch = BandernsatchWasm.new({ synchronous: true })) {
|
|
19993
20270
|
this.bandersnatch = bandersnatch;
|
|
19994
20271
|
}
|
|
19995
20272
|
/**
|
|
@@ -20301,8 +20578,8 @@ class PartiallyUpdatedState {
|
|
|
20301
20578
|
this.stateUpdate.services.preimages.push(newUpdate);
|
|
20302
20579
|
}
|
|
20303
20580
|
updateServiceStorageUtilisation(serviceId, items, bytes, serviceInfo) {
|
|
20304
|
-
debug_check
|
|
20305
|
-
debug_check
|
|
20581
|
+
debug_check(items >= 0, `storageUtilisationCount has to be a positive number, got: ${items}`);
|
|
20582
|
+
debug_check(bytes >= 0, `storageUtilisationBytes has to be a positive number, got: ${bytes}`);
|
|
20306
20583
|
const overflowItems = !isU32(items);
|
|
20307
20584
|
const overflowBytes = !isU64(bytes);
|
|
20308
20585
|
// TODO [ToDr] this is not specified in GP, but it seems sensible.
|
|
@@ -20727,7 +21004,7 @@ class AccumulateExternalities {
|
|
|
20727
21004
|
}
|
|
20728
21005
|
// TODO [ToDr] Not sure if we should update the service info in that case,
|
|
20729
21006
|
// but for now we let that case fall-through.
|
|
20730
|
-
debug_check
|
|
21007
|
+
debug_check(len === PreimageStatusKind.Unavailable);
|
|
20731
21008
|
}
|
|
20732
21009
|
// make sure we have enough balance for this update
|
|
20733
21010
|
// https://graypaper.fluffylabs.dev/#/9a08063/381201381601?v=0.6.6
|
|
@@ -21223,7 +21500,7 @@ class Assurances {
|
|
|
21223
21500
|
return result_Result.error(AssurancesError.InvalidOrder, `order: expected: ${prevValidatorIndex + 1}, got: ${validatorIndex}`);
|
|
21224
21501
|
}
|
|
21225
21502
|
prevValidatorIndex = assurance.validatorIndex;
|
|
21226
|
-
debug_check
|
|
21503
|
+
debug_check(bitfield.bitLength === coresCount, `Invalid bitfield length of ${bitfield.bitLength}`);
|
|
21227
21504
|
const setBits = bitfield.indicesOfSetBits();
|
|
21228
21505
|
for (const idx of setBits) {
|
|
21229
21506
|
perCoreAssurances[idx] += 1;
|
|
@@ -23547,7 +23824,7 @@ class DeferredTransfers {
|
|
|
23547
23824
|
transferStatistics.set(serviceId, { count: numbers_tryAsU32(transfers.length), gasUsed: common_tryAsServiceGas(consumedGas) });
|
|
23548
23825
|
const [updatedState, checkpointedState] = partialState.getStateUpdates();
|
|
23549
23826
|
currentStateUpdate = updatedState;
|
|
23550
|
-
debug_check
|
|
23827
|
+
debug_check(checkpointedState === null, "On transfer cannot invoke checkpoint.");
|
|
23551
23828
|
}
|
|
23552
23829
|
return result_Result.ok({
|
|
23553
23830
|
// NOTE: we return only services, since it's impossible to update
|
|
@@ -23885,7 +24162,7 @@ const ENTROPY_BYTES = 32;
|
|
|
23885
24162
|
* https://graypaper.fluffylabs.dev/#/579bd12/3b9a013b9a01
|
|
23886
24163
|
*/
|
|
23887
24164
|
function fisherYatesShuffle(arr, entropy) {
|
|
23888
|
-
debug_check
|
|
24165
|
+
debug_check(entropy.length === ENTROPY_BYTES, `Expected entropy of length ${ENTROPY_BYTES}, got ${entropy.length}`);
|
|
23889
24166
|
const n = arr.length;
|
|
23890
24167
|
const randomNumbers = hashToNumberSequence(entropy, arr.length);
|
|
23891
24168
|
const result = new Array(n);
|
|
@@ -24732,7 +25009,7 @@ class Statistics {
|
|
|
24732
25009
|
/** get statistics for the current epoch */
|
|
24733
25010
|
const statistics = this.getStatistics(slot);
|
|
24734
25011
|
const { current, cores, services } = statistics;
|
|
24735
|
-
debug_check
|
|
25012
|
+
debug_check(current[authorIndex] !== undefined, "authorIndex is out of bounds");
|
|
24736
25013
|
/** One validator can produce maximal one block per timeslot */
|
|
24737
25014
|
const newBlocksCount = current[authorIndex].blocks + 1;
|
|
24738
25015
|
current[authorIndex].blocks = numbers_tryAsU32(newBlocksCount);
|
|
@@ -24932,11 +25209,11 @@ class OnChain {
|
|
|
24932
25209
|
authorization;
|
|
24933
25210
|
// chapter 13: https://graypaper.fluffylabs.dev/#/68eaa1f/18b60118b601?v=0.6.4
|
|
24934
25211
|
statistics;
|
|
24935
|
-
constructor(chainSpec, state, blocks, hasher) {
|
|
25212
|
+
constructor(chainSpec, state, blocks, hasher, { enableParallelSealVerification }) {
|
|
24936
25213
|
this.chainSpec = chainSpec;
|
|
24937
25214
|
this.state = state;
|
|
24938
25215
|
this.hasher = hasher;
|
|
24939
|
-
const bandersnatch = BandernsatchWasm.new();
|
|
25216
|
+
const bandersnatch = BandernsatchWasm.new({ synchronous: !enableParallelSealVerification });
|
|
24940
25217
|
this.statistics = new Statistics(chainSpec, state);
|
|
24941
25218
|
this.safrole = new Safrole(chainSpec, state, bandersnatch);
|
|
24942
25219
|
this.safroleSeal = new SafroleSeal(bandersnatch);
|
|
@@ -24954,16 +25231,16 @@ class OnChain {
|
|
|
24954
25231
|
const sealState = this.safrole.getSafroleSealState(timeSlot);
|
|
24955
25232
|
return await this.safroleSeal.verifyHeaderSeal(block.header.view(), sealState);
|
|
24956
25233
|
}
|
|
24957
|
-
async transition(block, headerHash, omitSealVerification = false) {
|
|
25234
|
+
async transition(block, headerHash, preverifiedSeal = null, omitSealVerification = false) {
|
|
24958
25235
|
const headerView = block.header.view();
|
|
24959
25236
|
const header = block.header.materialize();
|
|
24960
25237
|
const timeSlot = header.timeSlotIndex;
|
|
24961
25238
|
// safrole seal
|
|
24962
|
-
let newEntropyHash;
|
|
25239
|
+
let newEntropyHash = preverifiedSeal;
|
|
24963
25240
|
if (omitSealVerification) {
|
|
24964
25241
|
newEntropyHash = hashBytes(header.seal).asOpaque();
|
|
24965
25242
|
}
|
|
24966
|
-
|
|
25243
|
+
if (newEntropyHash === null) {
|
|
24967
25244
|
const sealResult = await this.verifySeal(timeSlot, block);
|
|
24968
25245
|
if (sealResult.isError) {
|
|
24969
25246
|
return stfError(StfErrorKind.SafroleSeal, sealResult);
|
|
@@ -25070,7 +25347,7 @@ class OnChain {
|
|
|
25070
25347
|
assertEmpty(deferredTransfersRest);
|
|
25071
25348
|
const accumulateRoot = await this.accumulateOutput.transition({ accumulationOutputLog });
|
|
25072
25349
|
// recent history
|
|
25073
|
-
const recentHistoryUpdate = this.recentHistory.transition({
|
|
25350
|
+
const recentHistoryUpdate = await this.recentHistory.transition({
|
|
25074
25351
|
partial: recentHistoryPartialUpdate,
|
|
25075
25352
|
headerHash,
|
|
25076
25353
|
accumulateRoot,
|
|
@@ -25150,7 +25427,6 @@ function checkOffendersMatch(offendersMark, headerOffendersMark) {
|
|
|
25150
25427
|
|
|
25151
25428
|
|
|
25152
25429
|
|
|
25153
|
-
|
|
25154
25430
|
var ImporterErrorKind;
|
|
25155
25431
|
(function (ImporterErrorKind) {
|
|
25156
25432
|
ImporterErrorKind[ImporterErrorKind["Verifier"] = 0] = "Verifier";
|
|
@@ -25178,28 +25454,29 @@ class Importer {
|
|
|
25178
25454
|
throw new Error(`Unable to load best state from header hash: ${currentBestHeaderHash}.`);
|
|
25179
25455
|
}
|
|
25180
25456
|
this.verifier = new BlockVerifier(hasher, blocks);
|
|
25181
|
-
this.stf = new OnChain(spec, state, blocks, hasher);
|
|
25457
|
+
this.stf = new OnChain(spec, state, blocks, hasher, { enableParallelSealVerification: true });
|
|
25182
25458
|
this.state = state;
|
|
25183
25459
|
this.currentHash = currentBestHeaderHash;
|
|
25184
25460
|
logger.info(`😎 Best time slot: ${state.timeslot} (header hash: ${currentBestHeaderHash})`);
|
|
25185
25461
|
}
|
|
25186
|
-
|
|
25187
|
-
|
|
25188
|
-
|
|
25189
|
-
|
|
25190
|
-
|
|
25191
|
-
|
|
25192
|
-
|
|
25193
|
-
this.logger.
|
|
25194
|
-
return
|
|
25195
|
-
}
|
|
25196
|
-
|
|
25197
|
-
|
|
25198
|
-
|
|
25199
|
-
|
|
25200
|
-
|
|
25462
|
+
/** Attempt to pre-verify the seal to speed up importing. */
|
|
25463
|
+
async preverifySeal(timeSlot, block) {
|
|
25464
|
+
try {
|
|
25465
|
+
const res = await this.stf.verifySeal(timeSlot, block);
|
|
25466
|
+
if (res.isOk) {
|
|
25467
|
+
return res.ok;
|
|
25468
|
+
}
|
|
25469
|
+
this.logger.warn(`Unable to pre-verify the seal: ${resultToString(res)}`);
|
|
25470
|
+
return null;
|
|
25471
|
+
}
|
|
25472
|
+
catch (e) {
|
|
25473
|
+
this.logger.warn(`Error while trying to pre-verify the seal: ${e}`);
|
|
25474
|
+
return null;
|
|
25475
|
+
}
|
|
25476
|
+
}
|
|
25477
|
+
async importBlock(block, preverifiedSeal, omitSealVerification = false) {
|
|
25201
25478
|
const logger = this.logger;
|
|
25202
|
-
logger.log(
|
|
25479
|
+
logger.log(`🧱 Attempting to import a new block ${preverifiedSeal !== null ? "(seal preverified)" : ""}`);
|
|
25203
25480
|
const timerVerify = measure("import:verify");
|
|
25204
25481
|
const hash = await this.verifier.verifyBlock(block);
|
|
25205
25482
|
logger.log(timerVerify());
|
|
@@ -25224,7 +25501,7 @@ class Importer {
|
|
|
25224
25501
|
const headerHash = hash.ok;
|
|
25225
25502
|
logger.log(`🧱 Verified block: Got hash ${headerHash} for block at slot ${timeSlot}.`);
|
|
25226
25503
|
const timerStf = measure("import:stf");
|
|
25227
|
-
const res = await this.stf.transition(block, headerHash, omitSealVerification);
|
|
25504
|
+
const res = await this.stf.transition(block, headerHash, preverifiedSeal, omitSealVerification);
|
|
25228
25505
|
logger.log(timerStf());
|
|
25229
25506
|
if (res.isError) {
|
|
25230
25507
|
return importerError(ImporterErrorKind.Stf, res);
|
|
@@ -25274,19 +25551,6 @@ class Importer {
|
|
|
25274
25551
|
return stateEntries ?? null;
|
|
25275
25552
|
}
|
|
25276
25553
|
}
|
|
25277
|
-
/**
|
|
25278
|
-
* Attempt to safely extract timeslot of a block.
|
|
25279
|
-
*
|
|
25280
|
-
* NOTE: it may fail if encoding is invalid.
|
|
25281
|
-
*/
|
|
25282
|
-
function extractTimeSlot(block) {
|
|
25283
|
-
try {
|
|
25284
|
-
return block.header.view().timeSlotIndex.materialize();
|
|
25285
|
-
}
|
|
25286
|
-
catch {
|
|
25287
|
-
return tryAsTimeSlot(2 ** 32 - 1);
|
|
25288
|
-
}
|
|
25289
|
-
}
|
|
25290
25554
|
|
|
25291
25555
|
;// CONCATENATED MODULE: ./workers/generic/finished.ts
|
|
25292
25556
|
|
|
@@ -25550,6 +25814,7 @@ class ImporterReady extends State {
|
|
|
25550
25814
|
response: rootHash === null ? bytes_Bytes.zero(hash_HASH_SIZE).raw : rootHash.raw,
|
|
25551
25815
|
};
|
|
25552
25816
|
}
|
|
25817
|
+
// NOTE [ToDr] This should rather be using the import queue, instead of going directly.
|
|
25553
25818
|
async importBlock(block) {
|
|
25554
25819
|
if (this.importer === null) {
|
|
25555
25820
|
state_machine_logger.error(`${this.constructor.name} importer not initialized yet!`);
|
|
@@ -25561,13 +25826,17 @@ class ImporterReady extends State {
|
|
|
25561
25826
|
if (block instanceof Uint8Array) {
|
|
25562
25827
|
const config = this.getConfig();
|
|
25563
25828
|
const blockView = decoder_Decoder.decodeObject(Block.Codec.View, block, config.chainSpec);
|
|
25829
|
+
const headerView = blockView.header.view();
|
|
25830
|
+
const timeSlot = headerView.timeSlotIndex.materialize();
|
|
25564
25831
|
let response;
|
|
25565
25832
|
try {
|
|
25566
|
-
const res = await this.importer.importBlock(blockView, config.omitSealVerification);
|
|
25833
|
+
const res = await this.importer.importBlock(blockView, null, config.omitSealVerification);
|
|
25567
25834
|
if (res.isOk) {
|
|
25568
|
-
|
|
25835
|
+
state_machine_logger.info(`🧊 Best block: #${timeSlot} (${res.ok.hash})`);
|
|
25836
|
+
response = result_Result.ok(this.importer.getBestStateRootHash() ?? bytes_Bytes.zero(hash_HASH_SIZE).asOpaque());
|
|
25569
25837
|
}
|
|
25570
25838
|
else {
|
|
25839
|
+
state_machine_logger.log(`❌ Rejected block #${timeSlot}: ${resultToString(res)}`);
|
|
25571
25840
|
response = result_Result.error(resultToString(res));
|
|
25572
25841
|
}
|
|
25573
25842
|
}
|
|
@@ -25615,6 +25884,8 @@ class ImporterReady extends State {
|
|
|
25615
25884
|
|
|
25616
25885
|
|
|
25617
25886
|
|
|
25887
|
+
|
|
25888
|
+
|
|
25618
25889
|
const importer_logger = Logger.new(import.meta.filename, "importer");
|
|
25619
25890
|
if (!external_node_worker_threads_namespaceObject.isMainThread) {
|
|
25620
25891
|
Logger.configureAll(process.env.JAM_LOG ?? "", Level.LOG);
|
|
@@ -25631,6 +25902,7 @@ async function createImporter(config) {
|
|
|
25631
25902
|
const importer = new Importer(config.chainSpec, hasher, importer_logger, blocks, states);
|
|
25632
25903
|
return {
|
|
25633
25904
|
lmdb,
|
|
25905
|
+
blocks,
|
|
25634
25906
|
importer,
|
|
25635
25907
|
};
|
|
25636
25908
|
}
|
|
@@ -25645,27 +25917,65 @@ async function main(channel) {
|
|
|
25645
25917
|
importer_logger.info(`📥 Importer starting ${channel.currentState()}`);
|
|
25646
25918
|
// Await the configuration object
|
|
25647
25919
|
const ready = await channel.waitForState("ready(importer)");
|
|
25648
|
-
let closeDb = async () => { };
|
|
25649
25920
|
const finished = await ready.doUntil("finished", async (worker, port) => {
|
|
25650
25921
|
const config = worker.getConfig();
|
|
25651
|
-
const {
|
|
25652
|
-
closeDb = async () => {
|
|
25653
|
-
await lmdb.close();
|
|
25654
|
-
};
|
|
25922
|
+
const { blocks, importer } = await createImporter(config);
|
|
25655
25923
|
// TODO [ToDr] this is shit, since we have circular dependency.
|
|
25656
25924
|
worker.setImporter(importer);
|
|
25657
25925
|
importer_logger.info("📥 Importer waiting for blocks.");
|
|
25926
|
+
// TODO [ToDr] back pressure?
|
|
25927
|
+
let isProcessing = false;
|
|
25928
|
+
const importingQueue = new ImportQueue(config.chainSpec, importer);
|
|
25658
25929
|
worker.onBlock.on(async (block) => {
|
|
25659
|
-
const
|
|
25660
|
-
|
|
25661
|
-
|
|
25930
|
+
const details = ImportQueue.getBlockDetails(block);
|
|
25931
|
+
// ignore invalid blocks.
|
|
25932
|
+
if (details.isError) {
|
|
25933
|
+
importer_logger.trace("🧊 Ignoring invalid block.");
|
|
25934
|
+
return;
|
|
25935
|
+
}
|
|
25936
|
+
// ignore already known blocks
|
|
25937
|
+
if (blocks.getHeader(details.ok.hash) !== null) {
|
|
25938
|
+
importer_logger.trace(`🧊 Already imported block: #${details.ok.data.timeSlot}.`);
|
|
25939
|
+
return;
|
|
25940
|
+
}
|
|
25941
|
+
const importResult = importingQueue.push(details.ok);
|
|
25942
|
+
// ignore blocks that are already queued
|
|
25943
|
+
if (importResult.isError) {
|
|
25944
|
+
importer_logger.trace(`🧊 Already queued block: #${details.ok.data.timeSlot}.`);
|
|
25945
|
+
return;
|
|
25946
|
+
}
|
|
25947
|
+
importer_logger.log(`🧊 Queued block: #${details.ok.data.timeSlot} (skip seal: ${config.omitSealVerification})`);
|
|
25948
|
+
if (isProcessing) {
|
|
25949
|
+
return;
|
|
25950
|
+
}
|
|
25951
|
+
isProcessing = true;
|
|
25952
|
+
try {
|
|
25953
|
+
for (;;) {
|
|
25954
|
+
const entry = importingQueue.shift();
|
|
25955
|
+
if (entry === undefined) {
|
|
25956
|
+
return;
|
|
25957
|
+
}
|
|
25958
|
+
const { block, seal, timeSlot } = entry;
|
|
25959
|
+
const timer = measure("importBlock");
|
|
25960
|
+
const maybeBestHeader = await importer.importBlock(block, await seal, config.omitSealVerification);
|
|
25961
|
+
if (maybeBestHeader.isOk) {
|
|
25962
|
+
const bestHeader = maybeBestHeader.ok;
|
|
25963
|
+
worker.announce(port, bestHeader);
|
|
25964
|
+
importer_logger.info(`🧊 Best block: #${bestHeader.data.timeSlotIndex.materialize()} (${bestHeader.hash})`);
|
|
25965
|
+
}
|
|
25966
|
+
else {
|
|
25967
|
+
importer_logger.log(`❌ Rejected block #${timeSlot}: ${resultToString(maybeBestHeader)}`);
|
|
25968
|
+
}
|
|
25969
|
+
importer_logger.log(timer());
|
|
25970
|
+
}
|
|
25971
|
+
}
|
|
25972
|
+
finally {
|
|
25973
|
+
isProcessing = false;
|
|
25662
25974
|
}
|
|
25663
25975
|
});
|
|
25664
25976
|
await wasmPromise;
|
|
25665
25977
|
});
|
|
25666
25978
|
importer_logger.info("📥 Importer finished. Closing channel.");
|
|
25667
|
-
// close the database
|
|
25668
|
-
await closeDb();
|
|
25669
25979
|
// Close the comms to gracefuly close the app.
|
|
25670
25980
|
finished.currentState().close(channel);
|
|
25671
25981
|
}
|