@typeberry/jam 0.1.0-3c30204 → 0.1.0-eb00e84
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bandersnatch/6b655f8772c01b768329.js +1 -0
- package/bandersnatch/ccf8ada94096a8f232f5.js +1 -0
- package/bandersnatch/e2fdc1b646378dd96eda.js +1 -0
- package/bandersnatch/index.js +3037 -0
- package/bandersnatch/index.js.map +1 -0
- package/bandersnatch/package.json +3 -0
- package/bandersnatch/sourcemap-register.cjs +1 -0
- package/block-generator/index.js +0 -2
- package/block-generator/index.js.map +1 -1
- package/bootstrap-bandersnatch.mjs +162 -0
- package/bootstrap-bandersnatch.mjs.map +1 -0
- package/importer/bootstrap-bandersnatch.mjs.map +1 -0
- package/importer/index.js +460 -65
- package/importer/index.js.map +1 -1
- package/index.js +457 -65
- package/index.js.map +1 -1
- package/jam-network/index.js +0 -2
- package/jam-network/index.js.map +1 -1
- package/package.json +1 -1
package/importer/index.js
CHANGED
|
@@ -5695,7 +5695,6 @@ async function verifyBatch(input) {
|
|
|
5695
5695
|
|
|
5696
5696
|
;// CONCATENATED MODULE: ./packages/core/hash/hash.ts
|
|
5697
5697
|
|
|
5698
|
-
|
|
5699
5698
|
/**
|
|
5700
5699
|
* Size of the output of the hash functions.
|
|
5701
5700
|
*
|
|
@@ -5705,7 +5704,6 @@ async function verifyBatch(input) {
|
|
|
5705
5704
|
const hash_HASH_SIZE = 32;
|
|
5706
5705
|
/** A hash without last byte (useful for trie representation). */
|
|
5707
5706
|
const TRUNCATED_HASH_SIZE = 31;
|
|
5708
|
-
const ZERO_HASH = bytes_Bytes.zero(hash_HASH_SIZE);
|
|
5709
5707
|
/**
|
|
5710
5708
|
* Container for some object with a hash that is related to this object.
|
|
5711
5709
|
*
|
|
@@ -8538,7 +8536,7 @@ const common_tryAsServiceGas = (v) => opaque_asOpaqueType(numbers_tryAsU64(v));
|
|
|
8538
8536
|
/** Attempt to convert a number into `CoreIndex`. */
|
|
8539
8537
|
const common_tryAsCoreIndex = (v) => opaque_asOpaqueType(numbers_tryAsU16(v));
|
|
8540
8538
|
/** Attempt to convert a number into `Epoch`. */
|
|
8541
|
-
const tryAsEpoch = (v) =>
|
|
8539
|
+
const tryAsEpoch = (v) => opaque_asOpaqueType(numbers_tryAsU32(v));
|
|
8542
8540
|
function tryAsPerValidator(array, spec) {
|
|
8543
8541
|
debug_check(array.length === spec.validatorsCount, `Invalid per-validator array length. Expected ${spec.validatorsCount}, got: ${array.length}`);
|
|
8544
8542
|
return sized_array_asKnownSize(array);
|
|
@@ -18922,6 +18920,88 @@ class PvmExecutor {
|
|
|
18922
18920
|
|
|
18923
18921
|
|
|
18924
18922
|
|
|
18923
|
+
;// CONCATENATED MODULE: ./workers/importer/import-queue.ts
|
|
18924
|
+
|
|
18925
|
+
|
|
18926
|
+
|
|
18927
|
+
|
|
18928
|
+
|
|
18929
|
+
class ImportQueue {
|
|
18930
|
+
spec;
|
|
18931
|
+
importer;
|
|
18932
|
+
toImport = SortedArray.fromSortedArray((a, b) => {
|
|
18933
|
+
const diff = a.timeSlot - b.timeSlot;
|
|
18934
|
+
if (diff < 0) {
|
|
18935
|
+
return Ordering.Greater;
|
|
18936
|
+
}
|
|
18937
|
+
if (diff > 0) {
|
|
18938
|
+
return Ordering.Less;
|
|
18939
|
+
}
|
|
18940
|
+
return Ordering.Equal;
|
|
18941
|
+
});
|
|
18942
|
+
queuedBlocks = HashSet.new();
|
|
18943
|
+
lastEpoch = tryAsEpoch(2 ** 32 - 1);
|
|
18944
|
+
constructor(spec, importer) {
|
|
18945
|
+
this.spec = spec;
|
|
18946
|
+
this.importer = importer;
|
|
18947
|
+
}
|
|
18948
|
+
isCurrentEpoch(timeSlot) {
|
|
18949
|
+
const epoch = Math.floor(timeSlot / this.spec.epochLength);
|
|
18950
|
+
return this.lastEpoch === epoch;
|
|
18951
|
+
}
|
|
18952
|
+
startPreverification() {
|
|
18953
|
+
for (const entry of this.toImport) {
|
|
18954
|
+
if (this.isCurrentEpoch(entry.timeSlot)) {
|
|
18955
|
+
entry.seal = this.importer.preverifySeal(entry.timeSlot, entry.block);
|
|
18956
|
+
}
|
|
18957
|
+
}
|
|
18958
|
+
}
|
|
18959
|
+
static getBlockDetails(block) {
|
|
18960
|
+
let encodedHeader;
|
|
18961
|
+
let timeSlot;
|
|
18962
|
+
try {
|
|
18963
|
+
encodedHeader = block.header.encoded();
|
|
18964
|
+
timeSlot = block.header.view().timeSlotIndex.materialize();
|
|
18965
|
+
}
|
|
18966
|
+
catch {
|
|
18967
|
+
return result_Result.error("invalid");
|
|
18968
|
+
}
|
|
18969
|
+
const headerHash = hashBytes(encodedHeader).asOpaque();
|
|
18970
|
+
return result_Result.ok(new WithHash(headerHash, { block, timeSlot }));
|
|
18971
|
+
}
|
|
18972
|
+
push(details) {
|
|
18973
|
+
const headerHash = details.hash;
|
|
18974
|
+
if (this.queuedBlocks.has(headerHash)) {
|
|
18975
|
+
return result_Result.error("already queued");
|
|
18976
|
+
}
|
|
18977
|
+
const { timeSlot, block } = details.data;
|
|
18978
|
+
const entry = {
|
|
18979
|
+
headerHash,
|
|
18980
|
+
timeSlot,
|
|
18981
|
+
block,
|
|
18982
|
+
seal: this.isCurrentEpoch(timeSlot) ? this.importer.preverifySeal(timeSlot, block) : Promise.resolve(null),
|
|
18983
|
+
};
|
|
18984
|
+
this.toImport.insert(entry);
|
|
18985
|
+
this.queuedBlocks.insert(headerHash);
|
|
18986
|
+
return result_Result.ok(result_OK);
|
|
18987
|
+
}
|
|
18988
|
+
shift() {
|
|
18989
|
+
const entry = this.toImport.pop();
|
|
18990
|
+
if (entry !== undefined) {
|
|
18991
|
+
this.queuedBlocks.delete(entry.headerHash);
|
|
18992
|
+
const blockEpoch = Math.floor(entry.timeSlot / this.spec.epochLength);
|
|
18993
|
+
const hasEpochChanged = this.lastEpoch !== blockEpoch;
|
|
18994
|
+
this.lastEpoch = tryAsEpoch(blockEpoch);
|
|
18995
|
+
// currently removed block is changing the epoch, so fire up
|
|
18996
|
+
// preverifcation for the following blocks.
|
|
18997
|
+
if (hasEpochChanged) {
|
|
18998
|
+
this.startPreverification();
|
|
18999
|
+
}
|
|
19000
|
+
}
|
|
19001
|
+
return entry;
|
|
19002
|
+
}
|
|
19003
|
+
}
|
|
19004
|
+
|
|
18925
19005
|
;// CONCATENATED MODULE: ./packages/jam/transition/block-verifier.ts
|
|
18926
19006
|
|
|
18927
19007
|
|
|
@@ -18935,7 +19015,7 @@ var BlockVerifierError;
|
|
|
18935
19015
|
BlockVerifierError[BlockVerifierError["InvalidStateRoot"] = 4] = "InvalidStateRoot";
|
|
18936
19016
|
BlockVerifierError[BlockVerifierError["AlreadyImported"] = 5] = "AlreadyImported";
|
|
18937
19017
|
})(BlockVerifierError || (BlockVerifierError = {}));
|
|
18938
|
-
const
|
|
19018
|
+
const ZERO_HASH = bytes_Bytes.zero(hash_HASH_SIZE).asOpaque();
|
|
18939
19019
|
class BlockVerifier {
|
|
18940
19020
|
hasher;
|
|
18941
19021
|
blocks;
|
|
@@ -18955,7 +19035,7 @@ class BlockVerifier {
|
|
|
18955
19035
|
// https://graypaper.fluffylabs.dev/#/cc517d7/0c9d000c9d00?v=0.6.5
|
|
18956
19036
|
const parentHash = headerView.parentHeaderHash.materialize();
|
|
18957
19037
|
// importing genesis block
|
|
18958
|
-
if (!parentHash.isEqualTo(
|
|
19038
|
+
if (!parentHash.isEqualTo(ZERO_HASH)) {
|
|
18959
19039
|
const parentBlock = this.blocks.getHeader(parentHash);
|
|
18960
19040
|
if (parentBlock === null) {
|
|
18961
19041
|
return result_Result.error(BlockVerifierError.ParentNotFound, `Parent ${parentHash.toString()} not found`);
|
|
@@ -19468,22 +19548,304 @@ async function verifyTickets(bandersnatch, numberOfValidators, epochRoot, ticket
|
|
|
19468
19548
|
}));
|
|
19469
19549
|
}
|
|
19470
19550
|
|
|
19471
|
-
;// CONCATENATED MODULE:
|
|
19551
|
+
;// CONCATENATED MODULE: external "node:os"
|
|
19552
|
+
const external_node_os_namespaceObject = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("node:os");
|
|
19553
|
+
var external_node_os_default = /*#__PURE__*/__nccwpck_require__.n(external_node_os_namespaceObject);
|
|
19554
|
+
;// CONCATENATED MODULE: ./packages/core/concurrent/parent.ts
|
|
19555
|
+
|
|
19556
|
+
|
|
19557
|
+
// Amount of tasks in the queue that will trigger creation of new worker thread.
|
|
19558
|
+
// NOTE this might need to be configurable in the future.
|
|
19559
|
+
const QUEUE_SIZE_WORKER_THRESHOLD = 5;
|
|
19560
|
+
/** Execution pool manager. */
|
|
19561
|
+
class Executor {
|
|
19562
|
+
workers;
|
|
19563
|
+
maxWorkers;
|
|
19564
|
+
workerPath;
|
|
19565
|
+
/** Initialize a new concurrent executor given a path to the worker. */
|
|
19566
|
+
static async initialize(workerPath, options) {
|
|
19567
|
+
debug_check(options.maxWorkers > 0, "Max workers has to be positive.");
|
|
19568
|
+
debug_check(options.minWorkers <= options.maxWorkers, "Min workers has to be lower or equal to max workers.");
|
|
19569
|
+
const workers = [];
|
|
19570
|
+
for (let i = 0; i < options.minWorkers; i++) {
|
|
19571
|
+
workers.push(await initWorker(workerPath));
|
|
19572
|
+
}
|
|
19573
|
+
return new Executor(workers, options.maxWorkers, workerPath);
|
|
19574
|
+
}
|
|
19575
|
+
// keeps track of the indices of worker threads that are currently free and available to execute tasks
|
|
19576
|
+
freeWorkerIndices = [];
|
|
19577
|
+
taskQueue = [];
|
|
19578
|
+
isDestroyed = false;
|
|
19579
|
+
isWorkerInitializing = false;
|
|
19580
|
+
constructor(workers, maxWorkers, workerPath) {
|
|
19581
|
+
this.workers = workers;
|
|
19582
|
+
this.maxWorkers = maxWorkers;
|
|
19583
|
+
this.workerPath = workerPath;
|
|
19584
|
+
// intial free workers.
|
|
19585
|
+
for (let i = 0; i < workers.length; i++) {
|
|
19586
|
+
this.freeWorkerIndices.push(i);
|
|
19587
|
+
}
|
|
19588
|
+
}
|
|
19589
|
+
/** Attempt to initialize a new worker. */
|
|
19590
|
+
async initNewWorker(onSuccess = () => { }) {
|
|
19591
|
+
if (this.workers.length >= this.maxWorkers) {
|
|
19592
|
+
// biome-ignore lint/suspicious/noConsole: warning
|
|
19593
|
+
console.warn(`Task queue has ${this.taskQueue.length} pending items and we can't init any more workers.`);
|
|
19594
|
+
return;
|
|
19595
|
+
}
|
|
19596
|
+
if (this.isWorkerInitializing) {
|
|
19597
|
+
return;
|
|
19598
|
+
}
|
|
19599
|
+
this.isWorkerInitializing = true;
|
|
19600
|
+
this.workers.push(await initWorker(this.workerPath));
|
|
19601
|
+
this.freeWorkerIndices.push(this.workers.length - 1);
|
|
19602
|
+
this.isWorkerInitializing = false;
|
|
19603
|
+
onSuccess();
|
|
19604
|
+
}
|
|
19605
|
+
/** Terminate all workers and clear the executor. */
|
|
19606
|
+
async destroy() {
|
|
19607
|
+
for (const worker of this.workers) {
|
|
19608
|
+
worker.port.close();
|
|
19609
|
+
await worker.worker.terminate();
|
|
19610
|
+
}
|
|
19611
|
+
this.workers.length = 0;
|
|
19612
|
+
this.isDestroyed = true;
|
|
19613
|
+
}
|
|
19614
|
+
/** Execute a task with given parameters. */
|
|
19615
|
+
async run(params) {
|
|
19616
|
+
return new Promise((resolve, reject) => {
|
|
19617
|
+
if (this.isDestroyed) {
|
|
19618
|
+
reject("pool destroyed");
|
|
19619
|
+
return;
|
|
19620
|
+
}
|
|
19621
|
+
this.taskQueue.push({
|
|
19622
|
+
params,
|
|
19623
|
+
resolve,
|
|
19624
|
+
reject,
|
|
19625
|
+
});
|
|
19626
|
+
this.processEntryFromTaskQueue();
|
|
19627
|
+
});
|
|
19628
|
+
}
|
|
19629
|
+
/** Process single element from the task queue. */
|
|
19630
|
+
processEntryFromTaskQueue() {
|
|
19631
|
+
const freeWorker = this.freeWorkerIndices.pop();
|
|
19632
|
+
// no free workers available currently,
|
|
19633
|
+
// we will retry when one of the tasks completes.
|
|
19634
|
+
if (freeWorker === undefined) {
|
|
19635
|
+
if (this.taskQueue.length > QUEUE_SIZE_WORKER_THRESHOLD) {
|
|
19636
|
+
this.initNewWorker(() => {
|
|
19637
|
+
// process an entry in this newly initialized worker.
|
|
19638
|
+
this.processEntryFromTaskQueue();
|
|
19639
|
+
});
|
|
19640
|
+
}
|
|
19641
|
+
return;
|
|
19642
|
+
}
|
|
19643
|
+
const task = this.taskQueue.pop();
|
|
19644
|
+
// no tasks in the queue
|
|
19645
|
+
if (task === undefined) {
|
|
19646
|
+
this.freeWorkerIndices.push(freeWorker);
|
|
19647
|
+
return;
|
|
19648
|
+
}
|
|
19649
|
+
const worker = this.workers[freeWorker];
|
|
19650
|
+
worker.runTask(task, () => {
|
|
19651
|
+
// mark the worker as available again
|
|
19652
|
+
this.freeWorkerIndices.push(freeWorker);
|
|
19653
|
+
// and continue processing the queue
|
|
19654
|
+
this.processEntryFromTaskQueue();
|
|
19655
|
+
});
|
|
19656
|
+
}
|
|
19657
|
+
}
|
|
19658
|
+
async function initWorker(workerPath) {
|
|
19659
|
+
// create a worker and initialize communication channel
|
|
19660
|
+
const { port1, port2 } = new MessageChannel();
|
|
19661
|
+
const workerThread = new external_node_worker_threads_namespaceObject.Worker(workerPath, {});
|
|
19662
|
+
workerThread.postMessage(port1, [port1]);
|
|
19663
|
+
// // wait for the worker to start
|
|
19664
|
+
await new Promise((resolve, reject) => {
|
|
19665
|
+
workerThread.once("message", resolve);
|
|
19666
|
+
workerThread.once("error", reject);
|
|
19667
|
+
});
|
|
19668
|
+
// make sure the threads don't prevent the program from stopping.
|
|
19669
|
+
workerThread.unref();
|
|
19670
|
+
return new WorkerChannel(workerThread, port2);
|
|
19671
|
+
}
|
|
19672
|
+
class WorkerChannel {
|
|
19673
|
+
worker;
|
|
19674
|
+
port;
|
|
19675
|
+
constructor(worker, port) {
|
|
19676
|
+
this.worker = worker;
|
|
19677
|
+
this.port = port;
|
|
19678
|
+
}
|
|
19679
|
+
runTask(task, onFinish) {
|
|
19680
|
+
const message = {
|
|
19681
|
+
params: task.params,
|
|
19682
|
+
};
|
|
19683
|
+
// when we receive a response, make sure to process it
|
|
19684
|
+
this.port.once("message", (e) => {
|
|
19685
|
+
if (e.isOk) {
|
|
19686
|
+
task.resolve(e.ok);
|
|
19687
|
+
}
|
|
19688
|
+
else {
|
|
19689
|
+
task.reject(new Error(e.error));
|
|
19690
|
+
}
|
|
19691
|
+
onFinish();
|
|
19692
|
+
});
|
|
19693
|
+
// send the task to work on.
|
|
19694
|
+
this.port.postMessage(message, message.params.getTransferList());
|
|
19695
|
+
}
|
|
19696
|
+
}
|
|
19697
|
+
|
|
19698
|
+
;// CONCATENATED MODULE: ./packages/core/concurrent/worker.ts
|
|
19699
|
+
|
|
19700
|
+
|
|
19701
|
+
/** A in-worker abstraction. */
|
|
19702
|
+
class ConcurrentWorker {
|
|
19703
|
+
runInternal;
|
|
19704
|
+
state;
|
|
19705
|
+
static new(run, state) {
|
|
19706
|
+
return new ConcurrentWorker(run, state);
|
|
19707
|
+
}
|
|
19708
|
+
constructor(runInternal, state) {
|
|
19709
|
+
this.runInternal = runInternal;
|
|
19710
|
+
this.state = state;
|
|
19711
|
+
}
|
|
19712
|
+
listenToParentPort() {
|
|
19713
|
+
if (external_node_worker_threads_namespaceObject.parentPort === null) {
|
|
19714
|
+
throw new Error("This method is meant to be run inside a worker thread!");
|
|
19715
|
+
}
|
|
19716
|
+
external_node_worker_threads_namespaceObject.parentPort.once("close", () => {
|
|
19717
|
+
process.exit(0);
|
|
19718
|
+
});
|
|
19719
|
+
external_node_worker_threads_namespaceObject.parentPort.once("message", (port) => {
|
|
19720
|
+
this.listenTo(port);
|
|
19721
|
+
// send back readiness signal.
|
|
19722
|
+
external_node_worker_threads_namespaceObject.parentPort?.postMessage("ready");
|
|
19723
|
+
});
|
|
19724
|
+
}
|
|
19725
|
+
listenTo(port) {
|
|
19726
|
+
port.once("close", () => {
|
|
19727
|
+
port.removeAllListeners();
|
|
19728
|
+
process.exit(0);
|
|
19729
|
+
});
|
|
19730
|
+
port.on("message", (ev) => {
|
|
19731
|
+
const { params } = ev;
|
|
19732
|
+
this.run(params)
|
|
19733
|
+
.then((result) => {
|
|
19734
|
+
const response = result_Result.ok(result);
|
|
19735
|
+
port.postMessage(response, result.getTransferList());
|
|
19736
|
+
})
|
|
19737
|
+
.catch((e) => {
|
|
19738
|
+
const response = result_Result.error(`${e}`);
|
|
19739
|
+
port.postMessage(response, []);
|
|
19740
|
+
});
|
|
19741
|
+
});
|
|
19742
|
+
}
|
|
19743
|
+
async run(params) {
|
|
19744
|
+
return await this.runInternal(params, this.state);
|
|
19745
|
+
}
|
|
19746
|
+
async destroy() { }
|
|
19747
|
+
}
|
|
19748
|
+
|
|
19749
|
+
;// CONCATENATED MODULE: ./packages/core/concurrent/index.ts
|
|
19750
|
+
|
|
19472
19751
|
|
|
19752
|
+
|
|
19753
|
+
;// CONCATENATED MODULE: ./packages/jam/safrole/bandersnatch-wasm/params.ts
|
|
19754
|
+
var Method;
|
|
19755
|
+
(function (Method) {
|
|
19756
|
+
Method[Method["RingCommitment"] = 0] = "RingCommitment";
|
|
19757
|
+
Method[Method["BatchVerifyTickets"] = 1] = "BatchVerifyTickets";
|
|
19758
|
+
Method[Method["VerifySeal"] = 2] = "VerifySeal";
|
|
19759
|
+
})(Method || (Method = {}));
|
|
19760
|
+
class params_Response {
|
|
19761
|
+
data;
|
|
19762
|
+
constructor(data) {
|
|
19763
|
+
this.data = data;
|
|
19764
|
+
}
|
|
19765
|
+
getTransferList() {
|
|
19766
|
+
return [this.data.buffer];
|
|
19767
|
+
}
|
|
19768
|
+
}
|
|
19769
|
+
class Params {
|
|
19770
|
+
params;
|
|
19771
|
+
constructor(params) {
|
|
19772
|
+
this.params = params;
|
|
19773
|
+
}
|
|
19774
|
+
getTransferList() {
|
|
19775
|
+
return [];
|
|
19776
|
+
}
|
|
19777
|
+
}
|
|
19778
|
+
|
|
19779
|
+
;// CONCATENATED MODULE: ./packages/jam/safrole/bandersnatch-wasm/worker.ts
|
|
19780
|
+
|
|
19781
|
+
|
|
19782
|
+
|
|
19783
|
+
|
|
19784
|
+
const worker = ConcurrentWorker.new(async (p) => {
|
|
19785
|
+
await initAll();
|
|
19786
|
+
const params = p.params;
|
|
19787
|
+
const method = params.method;
|
|
19788
|
+
if (method === Method.RingCommitment) {
|
|
19789
|
+
return Promise.resolve(new params_Response(bandersnatch_exports.ring_commitment(params.keys)));
|
|
19790
|
+
}
|
|
19791
|
+
if (method === Method.BatchVerifyTickets) {
|
|
19792
|
+
return Promise.resolve(new params_Response(bandersnatch_exports.batch_verify_tickets(params.ringSize, params.commitment, params.ticketsData, params.contextLength)));
|
|
19793
|
+
}
|
|
19794
|
+
if (method === Method.VerifySeal) {
|
|
19795
|
+
return Promise.resolve(new params_Response(bandersnatch_exports.verify_seal(params.authorKey, params.signature, params.payload, params.auxData)));
|
|
19796
|
+
}
|
|
19797
|
+
debug_assertNever(method);
|
|
19798
|
+
}, null);
|
|
19799
|
+
|
|
19800
|
+
;// CONCATENATED MODULE: ./packages/jam/safrole/bandersnatch-wasm/index.ts
|
|
19801
|
+
|
|
19802
|
+
|
|
19803
|
+
|
|
19804
|
+
|
|
19805
|
+
const workerFile = __nccwpck_require__.ab + "bootstrap-bandersnatch.mjs";
|
|
19473
19806
|
class BandernsatchWasm {
|
|
19474
|
-
|
|
19475
|
-
|
|
19476
|
-
|
|
19477
|
-
|
|
19807
|
+
executor;
|
|
19808
|
+
constructor(executor) {
|
|
19809
|
+
this.executor = executor;
|
|
19810
|
+
}
|
|
19811
|
+
destroy() {
|
|
19812
|
+
return this.executor.destroy();
|
|
19813
|
+
}
|
|
19814
|
+
static async new({ synchronous }) {
|
|
19815
|
+
const workers = external_node_os_default().cpus().length;
|
|
19816
|
+
return new BandernsatchWasm(!synchronous
|
|
19817
|
+
? await Executor.initialize(workerFile, {
|
|
19818
|
+
minWorkers: Math.max(1, Math.floor(workers / 2)),
|
|
19819
|
+
maxWorkers: workers,
|
|
19820
|
+
})
|
|
19821
|
+
: worker);
|
|
19478
19822
|
}
|
|
19479
19823
|
async verifySeal(authorKey, signature, payload, auxData) {
|
|
19480
|
-
|
|
19824
|
+
const x = await this.executor.run(new Params({
|
|
19825
|
+
method: Method.VerifySeal,
|
|
19826
|
+
authorKey,
|
|
19827
|
+
signature,
|
|
19828
|
+
payload,
|
|
19829
|
+
auxData,
|
|
19830
|
+
}));
|
|
19831
|
+
return x.data;
|
|
19481
19832
|
}
|
|
19482
19833
|
async getRingCommitment(keys) {
|
|
19483
|
-
|
|
19834
|
+
const x = await this.executor.run(new Params({
|
|
19835
|
+
method: Method.RingCommitment,
|
|
19836
|
+
keys,
|
|
19837
|
+
}));
|
|
19838
|
+
return x.data;
|
|
19484
19839
|
}
|
|
19485
19840
|
async batchVerifyTicket(ringSize, commitment, ticketsData, contextLength) {
|
|
19486
|
-
|
|
19841
|
+
const x = await this.executor.run(new Params({
|
|
19842
|
+
method: Method.BatchVerifyTickets,
|
|
19843
|
+
ringSize,
|
|
19844
|
+
commitment,
|
|
19845
|
+
ticketsData,
|
|
19846
|
+
contextLength,
|
|
19847
|
+
}));
|
|
19848
|
+
return x.data;
|
|
19487
19849
|
}
|
|
19488
19850
|
}
|
|
19489
19851
|
|
|
@@ -19526,7 +19888,7 @@ class Safrole {
|
|
|
19526
19888
|
chainSpec;
|
|
19527
19889
|
state;
|
|
19528
19890
|
bandersnatch;
|
|
19529
|
-
constructor(chainSpec, state, bandersnatch = BandernsatchWasm.new()) {
|
|
19891
|
+
constructor(chainSpec, state, bandersnatch = BandernsatchWasm.new({ synchronous: true })) {
|
|
19530
19892
|
this.chainSpec = chainSpec;
|
|
19531
19893
|
this.state = state;
|
|
19532
19894
|
this.bandersnatch = bandersnatch;
|
|
@@ -19904,7 +20266,7 @@ var SafroleSealError;
|
|
|
19904
20266
|
const BANDERSNATCH_ZERO_KEY = bytes_Bytes.zero(BANDERSNATCH_KEY_BYTES).asOpaque();
|
|
19905
20267
|
class SafroleSeal {
|
|
19906
20268
|
bandersnatch;
|
|
19907
|
-
constructor(bandersnatch = BandernsatchWasm.new()) {
|
|
20269
|
+
constructor(bandersnatch = BandernsatchWasm.new({ synchronous: true })) {
|
|
19908
20270
|
this.bandersnatch = bandersnatch;
|
|
19909
20271
|
}
|
|
19910
20272
|
/**
|
|
@@ -24847,11 +25209,11 @@ class OnChain {
|
|
|
24847
25209
|
authorization;
|
|
24848
25210
|
// chapter 13: https://graypaper.fluffylabs.dev/#/68eaa1f/18b60118b601?v=0.6.4
|
|
24849
25211
|
statistics;
|
|
24850
|
-
constructor(chainSpec, state, blocks, hasher) {
|
|
25212
|
+
constructor(chainSpec, state, blocks, hasher, { enableParallelSealVerification }) {
|
|
24851
25213
|
this.chainSpec = chainSpec;
|
|
24852
25214
|
this.state = state;
|
|
24853
25215
|
this.hasher = hasher;
|
|
24854
|
-
const bandersnatch = BandernsatchWasm.new();
|
|
25216
|
+
const bandersnatch = BandernsatchWasm.new({ synchronous: !enableParallelSealVerification });
|
|
24855
25217
|
this.statistics = new Statistics(chainSpec, state);
|
|
24856
25218
|
this.safrole = new Safrole(chainSpec, state, bandersnatch);
|
|
24857
25219
|
this.safroleSeal = new SafroleSeal(bandersnatch);
|
|
@@ -24869,16 +25231,16 @@ class OnChain {
|
|
|
24869
25231
|
const sealState = this.safrole.getSafroleSealState(timeSlot);
|
|
24870
25232
|
return await this.safroleSeal.verifyHeaderSeal(block.header.view(), sealState);
|
|
24871
25233
|
}
|
|
24872
|
-
async transition(block, headerHash, omitSealVerification = false) {
|
|
25234
|
+
async transition(block, headerHash, preverifiedSeal = null, omitSealVerification = false) {
|
|
24873
25235
|
const headerView = block.header.view();
|
|
24874
25236
|
const header = block.header.materialize();
|
|
24875
25237
|
const timeSlot = header.timeSlotIndex;
|
|
24876
25238
|
// safrole seal
|
|
24877
|
-
let newEntropyHash;
|
|
25239
|
+
let newEntropyHash = preverifiedSeal;
|
|
24878
25240
|
if (omitSealVerification) {
|
|
24879
25241
|
newEntropyHash = hashBytes(header.seal).asOpaque();
|
|
24880
25242
|
}
|
|
24881
|
-
|
|
25243
|
+
if (newEntropyHash === null) {
|
|
24882
25244
|
const sealResult = await this.verifySeal(timeSlot, block);
|
|
24883
25245
|
if (sealResult.isError) {
|
|
24884
25246
|
return stfError(StfErrorKind.SafroleSeal, sealResult);
|
|
@@ -24985,7 +25347,7 @@ class OnChain {
|
|
|
24985
25347
|
assertEmpty(deferredTransfersRest);
|
|
24986
25348
|
const accumulateRoot = await this.accumulateOutput.transition({ accumulationOutputLog });
|
|
24987
25349
|
// recent history
|
|
24988
|
-
const recentHistoryUpdate = this.recentHistory.transition({
|
|
25350
|
+
const recentHistoryUpdate = await this.recentHistory.transition({
|
|
24989
25351
|
partial: recentHistoryPartialUpdate,
|
|
24990
25352
|
headerHash,
|
|
24991
25353
|
accumulateRoot,
|
|
@@ -25065,7 +25427,6 @@ function checkOffendersMatch(offendersMark, headerOffendersMark) {
|
|
|
25065
25427
|
|
|
25066
25428
|
|
|
25067
25429
|
|
|
25068
|
-
|
|
25069
25430
|
var ImporterErrorKind;
|
|
25070
25431
|
(function (ImporterErrorKind) {
|
|
25071
25432
|
ImporterErrorKind[ImporterErrorKind["Verifier"] = 0] = "Verifier";
|
|
@@ -25093,28 +25454,29 @@ class Importer {
|
|
|
25093
25454
|
throw new Error(`Unable to load best state from header hash: ${currentBestHeaderHash}.`);
|
|
25094
25455
|
}
|
|
25095
25456
|
this.verifier = new BlockVerifier(hasher, blocks);
|
|
25096
|
-
this.stf = new OnChain(spec, state, blocks, hasher);
|
|
25457
|
+
this.stf = new OnChain(spec, state, blocks, hasher, { enableParallelSealVerification: true });
|
|
25097
25458
|
this.state = state;
|
|
25098
25459
|
this.currentHash = currentBestHeaderHash;
|
|
25099
25460
|
logger.info(`😎 Best time slot: ${state.timeslot} (header hash: ${currentBestHeaderHash})`);
|
|
25100
25461
|
}
|
|
25101
|
-
|
|
25102
|
-
|
|
25103
|
-
|
|
25104
|
-
|
|
25105
|
-
|
|
25106
|
-
|
|
25107
|
-
|
|
25108
|
-
this.logger.
|
|
25109
|
-
return
|
|
25110
|
-
}
|
|
25111
|
-
|
|
25112
|
-
|
|
25113
|
-
|
|
25114
|
-
|
|
25115
|
-
|
|
25462
|
+
/** Attempt to pre-verify the seal to speed up importing. */
|
|
25463
|
+
async preverifySeal(timeSlot, block) {
|
|
25464
|
+
try {
|
|
25465
|
+
const res = await this.stf.verifySeal(timeSlot, block);
|
|
25466
|
+
if (res.isOk) {
|
|
25467
|
+
return res.ok;
|
|
25468
|
+
}
|
|
25469
|
+
this.logger.warn(`Unable to pre-verify the seal: ${resultToString(res)}`);
|
|
25470
|
+
return null;
|
|
25471
|
+
}
|
|
25472
|
+
catch (e) {
|
|
25473
|
+
this.logger.warn(`Error while trying to pre-verify the seal: ${e}`);
|
|
25474
|
+
return null;
|
|
25475
|
+
}
|
|
25476
|
+
}
|
|
25477
|
+
async importBlock(block, preverifiedSeal, omitSealVerification = false) {
|
|
25116
25478
|
const logger = this.logger;
|
|
25117
|
-
logger.log(
|
|
25479
|
+
logger.log(`🧱 Attempting to import a new block ${preverifiedSeal !== null ? "(seal preverified)" : ""}`);
|
|
25118
25480
|
const timerVerify = measure("import:verify");
|
|
25119
25481
|
const hash = await this.verifier.verifyBlock(block);
|
|
25120
25482
|
logger.log(timerVerify());
|
|
@@ -25139,7 +25501,7 @@ class Importer {
|
|
|
25139
25501
|
const headerHash = hash.ok;
|
|
25140
25502
|
logger.log(`🧱 Verified block: Got hash ${headerHash} for block at slot ${timeSlot}.`);
|
|
25141
25503
|
const timerStf = measure("import:stf");
|
|
25142
|
-
const res = await this.stf.transition(block, headerHash, omitSealVerification);
|
|
25504
|
+
const res = await this.stf.transition(block, headerHash, preverifiedSeal, omitSealVerification);
|
|
25143
25505
|
logger.log(timerStf());
|
|
25144
25506
|
if (res.isError) {
|
|
25145
25507
|
return importerError(ImporterErrorKind.Stf, res);
|
|
@@ -25189,19 +25551,6 @@ class Importer {
|
|
|
25189
25551
|
return stateEntries ?? null;
|
|
25190
25552
|
}
|
|
25191
25553
|
}
|
|
25192
|
-
/**
|
|
25193
|
-
* Attempt to safely extract timeslot of a block.
|
|
25194
|
-
*
|
|
25195
|
-
* NOTE: it may fail if encoding is invalid.
|
|
25196
|
-
*/
|
|
25197
|
-
function extractTimeSlot(block) {
|
|
25198
|
-
try {
|
|
25199
|
-
return block.header.view().timeSlotIndex.materialize();
|
|
25200
|
-
}
|
|
25201
|
-
catch {
|
|
25202
|
-
return tryAsTimeSlot(2 ** 32 - 1);
|
|
25203
|
-
}
|
|
25204
|
-
}
|
|
25205
25554
|
|
|
25206
25555
|
;// CONCATENATED MODULE: ./workers/generic/finished.ts
|
|
25207
25556
|
|
|
@@ -25465,6 +25814,7 @@ class ImporterReady extends State {
|
|
|
25465
25814
|
response: rootHash === null ? bytes_Bytes.zero(hash_HASH_SIZE).raw : rootHash.raw,
|
|
25466
25815
|
};
|
|
25467
25816
|
}
|
|
25817
|
+
// NOTE [ToDr] This should rather be using the import queue, instead of going directly.
|
|
25468
25818
|
async importBlock(block) {
|
|
25469
25819
|
if (this.importer === null) {
|
|
25470
25820
|
state_machine_logger.error(`${this.constructor.name} importer not initialized yet!`);
|
|
@@ -25476,13 +25826,17 @@ class ImporterReady extends State {
|
|
|
25476
25826
|
if (block instanceof Uint8Array) {
|
|
25477
25827
|
const config = this.getConfig();
|
|
25478
25828
|
const blockView = decoder_Decoder.decodeObject(Block.Codec.View, block, config.chainSpec);
|
|
25829
|
+
const headerView = blockView.header.view();
|
|
25830
|
+
const timeSlot = headerView.timeSlotIndex.materialize();
|
|
25479
25831
|
let response;
|
|
25480
25832
|
try {
|
|
25481
|
-
const res = await this.importer.importBlock(blockView, config.omitSealVerification);
|
|
25833
|
+
const res = await this.importer.importBlock(blockView, null, config.omitSealVerification);
|
|
25482
25834
|
if (res.isOk) {
|
|
25483
|
-
|
|
25835
|
+
state_machine_logger.info(`🧊 Best block: #${timeSlot} (${res.ok.hash})`);
|
|
25836
|
+
response = result_Result.ok(this.importer.getBestStateRootHash() ?? bytes_Bytes.zero(hash_HASH_SIZE).asOpaque());
|
|
25484
25837
|
}
|
|
25485
25838
|
else {
|
|
25839
|
+
state_machine_logger.log(`❌ Rejected block #${timeSlot}: ${resultToString(res)}`);
|
|
25486
25840
|
response = result_Result.error(resultToString(res));
|
|
25487
25841
|
}
|
|
25488
25842
|
}
|
|
@@ -25530,6 +25884,8 @@ class ImporterReady extends State {
|
|
|
25530
25884
|
|
|
25531
25885
|
|
|
25532
25886
|
|
|
25887
|
+
|
|
25888
|
+
|
|
25533
25889
|
const importer_logger = Logger.new(import.meta.filename, "importer");
|
|
25534
25890
|
if (!external_node_worker_threads_namespaceObject.isMainThread) {
|
|
25535
25891
|
Logger.configureAll(process.env.JAM_LOG ?? "", Level.LOG);
|
|
@@ -25546,6 +25902,7 @@ async function createImporter(config) {
|
|
|
25546
25902
|
const importer = new Importer(config.chainSpec, hasher, importer_logger, blocks, states);
|
|
25547
25903
|
return {
|
|
25548
25904
|
lmdb,
|
|
25905
|
+
blocks,
|
|
25549
25906
|
importer,
|
|
25550
25907
|
};
|
|
25551
25908
|
}
|
|
@@ -25560,27 +25917,65 @@ async function main(channel) {
|
|
|
25560
25917
|
importer_logger.info(`📥 Importer starting ${channel.currentState()}`);
|
|
25561
25918
|
// Await the configuration object
|
|
25562
25919
|
const ready = await channel.waitForState("ready(importer)");
|
|
25563
|
-
let closeDb = async () => { };
|
|
25564
25920
|
const finished = await ready.doUntil("finished", async (worker, port) => {
|
|
25565
25921
|
const config = worker.getConfig();
|
|
25566
|
-
const {
|
|
25567
|
-
closeDb = async () => {
|
|
25568
|
-
await lmdb.close();
|
|
25569
|
-
};
|
|
25922
|
+
const { blocks, importer } = await createImporter(config);
|
|
25570
25923
|
// TODO [ToDr] this is shit, since we have circular dependency.
|
|
25571
25924
|
worker.setImporter(importer);
|
|
25572
25925
|
importer_logger.info("📥 Importer waiting for blocks.");
|
|
25926
|
+
// TODO [ToDr] back pressure?
|
|
25927
|
+
let isProcessing = false;
|
|
25928
|
+
const importingQueue = new ImportQueue(config.chainSpec, importer);
|
|
25573
25929
|
worker.onBlock.on(async (block) => {
|
|
25574
|
-
const
|
|
25575
|
-
|
|
25576
|
-
|
|
25930
|
+
const details = ImportQueue.getBlockDetails(block);
|
|
25931
|
+
// ignore invalid blocks.
|
|
25932
|
+
if (details.isError) {
|
|
25933
|
+
importer_logger.trace("🧊 Ignoring invalid block.");
|
|
25934
|
+
return;
|
|
25935
|
+
}
|
|
25936
|
+
// ignore already known blocks
|
|
25937
|
+
if (blocks.getHeader(details.ok.hash) !== null) {
|
|
25938
|
+
importer_logger.trace(`🧊 Already imported block: #${details.ok.data.timeSlot}.`);
|
|
25939
|
+
return;
|
|
25940
|
+
}
|
|
25941
|
+
const importResult = importingQueue.push(details.ok);
|
|
25942
|
+
// ignore blocks that are already queued
|
|
25943
|
+
if (importResult.isError) {
|
|
25944
|
+
importer_logger.trace(`🧊 Already queued block: #${details.ok.data.timeSlot}.`);
|
|
25945
|
+
return;
|
|
25946
|
+
}
|
|
25947
|
+
importer_logger.log(`🧊 Queued block: #${details.ok.data.timeSlot} (skip seal: ${config.omitSealVerification})`);
|
|
25948
|
+
if (isProcessing) {
|
|
25949
|
+
return;
|
|
25950
|
+
}
|
|
25951
|
+
isProcessing = true;
|
|
25952
|
+
try {
|
|
25953
|
+
for (;;) {
|
|
25954
|
+
const entry = importingQueue.shift();
|
|
25955
|
+
if (entry === undefined) {
|
|
25956
|
+
return;
|
|
25957
|
+
}
|
|
25958
|
+
const { block, seal, timeSlot } = entry;
|
|
25959
|
+
const timer = measure("importBlock");
|
|
25960
|
+
const maybeBestHeader = await importer.importBlock(block, await seal, config.omitSealVerification);
|
|
25961
|
+
if (maybeBestHeader.isOk) {
|
|
25962
|
+
const bestHeader = maybeBestHeader.ok;
|
|
25963
|
+
worker.announce(port, bestHeader);
|
|
25964
|
+
importer_logger.info(`🧊 Best block: #${bestHeader.data.timeSlotIndex.materialize()} (${bestHeader.hash})`);
|
|
25965
|
+
}
|
|
25966
|
+
else {
|
|
25967
|
+
importer_logger.log(`❌ Rejected block #${timeSlot}: ${resultToString(maybeBestHeader)}`);
|
|
25968
|
+
}
|
|
25969
|
+
importer_logger.log(timer());
|
|
25970
|
+
}
|
|
25971
|
+
}
|
|
25972
|
+
finally {
|
|
25973
|
+
isProcessing = false;
|
|
25577
25974
|
}
|
|
25578
25975
|
});
|
|
25579
25976
|
await wasmPromise;
|
|
25580
25977
|
});
|
|
25581
25978
|
importer_logger.info("📥 Importer finished. Closing channel.");
|
|
25582
|
-
// close the database
|
|
25583
|
-
await closeDb();
|
|
25584
25979
|
// Close the comms to gracefuly close the app.
|
|
25585
25980
|
finished.currentState().close(channel);
|
|
25586
25981
|
}
|