@typeberry/jam 0.1.1-e48de40 → 0.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/block-generator/index.js +94 -132
- package/block-generator/index.js.map +1 -1
- package/importer/index.js +177 -234
- package/importer/index.js.map +1 -1
- package/index.js +341 -394
- package/index.js.map +1 -1
- package/jam-network/index.js +162 -198
- package/jam-network/index.js.map +1 -1
- package/package.json +2 -4
package/importer/index.js
CHANGED
|
@@ -4436,34 +4436,6 @@ class WithDebug {
|
|
|
4436
4436
|
}
|
|
4437
4437
|
}
|
|
4438
4438
|
|
|
4439
|
-
;// CONCATENATED MODULE: ./packages/core/utils/dev.ts
|
|
4440
|
-
const dev_env = typeof process === "undefined" ? {} : process.env;
|
|
4441
|
-
/**
|
|
4442
|
-
* The function will produce relative path resolver that is adjusted
|
|
4443
|
-
* for package location within the workspace.
|
|
4444
|
-
*
|
|
4445
|
-
* Example:
|
|
4446
|
-
* $ npm start -w @typeberry/jam
|
|
4447
|
-
*
|
|
4448
|
-
* The above command will run `./bin/jam/index.js`, however we would
|
|
4449
|
-
* still want relative paths to be resolved according to top-level workspace
|
|
4450
|
-
* directory.
|
|
4451
|
-
*
|
|
4452
|
-
* So the caller, passes the absolute workspace path as argument and get's
|
|
4453
|
-
* a function that can properly resolve relative paths.
|
|
4454
|
-
*
|
|
4455
|
-
* NOTE: the translation happens only for development build! When
|
|
4456
|
-
* we build a single library from our project, we no longer mangle the paths.
|
|
4457
|
-
*/
|
|
4458
|
-
const workspacePathFix = dev_env.NODE_ENV === "development"
|
|
4459
|
-
? (workspacePath) => (p) => {
|
|
4460
|
-
if (p.startsWith("/")) {
|
|
4461
|
-
return p;
|
|
4462
|
-
}
|
|
4463
|
-
return `${workspacePath}/${p}`;
|
|
4464
|
-
}
|
|
4465
|
-
: () => (p) => p;
|
|
4466
|
-
|
|
4467
4439
|
;// CONCATENATED MODULE: ./packages/core/utils/opaque.ts
|
|
4468
4440
|
/**
|
|
4469
4441
|
* @fileoverview `Opaque<Type, Token>` constructs a unique type which is a subset of Type with a
|
|
@@ -4807,7 +4779,6 @@ function isResult(x) {
|
|
|
4807
4779
|
|
|
4808
4780
|
|
|
4809
4781
|
|
|
4810
|
-
|
|
4811
4782
|
;// CONCATENATED MODULE: ./packages/core/bytes/bitvec.ts
|
|
4812
4783
|
|
|
4813
4784
|
/**
|
|
@@ -13401,23 +13372,6 @@ function parseLevel(lvl) {
|
|
|
13401
13372
|
;// CONCATENATED MODULE: ./packages/core/logger/console.ts
|
|
13402
13373
|
// biome-ignore-all lint/suspicious/noConsole: logger
|
|
13403
13374
|
|
|
13404
|
-
function print(level, levelAndName, strings, data) {
|
|
13405
|
-
if (level < levelAndName[0]) {
|
|
13406
|
-
return;
|
|
13407
|
-
}
|
|
13408
|
-
const lvlText = Level[level].padEnd(5);
|
|
13409
|
-
const val = strings.map((v, idx) => `${v}${data[idx]}`);
|
|
13410
|
-
const msg = `${lvlText} [${levelAndName[1]}] ${val}`;
|
|
13411
|
-
if (level === Level.WARN) {
|
|
13412
|
-
console.warn(msg);
|
|
13413
|
-
}
|
|
13414
|
-
else if (level === Level.ERROR) {
|
|
13415
|
-
console.error(msg);
|
|
13416
|
-
}
|
|
13417
|
-
else {
|
|
13418
|
-
console.info(msg);
|
|
13419
|
-
}
|
|
13420
|
-
}
|
|
13421
13375
|
/** An optimized logger that ignores `TRACE`, `DEBUG` and `LOG` messages.
|
|
13422
13376
|
*
|
|
13423
13377
|
* Use the `create` method to instantiate the right instance of a more specialized logger.
|
|
@@ -13448,91 +13402,109 @@ class ConsoleTransport {
|
|
|
13448
13402
|
constructor(options) {
|
|
13449
13403
|
this.options = options;
|
|
13450
13404
|
}
|
|
13451
|
-
insane(
|
|
13405
|
+
insane(_moduleName, _val) {
|
|
13452
13406
|
/* no-op */
|
|
13453
13407
|
}
|
|
13454
|
-
trace(
|
|
13408
|
+
trace(_moduleName, _val) {
|
|
13455
13409
|
/* no-op */
|
|
13456
13410
|
}
|
|
13457
|
-
log(
|
|
13411
|
+
log(_moduleName, _val) {
|
|
13458
13412
|
/* no-op */
|
|
13459
13413
|
}
|
|
13460
|
-
info(
|
|
13414
|
+
info(_moduleName, _val) {
|
|
13461
13415
|
/* no-op */
|
|
13462
13416
|
}
|
|
13463
|
-
warn(
|
|
13464
|
-
|
|
13417
|
+
warn(moduleName, val) {
|
|
13418
|
+
this.push(Level.WARN, moduleName, val);
|
|
13465
13419
|
}
|
|
13466
|
-
error(
|
|
13467
|
-
|
|
13420
|
+
error(moduleName, val) {
|
|
13421
|
+
this.push(Level.ERROR, moduleName, val);
|
|
13422
|
+
}
|
|
13423
|
+
push(level, moduleName, val) {
|
|
13424
|
+
const shortModule = moduleName.replace(this.options.workingDir, "");
|
|
13425
|
+
const configuredLevel = findLevel(this.options, moduleName);
|
|
13426
|
+
const lvlText = Level[level].padEnd(5);
|
|
13427
|
+
if (level < configuredLevel) {
|
|
13428
|
+
return;
|
|
13429
|
+
}
|
|
13430
|
+
const msg = `${lvlText} [${shortModule}] ${val}`;
|
|
13431
|
+
if (level === Level.WARN) {
|
|
13432
|
+
console.warn(msg);
|
|
13433
|
+
}
|
|
13434
|
+
else if (level === Level.ERROR) {
|
|
13435
|
+
console.error(msg);
|
|
13436
|
+
}
|
|
13437
|
+
else {
|
|
13438
|
+
console.info(msg);
|
|
13439
|
+
}
|
|
13468
13440
|
}
|
|
13469
13441
|
}
|
|
13470
13442
|
/**
|
|
13471
13443
|
* Insane version of console logger - supports insane level.
|
|
13472
13444
|
*/
|
|
13473
13445
|
class InsaneConsoleLogger extends ConsoleTransport {
|
|
13474
|
-
insane(
|
|
13475
|
-
|
|
13446
|
+
insane(moduleName, val) {
|
|
13447
|
+
this.push(Level.INSANE, moduleName, val);
|
|
13476
13448
|
}
|
|
13477
|
-
trace(
|
|
13478
|
-
|
|
13449
|
+
trace(moduleName, val) {
|
|
13450
|
+
this.push(Level.TRACE, moduleName, val);
|
|
13479
13451
|
}
|
|
13480
|
-
log(
|
|
13481
|
-
|
|
13452
|
+
log(moduleName, val) {
|
|
13453
|
+
this.push(Level.LOG, moduleName, val);
|
|
13482
13454
|
}
|
|
13483
|
-
info(
|
|
13484
|
-
|
|
13455
|
+
info(moduleName, val) {
|
|
13456
|
+
this.push(Level.INFO, moduleName, val);
|
|
13485
13457
|
}
|
|
13486
13458
|
}
|
|
13487
13459
|
/**
|
|
13488
13460
|
* A basic version of console logger - printing everything.
|
|
13489
13461
|
*/
|
|
13490
13462
|
class TraceConsoleTransport extends ConsoleTransport {
|
|
13491
|
-
insane(
|
|
13463
|
+
insane(_moduleName, _val) {
|
|
13492
13464
|
/* no-op */
|
|
13493
13465
|
}
|
|
13494
|
-
trace(
|
|
13495
|
-
|
|
13466
|
+
trace(moduleName, val) {
|
|
13467
|
+
this.push(Level.TRACE, moduleName, val);
|
|
13496
13468
|
}
|
|
13497
|
-
log(
|
|
13498
|
-
|
|
13469
|
+
log(moduleName, val) {
|
|
13470
|
+
this.push(Level.LOG, moduleName, val);
|
|
13499
13471
|
}
|
|
13500
|
-
info(
|
|
13501
|
-
|
|
13472
|
+
info(moduleName, val) {
|
|
13473
|
+
this.push(Level.INFO, moduleName, val);
|
|
13502
13474
|
}
|
|
13503
13475
|
}
|
|
13504
13476
|
/**
|
|
13505
13477
|
* An optimized version of the logger - completely ignores `TRACE` level calls.
|
|
13506
13478
|
*/
|
|
13507
13479
|
class LogConsoleTransport extends ConsoleTransport {
|
|
13508
|
-
insane(
|
|
13480
|
+
insane(_moduleName, _val) {
|
|
13509
13481
|
/* no-op */
|
|
13510
13482
|
}
|
|
13511
|
-
trace(
|
|
13483
|
+
trace(_moduleName, _val) {
|
|
13512
13484
|
/* no-op */
|
|
13513
13485
|
}
|
|
13514
|
-
log(
|
|
13515
|
-
|
|
13486
|
+
log(moduleName, val) {
|
|
13487
|
+
this.push(Level.LOG, moduleName, val);
|
|
13516
13488
|
}
|
|
13517
|
-
info(
|
|
13518
|
-
|
|
13489
|
+
info(moduleName, val) {
|
|
13490
|
+
this.push(Level.INFO, moduleName, val);
|
|
13519
13491
|
}
|
|
13520
13492
|
}
|
|
13521
13493
|
/**
|
|
13522
13494
|
* An optimized version of the logger - completely ignores `TRACE` & `DEBUG` level calls.
|
|
13523
13495
|
*/
|
|
13524
13496
|
class InfoConsoleTransport extends ConsoleTransport {
|
|
13525
|
-
insane(
|
|
13497
|
+
insane(_moduleName, _val) {
|
|
13526
13498
|
/* no-op */
|
|
13527
13499
|
}
|
|
13528
|
-
trace(
|
|
13500
|
+
trace(_moduleName, _val) {
|
|
13529
13501
|
/* no-op */
|
|
13530
13502
|
}
|
|
13531
|
-
log(
|
|
13503
|
+
log(_moduleName, _val) {
|
|
13532
13504
|
/* no-op */
|
|
13533
13505
|
}
|
|
13534
|
-
info(
|
|
13535
|
-
|
|
13506
|
+
info(moduleName, val) {
|
|
13507
|
+
this.push(Level.INFO, moduleName, val);
|
|
13536
13508
|
}
|
|
13537
13509
|
}
|
|
13538
13510
|
|
|
@@ -13569,6 +13541,11 @@ class Logger {
|
|
|
13569
13541
|
const module = moduleName ?? fName;
|
|
13570
13542
|
return new Logger(module.padStart(8, " "), GLOBAL_CONFIG);
|
|
13571
13543
|
}
|
|
13544
|
+
/**
|
|
13545
|
+
* Return currently configured level for given module. */
|
|
13546
|
+
static getLevel(moduleName) {
|
|
13547
|
+
return findLevel(GLOBAL_CONFIG.options, moduleName);
|
|
13548
|
+
}
|
|
13572
13549
|
/**
|
|
13573
13550
|
* Global configuration of all loggers.
|
|
13574
13551
|
*
|
|
@@ -13599,46 +13576,33 @@ class Logger {
|
|
|
13599
13576
|
const options = parseLoggerOptions(input, defaultLevel, workingDir);
|
|
13600
13577
|
Logger.configureAllFromOptions(options);
|
|
13601
13578
|
}
|
|
13602
|
-
cachedLevelAndName;
|
|
13603
13579
|
constructor(moduleName, config) {
|
|
13604
13580
|
this.moduleName = moduleName;
|
|
13605
13581
|
this.config = config;
|
|
13606
13582
|
}
|
|
13607
|
-
/** Return currently configured level for given module. */
|
|
13608
|
-
getLevel() {
|
|
13609
|
-
return this.getLevelAndName()[0];
|
|
13610
|
-
}
|
|
13611
|
-
getLevelAndName() {
|
|
13612
|
-
if (this.cachedLevelAndName === undefined) {
|
|
13613
|
-
const level = findLevel(this.config.options, this.moduleName);
|
|
13614
|
-
const shortName = this.moduleName.replace(this.config.options.workingDir, "");
|
|
13615
|
-
this.cachedLevelAndName = [level, shortName];
|
|
13616
|
-
}
|
|
13617
|
-
return this.cachedLevelAndName;
|
|
13618
|
-
}
|
|
13619
13583
|
/** Log a message with `INSANE` level. */
|
|
13620
|
-
insane(
|
|
13621
|
-
this.config.transport.insane(this.
|
|
13584
|
+
insane(val) {
|
|
13585
|
+
this.config.transport.insane(this.moduleName, val);
|
|
13622
13586
|
}
|
|
13623
13587
|
/** Log a message with `TRACE` level. */
|
|
13624
|
-
trace(
|
|
13625
|
-
this.config.transport.trace(this.
|
|
13588
|
+
trace(val) {
|
|
13589
|
+
this.config.transport.trace(this.moduleName, val);
|
|
13626
13590
|
}
|
|
13627
13591
|
/** Log a message with `DEBUG`/`LOG` level. */
|
|
13628
|
-
log(
|
|
13629
|
-
this.config.transport.log(this.
|
|
13592
|
+
log(val) {
|
|
13593
|
+
this.config.transport.log(this.moduleName, val);
|
|
13630
13594
|
}
|
|
13631
13595
|
/** Log a message with `INFO` level. */
|
|
13632
|
-
info(
|
|
13633
|
-
this.config.transport.info(this.
|
|
13596
|
+
info(val) {
|
|
13597
|
+
this.config.transport.info(this.moduleName, val);
|
|
13634
13598
|
}
|
|
13635
13599
|
/** Log a message with `WARN` level. */
|
|
13636
|
-
warn(
|
|
13637
|
-
this.config.transport.warn(this.
|
|
13600
|
+
warn(val) {
|
|
13601
|
+
this.config.transport.warn(this.moduleName, val);
|
|
13638
13602
|
}
|
|
13639
13603
|
/** Log a message with `ERROR` level. */
|
|
13640
|
-
error(
|
|
13641
|
-
this.config.transport.error(this.
|
|
13604
|
+
error(val) {
|
|
13605
|
+
this.config.transport.error(this.moduleName, val);
|
|
13642
13606
|
}
|
|
13643
13607
|
}
|
|
13644
13608
|
|
|
@@ -13753,7 +13717,7 @@ class LmdbStates {
|
|
|
13753
13717
|
await Promise.all([valuesWrite, statesWrite]);
|
|
13754
13718
|
}
|
|
13755
13719
|
catch (e) {
|
|
13756
|
-
logger.error
|
|
13720
|
+
logger.error(`${e}`);
|
|
13757
13721
|
return result_Result.error(StateUpdateError.Commit);
|
|
13758
13722
|
}
|
|
13759
13723
|
return result_Result.ok(result_OK);
|
|
@@ -13861,7 +13825,7 @@ class TypedPort {
|
|
|
13861
13825
|
this.dispatchPortMessage(msg);
|
|
13862
13826
|
}
|
|
13863
13827
|
catch (e) {
|
|
13864
|
-
port_logger.error
|
|
13828
|
+
port_logger.error(`[${this.constructor.name}] Failed to dispatch a message: ${e}: ${JSON.stringify(msg)}`);
|
|
13865
13829
|
throw e;
|
|
13866
13830
|
}
|
|
13867
13831
|
});
|
|
@@ -13935,7 +13899,7 @@ class TypedPort {
|
|
|
13935
13899
|
this.port.postMessage(msg, transferList);
|
|
13936
13900
|
}
|
|
13937
13901
|
catch (e) {
|
|
13938
|
-
port_logger.error
|
|
13902
|
+
port_logger.error(`[${this.constructor.name}] Failed to post a message: ${e}: ${JSON.stringify(msg)}`);
|
|
13939
13903
|
throw e;
|
|
13940
13904
|
}
|
|
13941
13905
|
}
|
|
@@ -13966,7 +13930,7 @@ class TypedPort {
|
|
|
13966
13930
|
cleanup(reason) {
|
|
13967
13931
|
// resolve all pending requests with an error.
|
|
13968
13932
|
const responseListeners = this.responseListeners.eventNames();
|
|
13969
|
-
for (const ev
|
|
13933
|
+
for (const ev in responseListeners) {
|
|
13970
13934
|
this.responseListeners.emit(ev, new Error(`port is ${reason}`));
|
|
13971
13935
|
}
|
|
13972
13936
|
}
|
|
@@ -14015,7 +13979,7 @@ class channel_MessageChannelStateMachine {
|
|
|
14015
13979
|
this.dispatchSignal(name, data);
|
|
14016
13980
|
}
|
|
14017
13981
|
catch (e) {
|
|
14018
|
-
channel_logger.error
|
|
13982
|
+
channel_logger.error(`[${this.constructor.name}] Unable to dispatch signal (${name}): ${e}. ${this.stateInfo(remoteState)}`);
|
|
14019
13983
|
throw e;
|
|
14020
13984
|
}
|
|
14021
13985
|
});
|
|
@@ -14024,7 +13988,7 @@ class channel_MessageChannelStateMachine {
|
|
|
14024
13988
|
await this.dispatchRequest(name, data, msg);
|
|
14025
13989
|
}
|
|
14026
13990
|
catch (e) {
|
|
14027
|
-
channel_logger.error
|
|
13991
|
+
channel_logger.error(`[${this.constructor.name}] Unable to dispatch request (${name}): ${e}. ${this.stateInfo(remoteState)}`);
|
|
14028
13992
|
throw e;
|
|
14029
13993
|
}
|
|
14030
13994
|
});
|
|
@@ -14110,7 +14074,7 @@ class channel_MessageChannelStateMachine {
|
|
|
14110
14074
|
this.machine.transition(res.transitionTo.state, res.transitionTo.data);
|
|
14111
14075
|
}
|
|
14112
14076
|
if (didStateChangeInMeantime) {
|
|
14113
|
-
channel_logger.warn
|
|
14077
|
+
channel_logger.warn(`Ignoring obsolete response for an old request: "${name}"`);
|
|
14114
14078
|
return;
|
|
14115
14079
|
}
|
|
14116
14080
|
return this.port.respond(prevState.stateName, msg, res.response);
|
|
@@ -14126,7 +14090,7 @@ class channel_MessageChannelStateMachine {
|
|
|
14126
14090
|
}
|
|
14127
14091
|
}
|
|
14128
14092
|
transitionTo() {
|
|
14129
|
-
channel_logger.trace
|
|
14093
|
+
channel_logger.trace(`[${this.machine.name}] transitioned to ${this.currentState()}`);
|
|
14130
14094
|
return this;
|
|
14131
14095
|
}
|
|
14132
14096
|
/**
|
|
@@ -14146,7 +14110,7 @@ class channel_MessageChannelStateMachine {
|
|
|
14146
14110
|
await promise;
|
|
14147
14111
|
}
|
|
14148
14112
|
catch (e) {
|
|
14149
|
-
channel_logger.error
|
|
14113
|
+
channel_logger.error(JSON.stringify(e));
|
|
14150
14114
|
}
|
|
14151
14115
|
return new channel_MessageChannelStateMachine(machine, port);
|
|
14152
14116
|
}
|
|
@@ -14349,7 +14313,7 @@ class State {
|
|
|
14349
14313
|
* actions.
|
|
14350
14314
|
*/
|
|
14351
14315
|
onActivation(data) {
|
|
14352
|
-
state_logger.trace
|
|
14316
|
+
state_logger.trace(`[${this.constructor.name}] Changing state to: ${this}`);
|
|
14353
14317
|
this.data = data;
|
|
14354
14318
|
}
|
|
14355
14319
|
/**
|
|
@@ -16060,14 +16024,12 @@ class WriteablePage extends MemoryPage {
|
|
|
16060
16024
|
|
|
16061
16025
|
|
|
16062
16026
|
|
|
16063
|
-
|
|
16064
|
-
|
|
16065
16027
|
var AccessType;
|
|
16066
16028
|
(function (AccessType) {
|
|
16067
16029
|
AccessType[AccessType["READ"] = 0] = "READ";
|
|
16068
16030
|
AccessType[AccessType["WRITE"] = 1] = "WRITE";
|
|
16069
16031
|
})(AccessType || (AccessType = {}));
|
|
16070
|
-
const
|
|
16032
|
+
// const logger = Logger.new(import.meta.filename, "pvm:mem");
|
|
16071
16033
|
class Memory {
|
|
16072
16034
|
sbrkIndex;
|
|
16073
16035
|
virtualSbrkIndex;
|
|
@@ -16098,7 +16060,7 @@ class Memory {
|
|
|
16098
16060
|
if (bytes.length === 0) {
|
|
16099
16061
|
return result_Result.ok(result_OK);
|
|
16100
16062
|
}
|
|
16101
|
-
|
|
16063
|
+
// logger.insane(`MEM[${address}] <- ${BytesBlob.blobFrom(bytes)}`);
|
|
16102
16064
|
const pagesResult = this.getPages(address, bytes.length, AccessType.WRITE);
|
|
16103
16065
|
if (pagesResult.isError) {
|
|
16104
16066
|
return result_Result.error(pagesResult.error);
|
|
@@ -16165,7 +16127,7 @@ class Memory {
|
|
|
16165
16127
|
currentPosition += bytesToRead;
|
|
16166
16128
|
bytesLeft -= bytesToRead;
|
|
16167
16129
|
}
|
|
16168
|
-
|
|
16130
|
+
// logger.insane(`MEM[${startAddress}] => ${BytesBlob.blobFrom(result)}`);
|
|
16169
16131
|
return result_Result.ok(result_OK);
|
|
16170
16132
|
}
|
|
16171
16133
|
sbrk(length) {
|
|
@@ -18095,7 +18057,7 @@ class ProgramDecoder {
|
|
|
18095
18057
|
return result_Result.ok(new ProgramDecoder(program));
|
|
18096
18058
|
}
|
|
18097
18059
|
catch (e) {
|
|
18098
|
-
program_decoder_logger.error
|
|
18060
|
+
program_decoder_logger.error(`Invalid program: ${e}`);
|
|
18099
18061
|
return result_Result.error(ProgramDecoderError.InvalidProgramError);
|
|
18100
18062
|
}
|
|
18101
18063
|
}
|
|
@@ -18261,7 +18223,7 @@ class Interpreter {
|
|
|
18261
18223
|
const argsType = instructionArgumentTypeMap[currentInstruction] ?? ArgumentType.NO_ARGUMENTS;
|
|
18262
18224
|
const argsResult = this.argsDecodingResults[argsType];
|
|
18263
18225
|
this.argsDecoder.fillArgs(this.pc, argsResult);
|
|
18264
|
-
interpreter_logger.insane
|
|
18226
|
+
interpreter_logger.insane(`[PC: ${this.pc}] ${Instruction[currentInstruction]}`);
|
|
18265
18227
|
if (!isValidInstruction) {
|
|
18266
18228
|
this.instructionResult.status = pvm_interpreter_result_Result.PANIC;
|
|
18267
18229
|
}
|
|
@@ -18333,7 +18295,7 @@ class Interpreter {
|
|
|
18333
18295
|
this.status = status_Status.HOST;
|
|
18334
18296
|
break;
|
|
18335
18297
|
}
|
|
18336
|
-
interpreter_logger.insane
|
|
18298
|
+
interpreter_logger.insane(`[PC: ${this.pc}] Status: ${pvm_interpreter_result_Result[this.instructionResult.status]}`);
|
|
18337
18299
|
return this.status;
|
|
18338
18300
|
}
|
|
18339
18301
|
this.pc = this.instructionResult.nextPc;
|
|
@@ -18580,7 +18542,7 @@ class host_calls_manager_HostCallsManager {
|
|
|
18580
18542
|
return `r${idx}=${value} (0x${value.toString(16)})`;
|
|
18581
18543
|
})
|
|
18582
18544
|
.join(", ");
|
|
18583
|
-
host_calls_manager_logger.insane
|
|
18545
|
+
host_calls_manager_logger.insane(`[${currentServiceId}] ${context} ${name}${requested}. Gas: ${gas}. Regs: ${registerValues}.`);
|
|
18584
18546
|
}
|
|
18585
18547
|
}
|
|
18586
18548
|
class NoopMissing {
|
|
@@ -20963,7 +20925,7 @@ class AccumulateExternalities {
|
|
|
20963
20925
|
/** https://graypaper.fluffylabs.dev/#/7e6ff6a/362802362d02?v=0.6.7 */
|
|
20964
20926
|
const validatorsManager = this.updatedState.getPrivilegedServices().validatorsManager;
|
|
20965
20927
|
if (validatorsManager !== this.currentServiceId) {
|
|
20966
|
-
accumulate_externalities_logger.trace
|
|
20928
|
+
accumulate_externalities_logger.trace(`Current service id (${this.currentServiceId}) is not a validators manager. (expected: ${validatorsManager}) and cannot update validators data. Ignoring`);
|
|
20967
20929
|
return result_Result.error(UnprivilegedError);
|
|
20968
20930
|
}
|
|
20969
20931
|
this.updatedState.stateUpdate.validatorsData = validatorsData;
|
|
@@ -20978,11 +20940,11 @@ class AccumulateExternalities {
|
|
|
20978
20940
|
// NOTE `coreIndex` is already verified in the HC, so this is infallible.
|
|
20979
20941
|
const currentAuthManager = this.updatedState.getPrivilegedServices().authManager[coreIndex];
|
|
20980
20942
|
if (currentAuthManager !== this.currentServiceId) {
|
|
20981
|
-
accumulate_externalities_logger.trace
|
|
20943
|
+
accumulate_externalities_logger.trace(`Current service id (${this.currentServiceId}) is not an auth manager of core ${coreIndex} (expected: ${currentAuthManager}) and cannot update authorization queue. Ignoring`);
|
|
20982
20944
|
return result_Result.error(UpdatePrivilegesError.UnprivilegedService);
|
|
20983
20945
|
}
|
|
20984
20946
|
if (authManager === null && Compatibility.isGreaterOrEqual(GpVersion.V0_7_1)) {
|
|
20985
|
-
accumulate_externalities_logger.trace
|
|
20947
|
+
accumulate_externalities_logger.trace("The new auth manager is not a valid service id. Ignoring");
|
|
20986
20948
|
return result_Result.error(UpdatePrivilegesError.InvalidServiceId);
|
|
20987
20949
|
}
|
|
20988
20950
|
this.updatedState.stateUpdate.authorizationQueues.set(coreIndex, authQueue);
|
|
@@ -21863,7 +21825,7 @@ class Assign {
|
|
|
21863
21825
|
const memoryReadResult = memory.loadInto(res, authorizationQueueStart);
|
|
21864
21826
|
// error while reading the memory.
|
|
21865
21827
|
if (memoryReadResult.isError) {
|
|
21866
|
-
logger_logger.trace
|
|
21828
|
+
logger_logger.trace("ASSIGN() <- PANIC");
|
|
21867
21829
|
return PvmExecution.Panic;
|
|
21868
21830
|
}
|
|
21869
21831
|
if (maybeCoreIndex >= this.chainSpec.coresCount) {
|
|
@@ -21878,18 +21840,18 @@ class Assign {
|
|
|
21878
21840
|
const result = this.partialState.updateAuthorizationQueue(coreIndex, fixedSizeAuthQueue, authManager);
|
|
21879
21841
|
if (result.isOk) {
|
|
21880
21842
|
regs.set(IN_OUT_REG, HostCallResult.OK);
|
|
21881
|
-
logger_logger.trace
|
|
21843
|
+
logger_logger.trace(`ASSIGN(${coreIndex}, ${fixedSizeAuthQueue}) <- OK`);
|
|
21882
21844
|
return;
|
|
21883
21845
|
}
|
|
21884
21846
|
const e = result.error;
|
|
21885
21847
|
if (e === UpdatePrivilegesError.UnprivilegedService) {
|
|
21886
21848
|
regs.set(IN_OUT_REG, HostCallResult.HUH);
|
|
21887
|
-
logger_logger.trace
|
|
21849
|
+
logger_logger.trace(`ASSIGN(${coreIndex}, ${fixedSizeAuthQueue}) <- HUH`);
|
|
21888
21850
|
return;
|
|
21889
21851
|
}
|
|
21890
21852
|
if (e === UpdatePrivilegesError.InvalidServiceId) {
|
|
21891
21853
|
regs.set(IN_OUT_REG, HostCallResult.WHO);
|
|
21892
|
-
logger_logger.trace
|
|
21854
|
+
logger_logger.trace(`ASSIGN(${coreIndex}, ${fixedSizeAuthQueue}) <- HUH`);
|
|
21893
21855
|
return;
|
|
21894
21856
|
}
|
|
21895
21857
|
debug_assertNever(e);
|
|
@@ -21955,7 +21917,7 @@ class Bless {
|
|
|
21955
21917
|
decoder.resetTo(0);
|
|
21956
21918
|
const memoryReadResult = memory.loadInto(result, memIndex);
|
|
21957
21919
|
if (memoryReadResult.isError) {
|
|
21958
|
-
logger_logger.trace
|
|
21920
|
+
logger_logger.trace(`BLESS(${manager}, ${validator}) <- PANIC`);
|
|
21959
21921
|
return PvmExecution.Panic;
|
|
21960
21922
|
}
|
|
21961
21923
|
const { serviceId, gas } = decoder.object(serviceIdAndGasCodec);
|
|
@@ -21968,24 +21930,24 @@ class Bless {
|
|
|
21968
21930
|
const authorizersDecoder = decoder_Decoder.fromBlob(res);
|
|
21969
21931
|
const memoryReadResult = memory.loadInto(res, authorization);
|
|
21970
21932
|
if (memoryReadResult.isError) {
|
|
21971
|
-
logger_logger.trace
|
|
21933
|
+
logger_logger.trace(`BLESS(${manager}, ${validator}, ${autoAccumulateEntries}) <- PANIC`);
|
|
21972
21934
|
return PvmExecution.Panic;
|
|
21973
21935
|
}
|
|
21974
21936
|
const authorizers = tryAsPerCore(authorizersDecoder.sequenceFixLen(descriptors_codec.u32.asOpaque(), this.chainSpec.coresCount), this.chainSpec);
|
|
21975
21937
|
const updateResult = this.partialState.updatePrivilegedServices(manager, authorizers, validator, autoAccumulateEntries);
|
|
21976
21938
|
if (updateResult.isOk) {
|
|
21977
|
-
logger_logger.trace
|
|
21939
|
+
logger_logger.trace(`BLESS(${manager}, ${authorizers}, ${validator}, ${autoAccumulateEntries}) <- OK`);
|
|
21978
21940
|
regs.set(bless_IN_OUT_REG, HostCallResult.OK);
|
|
21979
21941
|
return;
|
|
21980
21942
|
}
|
|
21981
21943
|
const e = updateResult.error;
|
|
21982
21944
|
if (e === UpdatePrivilegesError.UnprivilegedService) {
|
|
21983
|
-
logger_logger.trace
|
|
21945
|
+
logger_logger.trace(`BLESS(${manager}, ${authorizers}, ${validator}, ${autoAccumulateEntries}) <- HUH`);
|
|
21984
21946
|
regs.set(bless_IN_OUT_REG, HostCallResult.HUH);
|
|
21985
21947
|
return;
|
|
21986
21948
|
}
|
|
21987
21949
|
if (e === UpdatePrivilegesError.InvalidServiceId) {
|
|
21988
|
-
logger_logger.trace
|
|
21950
|
+
logger_logger.trace(`BLESS(${manager}, ${authorizers}, ${validator}, ${autoAccumulateEntries}) <- WHO`);
|
|
21989
21951
|
regs.set(bless_IN_OUT_REG, HostCallResult.WHO);
|
|
21990
21952
|
return;
|
|
21991
21953
|
}
|
|
@@ -22015,7 +21977,7 @@ class GasHostCall {
|
|
|
22015
21977
|
}
|
|
22016
21978
|
execute(gas, regs) {
|
|
22017
21979
|
const gasValue = gas.get();
|
|
22018
|
-
logger_logger.trace
|
|
21980
|
+
logger_logger.trace(`GAS <- ${gasValue}`);
|
|
22019
21981
|
regs.set(7, numbers_tryAsU64(gasValue));
|
|
22020
21982
|
return Promise.resolve(undefined);
|
|
22021
21983
|
}
|
|
@@ -22047,7 +22009,7 @@ class Checkpoint {
|
|
|
22047
22009
|
async execute(gas, regs) {
|
|
22048
22010
|
await this.gasHostCall.execute(gas, regs);
|
|
22049
22011
|
this.partialState.checkpoint();
|
|
22050
|
-
logger_logger.trace
|
|
22012
|
+
logger_logger.trace("CHECKPOINT()");
|
|
22051
22013
|
return;
|
|
22052
22014
|
}
|
|
22053
22015
|
}
|
|
@@ -22086,18 +22048,18 @@ class Designate {
|
|
|
22086
22048
|
const memoryReadResult = memory.loadInto(res, validatorsStart);
|
|
22087
22049
|
// error while reading the memory.
|
|
22088
22050
|
if (memoryReadResult.isError) {
|
|
22089
|
-
logger_logger.trace
|
|
22051
|
+
logger_logger.trace("DESIGNATE() <- PANIC");
|
|
22090
22052
|
return PvmExecution.Panic;
|
|
22091
22053
|
}
|
|
22092
22054
|
const decoder = decoder_Decoder.fromBlob(res);
|
|
22093
22055
|
const validatorsData = decoder.sequenceFixLen(ValidatorData.Codec, this.chainSpec.validatorsCount);
|
|
22094
22056
|
const result = this.partialState.updateValidatorsData(tryAsPerValidator(validatorsData, this.chainSpec));
|
|
22095
22057
|
if (result.isError) {
|
|
22096
|
-
logger_logger.trace
|
|
22058
|
+
logger_logger.trace(`DESIGNATE([${validatorsData[0]}, ${validatorsData[1]}, ...]) <- HUH`);
|
|
22097
22059
|
regs.set(designate_IN_OUT_REG, HostCallResult.HUH);
|
|
22098
22060
|
}
|
|
22099
22061
|
else {
|
|
22100
|
-
logger_logger.trace
|
|
22062
|
+
logger_logger.trace(`DESIGNATE([${validatorsData[0]}, ${validatorsData[1]}, ...]) <- OK`);
|
|
22101
22063
|
regs.set(designate_IN_OUT_REG, HostCallResult.OK);
|
|
22102
22064
|
}
|
|
22103
22065
|
}
|
|
@@ -22138,17 +22100,17 @@ class Eject {
|
|
|
22138
22100
|
const previousCodeHash = bytes_Bytes.zero(hash_HASH_SIZE).asOpaque();
|
|
22139
22101
|
const memoryReadResult = memory.loadInto(previousCodeHash.raw, preimageHashStart);
|
|
22140
22102
|
if (memoryReadResult.isError) {
|
|
22141
|
-
logger_logger.trace
|
|
22103
|
+
logger_logger.trace(`EJECT(${serviceId}) <- PANIC`);
|
|
22142
22104
|
return PvmExecution.Panic;
|
|
22143
22105
|
}
|
|
22144
22106
|
// cannot eject self
|
|
22145
22107
|
if (serviceId === this.currentServiceId) {
|
|
22146
22108
|
regs.set(eject_IN_OUT_REG, HostCallResult.WHO);
|
|
22147
|
-
logger_logger.trace
|
|
22109
|
+
logger_logger.trace(`EJECT(${serviceId}, ${previousCodeHash}) <- WHO`);
|
|
22148
22110
|
return;
|
|
22149
22111
|
}
|
|
22150
22112
|
const result = this.partialState.eject(serviceId, previousCodeHash);
|
|
22151
|
-
logger_logger.trace
|
|
22113
|
+
logger_logger.trace(`EJECT(${serviceId}, ${previousCodeHash}) <- ${resultToString(result)}`);
|
|
22152
22114
|
// All good!
|
|
22153
22115
|
if (result.isOk) {
|
|
22154
22116
|
regs.set(eject_IN_OUT_REG, HostCallResult.OK);
|
|
@@ -22200,11 +22162,11 @@ class Forget {
|
|
|
22200
22162
|
const memoryReadResult = memory.loadInto(hash.raw, hashStart);
|
|
22201
22163
|
// error while reading the memory.
|
|
22202
22164
|
if (memoryReadResult.isError) {
|
|
22203
|
-
logger_logger.trace
|
|
22165
|
+
logger_logger.trace(`FORGET(${hash}, ${length}) <- PANIC`);
|
|
22204
22166
|
return PvmExecution.Panic;
|
|
22205
22167
|
}
|
|
22206
22168
|
const result = this.partialState.forgetPreimage(hash.asOpaque(), length);
|
|
22207
|
-
logger_logger.trace
|
|
22169
|
+
logger_logger.trace(`FORGET(${hash}, ${length}) <- ${resultToString(result)}`);
|
|
22208
22170
|
if (result.isOk) {
|
|
22209
22171
|
regs.set(forget_IN_OUT_REG, HostCallResult.OK);
|
|
22210
22172
|
}
|
|
@@ -22257,11 +22219,11 @@ class New {
|
|
|
22257
22219
|
const memoryReadResult = memory.loadInto(codeHash.raw, codeHashStart);
|
|
22258
22220
|
// error while reading the memory.
|
|
22259
22221
|
if (memoryReadResult.isError) {
|
|
22260
|
-
logger_logger.trace
|
|
22222
|
+
logger_logger.trace(`NEW(${codeHash}, ${codeLength}, ${gas}, ${allowance}, ${gratisStorage}) <- PANIC`);
|
|
22261
22223
|
return PvmExecution.Panic;
|
|
22262
22224
|
}
|
|
22263
22225
|
const assignedId = this.partialState.newService(codeHash.asOpaque(), codeLength, gas, allowance, gratisStorage);
|
|
22264
|
-
logger_logger.trace
|
|
22226
|
+
logger_logger.trace(`NEW(${codeHash}, ${codeLength}, ${gas}, ${allowance}, ${gratisStorage}) <- ${resultToString(assignedId)}`);
|
|
22265
22227
|
if (assignedId.isOk) {
|
|
22266
22228
|
regs.set(new_IN_OUT_REG, numbers_tryAsU64(assignedId.ok));
|
|
22267
22229
|
return;
|
|
@@ -22316,11 +22278,11 @@ class Provide {
|
|
|
22316
22278
|
const preimage = bytes_BytesBlob.blobFrom(new Uint8Array(length));
|
|
22317
22279
|
const memoryReadResult = memory.loadInto(preimage.raw, preimageStart);
|
|
22318
22280
|
if (memoryReadResult.isError) {
|
|
22319
|
-
logger_logger.trace
|
|
22281
|
+
logger_logger.trace(`PROVIDE(${serviceId}, ${preimage.toStringTruncated()}) <- PANIC`);
|
|
22320
22282
|
return PvmExecution.Panic;
|
|
22321
22283
|
}
|
|
22322
22284
|
const result = this.partialState.providePreimage(serviceId, preimage);
|
|
22323
|
-
logger_logger.trace
|
|
22285
|
+
logger_logger.trace(`PROVIDE(${serviceId}, ${preimage.toStringTruncated()}) <- ${resultToString(result)}`);
|
|
22324
22286
|
if (result.isOk) {
|
|
22325
22287
|
regs.set(provide_IN_OUT_REG, HostCallResult.OK);
|
|
22326
22288
|
return;
|
|
@@ -22374,11 +22336,11 @@ class Query {
|
|
|
22374
22336
|
const memoryReadResult = memory.loadInto(hash.raw, hashStart);
|
|
22375
22337
|
// error while reading the memory.
|
|
22376
22338
|
if (memoryReadResult.isError) {
|
|
22377
|
-
logger_logger.trace
|
|
22339
|
+
logger_logger.trace(`QUERY(${hash}, ${length}) <- PANIC`);
|
|
22378
22340
|
return PvmExecution.Panic;
|
|
22379
22341
|
}
|
|
22380
22342
|
const result = this.partialState.checkPreimageStatus(hash.asOpaque(), length);
|
|
22381
|
-
logger_logger.trace
|
|
22343
|
+
logger_logger.trace(`QUERY(${hash}, ${length}) <- ${result}`);
|
|
22382
22344
|
const zero = numbers_tryAsU64(0n);
|
|
22383
22345
|
if (result === null) {
|
|
22384
22346
|
regs.set(IN_OUT_REG_1, HostCallResult.NONE);
|
|
@@ -22439,11 +22401,11 @@ class Solicit {
|
|
|
22439
22401
|
const hash = bytes_Bytes.zero(hash_HASH_SIZE);
|
|
22440
22402
|
const memoryReadResult = memory.loadInto(hash.raw, hashStart);
|
|
22441
22403
|
if (memoryReadResult.isError) {
|
|
22442
|
-
logger_logger.trace
|
|
22404
|
+
logger_logger.trace(`SOLICIT(${hash}, ${length}) <- PANIC`);
|
|
22443
22405
|
return PvmExecution.Panic;
|
|
22444
22406
|
}
|
|
22445
22407
|
const result = this.partialState.requestPreimage(hash.asOpaque(), length);
|
|
22446
|
-
logger_logger.trace
|
|
22408
|
+
logger_logger.trace(`SOLICIT(${hash}, ${length}) <- ${resultToString(result)}`);
|
|
22447
22409
|
if (result.isOk) {
|
|
22448
22410
|
regs.set(solicit_IN_OUT_REG, HostCallResult.OK);
|
|
22449
22411
|
return;
|
|
@@ -22515,11 +22477,11 @@ class Transfer {
|
|
|
22515
22477
|
const memoryReadResult = memory.loadInto(memo.raw, memoStart);
|
|
22516
22478
|
// page fault while reading the memory.
|
|
22517
22479
|
if (memoryReadResult.isError) {
|
|
22518
|
-
logger_logger.trace
|
|
22480
|
+
logger_logger.trace(`TRANSFER(${destination}, ${amount}, ${onTransferGas}, ${memo}) <- PANIC`);
|
|
22519
22481
|
return PvmExecution.Panic;
|
|
22520
22482
|
}
|
|
22521
22483
|
const transferResult = this.partialState.transfer(destination, amount, onTransferGas, memo);
|
|
22522
|
-
logger_logger.trace
|
|
22484
|
+
logger_logger.trace(`TRANSFER(${destination}, ${amount}, ${onTransferGas}, ${memo}) <- ${resultToString(transferResult)}`);
|
|
22523
22485
|
// All good!
|
|
22524
22486
|
if (transferResult.isOk) {
|
|
22525
22487
|
regs.set(transfer_IN_OUT_REG, HostCallResult.OK);
|
|
@@ -22578,11 +22540,11 @@ class Upgrade {
|
|
|
22578
22540
|
const codeHash = bytes_Bytes.zero(hash_HASH_SIZE);
|
|
22579
22541
|
const memoryReadResult = memory.loadInto(codeHash.raw, codeHashStart);
|
|
22580
22542
|
if (memoryReadResult.isError) {
|
|
22581
|
-
logger_logger.trace
|
|
22543
|
+
logger_logger.trace(`UPGRADE(${codeHash}, ${gas}, ${allowance}) <- PANIC`);
|
|
22582
22544
|
return PvmExecution.Panic;
|
|
22583
22545
|
}
|
|
22584
22546
|
this.partialState.upgradeService(codeHash.asOpaque(), gas, allowance);
|
|
22585
|
-
logger_logger.trace
|
|
22547
|
+
logger_logger.trace(`UPGRADE(${codeHash}, ${gas}, ${allowance})`);
|
|
22586
22548
|
regs.set(upgrade_IN_OUT_REG, HostCallResult.OK);
|
|
22587
22549
|
}
|
|
22588
22550
|
}
|
|
@@ -22616,11 +22578,11 @@ class Yield {
|
|
|
22616
22578
|
const hash = bytes_Bytes.zero(hash_HASH_SIZE);
|
|
22617
22579
|
const memoryReadResult = memory.loadInto(hash.raw, hashStart);
|
|
22618
22580
|
if (memoryReadResult.isError) {
|
|
22619
|
-
logger_logger.trace
|
|
22581
|
+
logger_logger.trace("YIELD() <- PANIC");
|
|
22620
22582
|
return PvmExecution.Panic;
|
|
22621
22583
|
}
|
|
22622
22584
|
this.partialState.yield(hash);
|
|
22623
|
-
logger_logger.trace
|
|
22585
|
+
logger_logger.trace(`YIELD(${hash})`);
|
|
22624
22586
|
regs.set(yield_IN_OUT_REG, HostCallResult.OK);
|
|
22625
22587
|
}
|
|
22626
22588
|
}
|
|
@@ -22662,10 +22624,10 @@ class Fetch {
|
|
|
22662
22624
|
const chunk = value === null ? new Uint8Array() : value.raw.subarray(Number(offset), Number(offset + length));
|
|
22663
22625
|
const storeResult = memory.storeFrom(output, chunk);
|
|
22664
22626
|
if (storeResult.isError) {
|
|
22665
|
-
logger_logger.trace
|
|
22627
|
+
logger_logger.trace(`FETCH(${kind}) <- PANIC`);
|
|
22666
22628
|
return PvmExecution.Panic;
|
|
22667
22629
|
}
|
|
22668
|
-
logger_logger.trace
|
|
22630
|
+
logger_logger.trace(`FETCH(${kind}) <- ${value?.toStringTruncated()}`);
|
|
22669
22631
|
// write result
|
|
22670
22632
|
regs.set(fetch_IN_OUT_REG, value === null ? HostCallResult.NONE : valueLength);
|
|
22671
22633
|
}
|
|
@@ -22824,10 +22786,10 @@ class Info {
|
|
|
22824
22786
|
const chunk = encodedInfo.raw.subarray(Number(offset), Number(offset + length));
|
|
22825
22787
|
const writeResult = memory.storeFrom(outputStart, chunk);
|
|
22826
22788
|
if (writeResult.isError) {
|
|
22827
|
-
logger_logger.trace
|
|
22789
|
+
logger_logger.trace(`INFO(${serviceId}) <- PANIC`);
|
|
22828
22790
|
return PvmExecution.Panic;
|
|
22829
22791
|
}
|
|
22830
|
-
logger_logger.trace
|
|
22792
|
+
logger_logger.trace(`INFO(${serviceId}) <- ${bytes_BytesBlob.blobFrom(chunk)}`);
|
|
22831
22793
|
if (accountInfo === null) {
|
|
22832
22794
|
regs.set(info_IN_OUT_REG, HostCallResult.NONE);
|
|
22833
22795
|
return;
|
|
@@ -22888,7 +22850,7 @@ class LogHostCall {
|
|
|
22888
22850
|
memory.loadInto(target, targetStart);
|
|
22889
22851
|
}
|
|
22890
22852
|
memory.loadInto(message, msgStart);
|
|
22891
|
-
logger_logger.trace
|
|
22853
|
+
logger_logger.trace(`SERVICE [${this.currentServiceId}] [${lvl}] ${decoder.decode(target)} ${decoder.decode(message)}`);
|
|
22892
22854
|
return Promise.resolve(undefined);
|
|
22893
22855
|
}
|
|
22894
22856
|
}
|
|
@@ -22928,12 +22890,12 @@ class Lookup {
|
|
|
22928
22890
|
const preImageHash = bytes_Bytes.zero(hash_HASH_SIZE);
|
|
22929
22891
|
const memoryReadResult = memory.loadInto(preImageHash.raw, hashAddress);
|
|
22930
22892
|
if (memoryReadResult.isError) {
|
|
22931
|
-
logger_logger.trace
|
|
22893
|
+
logger_logger.trace(`LOOKUP(${serviceId}, ${preImageHash}) <- PANIC`);
|
|
22932
22894
|
return PvmExecution.Panic;
|
|
22933
22895
|
}
|
|
22934
22896
|
// v
|
|
22935
22897
|
const preImage = this.account.lookup(serviceId, preImageHash);
|
|
22936
|
-
logger_logger.trace
|
|
22898
|
+
logger_logger.trace(`LOOKUP(${serviceId}, ${preImageHash}) <- ${preImage?.toStringTruncated()}...`);
|
|
22937
22899
|
const preImageLength = preImage === null ? numbers_tryAsU64(0) : numbers_tryAsU64(preImage.raw.length);
|
|
22938
22900
|
const preimageBlobOffset = regs.get(10);
|
|
22939
22901
|
const lengthToWrite = regs.get(11);
|
|
@@ -22995,7 +22957,7 @@ class Read {
|
|
|
22995
22957
|
const rawKey = bytes_BytesBlob.blobFrom(new Uint8Array(storageKeyLengthClamped));
|
|
22996
22958
|
const memoryReadResult = memory.loadInto(rawKey.raw, storageKeyStartAddress);
|
|
22997
22959
|
if (memoryReadResult.isError) {
|
|
22998
|
-
logger_logger.trace
|
|
22960
|
+
logger_logger.trace(`READ(${serviceId}, ${rawKey}) <- PANIC`);
|
|
22999
22961
|
return PvmExecution.Panic;
|
|
23000
22962
|
}
|
|
23001
22963
|
// v
|
|
@@ -23013,15 +22975,15 @@ class Read {
|
|
|
23013
22975
|
const chunk = value === null ? new Uint8Array(0) : value.raw.subarray(Number(offset), Number(offset + blobLength));
|
|
23014
22976
|
const memoryWriteResult = memory.storeFrom(destinationAddress, chunk);
|
|
23015
22977
|
if (memoryWriteResult.isError) {
|
|
23016
|
-
logger_logger.trace
|
|
22978
|
+
logger_logger.trace(`READ(${serviceId}, ${rawKey}) <- PANIC`);
|
|
23017
22979
|
return PvmExecution.Panic;
|
|
23018
22980
|
}
|
|
23019
22981
|
if (value === null) {
|
|
23020
|
-
logger_logger.trace
|
|
22982
|
+
logger_logger.trace(`READ(${serviceId}, ${rawKey}) <- NONE`);
|
|
23021
22983
|
regs.set(read_IN_OUT_REG, HostCallResult.NONE);
|
|
23022
22984
|
return;
|
|
23023
22985
|
}
|
|
23024
|
-
logger_logger.trace
|
|
22986
|
+
logger_logger.trace(`READ(${serviceId}, ${rawKey}) <- ${bytes_BytesBlob.blobFrom(chunk).toStringTruncated()}`);
|
|
23025
22987
|
regs.set(read_IN_OUT_REG, valueLength);
|
|
23026
22988
|
}
|
|
23027
22989
|
}
|
|
@@ -23064,7 +23026,7 @@ class Write {
|
|
|
23064
23026
|
const rawStorageKey = new Uint8Array(storageKeyLengthClamped);
|
|
23065
23027
|
const keyLoadingResult = memory.loadInto(rawStorageKey, storageKeyStartAddress);
|
|
23066
23028
|
if (keyLoadingResult.isError) {
|
|
23067
|
-
logger_logger.trace
|
|
23029
|
+
logger_logger.trace("WRITE() <- PANIC");
|
|
23068
23030
|
return PvmExecution.Panic;
|
|
23069
23031
|
}
|
|
23070
23032
|
// k
|
|
@@ -23074,14 +23036,14 @@ class Write {
|
|
|
23074
23036
|
const valueLoadingResult = memory.loadInto(value, valueStart);
|
|
23075
23037
|
// Note [MaSo] this is ok to return bcs if valueLength is 0, then this panic won't happen
|
|
23076
23038
|
if (valueLoadingResult.isError) {
|
|
23077
|
-
logger_logger.trace
|
|
23039
|
+
logger_logger.trace(`WRITE(${storageKey})}) <- PANIC`);
|
|
23078
23040
|
return PvmExecution.Panic;
|
|
23079
23041
|
}
|
|
23080
23042
|
/** https://graypaper.fluffylabs.dev/#/9a08063/33af0133b201?v=0.6.6 */
|
|
23081
23043
|
const maybeValue = valueLength === 0n ? null : bytes_BytesBlob.blobFrom(value);
|
|
23082
23044
|
// a
|
|
23083
23045
|
const result = this.account.write(storageKey, maybeValue);
|
|
23084
|
-
logger_logger.trace
|
|
23046
|
+
logger_logger.trace(`WRITE(${storageKey}, ${maybeValue?.toStringTruncated()}) <- ${resultToString(result)}`);
|
|
23085
23047
|
if (result.isError) {
|
|
23086
23048
|
regs.set(write_IN_OUT_REG, HostCallResult.FULL);
|
|
23087
23049
|
return;
|
|
@@ -23273,18 +23235,18 @@ class Accumulate {
|
|
|
23273
23235
|
async pvmAccumulateInvocation(slot, serviceId, operands, gas, entropy, inputStateUpdate) {
|
|
23274
23236
|
const service = this.state.getService(serviceId);
|
|
23275
23237
|
if (service === null) {
|
|
23276
|
-
accumulate_logger.log
|
|
23238
|
+
accumulate_logger.log(`Service with id ${serviceId} not found.`);
|
|
23277
23239
|
return result_Result.error(PvmInvocationError.NoService);
|
|
23278
23240
|
}
|
|
23279
23241
|
const codeHash = service.getInfo().codeHash;
|
|
23280
23242
|
// TODO [ToDr] Should we check that the preimage is still available?
|
|
23281
23243
|
const code = service.getPreimage(codeHash.asOpaque());
|
|
23282
23244
|
if (code === null) {
|
|
23283
|
-
accumulate_logger.log
|
|
23245
|
+
accumulate_logger.log(`Code with hash ${codeHash} not found for service ${serviceId}.`);
|
|
23284
23246
|
return result_Result.error(PvmInvocationError.NoPreimage);
|
|
23285
23247
|
}
|
|
23286
23248
|
if (code.length > W_C) {
|
|
23287
|
-
accumulate_logger.log
|
|
23249
|
+
accumulate_logger.log(`Code with hash ${codeHash} is too long for service ${serviceId}.`);
|
|
23288
23250
|
return result_Result.error(PvmInvocationError.PreimageTooLong);
|
|
23289
23251
|
}
|
|
23290
23252
|
const nextServiceId = generateNextServiceId({ serviceId, entropy, timeslot: slot }, this.chainSpec);
|
|
@@ -23306,10 +23268,10 @@ class Accumulate {
|
|
|
23306
23268
|
if (result.hasStatus()) {
|
|
23307
23269
|
const status = result.status;
|
|
23308
23270
|
if (status === status_Status.OOG || status === status_Status.PANIC) {
|
|
23309
|
-
accumulate_logger.trace
|
|
23271
|
+
accumulate_logger.trace(`[${serviceId}] accumulate finished with ${status_Status[status]} reverting to checkpoint.`);
|
|
23310
23272
|
return result_Result.ok({ stateUpdate: checkpoint, consumedGas: common_tryAsServiceGas(result.consumedGas) });
|
|
23311
23273
|
}
|
|
23312
|
-
accumulate_logger.trace
|
|
23274
|
+
accumulate_logger.trace(`[${serviceId}] accumulate finished with ${status_Status[status]}`);
|
|
23313
23275
|
}
|
|
23314
23276
|
/**
|
|
23315
23277
|
* PVM invocation returned a hash so we override whatever `yield` host call
|
|
@@ -23334,14 +23296,14 @@ class Accumulate {
|
|
|
23334
23296
|
* https://graypaper.fluffylabs.dev/#/7e6ff6a/18d70118d701?v=0.6.7
|
|
23335
23297
|
*/
|
|
23336
23298
|
async accumulateSingleService(serviceId, operands, gasCost, slot, entropy, inputStateUpdate) {
|
|
23337
|
-
accumulate_logger.log
|
|
23299
|
+
accumulate_logger.log(`Accumulating service ${serviceId}, items: ${operands.length} at slot: ${slot}.`);
|
|
23338
23300
|
const result = await this.pvmAccumulateInvocation(slot, serviceId, operands, gasCost, entropy, inputStateUpdate);
|
|
23339
23301
|
if (result.isError) {
|
|
23340
23302
|
// https://graypaper.fluffylabs.dev/#/7e6ff6a/2fb6012fb601?v=0.6.7
|
|
23341
|
-
accumulate_logger.log
|
|
23303
|
+
accumulate_logger.log(`Accumulation failed for ${serviceId}.`);
|
|
23342
23304
|
return { stateUpdate: null, consumedGas: 0n };
|
|
23343
23305
|
}
|
|
23344
|
-
accumulate_logger.log
|
|
23306
|
+
accumulate_logger.log(`Accumulation successful for ${serviceId}. Consumed: ${result.ok.consumedGas}`);
|
|
23345
23307
|
return result.ok;
|
|
23346
23308
|
}
|
|
23347
23309
|
/**
|
|
@@ -23390,7 +23352,6 @@ class Accumulate {
|
|
|
23390
23352
|
const serviceIds = accumulateData.getServiceIds();
|
|
23391
23353
|
let gasCost = common_tryAsServiceGas(0);
|
|
23392
23354
|
let currentState = inputStateUpdate;
|
|
23393
|
-
const currentManager = (inputStateUpdate.privilegedServices ?? this.state.privilegedServices).manager;
|
|
23394
23355
|
for (const serviceId of serviceIds) {
|
|
23395
23356
|
const checkpoint = AccumulationStateUpdate.copyFrom(currentState);
|
|
23396
23357
|
const { consumedGas, stateUpdate } = await this.accumulateSingleService(serviceId, accumulateData.getOperands(serviceId), accumulateData.getGasCost(serviceId), slot, entropy, currentState);
|
|
@@ -23400,21 +23361,6 @@ class Accumulate {
|
|
|
23400
23361
|
serviceStatistics.gasUsed = common_tryAsServiceGas(serviceStatistics.gasUsed + consumedGas);
|
|
23401
23362
|
statistics.set(serviceId, serviceStatistics);
|
|
23402
23363
|
currentState = stateUpdate === null ? checkpoint : stateUpdate;
|
|
23403
|
-
if (Compatibility.is(GpVersion.V0_7_0) && serviceId === currentManager) {
|
|
23404
|
-
const newV = currentState.privilegedServices?.validatorsManager;
|
|
23405
|
-
if (currentState.privilegedServices !== null && newV !== undefined && serviceIds.includes(newV)) {
|
|
23406
|
-
accumulate_logger.info `Entering completely incorrect code that probably reverts validatorsManager change. This is valid in 0.7.0 only and incorrect in 0.7.1+`;
|
|
23407
|
-
// Since serviceIds already contains newV, this service gets accumulated twice.
|
|
23408
|
-
// To avoid double-counting, we skip stats and gas cost tracking here.
|
|
23409
|
-
// We need this accumulation to get the correct `validatorsManager`
|
|
23410
|
-
const { stateUpdate } = await this.accumulateSingleService(newV, accumulateData.getOperands(newV), accumulateData.getGasCost(newV), slot, entropy, checkpoint);
|
|
23411
|
-
const correctV = stateUpdate?.privilegedServices?.validatorsManager ?? this.state.privilegedServices.validatorsManager;
|
|
23412
|
-
currentState.privilegedServices = PrivilegedServices.create({
|
|
23413
|
-
...currentState.privilegedServices,
|
|
23414
|
-
validatorsManager: correctV,
|
|
23415
|
-
});
|
|
23416
|
-
}
|
|
23417
|
-
}
|
|
23418
23364
|
}
|
|
23419
23365
|
return {
|
|
23420
23366
|
state: currentState,
|
|
@@ -23559,14 +23505,11 @@ class DeferredTransfers {
|
|
|
23559
23505
|
async transition({ pendingTransfers, timeslot, servicesUpdate: inputServicesUpdate, entropy, }) {
|
|
23560
23506
|
// https://graypaper.fluffylabs.dev/#/7e6ff6a/187a03187a03?v=0.6.7
|
|
23561
23507
|
const transferStatistics = new Map();
|
|
23562
|
-
const services = uniquePreserveOrder(pendingTransfers.
|
|
23508
|
+
const services = uniquePreserveOrder(pendingTransfers.flatMap((x) => [x.source, x.destination]));
|
|
23563
23509
|
let currentStateUpdate = AccumulationStateUpdate.new(inputServicesUpdate);
|
|
23564
23510
|
for (const serviceId of services) {
|
|
23565
23511
|
const partiallyUpdatedState = new PartiallyUpdatedState(this.state, currentStateUpdate);
|
|
23566
|
-
|
|
23567
|
-
const transfers = pendingTransfers
|
|
23568
|
-
.filter((pendingTransfer) => pendingTransfer.destination === serviceId)
|
|
23569
|
-
.toSorted((a, b) => a.source - b.source);
|
|
23512
|
+
const transfers = pendingTransfers.filter((pendingTransfer) => pendingTransfer.destination === serviceId);
|
|
23570
23513
|
const info = partiallyUpdatedState.getServiceInfo(serviceId);
|
|
23571
23514
|
if (info === null) {
|
|
23572
23515
|
return result_Result.error(DeferredTransfersErrorCode.ServiceInfoNotExist);
|
|
@@ -23586,13 +23529,13 @@ class DeferredTransfers {
|
|
|
23586
23529
|
const isCodeCorrect = code !== null && code.length <= W_C;
|
|
23587
23530
|
if (!hasTransfers || !isCodeCorrect) {
|
|
23588
23531
|
if (code === null) {
|
|
23589
|
-
deferred_transfers_logger.trace
|
|
23532
|
+
deferred_transfers_logger.trace(`Skipping ON_TRANSFER execution for service ${serviceId} because code is null`);
|
|
23590
23533
|
}
|
|
23591
23534
|
else if (!hasTransfers) {
|
|
23592
|
-
deferred_transfers_logger.trace
|
|
23535
|
+
deferred_transfers_logger.trace(`Skipping ON_TRANSFER execution for service ${serviceId} because there are no transfers`);
|
|
23593
23536
|
}
|
|
23594
23537
|
else {
|
|
23595
|
-
deferred_transfers_logger.trace
|
|
23538
|
+
deferred_transfers_logger.trace(`Skipping ON_TRANSFER execution for service ${serviceId} because code is too long`);
|
|
23596
23539
|
}
|
|
23597
23540
|
}
|
|
23598
23541
|
else {
|
|
@@ -24220,7 +24163,7 @@ function verifyRefineContexts(minLookupSlot, contexts, recentBlocksPartialUpdate
|
|
|
24220
24163
|
headerChain.isAncestor(context.lookupAnchorSlot, context.lookupAnchor, context.anchor);
|
|
24221
24164
|
if (!isInChain) {
|
|
24222
24165
|
if (process.env.SKIP_LOOKUP_ANCHOR_CHECK !== undefined) {
|
|
24223
|
-
verify_contextual_logger.warn
|
|
24166
|
+
verify_contextual_logger.warn(`Lookup anchor check for ${context.lookupAnchor} would fail, but override is active.`);
|
|
24224
24167
|
}
|
|
24225
24168
|
else {
|
|
24226
24169
|
return result_Result.error(ReportsError.SegmentRootLookupInvalid, `Lookup anchor is not found in chain. Hash: ${context.lookupAnchor} (slot: ${context.lookupAnchorSlot})`);
|
|
@@ -25107,7 +25050,7 @@ class OnChain {
|
|
|
25107
25050
|
reports: availableReports,
|
|
25108
25051
|
entropy: entropy[0],
|
|
25109
25052
|
});
|
|
25110
|
-
chain_stf_logger.log
|
|
25053
|
+
chain_stf_logger.log(timerAccumulate());
|
|
25111
25054
|
if (accumulateResult.isError) {
|
|
25112
25055
|
return stfError(StfErrorKind.Accumulate, accumulateResult);
|
|
25113
25056
|
}
|
|
@@ -25238,7 +25181,7 @@ class Importer {
|
|
|
25238
25181
|
this.stf = new OnChain(spec, state, blocks, hasher);
|
|
25239
25182
|
this.state = state;
|
|
25240
25183
|
this.currentHash = currentBestHeaderHash;
|
|
25241
|
-
logger.info
|
|
25184
|
+
logger.info(`😎 Best time slot: ${state.timeslot} (header hash: ${currentBestHeaderHash})`);
|
|
25242
25185
|
}
|
|
25243
25186
|
async importBlock(block, omitSealVerification) {
|
|
25244
25187
|
const timer = measure("importBlock");
|
|
@@ -25246,20 +25189,20 @@ class Importer {
|
|
|
25246
25189
|
const maybeBestHeader = await this.importBlockInternal(block, omitSealVerification);
|
|
25247
25190
|
if (maybeBestHeader.isOk) {
|
|
25248
25191
|
const bestHeader = maybeBestHeader.ok;
|
|
25249
|
-
this.logger.info
|
|
25250
|
-
this.logger.log
|
|
25192
|
+
this.logger.info(`🧊 Best block: #${timeSlot} (${bestHeader.hash})`);
|
|
25193
|
+
this.logger.log(timer());
|
|
25251
25194
|
return maybeBestHeader;
|
|
25252
25195
|
}
|
|
25253
|
-
this.logger.log
|
|
25254
|
-
this.logger.log
|
|
25196
|
+
this.logger.log(`❌ Rejected block #${timeSlot}: ${resultToString(maybeBestHeader)}`);
|
|
25197
|
+
this.logger.log(timer());
|
|
25255
25198
|
return maybeBestHeader;
|
|
25256
25199
|
}
|
|
25257
25200
|
async importBlockInternal(block, omitSealVerification = false) {
|
|
25258
25201
|
const logger = this.logger;
|
|
25259
|
-
logger.log
|
|
25202
|
+
logger.log("🧱 Attempting to import a new block");
|
|
25260
25203
|
const timerVerify = measure("import:verify");
|
|
25261
25204
|
const hash = await this.verifier.verifyBlock(block);
|
|
25262
|
-
logger.log
|
|
25205
|
+
logger.log(timerVerify());
|
|
25263
25206
|
if (hash.isError) {
|
|
25264
25207
|
return importerError(ImporterErrorKind.Verifier, hash);
|
|
25265
25208
|
}
|
|
@@ -25279,10 +25222,10 @@ class Importer {
|
|
|
25279
25222
|
}
|
|
25280
25223
|
const timeSlot = block.header.view().timeSlotIndex.materialize();
|
|
25281
25224
|
const headerHash = hash.ok;
|
|
25282
|
-
logger.log
|
|
25225
|
+
logger.log(`🧱 Verified block: Got hash ${headerHash} for block at slot ${timeSlot}.`);
|
|
25283
25226
|
const timerStf = measure("import:stf");
|
|
25284
25227
|
const res = await this.stf.transition(block, headerHash, omitSealVerification);
|
|
25285
|
-
logger.log
|
|
25228
|
+
logger.log(timerStf());
|
|
25286
25229
|
if (res.isError) {
|
|
25287
25230
|
return importerError(ImporterErrorKind.Stf, res);
|
|
25288
25231
|
}
|
|
@@ -25291,7 +25234,7 @@ class Importer {
|
|
|
25291
25234
|
const timerState = measure("import:state");
|
|
25292
25235
|
const updateResult = await this.states.updateAndSetState(headerHash, this.state, update);
|
|
25293
25236
|
if (updateResult.isError) {
|
|
25294
|
-
logger.error
|
|
25237
|
+
logger.error(`🧱 Unable to update state: ${resultToString(updateResult)}`);
|
|
25295
25238
|
return importerError(ImporterErrorKind.Update, updateResult);
|
|
25296
25239
|
}
|
|
25297
25240
|
const newState = this.states.getState(headerHash);
|
|
@@ -25302,17 +25245,17 @@ class Importer {
|
|
|
25302
25245
|
// the state of a parent block to support forks and create a fresh STF.
|
|
25303
25246
|
this.state.updateBackend(newState.backend);
|
|
25304
25247
|
this.currentHash = headerHash;
|
|
25305
|
-
logger.log
|
|
25248
|
+
logger.log(timerState());
|
|
25306
25249
|
// insert new state and the block to DB.
|
|
25307
25250
|
const timerDb = measure("import:db");
|
|
25308
25251
|
const writeBlocks = this.blocks.insertBlock(new WithHash(headerHash, block));
|
|
25309
25252
|
// Computation of the state root may happen asynchronously,
|
|
25310
25253
|
// but we still need to wait for it before next block can be imported
|
|
25311
25254
|
const stateRoot = await this.states.getStateRoot(newState);
|
|
25312
|
-
logger.log
|
|
25255
|
+
logger.log(`🧱 Storing post-state-root for ${headerHash}: ${stateRoot}.`);
|
|
25313
25256
|
const writeStateRoot = this.blocks.setPostStateRoot(headerHash, stateRoot);
|
|
25314
25257
|
await Promise.all([writeBlocks, writeStateRoot]);
|
|
25315
|
-
logger.log
|
|
25258
|
+
logger.log(timerDb());
|
|
25316
25259
|
// finally update the best block
|
|
25317
25260
|
await this.blocks.setBestHeaderHash(headerHash);
|
|
25318
25261
|
return result_Result.ok(new WithHash(headerHash, block.header.view()));
|
|
@@ -25414,7 +25357,7 @@ async function spawnWorkerGeneric(bootstrapPath, logger, mainReadyName, mainRead
|
|
|
25414
25357
|
const worker = new Worker(bootstrapPath);
|
|
25415
25358
|
const machine = stateMachineMain(`main->${mainReadyName}`, mainReadyName, mainReadyState);
|
|
25416
25359
|
const channel = await MessageChannelStateMachine.createAndTransferChannel(machine, worker);
|
|
25417
|
-
logger.trace
|
|
25360
|
+
logger.trace(`[${machine.name}] Worker spawned ${channel.currentState()}`);
|
|
25418
25361
|
return channel;
|
|
25419
25362
|
}
|
|
25420
25363
|
|
|
@@ -25527,7 +25470,7 @@ class MainReady extends State {
|
|
|
25527
25470
|
if (res instanceof Uint8Array) {
|
|
25528
25471
|
return bytes_Bytes.fromBlob(res, hash_HASH_SIZE).asOpaque();
|
|
25529
25472
|
}
|
|
25530
|
-
state_machine_logger.error
|
|
25473
|
+
state_machine_logger.error(`Invalid response for getBestStateRootHash. Expected Uint8Array, got: ${res}`);
|
|
25531
25474
|
return bytes_Bytes.zero(hash_HASH_SIZE).asOpaque();
|
|
25532
25475
|
}
|
|
25533
25476
|
finish(channel) {
|
|
@@ -25575,7 +25518,7 @@ class ImporterReady extends State {
|
|
|
25575
25518
|
}
|
|
25576
25519
|
async getStateEntries(hash) {
|
|
25577
25520
|
if (this.importer === null) {
|
|
25578
|
-
state_machine_logger.error
|
|
25521
|
+
state_machine_logger.error(`${this.constructor.name} importer not initialized yet!`);
|
|
25579
25522
|
await new Promise((resolve) => {
|
|
25580
25523
|
this.onImporter.once(resolve);
|
|
25581
25524
|
});
|
|
@@ -25589,7 +25532,7 @@ class ImporterReady extends State {
|
|
|
25589
25532
|
response: encoded.raw,
|
|
25590
25533
|
};
|
|
25591
25534
|
}
|
|
25592
|
-
state_machine_logger.error
|
|
25535
|
+
state_machine_logger.error(`${this.constructor.name} got invalid request type: ${JSON.stringify(hash)}.`);
|
|
25593
25536
|
return {
|
|
25594
25537
|
response: null,
|
|
25595
25538
|
};
|
|
@@ -25609,7 +25552,7 @@ class ImporterReady extends State {
|
|
|
25609
25552
|
}
|
|
25610
25553
|
async importBlock(block) {
|
|
25611
25554
|
if (this.importer === null) {
|
|
25612
|
-
state_machine_logger.error
|
|
25555
|
+
state_machine_logger.error(`${this.constructor.name} importer not initialized yet!`);
|
|
25613
25556
|
await new Promise((resolve) => {
|
|
25614
25557
|
this.onImporter.once(resolve);
|
|
25615
25558
|
});
|
|
@@ -25629,8 +25572,8 @@ class ImporterReady extends State {
|
|
|
25629
25572
|
}
|
|
25630
25573
|
}
|
|
25631
25574
|
catch (e) {
|
|
25632
|
-
state_machine_logger.error
|
|
25633
|
-
state_machine_logger.error
|
|
25575
|
+
state_machine_logger.error(`Failed to import block: ${e}`);
|
|
25576
|
+
state_machine_logger.error(`${e instanceof Error ? e.stack : ""}`);
|
|
25634
25577
|
response = result_Result.error(`${e}`);
|
|
25635
25578
|
}
|
|
25636
25579
|
const encoded = encoder_Encoder.encodeObject(importBlockResultCodec, response);
|
|
@@ -25638,7 +25581,7 @@ class ImporterReady extends State {
|
|
|
25638
25581
|
response: encoded.raw,
|
|
25639
25582
|
};
|
|
25640
25583
|
}
|
|
25641
|
-
state_machine_logger.error
|
|
25584
|
+
state_machine_logger.error(`${this.constructor.name} got invalid request type: ${JSON.stringify(block)}.`);
|
|
25642
25585
|
return {
|
|
25643
25586
|
response: null,
|
|
25644
25587
|
};
|
|
@@ -25650,7 +25593,7 @@ class ImporterReady extends State {
|
|
|
25650
25593
|
this.onBlock.emit(blockView);
|
|
25651
25594
|
}
|
|
25652
25595
|
else {
|
|
25653
|
-
state_machine_logger.error
|
|
25596
|
+
state_machine_logger.error(`${this.constructor.name} got invalid signal type: ${JSON.stringify(block)}.`);
|
|
25654
25597
|
}
|
|
25655
25598
|
}
|
|
25656
25599
|
async endWork() {
|
|
@@ -25677,7 +25620,7 @@ if (!external_node_worker_threads_namespaceObject.isMainThread) {
|
|
|
25677
25620
|
Logger.configureAll(process.env.JAM_LOG ?? "", Level.LOG);
|
|
25678
25621
|
const machine = importerStateMachine();
|
|
25679
25622
|
const channel = channel_MessageChannelStateMachine.receiveChannel(machine, external_node_worker_threads_namespaceObject.parentPort);
|
|
25680
|
-
channel.then((channel) => main(channel)).catch((e) => importer_logger.error
|
|
25623
|
+
channel.then((channel) => main(channel)).catch((e) => importer_logger.error(e));
|
|
25681
25624
|
}
|
|
25682
25625
|
const keccakHasher = KeccakHasher.create();
|
|
25683
25626
|
async function createImporter(config) {
|
|
@@ -25699,7 +25642,7 @@ async function createImporter(config) {
|
|
|
25699
25642
|
*/
|
|
25700
25643
|
async function main(channel) {
|
|
25701
25644
|
const wasmPromise = initAll();
|
|
25702
|
-
importer_logger.info
|
|
25645
|
+
importer_logger.info(`📥 Importer starting ${channel.currentState()}`);
|
|
25703
25646
|
// Await the configuration object
|
|
25704
25647
|
const ready = await channel.waitForState("ready(importer)");
|
|
25705
25648
|
let closeDb = async () => { };
|
|
@@ -25711,7 +25654,7 @@ async function main(channel) {
|
|
|
25711
25654
|
};
|
|
25712
25655
|
// TODO [ToDr] this is shit, since we have circular dependency.
|
|
25713
25656
|
worker.setImporter(importer);
|
|
25714
|
-
importer_logger.info
|
|
25657
|
+
importer_logger.info("📥 Importer waiting for blocks.");
|
|
25715
25658
|
worker.onBlock.on(async (block) => {
|
|
25716
25659
|
const res = await importer.importBlock(block, config.omitSealVerification);
|
|
25717
25660
|
if (res.isOk) {
|
|
@@ -25720,7 +25663,7 @@ async function main(channel) {
|
|
|
25720
25663
|
});
|
|
25721
25664
|
await wasmPromise;
|
|
25722
25665
|
});
|
|
25723
|
-
importer_logger.info
|
|
25666
|
+
importer_logger.info("📥 Importer finished. Closing channel.");
|
|
25724
25667
|
// close the database
|
|
25725
25668
|
await closeDb();
|
|
25726
25669
|
// Close the comms to gracefuly close the app.
|