@typeberry/jam 0.1.1-e48de40 → 0.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/block-generator/index.js +94 -132
- package/block-generator/index.js.map +1 -1
- package/importer/index.js +177 -234
- package/importer/index.js.map +1 -1
- package/index.js +341 -394
- package/index.js.map +1 -1
- package/jam-network/index.js +162 -198
- package/jam-network/index.js.map +1 -1
- package/package.json +2 -4
package/index.js
CHANGED
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
#!/usr/bin/env node
|
|
2
1
|
import './sourcemap-register.cjs';import { createRequire as __WEBPACK_EXTERNAL_createRequire } from "module";
|
|
3
2
|
import * as __WEBPACK_EXTERNAL_MODULE_lmdb__ from "lmdb";
|
|
4
3
|
/******/ var __webpack_modules__ = ({
|
|
@@ -28435,34 +28434,6 @@ class WithDebug {
|
|
|
28435
28434
|
}
|
|
28436
28435
|
}
|
|
28437
28436
|
|
|
28438
|
-
;// CONCATENATED MODULE: ./packages/core/utils/dev.ts
|
|
28439
|
-
const dev_env = typeof process === "undefined" ? {} : process.env;
|
|
28440
|
-
/**
|
|
28441
|
-
* The function will produce relative path resolver that is adjusted
|
|
28442
|
-
* for package location within the workspace.
|
|
28443
|
-
*
|
|
28444
|
-
* Example:
|
|
28445
|
-
* $ npm start -w @typeberry/jam
|
|
28446
|
-
*
|
|
28447
|
-
* The above command will run `./bin/jam/index.js`, however we would
|
|
28448
|
-
* still want relative paths to be resolved according to top-level workspace
|
|
28449
|
-
* directory.
|
|
28450
|
-
*
|
|
28451
|
-
* So the caller, passes the absolute workspace path as argument and get's
|
|
28452
|
-
* a function that can properly resolve relative paths.
|
|
28453
|
-
*
|
|
28454
|
-
* NOTE: the translation happens only for development build! When
|
|
28455
|
-
* we build a single library from our project, we no longer mangle the paths.
|
|
28456
|
-
*/
|
|
28457
|
-
const workspacePathFix = dev_env.NODE_ENV === "development"
|
|
28458
|
-
? (workspacePath) => (p) => {
|
|
28459
|
-
if (p.startsWith("/")) {
|
|
28460
|
-
return p;
|
|
28461
|
-
}
|
|
28462
|
-
return `${workspacePath}/${p}`;
|
|
28463
|
-
}
|
|
28464
|
-
: () => (p) => p;
|
|
28465
|
-
|
|
28466
28437
|
;// CONCATENATED MODULE: ./packages/core/utils/opaque.ts
|
|
28467
28438
|
/**
|
|
28468
28439
|
* @fileoverview `Opaque<Type, Token>` constructs a unique type which is a subset of Type with a
|
|
@@ -28806,7 +28777,6 @@ function isResult(x) {
|
|
|
28806
28777
|
|
|
28807
28778
|
|
|
28808
28779
|
|
|
28809
|
-
|
|
28810
28780
|
;// CONCATENATED MODULE: ./packages/core/bytes/bitvec.ts
|
|
28811
28781
|
|
|
28812
28782
|
/**
|
|
@@ -35178,23 +35148,6 @@ function parseLevel(lvl) {
|
|
|
35178
35148
|
;// CONCATENATED MODULE: ./packages/core/logger/console.ts
|
|
35179
35149
|
// biome-ignore-all lint/suspicious/noConsole: logger
|
|
35180
35150
|
|
|
35181
|
-
function print(level, levelAndName, strings, data) {
|
|
35182
|
-
if (level < levelAndName[0]) {
|
|
35183
|
-
return;
|
|
35184
|
-
}
|
|
35185
|
-
const lvlText = Level[level].padEnd(5);
|
|
35186
|
-
const val = strings.map((v, idx) => `${v}${data[idx]}`);
|
|
35187
|
-
const msg = `${lvlText} [${levelAndName[1]}] ${val}`;
|
|
35188
|
-
if (level === Level.WARN) {
|
|
35189
|
-
console.warn(msg);
|
|
35190
|
-
}
|
|
35191
|
-
else if (level === Level.ERROR) {
|
|
35192
|
-
console.error(msg);
|
|
35193
|
-
}
|
|
35194
|
-
else {
|
|
35195
|
-
console.info(msg);
|
|
35196
|
-
}
|
|
35197
|
-
}
|
|
35198
35151
|
/** An optimized logger that ignores `TRACE`, `DEBUG` and `LOG` messages.
|
|
35199
35152
|
*
|
|
35200
35153
|
* Use the `create` method to instantiate the right instance of a more specialized logger.
|
|
@@ -35225,91 +35178,109 @@ class ConsoleTransport {
|
|
|
35225
35178
|
constructor(options) {
|
|
35226
35179
|
this.options = options;
|
|
35227
35180
|
}
|
|
35228
|
-
insane(
|
|
35181
|
+
insane(_moduleName, _val) {
|
|
35229
35182
|
/* no-op */
|
|
35230
35183
|
}
|
|
35231
|
-
trace(
|
|
35184
|
+
trace(_moduleName, _val) {
|
|
35232
35185
|
/* no-op */
|
|
35233
35186
|
}
|
|
35234
|
-
log(
|
|
35187
|
+
log(_moduleName, _val) {
|
|
35235
35188
|
/* no-op */
|
|
35236
35189
|
}
|
|
35237
|
-
info(
|
|
35190
|
+
info(_moduleName, _val) {
|
|
35238
35191
|
/* no-op */
|
|
35239
35192
|
}
|
|
35240
|
-
warn(
|
|
35241
|
-
|
|
35193
|
+
warn(moduleName, val) {
|
|
35194
|
+
this.push(Level.WARN, moduleName, val);
|
|
35242
35195
|
}
|
|
35243
|
-
error(
|
|
35244
|
-
|
|
35196
|
+
error(moduleName, val) {
|
|
35197
|
+
this.push(Level.ERROR, moduleName, val);
|
|
35198
|
+
}
|
|
35199
|
+
push(level, moduleName, val) {
|
|
35200
|
+
const shortModule = moduleName.replace(this.options.workingDir, "");
|
|
35201
|
+
const configuredLevel = findLevel(this.options, moduleName);
|
|
35202
|
+
const lvlText = Level[level].padEnd(5);
|
|
35203
|
+
if (level < configuredLevel) {
|
|
35204
|
+
return;
|
|
35205
|
+
}
|
|
35206
|
+
const msg = `${lvlText} [${shortModule}] ${val}`;
|
|
35207
|
+
if (level === Level.WARN) {
|
|
35208
|
+
console.warn(msg);
|
|
35209
|
+
}
|
|
35210
|
+
else if (level === Level.ERROR) {
|
|
35211
|
+
console.error(msg);
|
|
35212
|
+
}
|
|
35213
|
+
else {
|
|
35214
|
+
console.info(msg);
|
|
35215
|
+
}
|
|
35245
35216
|
}
|
|
35246
35217
|
}
|
|
35247
35218
|
/**
|
|
35248
35219
|
* Insane version of console logger - supports insane level.
|
|
35249
35220
|
*/
|
|
35250
35221
|
class InsaneConsoleLogger extends ConsoleTransport {
|
|
35251
|
-
insane(
|
|
35252
|
-
|
|
35222
|
+
insane(moduleName, val) {
|
|
35223
|
+
this.push(Level.INSANE, moduleName, val);
|
|
35253
35224
|
}
|
|
35254
|
-
trace(
|
|
35255
|
-
|
|
35225
|
+
trace(moduleName, val) {
|
|
35226
|
+
this.push(Level.TRACE, moduleName, val);
|
|
35256
35227
|
}
|
|
35257
|
-
log(
|
|
35258
|
-
|
|
35228
|
+
log(moduleName, val) {
|
|
35229
|
+
this.push(Level.LOG, moduleName, val);
|
|
35259
35230
|
}
|
|
35260
|
-
info(
|
|
35261
|
-
|
|
35231
|
+
info(moduleName, val) {
|
|
35232
|
+
this.push(Level.INFO, moduleName, val);
|
|
35262
35233
|
}
|
|
35263
35234
|
}
|
|
35264
35235
|
/**
|
|
35265
35236
|
* A basic version of console logger - printing everything.
|
|
35266
35237
|
*/
|
|
35267
35238
|
class TraceConsoleTransport extends ConsoleTransport {
|
|
35268
|
-
insane(
|
|
35239
|
+
insane(_moduleName, _val) {
|
|
35269
35240
|
/* no-op */
|
|
35270
35241
|
}
|
|
35271
|
-
trace(
|
|
35272
|
-
|
|
35242
|
+
trace(moduleName, val) {
|
|
35243
|
+
this.push(Level.TRACE, moduleName, val);
|
|
35273
35244
|
}
|
|
35274
|
-
log(
|
|
35275
|
-
|
|
35245
|
+
log(moduleName, val) {
|
|
35246
|
+
this.push(Level.LOG, moduleName, val);
|
|
35276
35247
|
}
|
|
35277
|
-
info(
|
|
35278
|
-
|
|
35248
|
+
info(moduleName, val) {
|
|
35249
|
+
this.push(Level.INFO, moduleName, val);
|
|
35279
35250
|
}
|
|
35280
35251
|
}
|
|
35281
35252
|
/**
|
|
35282
35253
|
* An optimized version of the logger - completely ignores `TRACE` level calls.
|
|
35283
35254
|
*/
|
|
35284
35255
|
class LogConsoleTransport extends ConsoleTransport {
|
|
35285
|
-
insane(
|
|
35256
|
+
insane(_moduleName, _val) {
|
|
35286
35257
|
/* no-op */
|
|
35287
35258
|
}
|
|
35288
|
-
trace(
|
|
35259
|
+
trace(_moduleName, _val) {
|
|
35289
35260
|
/* no-op */
|
|
35290
35261
|
}
|
|
35291
|
-
log(
|
|
35292
|
-
|
|
35262
|
+
log(moduleName, val) {
|
|
35263
|
+
this.push(Level.LOG, moduleName, val);
|
|
35293
35264
|
}
|
|
35294
|
-
info(
|
|
35295
|
-
|
|
35265
|
+
info(moduleName, val) {
|
|
35266
|
+
this.push(Level.INFO, moduleName, val);
|
|
35296
35267
|
}
|
|
35297
35268
|
}
|
|
35298
35269
|
/**
|
|
35299
35270
|
* An optimized version of the logger - completely ignores `TRACE` & `DEBUG` level calls.
|
|
35300
35271
|
*/
|
|
35301
35272
|
class InfoConsoleTransport extends ConsoleTransport {
|
|
35302
|
-
insane(
|
|
35273
|
+
insane(_moduleName, _val) {
|
|
35303
35274
|
/* no-op */
|
|
35304
35275
|
}
|
|
35305
|
-
trace(
|
|
35276
|
+
trace(_moduleName, _val) {
|
|
35306
35277
|
/* no-op */
|
|
35307
35278
|
}
|
|
35308
|
-
log(
|
|
35279
|
+
log(_moduleName, _val) {
|
|
35309
35280
|
/* no-op */
|
|
35310
35281
|
}
|
|
35311
|
-
info(
|
|
35312
|
-
|
|
35282
|
+
info(moduleName, val) {
|
|
35283
|
+
this.push(Level.INFO, moduleName, val);
|
|
35313
35284
|
}
|
|
35314
35285
|
}
|
|
35315
35286
|
|
|
@@ -35346,6 +35317,11 @@ class Logger {
|
|
|
35346
35317
|
const module = moduleName ?? fName;
|
|
35347
35318
|
return new Logger(module.padStart(8, " "), GLOBAL_CONFIG);
|
|
35348
35319
|
}
|
|
35320
|
+
/**
|
|
35321
|
+
* Return currently configured level for given module. */
|
|
35322
|
+
static getLevel(moduleName) {
|
|
35323
|
+
return findLevel(GLOBAL_CONFIG.options, moduleName);
|
|
35324
|
+
}
|
|
35349
35325
|
/**
|
|
35350
35326
|
* Global configuration of all loggers.
|
|
35351
35327
|
*
|
|
@@ -35376,46 +35352,33 @@ class Logger {
|
|
|
35376
35352
|
const options = parseLoggerOptions(input, defaultLevel, workingDir);
|
|
35377
35353
|
Logger.configureAllFromOptions(options);
|
|
35378
35354
|
}
|
|
35379
|
-
cachedLevelAndName;
|
|
35380
35355
|
constructor(moduleName, config) {
|
|
35381
35356
|
this.moduleName = moduleName;
|
|
35382
35357
|
this.config = config;
|
|
35383
35358
|
}
|
|
35384
|
-
/** Return currently configured level for given module. */
|
|
35385
|
-
getLevel() {
|
|
35386
|
-
return this.getLevelAndName()[0];
|
|
35387
|
-
}
|
|
35388
|
-
getLevelAndName() {
|
|
35389
|
-
if (this.cachedLevelAndName === undefined) {
|
|
35390
|
-
const level = findLevel(this.config.options, this.moduleName);
|
|
35391
|
-
const shortName = this.moduleName.replace(this.config.options.workingDir, "");
|
|
35392
|
-
this.cachedLevelAndName = [level, shortName];
|
|
35393
|
-
}
|
|
35394
|
-
return this.cachedLevelAndName;
|
|
35395
|
-
}
|
|
35396
35359
|
/** Log a message with `INSANE` level. */
|
|
35397
|
-
insane(
|
|
35398
|
-
this.config.transport.insane(this.
|
|
35360
|
+
insane(val) {
|
|
35361
|
+
this.config.transport.insane(this.moduleName, val);
|
|
35399
35362
|
}
|
|
35400
35363
|
/** Log a message with `TRACE` level. */
|
|
35401
|
-
trace(
|
|
35402
|
-
this.config.transport.trace(this.
|
|
35364
|
+
trace(val) {
|
|
35365
|
+
this.config.transport.trace(this.moduleName, val);
|
|
35403
35366
|
}
|
|
35404
35367
|
/** Log a message with `DEBUG`/`LOG` level. */
|
|
35405
|
-
log(
|
|
35406
|
-
this.config.transport.log(this.
|
|
35368
|
+
log(val) {
|
|
35369
|
+
this.config.transport.log(this.moduleName, val);
|
|
35407
35370
|
}
|
|
35408
35371
|
/** Log a message with `INFO` level. */
|
|
35409
|
-
info(
|
|
35410
|
-
this.config.transport.info(this.
|
|
35372
|
+
info(val) {
|
|
35373
|
+
this.config.transport.info(this.moduleName, val);
|
|
35411
35374
|
}
|
|
35412
35375
|
/** Log a message with `WARN` level. */
|
|
35413
|
-
warn(
|
|
35414
|
-
this.config.transport.warn(this.
|
|
35376
|
+
warn(val) {
|
|
35377
|
+
this.config.transport.warn(this.moduleName, val);
|
|
35415
35378
|
}
|
|
35416
35379
|
/** Log a message with `ERROR` level. */
|
|
35417
|
-
error(
|
|
35418
|
-
this.config.transport.error(this.
|
|
35380
|
+
error(val) {
|
|
35381
|
+
this.config.transport.error(this.moduleName, val);
|
|
35419
35382
|
}
|
|
35420
35383
|
}
|
|
35421
35384
|
|
|
@@ -35505,15 +35468,15 @@ class NodeConfiguration {
|
|
|
35505
35468
|
}
|
|
35506
35469
|
function loadConfig(configPath) {
|
|
35507
35470
|
if (configPath === DEFAULT_CONFIG) {
|
|
35508
|
-
logger.log
|
|
35471
|
+
logger.log("🔧 Loading DEFAULT config");
|
|
35509
35472
|
return parseFromJson(configs.default, NodeConfiguration.fromJson);
|
|
35510
35473
|
}
|
|
35511
35474
|
if (configPath === DEV_CONFIG) {
|
|
35512
|
-
logger.log
|
|
35475
|
+
logger.log("🔧 Loading DEV config");
|
|
35513
35476
|
return parseFromJson(configs.dev, NodeConfiguration.fromJson);
|
|
35514
35477
|
}
|
|
35515
35478
|
try {
|
|
35516
|
-
logger.log
|
|
35479
|
+
logger.log(`🔧 Loading config from ${configPath}`);
|
|
35517
35480
|
const configFile = external_node_fs_default().readFileSync(configPath, "utf8");
|
|
35518
35481
|
const parsed = JSON.parse(configFile);
|
|
35519
35482
|
return parseFromJson(parsed, NodeConfiguration.fromJson);
|
|
@@ -39219,7 +39182,7 @@ class LmdbStates {
|
|
|
39219
39182
|
await Promise.all([valuesWrite, statesWrite]);
|
|
39220
39183
|
}
|
|
39221
39184
|
catch (e) {
|
|
39222
|
-
states_logger.error
|
|
39185
|
+
states_logger.error(`${e}`);
|
|
39223
39186
|
return result_Result.error(StateUpdateError.Commit);
|
|
39224
39187
|
}
|
|
39225
39188
|
return result_Result.ok(result_OK);
|
|
@@ -39288,7 +39251,7 @@ function openDatabase(nodeName, genesisHeader, databaseBasePath, { readOnly = fa
|
|
|
39288
39251
|
const genesisHeaderHash = hashBytes(genesisHeader).asOpaque();
|
|
39289
39252
|
const genesisHeaderHashNibbles = genesisHeaderHash.toString().substring(2, 10);
|
|
39290
39253
|
const dbPath = `${databaseBasePath}/${nodeNameHash}/${genesisHeaderHashNibbles}`;
|
|
39291
|
-
common_logger.info
|
|
39254
|
+
common_logger.info(`🛢️ Opening database at ${dbPath}`);
|
|
39292
39255
|
try {
|
|
39293
39256
|
return {
|
|
39294
39257
|
dbPath,
|
|
@@ -39310,21 +39273,21 @@ async function initializeDatabase(spec, genesisHeaderHash, rootDb, config, ances
|
|
|
39310
39273
|
const states = new LmdbStates(spec, rootDb);
|
|
39311
39274
|
const header = blocks.getBestHeaderHash();
|
|
39312
39275
|
const state = blocks.getPostStateRoot(header);
|
|
39313
|
-
common_logger.log
|
|
39314
|
-
common_logger.log
|
|
39276
|
+
common_logger.log(`🛢️ Best header hash: ${header}`);
|
|
39277
|
+
common_logger.log(`🛢️ Best state root: ${state}`);
|
|
39315
39278
|
// DB seems already initialized, just go with what we have.
|
|
39316
39279
|
const isDbInitialized = state !== null && !state.isEqualTo(bytes_Bytes.zero(hash_HASH_SIZE)) && !header.isEqualTo(bytes_Bytes.zero(hash_HASH_SIZE));
|
|
39317
39280
|
if (isDbInitialized) {
|
|
39318
39281
|
await rootDb.db.close();
|
|
39319
39282
|
return;
|
|
39320
39283
|
}
|
|
39321
|
-
common_logger.log
|
|
39284
|
+
common_logger.log("🛢️ Database looks fresh. Initializing.");
|
|
39322
39285
|
// looks like a fresh db, initialize the state.
|
|
39323
39286
|
const genesisHeader = decoder_Decoder.decodeObject(Header.Codec, config.genesisHeader, spec);
|
|
39324
39287
|
const genesisExtrinsic = emptyBlock().extrinsic;
|
|
39325
39288
|
const genesisBlock = Block.create({ header: genesisHeader, extrinsic: genesisExtrinsic });
|
|
39326
39289
|
const blockView = blockAsView(genesisBlock, spec);
|
|
39327
|
-
common_logger.log
|
|
39290
|
+
common_logger.log(`🧬 Writing genesis block #${genesisHeader.timeSlotIndex}: ${genesisHeaderHash}`);
|
|
39328
39291
|
const { genesisStateSerialized, genesisStateRootHash } = loadGenesisState(spec, config.genesisState);
|
|
39329
39292
|
// write to db
|
|
39330
39293
|
await blocks.insertBlock(new WithHash(genesisHeaderHash, blockView));
|
|
@@ -39342,7 +39305,7 @@ function loadGenesisState(spec, data) {
|
|
|
39342
39305
|
const stateEntries = state_entries_StateEntries.fromEntriesUnsafe(data.entries());
|
|
39343
39306
|
const state = serialized_state_SerializedState.fromStateEntries(spec, stateEntries);
|
|
39344
39307
|
const genesisStateRootHash = stateEntries.getRootHash();
|
|
39345
|
-
common_logger.info
|
|
39308
|
+
common_logger.info(`🧬 Genesis state root: ${genesisStateRootHash}`);
|
|
39346
39309
|
return {
|
|
39347
39310
|
genesisState: state,
|
|
39348
39311
|
genesisStateSerialized: stateEntries,
|
|
@@ -39481,7 +39444,7 @@ class TypedPort {
|
|
|
39481
39444
|
this.dispatchPortMessage(msg);
|
|
39482
39445
|
}
|
|
39483
39446
|
catch (e) {
|
|
39484
|
-
port_logger.error
|
|
39447
|
+
port_logger.error(`[${this.constructor.name}] Failed to dispatch a message: ${e}: ${JSON.stringify(msg)}`);
|
|
39485
39448
|
throw e;
|
|
39486
39449
|
}
|
|
39487
39450
|
});
|
|
@@ -39555,7 +39518,7 @@ class TypedPort {
|
|
|
39555
39518
|
this.port.postMessage(msg, transferList);
|
|
39556
39519
|
}
|
|
39557
39520
|
catch (e) {
|
|
39558
|
-
port_logger.error
|
|
39521
|
+
port_logger.error(`[${this.constructor.name}] Failed to post a message: ${e}: ${JSON.stringify(msg)}`);
|
|
39559
39522
|
throw e;
|
|
39560
39523
|
}
|
|
39561
39524
|
}
|
|
@@ -39586,7 +39549,7 @@ class TypedPort {
|
|
|
39586
39549
|
cleanup(reason) {
|
|
39587
39550
|
// resolve all pending requests with an error.
|
|
39588
39551
|
const responseListeners = this.responseListeners.eventNames();
|
|
39589
|
-
for (const ev
|
|
39552
|
+
for (const ev in responseListeners) {
|
|
39590
39553
|
this.responseListeners.emit(ev, new Error(`port is ${reason}`));
|
|
39591
39554
|
}
|
|
39592
39555
|
}
|
|
@@ -39635,7 +39598,7 @@ class MessageChannelStateMachine {
|
|
|
39635
39598
|
this.dispatchSignal(name, data);
|
|
39636
39599
|
}
|
|
39637
39600
|
catch (e) {
|
|
39638
|
-
channel_logger.error
|
|
39601
|
+
channel_logger.error(`[${this.constructor.name}] Unable to dispatch signal (${name}): ${e}. ${this.stateInfo(remoteState)}`);
|
|
39639
39602
|
throw e;
|
|
39640
39603
|
}
|
|
39641
39604
|
});
|
|
@@ -39644,7 +39607,7 @@ class MessageChannelStateMachine {
|
|
|
39644
39607
|
await this.dispatchRequest(name, data, msg);
|
|
39645
39608
|
}
|
|
39646
39609
|
catch (e) {
|
|
39647
|
-
channel_logger.error
|
|
39610
|
+
channel_logger.error(`[${this.constructor.name}] Unable to dispatch request (${name}): ${e}. ${this.stateInfo(remoteState)}`);
|
|
39648
39611
|
throw e;
|
|
39649
39612
|
}
|
|
39650
39613
|
});
|
|
@@ -39730,7 +39693,7 @@ class MessageChannelStateMachine {
|
|
|
39730
39693
|
this.machine.transition(res.transitionTo.state, res.transitionTo.data);
|
|
39731
39694
|
}
|
|
39732
39695
|
if (didStateChangeInMeantime) {
|
|
39733
|
-
channel_logger.warn
|
|
39696
|
+
channel_logger.warn(`Ignoring obsolete response for an old request: "${name}"`);
|
|
39734
39697
|
return;
|
|
39735
39698
|
}
|
|
39736
39699
|
return this.port.respond(prevState.stateName, msg, res.response);
|
|
@@ -39746,7 +39709,7 @@ class MessageChannelStateMachine {
|
|
|
39746
39709
|
}
|
|
39747
39710
|
}
|
|
39748
39711
|
transitionTo() {
|
|
39749
|
-
channel_logger.trace
|
|
39712
|
+
channel_logger.trace(`[${this.machine.name}] transitioned to ${this.currentState()}`);
|
|
39750
39713
|
return this;
|
|
39751
39714
|
}
|
|
39752
39715
|
/**
|
|
@@ -39766,7 +39729,7 @@ class MessageChannelStateMachine {
|
|
|
39766
39729
|
await promise;
|
|
39767
39730
|
}
|
|
39768
39731
|
catch (e) {
|
|
39769
|
-
channel_logger.error
|
|
39732
|
+
channel_logger.error(JSON.stringify(e));
|
|
39770
39733
|
}
|
|
39771
39734
|
return new MessageChannelStateMachine(machine, port);
|
|
39772
39735
|
}
|
|
@@ -39969,7 +39932,7 @@ class State {
|
|
|
39969
39932
|
* actions.
|
|
39970
39933
|
*/
|
|
39971
39934
|
onActivation(data) {
|
|
39972
|
-
state_logger.trace
|
|
39935
|
+
state_logger.trace(`[${this.constructor.name}] Changing state to: ${this}`);
|
|
39973
39936
|
this.data = data;
|
|
39974
39937
|
}
|
|
39975
39938
|
/**
|
|
@@ -40067,7 +40030,7 @@ async function spawnWorkerGeneric(bootstrapPath, logger, mainReadyName, mainRead
|
|
|
40067
40030
|
const worker = new external_node_worker_threads_namespaceObject.Worker(bootstrapPath);
|
|
40068
40031
|
const machine = stateMachineMain(`main->${mainReadyName}`, mainReadyName, mainReadyState);
|
|
40069
40032
|
const channel = await MessageChannelStateMachine.createAndTransferChannel(machine, worker);
|
|
40070
|
-
logger.trace
|
|
40033
|
+
logger.trace(`[${machine.name}] Worker spawned ${channel.currentState()}`);
|
|
40071
40034
|
return channel;
|
|
40072
40035
|
}
|
|
40073
40036
|
|
|
@@ -40180,7 +40143,7 @@ class MainReady extends State {
|
|
|
40180
40143
|
if (res instanceof Uint8Array) {
|
|
40181
40144
|
return bytes_Bytes.fromBlob(res, hash_HASH_SIZE).asOpaque();
|
|
40182
40145
|
}
|
|
40183
|
-
state_machine_logger.error
|
|
40146
|
+
state_machine_logger.error(`Invalid response for getBestStateRootHash. Expected Uint8Array, got: ${res}`);
|
|
40184
40147
|
return bytes_Bytes.zero(hash_HASH_SIZE).asOpaque();
|
|
40185
40148
|
}
|
|
40186
40149
|
finish(channel) {
|
|
@@ -40228,7 +40191,7 @@ class ImporterReady extends State {
|
|
|
40228
40191
|
}
|
|
40229
40192
|
async getStateEntries(hash) {
|
|
40230
40193
|
if (this.importer === null) {
|
|
40231
|
-
state_machine_logger.error
|
|
40194
|
+
state_machine_logger.error(`${this.constructor.name} importer not initialized yet!`);
|
|
40232
40195
|
await new Promise((resolve) => {
|
|
40233
40196
|
this.onImporter.once(resolve);
|
|
40234
40197
|
});
|
|
@@ -40242,7 +40205,7 @@ class ImporterReady extends State {
|
|
|
40242
40205
|
response: encoded.raw,
|
|
40243
40206
|
};
|
|
40244
40207
|
}
|
|
40245
|
-
state_machine_logger.error
|
|
40208
|
+
state_machine_logger.error(`${this.constructor.name} got invalid request type: ${JSON.stringify(hash)}.`);
|
|
40246
40209
|
return {
|
|
40247
40210
|
response: null,
|
|
40248
40211
|
};
|
|
@@ -40262,7 +40225,7 @@ class ImporterReady extends State {
|
|
|
40262
40225
|
}
|
|
40263
40226
|
async importBlock(block) {
|
|
40264
40227
|
if (this.importer === null) {
|
|
40265
|
-
state_machine_logger.error
|
|
40228
|
+
state_machine_logger.error(`${this.constructor.name} importer not initialized yet!`);
|
|
40266
40229
|
await new Promise((resolve) => {
|
|
40267
40230
|
this.onImporter.once(resolve);
|
|
40268
40231
|
});
|
|
@@ -40282,8 +40245,8 @@ class ImporterReady extends State {
|
|
|
40282
40245
|
}
|
|
40283
40246
|
}
|
|
40284
40247
|
catch (e) {
|
|
40285
|
-
state_machine_logger.error
|
|
40286
|
-
state_machine_logger.error
|
|
40248
|
+
state_machine_logger.error(`Failed to import block: ${e}`);
|
|
40249
|
+
state_machine_logger.error(`${e instanceof Error ? e.stack : ""}`);
|
|
40287
40250
|
response = result_Result.error(`${e}`);
|
|
40288
40251
|
}
|
|
40289
40252
|
const encoded = encoder_Encoder.encodeObject(importBlockResultCodec, response);
|
|
@@ -40291,7 +40254,7 @@ class ImporterReady extends State {
|
|
|
40291
40254
|
response: encoded.raw,
|
|
40292
40255
|
};
|
|
40293
40256
|
}
|
|
40294
|
-
state_machine_logger.error
|
|
40257
|
+
state_machine_logger.error(`${this.constructor.name} got invalid request type: ${JSON.stringify(block)}.`);
|
|
40295
40258
|
return {
|
|
40296
40259
|
response: null,
|
|
40297
40260
|
};
|
|
@@ -40303,7 +40266,7 @@ class ImporterReady extends State {
|
|
|
40303
40266
|
this.onBlock.emit(blockView);
|
|
40304
40267
|
}
|
|
40305
40268
|
else {
|
|
40306
|
-
state_machine_logger.error
|
|
40269
|
+
state_machine_logger.error(`${this.constructor.name} got invalid signal type: ${JSON.stringify(block)}.`);
|
|
40307
40270
|
}
|
|
40308
40271
|
}
|
|
40309
40272
|
async endWork() {
|
|
@@ -40402,7 +40365,7 @@ class state_machine_MainReady extends State {
|
|
|
40402
40365
|
this.onNewBlocks.emit(blocks);
|
|
40403
40366
|
}
|
|
40404
40367
|
else {
|
|
40405
|
-
jam_network_state_machine_logger.error
|
|
40368
|
+
jam_network_state_machine_logger.error(`${this.constructor.name} got invalid signal type: ${JSON.stringify(block)}.`);
|
|
40406
40369
|
}
|
|
40407
40370
|
}
|
|
40408
40371
|
announceHeader(port, header) {
|
|
@@ -40440,7 +40403,7 @@ class NetworkReady extends State {
|
|
|
40440
40403
|
this.onNewHeader.emit(decoded);
|
|
40441
40404
|
}
|
|
40442
40405
|
else {
|
|
40443
|
-
jam_network_state_machine_logger.error
|
|
40406
|
+
jam_network_state_machine_logger.error(`${this.constructor.name} got invalid signal type: ${JSON.stringify(header)}.`);
|
|
40444
40407
|
}
|
|
40445
40408
|
}
|
|
40446
40409
|
sendBlocks(port, blocks) {
|
|
@@ -42148,14 +42111,12 @@ class WriteablePage extends MemoryPage {
|
|
|
42148
42111
|
|
|
42149
42112
|
|
|
42150
42113
|
|
|
42151
|
-
|
|
42152
|
-
|
|
42153
42114
|
var AccessType;
|
|
42154
42115
|
(function (AccessType) {
|
|
42155
42116
|
AccessType[AccessType["READ"] = 0] = "READ";
|
|
42156
42117
|
AccessType[AccessType["WRITE"] = 1] = "WRITE";
|
|
42157
42118
|
})(AccessType || (AccessType = {}));
|
|
42158
|
-
const
|
|
42119
|
+
// const logger = Logger.new(import.meta.filename, "pvm:mem");
|
|
42159
42120
|
class Memory {
|
|
42160
42121
|
sbrkIndex;
|
|
42161
42122
|
virtualSbrkIndex;
|
|
@@ -42186,7 +42147,7 @@ class Memory {
|
|
|
42186
42147
|
if (bytes.length === 0) {
|
|
42187
42148
|
return result_Result.ok(result_OK);
|
|
42188
42149
|
}
|
|
42189
|
-
|
|
42150
|
+
// logger.insane(`MEM[${address}] <- ${BytesBlob.blobFrom(bytes)}`);
|
|
42190
42151
|
const pagesResult = this.getPages(address, bytes.length, AccessType.WRITE);
|
|
42191
42152
|
if (pagesResult.isError) {
|
|
42192
42153
|
return result_Result.error(pagesResult.error);
|
|
@@ -42253,7 +42214,7 @@ class Memory {
|
|
|
42253
42214
|
currentPosition += bytesToRead;
|
|
42254
42215
|
bytesLeft -= bytesToRead;
|
|
42255
42216
|
}
|
|
42256
|
-
|
|
42217
|
+
// logger.insane(`MEM[${startAddress}] => ${BytesBlob.blobFrom(result)}`);
|
|
42257
42218
|
return result_Result.ok(result_OK);
|
|
42258
42219
|
}
|
|
42259
42220
|
sbrk(length) {
|
|
@@ -44183,7 +44144,7 @@ class ProgramDecoder {
|
|
|
44183
44144
|
return result_Result.ok(new ProgramDecoder(program));
|
|
44184
44145
|
}
|
|
44185
44146
|
catch (e) {
|
|
44186
|
-
program_decoder_logger.error
|
|
44147
|
+
program_decoder_logger.error(`Invalid program: ${e}`);
|
|
44187
44148
|
return result_Result.error(ProgramDecoderError.InvalidProgramError);
|
|
44188
44149
|
}
|
|
44189
44150
|
}
|
|
@@ -44349,7 +44310,7 @@ class Interpreter {
|
|
|
44349
44310
|
const argsType = instructionArgumentTypeMap[currentInstruction] ?? ArgumentType.NO_ARGUMENTS;
|
|
44350
44311
|
const argsResult = this.argsDecodingResults[argsType];
|
|
44351
44312
|
this.argsDecoder.fillArgs(this.pc, argsResult);
|
|
44352
|
-
interpreter_logger.insane
|
|
44313
|
+
interpreter_logger.insane(`[PC: ${this.pc}] ${Instruction[currentInstruction]}`);
|
|
44353
44314
|
if (!isValidInstruction) {
|
|
44354
44315
|
this.instructionResult.status = pvm_interpreter_result_Result.PANIC;
|
|
44355
44316
|
}
|
|
@@ -44421,7 +44382,7 @@ class Interpreter {
|
|
|
44421
44382
|
this.status = status_Status.HOST;
|
|
44422
44383
|
break;
|
|
44423
44384
|
}
|
|
44424
|
-
interpreter_logger.insane
|
|
44385
|
+
interpreter_logger.insane(`[PC: ${this.pc}] Status: ${pvm_interpreter_result_Result[this.instructionResult.status]}`);
|
|
44425
44386
|
return this.status;
|
|
44426
44387
|
}
|
|
44427
44388
|
this.pc = this.instructionResult.nextPc;
|
|
@@ -44668,7 +44629,7 @@ class host_calls_manager_HostCallsManager {
|
|
|
44668
44629
|
return `r${idx}=${value} (0x${value.toString(16)})`;
|
|
44669
44630
|
})
|
|
44670
44631
|
.join(", ");
|
|
44671
|
-
host_calls_manager_logger.insane
|
|
44632
|
+
host_calls_manager_logger.insane(`[${currentServiceId}] ${context} ${name}${requested}. Gas: ${gas}. Regs: ${registerValues}.`);
|
|
44672
44633
|
}
|
|
44673
44634
|
}
|
|
44674
44635
|
class NoopMissing {
|
|
@@ -45276,7 +45237,7 @@ class block_generator_state_machine_MainReady extends State {
|
|
|
45276
45237
|
this.onBlock.emit(block);
|
|
45277
45238
|
}
|
|
45278
45239
|
else {
|
|
45279
|
-
block_generator_state_machine_logger.error
|
|
45240
|
+
block_generator_state_machine_logger.error(`${this.constructor.name} got invalid signal type: ${JSON.stringify(block)}.`);
|
|
45280
45241
|
}
|
|
45281
45242
|
}
|
|
45282
45243
|
finish(channel) {
|
|
@@ -45331,12 +45292,12 @@ if (!external_node_worker_threads_namespaceObject.isMainThread) {
|
|
|
45331
45292
|
channel
|
|
45332
45293
|
.then((channel) => main(channel))
|
|
45333
45294
|
.catch((e) => {
|
|
45334
|
-
block_generator_logger.error
|
|
45295
|
+
block_generator_logger.error(e);
|
|
45335
45296
|
if (e.stack !== undefined) {
|
|
45336
|
-
block_generator_logger.error
|
|
45297
|
+
block_generator_logger.error(e.stack);
|
|
45337
45298
|
}
|
|
45338
45299
|
if (e.cause !== undefined) {
|
|
45339
|
-
block_generator_logger.error
|
|
45300
|
+
block_generator_logger.error(e.cause);
|
|
45340
45301
|
}
|
|
45341
45302
|
});
|
|
45342
45303
|
}
|
|
@@ -45344,7 +45305,7 @@ if (!external_node_worker_threads_namespaceObject.isMainThread) {
|
|
|
45344
45305
|
* The `BlockGenerator` should periodically create new blocks and send them as signals to the main thread.
|
|
45345
45306
|
*/
|
|
45346
45307
|
async function main(channel) {
|
|
45347
|
-
block_generator_logger.info
|
|
45308
|
+
block_generator_logger.info(`🎁 Block Generator running ${channel.currentState()}`);
|
|
45348
45309
|
// Await the configuration object
|
|
45349
45310
|
const ready = await channel.waitForState("ready(generator)");
|
|
45350
45311
|
const config = ready.currentState().getConfig();
|
|
@@ -45359,11 +45320,11 @@ async function main(channel) {
|
|
|
45359
45320
|
await (0,promises_namespaceObject.setTimeout)(config.chainSpec.slotDuration * 1000);
|
|
45360
45321
|
counter += 1;
|
|
45361
45322
|
const newBlock = await generator.nextEncodedBlock();
|
|
45362
|
-
block_generator_logger.trace
|
|
45323
|
+
block_generator_logger.trace(`Sending block ${counter}`);
|
|
45363
45324
|
worker.sendBlock(port, newBlock);
|
|
45364
45325
|
}
|
|
45365
45326
|
});
|
|
45366
|
-
block_generator_logger.info
|
|
45327
|
+
block_generator_logger.info("Block Generator finished. Closing channel.");
|
|
45367
45328
|
// Close the comms to gracefully close the app.
|
|
45368
45329
|
finished.currentState().close(channel);
|
|
45369
45330
|
}
|
|
@@ -45478,11 +45439,11 @@ class PeersManagement {
|
|
|
45478
45439
|
_onPeerDisconnected = [];
|
|
45479
45440
|
peers = new Map();
|
|
45480
45441
|
peerConnected(peer) {
|
|
45481
|
-
peers_logger.info
|
|
45442
|
+
peers_logger.info(`💡 Peer ${displayId(peer)} connected.`);
|
|
45482
45443
|
const oldPeerData = this.peers.get(peer.id);
|
|
45483
45444
|
if (oldPeerData !== undefined) {
|
|
45484
45445
|
// TODO [ToDr] replacing old connection?
|
|
45485
|
-
peers_logger.warn
|
|
45446
|
+
peers_logger.warn("Replacing older connection.");
|
|
45486
45447
|
}
|
|
45487
45448
|
this.peers.set(peer.id, peer);
|
|
45488
45449
|
for (const callback of this._onPeerConnected) {
|
|
@@ -45490,7 +45451,7 @@ class PeersManagement {
|
|
|
45490
45451
|
}
|
|
45491
45452
|
}
|
|
45492
45453
|
peerDisconnected(peer) {
|
|
45493
|
-
peers_logger.info
|
|
45454
|
+
peers_logger.info(`⚡︎Peer ${displayId(peer)} disconnected.`);
|
|
45494
45455
|
this.peers.delete(peer.id);
|
|
45495
45456
|
for (const callback of this._onPeerDisconnected) {
|
|
45496
45457
|
callback(peer);
|
|
@@ -54282,23 +54243,23 @@ var VerifyCertError;
|
|
|
54282
54243
|
VerifyCertError[VerifyCertError["IncorrectSignature"] = 4] = "IncorrectSignature";
|
|
54283
54244
|
})(VerifyCertError || (VerifyCertError = {}));
|
|
54284
54245
|
async function verifyCertificate(certs) {
|
|
54285
|
-
certificate_logger.log
|
|
54246
|
+
certificate_logger.log("Incoming peer. Verifying certificate");
|
|
54286
54247
|
// Must present exactly one cert
|
|
54287
54248
|
if (certs.length !== 1) {
|
|
54288
|
-
certificate_logger.log
|
|
54249
|
+
certificate_logger.log("Rejecting peer with no certificates.");
|
|
54289
54250
|
return result_Result.error(VerifyCertError.NoCertificate);
|
|
54290
54251
|
}
|
|
54291
54252
|
// Parse with Node's X509Certificate (accepts PEM or DER)
|
|
54292
54253
|
const xc = new (external_node_crypto_default()).X509Certificate(certs[0]);
|
|
54293
54254
|
// Must be Ed25519 key
|
|
54294
54255
|
if (xc.publicKey.asymmetricKeyType !== CURVE_NAME.toLowerCase()) {
|
|
54295
|
-
certificate_logger.log
|
|
54256
|
+
certificate_logger.log(`Rejecting peer using non-ed25519 certificate: ${xc.publicKey.asymmetricKeyType}`);
|
|
54296
54257
|
return result_Result.error(VerifyCertError.NotEd25519);
|
|
54297
54258
|
}
|
|
54298
54259
|
// Extract raw public key via JWK export
|
|
54299
54260
|
const jwk = xc.publicKey.export({ format: "jwk" });
|
|
54300
54261
|
if (jwk.kty !== KEY_TYPE || jwk.crv !== CURVE_NAME) {
|
|
54301
|
-
certificate_logger.log
|
|
54262
|
+
certificate_logger.log(`Public key type mismatch: ${jwk.kty}, ${jwk.crv}`);
|
|
54302
54263
|
return result_Result.error(VerifyCertError.PublicKeyTypeMismatch);
|
|
54303
54264
|
}
|
|
54304
54265
|
// SAN must be exactly 'e'+base32(rawPub)
|
|
@@ -54306,7 +54267,7 @@ async function verifyCertificate(certs) {
|
|
|
54306
54267
|
const sanField = xc.subjectAltName ?? "";
|
|
54307
54268
|
const m = sanField.match(/DNS:([^,]+)/);
|
|
54308
54269
|
if (m === null || m[1] !== expectedSan) {
|
|
54309
|
-
certificate_logger.log
|
|
54270
|
+
certificate_logger.log(`AltName mismatch. Expected: '${expectedSan}', got: '${m?.[1]}'`);
|
|
54310
54271
|
return result_Result.error(VerifyCertError.AltNameMismatch);
|
|
54311
54272
|
}
|
|
54312
54273
|
const key = Buffer.from(jwk.x ?? "", "base64url");
|
|
@@ -54573,19 +54534,19 @@ class QuicNetwork {
|
|
|
54573
54534
|
}
|
|
54574
54535
|
this.started = true;
|
|
54575
54536
|
await this.socket.start({ host: this.listen.host, port: this.listen.port });
|
|
54576
|
-
quic_network_logger.info
|
|
54537
|
+
quic_network_logger.info(`🛜 QUIC socket on ${this.socket.host}:${this.socket.port}`);
|
|
54577
54538
|
await this.server.start();
|
|
54578
|
-
quic_network_logger.log
|
|
54539
|
+
quic_network_logger.log("🛜 QUIC server listening");
|
|
54579
54540
|
}
|
|
54580
54541
|
async stop() {
|
|
54581
54542
|
if (!this.started) {
|
|
54582
54543
|
throw new Error("Network not started yet!");
|
|
54583
54544
|
}
|
|
54584
|
-
quic_network_logger.info
|
|
54545
|
+
quic_network_logger.info("Stopping the networking.");
|
|
54585
54546
|
await this.server.stop();
|
|
54586
54547
|
await this.socket.stop();
|
|
54587
54548
|
this.started = false;
|
|
54588
|
-
quic_network_logger.info
|
|
54549
|
+
quic_network_logger.info("Networking stopped.");
|
|
54589
54550
|
}
|
|
54590
54551
|
get peers() {
|
|
54591
54552
|
return this._peers;
|
|
@@ -54606,7 +54567,7 @@ clazz, callback) {
|
|
|
54606
54567
|
await callback(ev);
|
|
54607
54568
|
}
|
|
54608
54569
|
catch (e) {
|
|
54609
|
-
quic_utils_logger.error
|
|
54570
|
+
quic_utils_logger.error(`Unhandled exception in ${clazz.name} event handler: ${e}`);
|
|
54610
54571
|
}
|
|
54611
54572
|
});
|
|
54612
54573
|
}
|
|
@@ -54665,7 +54626,7 @@ class QuicPeer {
|
|
|
54665
54626
|
streamEvents = new (external_node_events_default())();
|
|
54666
54627
|
constructor(conn, peerInfo) {
|
|
54667
54628
|
this.conn = conn;
|
|
54668
|
-
quic_peer_logger.log
|
|
54629
|
+
quic_peer_logger.log(`👥 [${peerInfo.id}] peer connected ${conn.remoteHost}:${conn.remotePort}`);
|
|
54669
54630
|
this.connectionId = conn.connectionIdShared.toString();
|
|
54670
54631
|
this.address = {
|
|
54671
54632
|
host: conn.remoteHost,
|
|
@@ -54675,11 +54636,11 @@ class QuicPeer {
|
|
|
54675
54636
|
this.key = peerInfo.key;
|
|
54676
54637
|
addEventListener(conn, EventQUICConnectionStream, (ev) => {
|
|
54677
54638
|
const stream = ev.detail;
|
|
54678
|
-
quic_peer_logger.log
|
|
54639
|
+
quic_peer_logger.log(`🚰 [${this.id}] new stream: [${stream.streamId}]`);
|
|
54679
54640
|
this.streamEvents.emit("stream", new QuicStream(stream));
|
|
54680
54641
|
});
|
|
54681
54642
|
addEventListener(conn, EventQUICConnectionError, (err) => {
|
|
54682
|
-
quic_peer_logger.error
|
|
54643
|
+
quic_peer_logger.error(`❌ [${this.id}] connection failed: ${err.detail}`);
|
|
54683
54644
|
});
|
|
54684
54645
|
}
|
|
54685
54646
|
addOnIncomingStream(streamCallback) {
|
|
@@ -54687,11 +54648,11 @@ class QuicPeer {
|
|
|
54687
54648
|
}
|
|
54688
54649
|
openStream() {
|
|
54689
54650
|
const stream = this.conn.newStream("bidi");
|
|
54690
|
-
quic_peer_logger.log
|
|
54651
|
+
quic_peer_logger.log(`🚰 [${this.id}] opening stream: [${stream.streamId}]`);
|
|
54691
54652
|
return new QuicStream(stream);
|
|
54692
54653
|
}
|
|
54693
54654
|
async disconnect() {
|
|
54694
|
-
quic_peer_logger.log
|
|
54655
|
+
quic_peer_logger.log(`👋 [${this.id}] disconnecting`);
|
|
54695
54656
|
await this.conn.stop({ isApp: true });
|
|
54696
54657
|
}
|
|
54697
54658
|
}
|
|
@@ -54712,7 +54673,7 @@ const setup_logger = Logger.new(import.meta.filename, "net");
|
|
|
54712
54673
|
class Quic {
|
|
54713
54674
|
/** Setup QUIC socket and start listening for connections. */
|
|
54714
54675
|
static async setup({ host, port, protocols, key }) {
|
|
54715
|
-
const quicLoggerLvl =
|
|
54676
|
+
const quicLoggerLvl = Logger.getLevel("net") > Level.TRACE ? LogLevel.WARN : LogLevel.DEBUG;
|
|
54716
54677
|
const quicLogger = new dist_Logger("quic", quicLoggerLvl, [
|
|
54717
54678
|
new handlers_StreamHandler(format `${level}:${keys}:${msg}`),
|
|
54718
54679
|
]);
|
|
@@ -54735,7 +54696,7 @@ class Quic {
|
|
|
54735
54696
|
verifyPeer: true,
|
|
54736
54697
|
verifyCallback: lastConnectedPeer.verifyCallback,
|
|
54737
54698
|
};
|
|
54738
|
-
setup_logger.info
|
|
54699
|
+
setup_logger.info(`🆔 Peer id: ** ${altNameRaw(key.pubKey)}@${host}:${port} ** (pubkey: ${key.pubKey})`);
|
|
54739
54700
|
// Shared injected UDP socket
|
|
54740
54701
|
const socket = new dist_QUICSocket({
|
|
54741
54702
|
logger: quicLogger.getChild("socket"),
|
|
@@ -54750,8 +54711,8 @@ class Quic {
|
|
|
54750
54711
|
// peer management
|
|
54751
54712
|
const peers = new PeersManagement();
|
|
54752
54713
|
// basic error handling
|
|
54753
|
-
addEventListener(server, EventQUICServerError, (error) => setup_logger.error
|
|
54754
|
-
addEventListener(server, EventQUICServerClose, (ev) => setup_logger.error
|
|
54714
|
+
addEventListener(server, EventQUICServerError, (error) => setup_logger.error(`🛜 Server error: ${error}`));
|
|
54715
|
+
addEventListener(server, EventQUICServerClose, (ev) => setup_logger.error(`🛜 Server stopped: ${ev}`));
|
|
54755
54716
|
// handling incoming session
|
|
54756
54717
|
addEventListener(server, EventQUICServerConnection, async (ev) => {
|
|
54757
54718
|
const conn = ev.detail;
|
|
@@ -54760,16 +54721,16 @@ class Quic {
|
|
|
54760
54721
|
return;
|
|
54761
54722
|
}
|
|
54762
54723
|
if (lastConnectedPeer.info.key.isEqualTo(key.pubKey)) {
|
|
54763
|
-
setup_logger.log
|
|
54724
|
+
setup_logger.log(`🛜 Rejecting connection from ourself from ${conn.remoteHost}:${conn.remotePort}`);
|
|
54764
54725
|
await conn.stop();
|
|
54765
54726
|
return;
|
|
54766
54727
|
}
|
|
54767
54728
|
if (peers.isConnected(lastConnectedPeer.info.id)) {
|
|
54768
|
-
setup_logger.log
|
|
54729
|
+
setup_logger.log(`🛜 Rejecting duplicate connection with peer ${lastConnectedPeer.info.id} from ${conn.remoteHost}:${conn.remotePort}`);
|
|
54769
54730
|
await conn.stop();
|
|
54770
54731
|
return;
|
|
54771
54732
|
}
|
|
54772
|
-
setup_logger.log
|
|
54733
|
+
setup_logger.log(`🛜 Server handshake with ${conn.remoteHost}:${conn.remotePort}`);
|
|
54773
54734
|
newPeer(conn, lastConnectedPeer.info);
|
|
54774
54735
|
lastConnectedPeer.info = null;
|
|
54775
54736
|
await conn.start();
|
|
@@ -54792,10 +54753,10 @@ class Quic {
|
|
|
54792
54753
|
});
|
|
54793
54754
|
const client = await clientLater;
|
|
54794
54755
|
addEventListener(client, EventQUICClientClose, () => {
|
|
54795
|
-
setup_logger.log
|
|
54756
|
+
setup_logger.log("⚰️ Client connection closed.");
|
|
54796
54757
|
});
|
|
54797
54758
|
addEventListener(client, EventQUICClientError, (error) => {
|
|
54798
|
-
setup_logger.error
|
|
54759
|
+
setup_logger.error(`🔴 Client error: ${error.detail}`);
|
|
54799
54760
|
});
|
|
54800
54761
|
if (peerDetails.info === null) {
|
|
54801
54762
|
throw new Error("Client connected, but there is no peer details!");
|
|
@@ -54803,7 +54764,7 @@ class Quic {
|
|
|
54803
54764
|
if (options.verifyName !== undefined && options.verifyName !== peerDetails.info.id) {
|
|
54804
54765
|
throw new Error(`Client connected, but the id didn't match. Expected: ${options.verifyName}, got: ${peerDetails.info.id}`);
|
|
54805
54766
|
}
|
|
54806
|
-
setup_logger.log
|
|
54767
|
+
setup_logger.log(`🤝 Client handshake with: ${peer.host}:${peer.port}`);
|
|
54807
54768
|
return newPeer(client.connection, peerDetails.info);
|
|
54808
54769
|
}
|
|
54809
54770
|
function newPeer(conn, peerInfo) {
|
|
@@ -54923,10 +54884,10 @@ class Connections {
|
|
|
54923
54884
|
for (;;) {
|
|
54924
54885
|
// increase the reconnection counter
|
|
54925
54886
|
meta.currentRetry += 1;
|
|
54926
|
-
if (meta.currentRetry
|
|
54887
|
+
if (meta.currentRetry >= meta.maxRetries) {
|
|
54927
54888
|
// reached max retries for a peer, remove it from tracking.
|
|
54928
54889
|
this.peerInfo.delete(id);
|
|
54929
|
-
jamnp_s_peers_logger.log
|
|
54890
|
+
jamnp_s_peers_logger.log(`[${id}] max retries reached. Removing peer.`);
|
|
54930
54891
|
return;
|
|
54931
54892
|
}
|
|
54932
54893
|
// else attempt to connect to a node a bit later.
|
|
@@ -54945,7 +54906,7 @@ class Connections {
|
|
|
54945
54906
|
}
|
|
54946
54907
|
// attempt to connect to the peer
|
|
54947
54908
|
try {
|
|
54948
|
-
jamnp_s_peers_logger.trace
|
|
54909
|
+
jamnp_s_peers_logger.trace(`[${id}] Attempting to connect to peer at ${meta.address.host}:${meta.address.port}.`);
|
|
54949
54910
|
await this.network.dial(meta.address, { signal, verifyName: meta.peerId });
|
|
54950
54911
|
return;
|
|
54951
54912
|
}
|
|
@@ -54954,7 +54915,7 @@ class Connections {
|
|
|
54954
54915
|
return;
|
|
54955
54916
|
}
|
|
54956
54917
|
// failing to connect, will retry.
|
|
54957
|
-
jamnp_s_peers_logger.trace
|
|
54918
|
+
jamnp_s_peers_logger.trace(`[${id}] attempt failed. Will retry (${meta.currentRetry}/${meta.maxRetries})`);
|
|
54958
54919
|
}
|
|
54959
54920
|
}
|
|
54960
54921
|
}
|
|
@@ -55067,7 +55028,7 @@ class StreamManager {
|
|
|
55067
55028
|
// We expect a one-byte identifier first.
|
|
55068
55029
|
const data = await reader.read();
|
|
55069
55030
|
bytes = bytes_BytesBlob.blobFrom(data.value !== undefined ? data.value : new Uint8Array());
|
|
55070
|
-
stream_manager_logger.trace
|
|
55031
|
+
stream_manager_logger.trace(`🚰 --> [${peer.id}:${streamId}] Initial data: ${bytes}`);
|
|
55071
55032
|
}
|
|
55072
55033
|
finally {
|
|
55073
55034
|
reader.releaseLock();
|
|
@@ -55081,7 +55042,7 @@ class StreamManager {
|
|
|
55081
55042
|
if (handler === undefined) {
|
|
55082
55043
|
throw new Error(`Unsupported stream kind: ${kind}`);
|
|
55083
55044
|
}
|
|
55084
|
-
stream_manager_logger.log
|
|
55045
|
+
stream_manager_logger.log(`🚰 --> [${peer.id}:${stream.streamId}] Stream identified as: ${kind}`);
|
|
55085
55046
|
this.registerStream(peer, handler, stream, bytes_BytesBlob.blobFrom(bytes.raw.subarray(1)));
|
|
55086
55047
|
}
|
|
55087
55048
|
registerStream(peer, handler, stream, initialData) {
|
|
@@ -55091,7 +55052,7 @@ class StreamManager {
|
|
|
55091
55052
|
this.streams.delete(streamId);
|
|
55092
55053
|
this.backgroundTasks.delete(streamId);
|
|
55093
55054
|
if (kind === StreamErrorKind.Exception) {
|
|
55094
|
-
stream_manager_logger.error
|
|
55055
|
+
stream_manager_logger.error(`🚰 --- [${peer.id}:${streamId}] Stream error: ${e}. Disconnecting peer.`);
|
|
55095
55056
|
}
|
|
55096
55057
|
if (kind !== StreamErrorKind.LocalClose) {
|
|
55097
55058
|
// whenever we have an error, we are going to inform the handler
|
|
@@ -55125,10 +55086,10 @@ async function readStreamForever(peer, handler, quicStream, initialData, reader)
|
|
|
55125
55086
|
let isDone = false;
|
|
55126
55087
|
const callback = handleMessageFragmentation((data) => {
|
|
55127
55088
|
const bytes = bytes_BytesBlob.blobFrom(new Uint8Array(data));
|
|
55128
|
-
stream_manager_logger.trace
|
|
55089
|
+
stream_manager_logger.trace(`🚰 --> [${peer.id}:${quicStream.streamId}] ${bytes}`);
|
|
55129
55090
|
handler.onStreamMessage(quicStream, bytes);
|
|
55130
55091
|
}, () => {
|
|
55131
|
-
stream_manager_logger.error
|
|
55092
|
+
stream_manager_logger.error(`🚰 --> [${peer.id}:${quicStream.streamId}] got too much data. Disconnecting.`);
|
|
55132
55093
|
peer.disconnect();
|
|
55133
55094
|
});
|
|
55134
55095
|
for (;;) {
|
|
@@ -55137,7 +55098,7 @@ async function readStreamForever(peer, handler, quicStream, initialData, reader)
|
|
|
55137
55098
|
// be a promise, so that we can make back pressure here.
|
|
55138
55099
|
callback(bytes.raw);
|
|
55139
55100
|
if (isDone) {
|
|
55140
|
-
stream_manager_logger.log
|
|
55101
|
+
stream_manager_logger.log(`🚰 --> [${peer.id}:${quicStream.streamId}] remote finished.`);
|
|
55141
55102
|
return;
|
|
55142
55103
|
}
|
|
55143
55104
|
// await for more data
|
|
@@ -55180,7 +55141,7 @@ class QuicStreamSender {
|
|
|
55180
55141
|
return;
|
|
55181
55142
|
}
|
|
55182
55143
|
const { data, addPrefix } = chunk;
|
|
55183
|
-
stream_manager_logger.trace
|
|
55144
|
+
stream_manager_logger.trace(`🚰 <-- [${this.streamId}] write: ${data}`);
|
|
55184
55145
|
if (addPrefix) {
|
|
55185
55146
|
await writer.write(encodeMessageLength(data.raw));
|
|
55186
55147
|
}
|
|
@@ -55197,7 +55158,7 @@ class QuicStreamSender {
|
|
|
55197
55158
|
}
|
|
55198
55159
|
close() {
|
|
55199
55160
|
handleAsyncErrors(async () => {
|
|
55200
|
-
stream_manager_logger.trace
|
|
55161
|
+
stream_manager_logger.trace(`🚰 <-- [${this.streamId}] closing`);
|
|
55201
55162
|
if (this.currentWriterPromise !== null) {
|
|
55202
55163
|
await this.currentWriterPromise;
|
|
55203
55164
|
}
|
|
@@ -55283,7 +55244,7 @@ class ServerHandler {
|
|
|
55283
55244
|
}
|
|
55284
55245
|
onStreamMessage(sender, message) {
|
|
55285
55246
|
const request = decoder_Decoder.decodeObject(BlockRequest.Codec, message);
|
|
55286
|
-
ce_128_block_request_logger.log
|
|
55247
|
+
ce_128_block_request_logger.log(`[${sender.streamId}] Client has requested: ${request}`);
|
|
55287
55248
|
const blocks = this.getBlockSequence(sender.streamId, request.headerHash, request.direction, request.maxBlocks);
|
|
55288
55249
|
sender.bufferAndSend(encoder_Encoder.encodeObject(descriptors_codec.sequenceFixLen(Block.Codec.View, blocks.length), blocks, this.chainSpec));
|
|
55289
55250
|
sender.close();
|
|
@@ -55303,7 +55264,7 @@ class ClientHandler {
|
|
|
55303
55264
|
throw new Error("Received an unexpected message from the server.");
|
|
55304
55265
|
}
|
|
55305
55266
|
const blocks = decoder_Decoder.decodeSequence(Block.Codec.View, message, this.chainSpec);
|
|
55306
|
-
ce_128_block_request_logger.log
|
|
55267
|
+
ce_128_block_request_logger.log(`[${sender.streamId}] Server returned ${blocks.length} blocks in ${message.length} bytes of data.`);
|
|
55307
55268
|
this.promiseResolvers.get(sender.streamId)?.(blocks);
|
|
55308
55269
|
this.promiseResolvers.delete(sender.streamId);
|
|
55309
55270
|
}
|
|
@@ -55474,13 +55435,13 @@ class ce_129_state_request_Handler {
|
|
|
55474
55435
|
}
|
|
55475
55436
|
onStreamMessage(sender, message) {
|
|
55476
55437
|
if (this.isServer) {
|
|
55477
|
-
ce_129_state_request_logger.info
|
|
55438
|
+
ce_129_state_request_logger.info(`[${sender.streamId}][server]: Received request.`);
|
|
55478
55439
|
if (this.getBoundaryNodes === undefined || this.getKeyValuePairs === undefined)
|
|
55479
55440
|
return;
|
|
55480
55441
|
const request = decoder_Decoder.decodeObject(StateRequest.Codec, message);
|
|
55481
55442
|
const boundaryNodes = this.getBoundaryNodes(request.headerHash, request.startKey, request.endKey);
|
|
55482
55443
|
const keyValuePairs = this.getKeyValuePairs(request.headerHash, request.startKey, request.endKey);
|
|
55483
|
-
ce_129_state_request_logger.info
|
|
55444
|
+
ce_129_state_request_logger.info(`[${sender.streamId}][server]: <-- responding with boundary nodes and key value pairs.`);
|
|
55484
55445
|
sender.bufferAndSend(encoder_Encoder.encodeObject(descriptors_codec.sequenceVarLen(trieNodeCodec), boundaryNodes));
|
|
55485
55446
|
sender.bufferAndSend(encoder_Encoder.encodeObject(StateResponse.Codec, StateResponse.create({ keyValuePairs })));
|
|
55486
55447
|
sender.close();
|
|
@@ -55488,11 +55449,11 @@ class ce_129_state_request_Handler {
|
|
|
55488
55449
|
}
|
|
55489
55450
|
if (!this.boundaryNodes.has(sender.streamId)) {
|
|
55490
55451
|
this.boundaryNodes.set(sender.streamId, decoder_Decoder.decodeObject(descriptors_codec.sequenceVarLen(trieNodeCodec), message));
|
|
55491
|
-
ce_129_state_request_logger.info
|
|
55452
|
+
ce_129_state_request_logger.info(`[${sender.streamId}][client]: Received boundary nodes.`);
|
|
55492
55453
|
return;
|
|
55493
55454
|
}
|
|
55494
55455
|
this.onResponse.get(sender.streamId)?.(decoder_Decoder.decodeObject(StateResponse.Codec, message));
|
|
55495
|
-
ce_129_state_request_logger.info
|
|
55456
|
+
ce_129_state_request_logger.info(`[${sender.streamId}][client]: Received state values.`);
|
|
55496
55457
|
}
|
|
55497
55458
|
onClose(streamId) {
|
|
55498
55459
|
this.boundaryNodes.delete(streamId);
|
|
@@ -55549,7 +55510,7 @@ class ce_131_ce_132_safrole_ticket_distribution_ServerHandler {
|
|
|
55549
55510
|
}
|
|
55550
55511
|
onStreamMessage(sender, message) {
|
|
55551
55512
|
const ticketDistribution = Decoder.decodeObject(TicketDistributionRequest.Codec, message);
|
|
55552
|
-
ce_131_ce_132_safrole_ticket_distribution_logger.log
|
|
55513
|
+
ce_131_ce_132_safrole_ticket_distribution_logger.log(`[${sender.streamId}][ce-${this.kind}] Received ticket for epoch ${ticketDistribution.epochIndex}`);
|
|
55553
55514
|
this.onTicketReceived(ticketDistribution.epochIndex, ticketDistribution.ticket);
|
|
55554
55515
|
sender.close();
|
|
55555
55516
|
}
|
|
@@ -55561,7 +55522,7 @@ class ce_131_ce_132_safrole_ticket_distribution_ClientHandler {
|
|
|
55561
55522
|
this.kind = kind;
|
|
55562
55523
|
}
|
|
55563
55524
|
onStreamMessage(sender) {
|
|
55564
|
-
ce_131_ce_132_safrole_ticket_distribution_logger.warn
|
|
55525
|
+
ce_131_ce_132_safrole_ticket_distribution_logger.warn(`[${sender.streamId}][ce-${this.kind}] Unexpected message received. Closing.`);
|
|
55565
55526
|
sender.close();
|
|
55566
55527
|
}
|
|
55567
55528
|
onClose() { }
|
|
@@ -55634,15 +55595,15 @@ class ce_133_work_package_submission_ServerHandler {
|
|
|
55634
55595
|
class ce_133_work_package_submission_ClientHandler {
|
|
55635
55596
|
kind = ce_133_work_package_submission_STREAM_KIND;
|
|
55636
55597
|
onStreamMessage(sender) {
|
|
55637
|
-
ce_133_work_package_submission_logger.warn
|
|
55598
|
+
ce_133_work_package_submission_logger.warn(`[${sender.streamId}] Got unexpected message on CE-133 stream. Closing.`);
|
|
55638
55599
|
sender.close();
|
|
55639
55600
|
}
|
|
55640
55601
|
onClose() { }
|
|
55641
55602
|
sendWorkPackage(sender, coreIndex, workPackage, extrinsic) {
|
|
55642
55603
|
const corePack = CoreWorkPackage.create({ coreIndex, workPackage });
|
|
55643
|
-
ce_133_work_package_submission_logger.trace
|
|
55604
|
+
ce_133_work_package_submission_logger.trace(`[${sender.streamId}] Sending work package: ${corePack}`);
|
|
55644
55605
|
sender.bufferAndSend(Encoder.encodeObject(CoreWorkPackage.Codec, corePack));
|
|
55645
|
-
ce_133_work_package_submission_logger.trace
|
|
55606
|
+
ce_133_work_package_submission_logger.trace(`[${sender.streamId}] Sending extrinsics: ${workPackage.items}`);
|
|
55646
55607
|
sender.bufferAndSend(Encoder.encodeObject(workItemExtrinsicsCodec(workPackage.items), extrinsic));
|
|
55647
55608
|
// now close the connection
|
|
55648
55609
|
sender.close();
|
|
@@ -55718,7 +55679,7 @@ class ce_134_work_package_sharing_ServerHandler {
|
|
|
55718
55679
|
ce_134_work_package_sharing_ServerHandler.sendWorkReport(sender, workReportHash, signature);
|
|
55719
55680
|
})
|
|
55720
55681
|
.catch((error) => {
|
|
55721
|
-
ce_134_work_package_sharing_logger.error
|
|
55682
|
+
ce_134_work_package_sharing_logger.error(`[${streamId}] Error processing work package: ${error}`);
|
|
55722
55683
|
this.onClose(streamId);
|
|
55723
55684
|
});
|
|
55724
55685
|
}
|
|
@@ -55735,7 +55696,7 @@ class ce_134_work_package_sharing_ClientHandler {
|
|
|
55735
55696
|
throw new Error("Unexpected message received.");
|
|
55736
55697
|
}
|
|
55737
55698
|
const response = Decoder.decodeObject(WorkPackageSharingResponse.Codec, message);
|
|
55738
|
-
ce_134_work_package_sharing_logger.info
|
|
55699
|
+
ce_134_work_package_sharing_logger.info(`[${sender.streamId}] Received work report hash and signature.`);
|
|
55739
55700
|
pendingRequest.resolve({ workReportHash: response.workReportHash, signature: response.signature });
|
|
55740
55701
|
sender.close();
|
|
55741
55702
|
}
|
|
@@ -55748,9 +55709,9 @@ class ce_134_work_package_sharing_ClientHandler {
|
|
|
55748
55709
|
}
|
|
55749
55710
|
async sendWorkPackage(sender, coreIndex, segmentsRootMappings, workPackageBundle) {
|
|
55750
55711
|
const request = WorkPackageSharingRequest.create({ coreIndex, segmentsRootMappings });
|
|
55751
|
-
ce_134_work_package_sharing_logger.trace
|
|
55712
|
+
ce_134_work_package_sharing_logger.trace(`[${sender.streamId}] Sending core index and segments-root mappings.`);
|
|
55752
55713
|
sender.bufferAndSend(Encoder.encodeObject(WorkPackageSharingRequest.Codec, request));
|
|
55753
|
-
ce_134_work_package_sharing_logger.trace
|
|
55714
|
+
ce_134_work_package_sharing_logger.trace(`[${sender.streamId}] Sending work package bundle.`);
|
|
55754
55715
|
sender.bufferAndSend(Encoder.encodeObject(WorkPackageBundleCodec, workPackageBundle));
|
|
55755
55716
|
return new Promise((resolve, reject) => {
|
|
55756
55717
|
this.pendingRequests.set(sender.streamId, { resolve, reject });
|
|
@@ -55810,7 +55771,7 @@ class ce_135_work_report_distribution_ServerHandler {
|
|
|
55810
55771
|
}
|
|
55811
55772
|
onStreamMessage(sender, message) {
|
|
55812
55773
|
const guaranteedWorkReport = Decoder.decodeObject(GuaranteedWorkReport.Codec, message, this.chainSpec);
|
|
55813
|
-
ce_135_work_report_distribution_logger.log
|
|
55774
|
+
ce_135_work_report_distribution_logger.log(`[${sender.streamId}] Received guaranteed work report.`);
|
|
55814
55775
|
this.onWorkReport(guaranteedWorkReport);
|
|
55815
55776
|
sender.close();
|
|
55816
55777
|
}
|
|
@@ -55823,12 +55784,12 @@ class ce_135_work_report_distribution_ClientHandler {
|
|
|
55823
55784
|
this.chainSpec = chainSpec;
|
|
55824
55785
|
}
|
|
55825
55786
|
onStreamMessage(sender) {
|
|
55826
|
-
ce_135_work_report_distribution_logger.warn
|
|
55787
|
+
ce_135_work_report_distribution_logger.warn(`[${sender.streamId}] Got unexpected message on CE-135 stream. Closing.`);
|
|
55827
55788
|
sender.close();
|
|
55828
55789
|
}
|
|
55829
55790
|
onClose() { }
|
|
55830
55791
|
sendWorkReport(sender, workReport) {
|
|
55831
|
-
ce_135_work_report_distribution_logger.trace
|
|
55792
|
+
ce_135_work_report_distribution_logger.trace(`[${sender.streamId}] Sending guaranteed work report.`);
|
|
55832
55793
|
sender.bufferAndSend(Encoder.encodeObject(GuaranteedWorkReport.Codec, workReport, this.chainSpec));
|
|
55833
55794
|
sender.close();
|
|
55834
55795
|
}
|
|
@@ -55934,7 +55895,7 @@ class up_0_block_announcement_Handler {
|
|
|
55934
55895
|
this.handshakes.set(streamId, handshake);
|
|
55935
55896
|
// we didn't initiate this handshake, so let's respond
|
|
55936
55897
|
if (!this.pendingHandshakes.delete(streamId)) {
|
|
55937
|
-
up_0_block_announcement_logger.log
|
|
55898
|
+
up_0_block_announcement_logger.log(`[${streamId}] <-- responding with a handshake.`);
|
|
55938
55899
|
sender.bufferAndSend(encoder_Encoder.encodeObject(Handshake.Codec, this.getHandshake()));
|
|
55939
55900
|
}
|
|
55940
55901
|
this.onHandshake(streamId, handshake);
|
|
@@ -55942,7 +55903,7 @@ class up_0_block_announcement_Handler {
|
|
|
55942
55903
|
}
|
|
55943
55904
|
// it's just an announcement
|
|
55944
55905
|
const annoucement = decoder_Decoder.decodeObject(Announcement.Codec, message, this.spec);
|
|
55945
|
-
up_0_block_announcement_logger.log
|
|
55906
|
+
up_0_block_announcement_logger.log(`[${streamId}] --> got blocks announcement: ${annoucement.final}`);
|
|
55946
55907
|
this.onAnnouncement(streamId, annoucement);
|
|
55947
55908
|
}
|
|
55948
55909
|
onClose(streamId) {
|
|
@@ -55955,7 +55916,7 @@ class up_0_block_announcement_Handler {
|
|
|
55955
55916
|
return;
|
|
55956
55917
|
}
|
|
55957
55918
|
const handshake = this.getHandshake();
|
|
55958
|
-
up_0_block_announcement_logger.trace
|
|
55919
|
+
up_0_block_announcement_logger.trace(`[${streamId}] <-- sending handshake`);
|
|
55959
55920
|
this.pendingHandshakes.set(sender.streamId, true);
|
|
55960
55921
|
sender.bufferAndSend(encoder_Encoder.encodeObject(Handshake.Codec, handshake));
|
|
55961
55922
|
}
|
|
@@ -55963,11 +55924,11 @@ class up_0_block_announcement_Handler {
|
|
|
55963
55924
|
const { streamId } = sender;
|
|
55964
55925
|
// only send announcement if we've handshaken
|
|
55965
55926
|
if (this.handshakes.has(streamId)) {
|
|
55966
|
-
up_0_block_announcement_logger.trace
|
|
55927
|
+
up_0_block_announcement_logger.trace(`[${streamId}] <-- sending block announcement: ${annoucement.final}`);
|
|
55967
55928
|
sender.bufferAndSend(encoder_Encoder.encodeObject(Announcement.Codec, annoucement, this.spec));
|
|
55968
55929
|
}
|
|
55969
55930
|
else {
|
|
55970
|
-
up_0_block_announcement_logger.warn
|
|
55931
|
+
up_0_block_announcement_logger.warn(`[${streamId}] <-- no handshake yet, skipping announcement.`);
|
|
55971
55932
|
}
|
|
55972
55933
|
}
|
|
55973
55934
|
}
|
|
@@ -56084,7 +56045,7 @@ class SyncTask {
|
|
|
56084
56045
|
onUp0Annoucement(peer, announcement) {
|
|
56085
56046
|
const { hash, slot } = announcement.final;
|
|
56086
56047
|
const bestHeader = hashHeader(announcement.header, this.spec);
|
|
56087
|
-
sync_logger.info
|
|
56048
|
+
sync_logger.info(`[${peer.id}] --> Received new header #${announcement.header.timeSlotIndex}: ${bestHeader.hash}`);
|
|
56088
56049
|
// NOTE [ToDr] Instead of having `Connections` store aux data perhaps
|
|
56089
56050
|
// we should maintain that directly? However that would require
|
|
56090
56051
|
// listening to peers connected/disconnected to perfrom some cleanups
|
|
@@ -56159,7 +56120,7 @@ class SyncTask {
|
|
|
56159
56120
|
const peers = this.connections.getConnectedPeers();
|
|
56160
56121
|
for (const peerInfo of peers) {
|
|
56161
56122
|
this.streamManager.withStreamOfKind(peerInfo.peerId, up_0_block_announcement_STREAM_KIND, (handler, sender) => {
|
|
56162
|
-
sync_logger.log
|
|
56123
|
+
sync_logger.log(`[${peerInfo.peerId}] <-- Broadcasting new header #${slot}: ${header.hash}`);
|
|
56163
56124
|
handler.sendAnnouncement(sender, annoucement);
|
|
56164
56125
|
return result_OK;
|
|
56165
56126
|
});
|
|
@@ -56173,13 +56134,13 @@ class SyncTask {
|
|
|
56173
56134
|
}
|
|
56174
56135
|
if (res.error === BlockSequenceError.BlockOnFork) {
|
|
56175
56136
|
// seems that peer is requesting syncing a fork from us, let's bail.
|
|
56176
|
-
sync_logger.warn
|
|
56137
|
+
sync_logger.warn(`[${peer.id}] <-- Invalid block sequence request: ${startHash} is on a fork.`);
|
|
56177
56138
|
return [];
|
|
56178
56139
|
}
|
|
56179
56140
|
if (res.error === BlockSequenceError.NoStartBlock) {
|
|
56180
56141
|
// we don't know about that block at all, so let's just bail.
|
|
56181
56142
|
// we should probably penalize the peer for sending BS?
|
|
56182
|
-
sync_logger.warn
|
|
56143
|
+
sync_logger.warn(`[${peer.id}] <-- Invalid block sequence request: ${startHash} missing header or extrinsic.`);
|
|
56183
56144
|
return [];
|
|
56184
56145
|
}
|
|
56185
56146
|
debug_assertNever(res.error);
|
|
@@ -56199,10 +56160,10 @@ class SyncTask {
|
|
|
56199
56160
|
// figure out where others are at
|
|
56200
56161
|
const othersBest = this.othersBest;
|
|
56201
56162
|
const blocksToSync = othersBest.slot - ourBestSlot;
|
|
56202
|
-
sync_logger.trace
|
|
56163
|
+
sync_logger.trace(`Our best. ${ourBestSlot}. Best seen: ${othersBest.slot}`);
|
|
56203
56164
|
if (blocksToSync < 1) {
|
|
56204
56165
|
this.connections.getPeerCount();
|
|
56205
|
-
sync_logger.trace
|
|
56166
|
+
sync_logger.trace(`No new blocks. ${peerCount} peers.`);
|
|
56206
56167
|
return {
|
|
56207
56168
|
kind: SyncResult.NoNewBlocks,
|
|
56208
56169
|
ours: ourBestSlot,
|
|
@@ -56210,7 +56171,7 @@ class SyncTask {
|
|
|
56210
56171
|
};
|
|
56211
56172
|
}
|
|
56212
56173
|
const requested = [];
|
|
56213
|
-
sync_logger.log
|
|
56174
|
+
sync_logger.log(`Sync ${blocksToSync} blocks from ${peerCount} peers.`);
|
|
56214
56175
|
// NOTE [ToDr] We might be requesting the same blocks from many peers
|
|
56215
56176
|
// which isn't very optimal, but for now: 🤷
|
|
56216
56177
|
//
|
|
@@ -56236,12 +56197,12 @@ class SyncTask {
|
|
|
56236
56197
|
// request as much blocks from that peer as possible.
|
|
56237
56198
|
this.streamManager.withNewStream(peerInfo.peerRef, STREAM_KIND, (handler, sender) => {
|
|
56238
56199
|
handleAsyncErrors(async () => {
|
|
56239
|
-
sync_logger.log
|
|
56200
|
+
sync_logger.log(`Fetching blocks from ${peerInfo.peerId}.`);
|
|
56240
56201
|
const blocks = await handler.requestBlockSequence(sender, bestHash, Direction.DescIncl, numbers_tryAsU32(bestSlot - ourBestSlot));
|
|
56241
56202
|
blocks.reverse();
|
|
56242
56203
|
this.onNewBlocks(blocks, peerInfo.peerId);
|
|
56243
56204
|
}, (e) => {
|
|
56244
|
-
sync_logger.warn
|
|
56205
|
+
sync_logger.warn(`[${peerInfo.peerId}] --> requesting blocks to import: ${e}`);
|
|
56245
56206
|
});
|
|
56246
56207
|
return result_OK;
|
|
56247
56208
|
});
|
|
@@ -56315,7 +56276,7 @@ function setupPeerListeners(syncTask, network, streamManager) {
|
|
|
56315
56276
|
// whenever the peer wants to open a stream with us, let's handle that.
|
|
56316
56277
|
peer.addOnIncomingStream((stream) => {
|
|
56317
56278
|
handleAsyncErrors(() => streamManager.onIncomingStream(peer, stream), (e) => {
|
|
56318
|
-
network_logger.error
|
|
56279
|
+
network_logger.error(`[${peer.id}:${stream.streamId}]🚰 Stream error: ${e}. Disconnecting peer.`);
|
|
56319
56280
|
peer.disconnect();
|
|
56320
56281
|
});
|
|
56321
56282
|
return result_OK;
|
|
@@ -56565,10 +56526,10 @@ class FuzzTarget {
|
|
|
56565
56526
|
async onSocketMessage(msg) {
|
|
56566
56527
|
// attempt to decode the messsage
|
|
56567
56528
|
const message = decoder_Decoder.decodeObject(messageCodec, msg, this.spec);
|
|
56568
|
-
handler_logger.log
|
|
56529
|
+
handler_logger.log(`[${message.type}] incoming message`);
|
|
56569
56530
|
await processAndRespond(this.spec, message, this.msgHandler, this.sender).catch((e) => {
|
|
56570
|
-
handler_logger.error
|
|
56571
|
-
handler_logger.error
|
|
56531
|
+
handler_logger.error(`Error while processing fuzz v0 message: ${e}`);
|
|
56532
|
+
handler_logger.error(e);
|
|
56572
56533
|
this.sender.close();
|
|
56573
56534
|
});
|
|
56574
56535
|
return;
|
|
@@ -56609,17 +56570,17 @@ class FuzzTarget {
|
|
|
56609
56570
|
break;
|
|
56610
56571
|
}
|
|
56611
56572
|
case MessageType.State: {
|
|
56612
|
-
handler_logger.log
|
|
56573
|
+
handler_logger.log(`--> Received unexpected 'State' message from the fuzzer. Closing.`);
|
|
56613
56574
|
sender.close();
|
|
56614
56575
|
return;
|
|
56615
56576
|
}
|
|
56616
56577
|
case MessageType.StateRoot: {
|
|
56617
|
-
handler_logger.log
|
|
56578
|
+
handler_logger.log(`--> Received unexpected 'StateRoot' message from the fuzzer. Closing.`);
|
|
56618
56579
|
sender.close();
|
|
56619
56580
|
return;
|
|
56620
56581
|
}
|
|
56621
56582
|
default: {
|
|
56622
|
-
handler_logger.log
|
|
56583
|
+
handler_logger.log(`--> Received unexpected message type ${JSON.stringify(message)} from the fuzzer. Closing.`);
|
|
56623
56584
|
sender.close();
|
|
56624
56585
|
try {
|
|
56625
56586
|
debug_assertNever(message);
|
|
@@ -56630,17 +56591,17 @@ class FuzzTarget {
|
|
|
56630
56591
|
}
|
|
56631
56592
|
}
|
|
56632
56593
|
if (response !== null) {
|
|
56633
|
-
handler_logger.trace
|
|
56594
|
+
handler_logger.trace(`<-- responding with: ${response.type}`);
|
|
56634
56595
|
const encoded = encoder_Encoder.encodeObject(messageCodec, response, spec);
|
|
56635
56596
|
sender.send(encoded);
|
|
56636
56597
|
}
|
|
56637
56598
|
else {
|
|
56638
|
-
handler_logger.warn
|
|
56599
|
+
handler_logger.warn(`<-- no response generated for: ${message.type}`);
|
|
56639
56600
|
}
|
|
56640
56601
|
}
|
|
56641
56602
|
}
|
|
56642
56603
|
onClose({ error }) {
|
|
56643
|
-
handler_logger.log
|
|
56604
|
+
handler_logger.log(`Closing the handler. Reason: ${error !== undefined ? error.message : "close"}.`);
|
|
56644
56605
|
}
|
|
56645
56606
|
}
|
|
56646
56607
|
|
|
@@ -56894,14 +56855,14 @@ class handler_FuzzTarget {
|
|
|
56894
56855
|
// attempt to decode the messsage
|
|
56895
56856
|
try {
|
|
56896
56857
|
const message = decoder_Decoder.decodeObject(types_messageCodec, msg, this.spec);
|
|
56897
|
-
v1_handler_logger.log
|
|
56858
|
+
v1_handler_logger.log(`[${message.type}] incoming message`);
|
|
56898
56859
|
await this.processAndRespond(message);
|
|
56899
56860
|
}
|
|
56900
56861
|
catch (e) {
|
|
56901
|
-
v1_handler_logger.error
|
|
56902
|
-
v1_handler_logger.error
|
|
56862
|
+
v1_handler_logger.error(`Error while processing fuzz v1 message: ${e}`);
|
|
56863
|
+
v1_handler_logger.error(`${e}`);
|
|
56903
56864
|
if (e instanceof Error) {
|
|
56904
|
-
v1_handler_logger.error
|
|
56865
|
+
v1_handler_logger.error(e.stack ?? "");
|
|
56905
56866
|
}
|
|
56906
56867
|
this.sender.close();
|
|
56907
56868
|
}
|
|
@@ -56912,7 +56873,7 @@ class handler_FuzzTarget {
|
|
|
56912
56873
|
case types_MessageType.PeerInfo: {
|
|
56913
56874
|
// only support V1
|
|
56914
56875
|
if (message.value.fuzzVersion !== 1) {
|
|
56915
|
-
v1_handler_logger.warn
|
|
56876
|
+
v1_handler_logger.warn(`Unsupported fuzzer protocol version: ${message.value.fuzzVersion}. Closing`);
|
|
56916
56877
|
this.sender.close();
|
|
56917
56878
|
return;
|
|
56918
56879
|
}
|
|
@@ -56920,9 +56881,9 @@ class handler_FuzzTarget {
|
|
|
56920
56881
|
const ourPeerInfo = await this.msgHandler.getPeerInfo(message.value);
|
|
56921
56882
|
// Calculate session features (intersection of both peer features)
|
|
56922
56883
|
this.sessionFeatures = message.value.features & ourPeerInfo.features;
|
|
56923
|
-
v1_handler_logger.info
|
|
56924
|
-
v1_handler_logger.log
|
|
56925
|
-
v1_handler_logger.log
|
|
56884
|
+
v1_handler_logger.info(`Handshake completed. Shared features: 0b${this.sessionFeatures.toString(2)}`);
|
|
56885
|
+
v1_handler_logger.log(`Feature ancestry: ${(this.sessionFeatures & Features.Ancestry) !== 0}`);
|
|
56886
|
+
v1_handler_logger.log(`Feature fork: ${(this.sessionFeatures & Features.Fork) !== 0}`);
|
|
56926
56887
|
response = {
|
|
56927
56888
|
type: types_MessageType.PeerInfo,
|
|
56928
56889
|
value: ourPeerInfo,
|
|
@@ -56962,22 +56923,22 @@ class handler_FuzzTarget {
|
|
|
56962
56923
|
break;
|
|
56963
56924
|
}
|
|
56964
56925
|
case types_MessageType.StateRoot: {
|
|
56965
|
-
v1_handler_logger.log
|
|
56926
|
+
v1_handler_logger.log(`--> Received unexpected 'StateRoot' message from the fuzzer. Closing.`);
|
|
56966
56927
|
this.sender.close();
|
|
56967
56928
|
return;
|
|
56968
56929
|
}
|
|
56969
56930
|
case types_MessageType.State: {
|
|
56970
|
-
v1_handler_logger.log
|
|
56931
|
+
v1_handler_logger.log(`--> Received unexpected 'State' message from the fuzzer. Closing.`);
|
|
56971
56932
|
this.sender.close();
|
|
56972
56933
|
return;
|
|
56973
56934
|
}
|
|
56974
56935
|
case types_MessageType.Error: {
|
|
56975
|
-
v1_handler_logger.log
|
|
56936
|
+
v1_handler_logger.log(`--> Received unexpected 'Error' message from the fuzzer. Closing.`);
|
|
56976
56937
|
this.sender.close();
|
|
56977
56938
|
return;
|
|
56978
56939
|
}
|
|
56979
56940
|
default: {
|
|
56980
|
-
v1_handler_logger.log
|
|
56941
|
+
v1_handler_logger.log(`--> Received unexpected message type ${JSON.stringify(message)} from the fuzzer. Closing.`);
|
|
56981
56942
|
this.sender.close();
|
|
56982
56943
|
try {
|
|
56983
56944
|
debug_assertNever(message);
|
|
@@ -56988,16 +56949,16 @@ class handler_FuzzTarget {
|
|
|
56988
56949
|
}
|
|
56989
56950
|
}
|
|
56990
56951
|
if (response !== null) {
|
|
56991
|
-
v1_handler_logger.trace
|
|
56952
|
+
v1_handler_logger.trace(`<-- responding with: ${response.type}`);
|
|
56992
56953
|
const encoded = encoder_Encoder.encodeObject(types_messageCodec, response, this.spec);
|
|
56993
56954
|
this.sender.send(encoded);
|
|
56994
56955
|
}
|
|
56995
56956
|
else {
|
|
56996
|
-
v1_handler_logger.warn
|
|
56957
|
+
v1_handler_logger.warn(`<-- no response generated for: ${message.type}`);
|
|
56997
56958
|
}
|
|
56998
56959
|
}
|
|
56999
56960
|
onClose({ error }) {
|
|
57000
|
-
v1_handler_logger.log
|
|
56961
|
+
v1_handler_logger.log(`Closing the v1 handler. Reason: ${error !== undefined ? error.message : "close"}.`);
|
|
57001
56962
|
}
|
|
57002
56963
|
/** Check if a specific feature is enabled in the session */
|
|
57003
56964
|
hasFeature(feature) {
|
|
@@ -57043,7 +57004,7 @@ function startIpcServer(name, newMessageHandler) {
|
|
|
57043
57004
|
const logger = Logger.new(import.meta.filename, "ext-ipc");
|
|
57044
57005
|
// Create the IPC server
|
|
57045
57006
|
const server = (0,external_node_net_namespaceObject.createServer)((socket) => {
|
|
57046
|
-
logger.log
|
|
57007
|
+
logger.log("Client connected");
|
|
57047
57008
|
const messageHandler = newMessageHandler(new IpcSender(socket));
|
|
57048
57009
|
// Handle incoming data from the client
|
|
57049
57010
|
socket.on("data", handleMessageFragmentation(async (data) => {
|
|
@@ -57055,23 +57016,23 @@ function startIpcServer(name, newMessageHandler) {
|
|
|
57055
57016
|
await messageHandler.onSocketMessage(data);
|
|
57056
57017
|
}
|
|
57057
57018
|
catch (e) {
|
|
57058
|
-
logger.error
|
|
57019
|
+
logger.error(`Received invalid data on socket: ${e}. Closing connection.`);
|
|
57059
57020
|
socket.end();
|
|
57060
57021
|
}
|
|
57061
57022
|
finally {
|
|
57062
57023
|
socket.resume();
|
|
57063
57024
|
}
|
|
57064
57025
|
}, () => {
|
|
57065
|
-
logger.error
|
|
57026
|
+
logger.error("Received too much data on socket. Closing connection.");
|
|
57066
57027
|
socket.end();
|
|
57067
57028
|
}));
|
|
57068
57029
|
// Handle client disconnection
|
|
57069
57030
|
socket.on("end", () => {
|
|
57070
|
-
logger.log
|
|
57031
|
+
logger.log("Client disconnected");
|
|
57071
57032
|
messageHandler.onClose({});
|
|
57072
57033
|
});
|
|
57073
57034
|
socket.on("error", (error) => {
|
|
57074
|
-
logger.error
|
|
57035
|
+
logger.error(`Socket error: ${error}`);
|
|
57075
57036
|
messageHandler.onClose({ error });
|
|
57076
57037
|
socket.end();
|
|
57077
57038
|
});
|
|
@@ -57086,14 +57047,14 @@ function startIpcServer(name, newMessageHandler) {
|
|
|
57086
57047
|
path: socketPath,
|
|
57087
57048
|
signal: controller.signal,
|
|
57088
57049
|
}, () => {
|
|
57089
|
-
logger.log
|
|
57050
|
+
logger.log(`IPC server is listening at ${socketPath}`);
|
|
57090
57051
|
});
|
|
57091
57052
|
// Handle server errors
|
|
57092
57053
|
server.on("error", (err) => {
|
|
57093
57054
|
throw err;
|
|
57094
57055
|
});
|
|
57095
57056
|
return () => {
|
|
57096
|
-
logger.info
|
|
57057
|
+
logger.info("Closing IPC server.");
|
|
57097
57058
|
// stop accepting new connections
|
|
57098
57059
|
server.close();
|
|
57099
57060
|
// abort the server
|
|
@@ -57238,7 +57199,7 @@ class JamnpIpcHandler {
|
|
|
57238
57199
|
// decode the message as `StreamEnvelope`
|
|
57239
57200
|
const envelope = decoder_Decoder.decodeObject(StreamEnvelope.Codec, msg);
|
|
57240
57201
|
const streamId = envelope.streamId;
|
|
57241
|
-
jamnp_handler_logger.log
|
|
57202
|
+
jamnp_handler_logger.log(`[${streamId}] incoming message: ${envelope.type} ${envelope.data}`);
|
|
57242
57203
|
// check if this is a already known stream id
|
|
57243
57204
|
const streamHandler = this.streams.get(streamId);
|
|
57244
57205
|
const streamSender = new EnvelopeSender(streamId, this.sender);
|
|
@@ -57246,13 +57207,13 @@ class JamnpIpcHandler {
|
|
|
57246
57207
|
if (streamHandler === undefined) {
|
|
57247
57208
|
// closing or message of unknown stream - ignore.
|
|
57248
57209
|
if (envelope.type !== StreamEnvelopeType.Open) {
|
|
57249
|
-
jamnp_handler_logger.warn
|
|
57210
|
+
jamnp_handler_logger.warn(`[${streamId}] (unknown) got invalid type ${envelope.type}.`);
|
|
57250
57211
|
return;
|
|
57251
57212
|
}
|
|
57252
57213
|
const newStream = decoder_Decoder.decodeObject(NewStream.Codec, envelope.data);
|
|
57253
57214
|
const handler = this.streamHandlers.get(newStream.streamByte);
|
|
57254
57215
|
if (handler !== undefined) {
|
|
57255
|
-
jamnp_handler_logger.log
|
|
57216
|
+
jamnp_handler_logger.log(`[${streamId}] new stream for ${handler.kind}`);
|
|
57256
57217
|
// insert the stream
|
|
57257
57218
|
this.streams.set(streamId, handler);
|
|
57258
57219
|
// Just send back the same stream byte.
|
|
@@ -57273,7 +57234,7 @@ class JamnpIpcHandler {
|
|
|
57273
57234
|
if (envelope.type !== StreamEnvelopeType.Msg) {
|
|
57274
57235
|
// display a warning but only if the stream was not pending for confirmation.
|
|
57275
57236
|
if (!this.pendingStreams.delete(streamId)) {
|
|
57276
|
-
jamnp_handler_logger.warn
|
|
57237
|
+
jamnp_handler_logger.warn(`[${streamId}] got invalid type ${envelope.type}.`);
|
|
57277
57238
|
}
|
|
57278
57239
|
return;
|
|
57279
57240
|
}
|
|
@@ -57282,7 +57243,7 @@ class JamnpIpcHandler {
|
|
|
57282
57243
|
}
|
|
57283
57244
|
/** Notify about termination of the underlying socket. */
|
|
57284
57245
|
onClose({ error }) {
|
|
57285
|
-
jamnp_handler_logger.log
|
|
57246
|
+
jamnp_handler_logger.log(`Closing the handler. Reason: ${error !== undefined ? error.message : "close"}.`);
|
|
57286
57247
|
// Socket closed - we should probably clear everything.
|
|
57287
57248
|
for (const [streamId, handler] of this.streams.entries()) {
|
|
57288
57249
|
handler.onClose(streamId, error === undefined);
|
|
@@ -57299,7 +57260,7 @@ class JamnpIpcHandler {
|
|
|
57299
57260
|
}
|
|
57300
57261
|
/** Wait for the handler to be finished either via close or error. */
|
|
57301
57262
|
waitForEnd() {
|
|
57302
|
-
jamnp_handler_logger.log
|
|
57263
|
+
jamnp_handler_logger.log("Waiting for the handler to be closed.");
|
|
57303
57264
|
return this.onEnd.listen;
|
|
57304
57265
|
}
|
|
57305
57266
|
}
|
|
@@ -57428,7 +57389,7 @@ class FuzzHandler {
|
|
|
57428
57389
|
async getSerializedState(value) {
|
|
57429
57390
|
const state = await this.api.getPostSerializedState(value);
|
|
57430
57391
|
if (state === null) {
|
|
57431
|
-
ipc_logger.warn
|
|
57392
|
+
ipc_logger.warn(`Fuzzer requested non-existing state for: ${value}`);
|
|
57432
57393
|
return [];
|
|
57433
57394
|
}
|
|
57434
57395
|
return Array.from(state).map(([key, value]) => {
|
|
@@ -57454,7 +57415,7 @@ class FuzzHandler {
|
|
|
57454
57415
|
if (res.isOk) {
|
|
57455
57416
|
return res;
|
|
57456
57417
|
}
|
|
57457
|
-
ipc_logger.log
|
|
57418
|
+
ipc_logger.log(`Rejecting block with error: ${res.error}. ${res.details}`);
|
|
57458
57419
|
return result_Result.error(ErrorMessage.create({ message: res.error }));
|
|
57459
57420
|
}
|
|
57460
57421
|
async importBlockV0(value) {
|
|
@@ -57462,11 +57423,11 @@ class FuzzHandler {
|
|
|
57462
57423
|
if (res.isOk) {
|
|
57463
57424
|
return res.ok;
|
|
57464
57425
|
}
|
|
57465
|
-
ipc_logger.warn
|
|
57426
|
+
ipc_logger.warn(`Fuzzer sent incorrect block with error ${res.error}. ${res.details}`);
|
|
57466
57427
|
return this.api.getBestStateRootHash();
|
|
57467
57428
|
}
|
|
57468
57429
|
async getPeerInfo(value) {
|
|
57469
|
-
ipc_logger.info
|
|
57430
|
+
ipc_logger.info(`Fuzzer ${value} connected.`);
|
|
57470
57431
|
return types_PeerInfo.create({
|
|
57471
57432
|
name: this.api.nodeName,
|
|
57472
57433
|
appVersion: this.api.nodeVersion,
|
|
@@ -57477,7 +57438,7 @@ class FuzzHandler {
|
|
|
57477
57438
|
});
|
|
57478
57439
|
}
|
|
57479
57440
|
async getPeerInfoV0(value) {
|
|
57480
|
-
ipc_logger.info
|
|
57441
|
+
ipc_logger.info(`Fuzzer ${value} connected.`);
|
|
57481
57442
|
return PeerInfo.create({
|
|
57482
57443
|
name: this.api.nodeName,
|
|
57483
57444
|
appVersion: this.api.nodeVersion,
|
|
@@ -57510,7 +57471,7 @@ if (!external_node_worker_threads_namespaceObject.isMainThread) {
|
|
|
57510
57471
|
Logger.configureAll(process.env.JAM_LOG ?? "", Level.LOG);
|
|
57511
57472
|
const machine = networkStateMachine();
|
|
57512
57473
|
const channel = MessageChannelStateMachine.receiveChannel(machine, external_node_worker_threads_namespaceObject.parentPort);
|
|
57513
|
-
channel.then((channel) => jam_network_main(channel)).catch((e) => jam_network_logger.error
|
|
57474
|
+
channel.then((channel) => jam_network_main(channel)).catch((e) => jam_network_logger.error(e));
|
|
57514
57475
|
}
|
|
57515
57476
|
/**
|
|
57516
57477
|
* JAM networking worker.
|
|
@@ -57521,7 +57482,7 @@ if (!external_node_worker_threads_namespaceObject.isMainThread) {
|
|
|
57521
57482
|
*/
|
|
57522
57483
|
async function jam_network_main(channel) {
|
|
57523
57484
|
await initAll();
|
|
57524
|
-
jam_network_logger.trace
|
|
57485
|
+
jam_network_logger.trace(`🛜 Network starting ${channel.currentState()}`);
|
|
57525
57486
|
// Await the configuration object
|
|
57526
57487
|
// TODO [ToDr] The whole state machine needs to die.
|
|
57527
57488
|
const ready = channel.currentState().stateName !== "ready(network)"
|
|
@@ -57532,7 +57493,7 @@ async function jam_network_main(channel) {
|
|
|
57532
57493
|
const key = await ed25519_privateKey(config.key);
|
|
57533
57494
|
const lmdb = new LmdbRoot(config.genericConfig.dbPath);
|
|
57534
57495
|
const blocks = new LmdbBlocks(config.genericConfig.chainSpec, lmdb);
|
|
57535
|
-
jam_network_logger.info
|
|
57496
|
+
jam_network_logger.info(`🛜 Listening at ${config.host}:${config.port}`);
|
|
57536
57497
|
const network = await setup({
|
|
57537
57498
|
host: config.host,
|
|
57538
57499
|
port: config.port,
|
|
@@ -57545,7 +57506,7 @@ async function jam_network_main(channel) {
|
|
|
57545
57506
|
ready.waitForState("finished").then(() => network.network.stop());
|
|
57546
57507
|
await network.network.start();
|
|
57547
57508
|
});
|
|
57548
|
-
jam_network_logger.info
|
|
57509
|
+
jam_network_logger.info("🛜 Network worker finished. Closing channel.");
|
|
57549
57510
|
// Close the comms to gracefuly close the app.
|
|
57550
57511
|
finished.currentState().close(channel);
|
|
57551
57512
|
}
|
|
@@ -57591,8 +57552,8 @@ async function main_main(config, withRelPath) {
|
|
|
57591
57552
|
throw new Error("The main binary cannot be running as a Worker!");
|
|
57592
57553
|
}
|
|
57593
57554
|
await initAll();
|
|
57594
|
-
common_logger.info
|
|
57595
|
-
common_logger.info
|
|
57555
|
+
common_logger.info(`🫐 Typeberry ${package_namespaceObject.rE}. GP: ${CURRENT_VERSION} (${CURRENT_SUITE})`);
|
|
57556
|
+
common_logger.info(`🎸 Starting node: ${config.nodeName}.`);
|
|
57596
57557
|
const chainSpec = getChainSpec(config.node.flavor);
|
|
57597
57558
|
const { rootDb, dbPath, genesisHeaderHash } = openDatabase(config.nodeName, config.node.chainSpec.genesisHeader, withRelPath(config.node.databaseBasePath));
|
|
57598
57559
|
// Initialize the database with genesis state and block if there isn't one.
|
|
@@ -57635,30 +57596,30 @@ async function main_main(config, withRelPath) {
|
|
|
57635
57596
|
return importer.finish(port);
|
|
57636
57597
|
});
|
|
57637
57598
|
await importerFinished.currentState().waitForWorkerToFinish();
|
|
57638
|
-
common_logger.log
|
|
57599
|
+
common_logger.log("[main] ☠️ Closing the extensions");
|
|
57639
57600
|
closeExtensions();
|
|
57640
|
-
common_logger.log
|
|
57601
|
+
common_logger.log("[main] ☠️ Closing the authorship module");
|
|
57641
57602
|
closeAuthorship();
|
|
57642
|
-
common_logger.log
|
|
57603
|
+
common_logger.log("[main] ☠️ Closing the networking module");
|
|
57643
57604
|
closeNetwork();
|
|
57644
|
-
common_logger.log
|
|
57605
|
+
common_logger.log("[main] 🛢️ Closing the database");
|
|
57645
57606
|
await rootDb.close();
|
|
57646
|
-
common_logger.info
|
|
57607
|
+
common_logger.info("[main] ✅ Done.");
|
|
57647
57608
|
},
|
|
57648
57609
|
};
|
|
57649
57610
|
return api;
|
|
57650
57611
|
}
|
|
57651
57612
|
const initAuthorship = async (importerReady, isAuthoring, config) => {
|
|
57652
57613
|
if (!isAuthoring) {
|
|
57653
|
-
common_logger.log
|
|
57614
|
+
common_logger.log("✍️ Authorship off: disabled");
|
|
57654
57615
|
return () => Promise.resolve();
|
|
57655
57616
|
}
|
|
57656
|
-
common_logger.info
|
|
57617
|
+
common_logger.info("✍️ Starting block generator.");
|
|
57657
57618
|
const { generator, finish } = await startBlockGenerator(config);
|
|
57658
57619
|
// relay blocks from generator to importer
|
|
57659
57620
|
importerReady.doUntil("finished", async (importer, port) => {
|
|
57660
57621
|
generator.currentState().onBlock.on((b) => {
|
|
57661
|
-
common_logger.log
|
|
57622
|
+
common_logger.log(`✍️ Produced block. Size: [${b.length}]`);
|
|
57662
57623
|
importer.sendBlock(port, b);
|
|
57663
57624
|
});
|
|
57664
57625
|
});
|
|
@@ -57666,7 +57627,7 @@ const initAuthorship = async (importerReady, isAuthoring, config) => {
|
|
|
57666
57627
|
};
|
|
57667
57628
|
const initNetwork = async (importerReady, workerConfig, genesisHeaderHash, networkConfig, bestHeader) => {
|
|
57668
57629
|
if (networkConfig === null) {
|
|
57669
|
-
common_logger.log
|
|
57630
|
+
common_logger.log("🛜 Networking off: no config");
|
|
57670
57631
|
return () => Promise.resolve();
|
|
57671
57632
|
}
|
|
57672
57633
|
const { key, host, port, bootnodes } = networkConfig;
|
|
@@ -59616,7 +59577,7 @@ class AccumulateExternalities {
|
|
|
59616
59577
|
/** https://graypaper.fluffylabs.dev/#/7e6ff6a/362802362d02?v=0.6.7 */
|
|
59617
59578
|
const validatorsManager = this.updatedState.getPrivilegedServices().validatorsManager;
|
|
59618
59579
|
if (validatorsManager !== this.currentServiceId) {
|
|
59619
|
-
accumulate_externalities_logger.trace
|
|
59580
|
+
accumulate_externalities_logger.trace(`Current service id (${this.currentServiceId}) is not a validators manager. (expected: ${validatorsManager}) and cannot update validators data. Ignoring`);
|
|
59620
59581
|
return result_Result.error(UnprivilegedError);
|
|
59621
59582
|
}
|
|
59622
59583
|
this.updatedState.stateUpdate.validatorsData = validatorsData;
|
|
@@ -59631,11 +59592,11 @@ class AccumulateExternalities {
|
|
|
59631
59592
|
// NOTE `coreIndex` is already verified in the HC, so this is infallible.
|
|
59632
59593
|
const currentAuthManager = this.updatedState.getPrivilegedServices().authManager[coreIndex];
|
|
59633
59594
|
if (currentAuthManager !== this.currentServiceId) {
|
|
59634
|
-
accumulate_externalities_logger.trace
|
|
59595
|
+
accumulate_externalities_logger.trace(`Current service id (${this.currentServiceId}) is not an auth manager of core ${coreIndex} (expected: ${currentAuthManager}) and cannot update authorization queue. Ignoring`);
|
|
59635
59596
|
return result_Result.error(UpdatePrivilegesError.UnprivilegedService);
|
|
59636
59597
|
}
|
|
59637
59598
|
if (authManager === null && Compatibility.isGreaterOrEqual(GpVersion.V0_7_1)) {
|
|
59638
|
-
accumulate_externalities_logger.trace
|
|
59599
|
+
accumulate_externalities_logger.trace("The new auth manager is not a valid service id. Ignoring");
|
|
59639
59600
|
return result_Result.error(UpdatePrivilegesError.InvalidServiceId);
|
|
59640
59601
|
}
|
|
59641
59602
|
this.updatedState.stateUpdate.authorizationQueues.set(coreIndex, authQueue);
|
|
@@ -60516,7 +60477,7 @@ class Assign {
|
|
|
60516
60477
|
const memoryReadResult = memory.loadInto(res, authorizationQueueStart);
|
|
60517
60478
|
// error while reading the memory.
|
|
60518
60479
|
if (memoryReadResult.isError) {
|
|
60519
|
-
logger_logger.trace
|
|
60480
|
+
logger_logger.trace("ASSIGN() <- PANIC");
|
|
60520
60481
|
return PvmExecution.Panic;
|
|
60521
60482
|
}
|
|
60522
60483
|
if (maybeCoreIndex >= this.chainSpec.coresCount) {
|
|
@@ -60531,18 +60492,18 @@ class Assign {
|
|
|
60531
60492
|
const result = this.partialState.updateAuthorizationQueue(coreIndex, fixedSizeAuthQueue, authManager);
|
|
60532
60493
|
if (result.isOk) {
|
|
60533
60494
|
regs.set(IN_OUT_REG, HostCallResult.OK);
|
|
60534
|
-
logger_logger.trace
|
|
60495
|
+
logger_logger.trace(`ASSIGN(${coreIndex}, ${fixedSizeAuthQueue}) <- OK`);
|
|
60535
60496
|
return;
|
|
60536
60497
|
}
|
|
60537
60498
|
const e = result.error;
|
|
60538
60499
|
if (e === UpdatePrivilegesError.UnprivilegedService) {
|
|
60539
60500
|
regs.set(IN_OUT_REG, HostCallResult.HUH);
|
|
60540
|
-
logger_logger.trace
|
|
60501
|
+
logger_logger.trace(`ASSIGN(${coreIndex}, ${fixedSizeAuthQueue}) <- HUH`);
|
|
60541
60502
|
return;
|
|
60542
60503
|
}
|
|
60543
60504
|
if (e === UpdatePrivilegesError.InvalidServiceId) {
|
|
60544
60505
|
regs.set(IN_OUT_REG, HostCallResult.WHO);
|
|
60545
|
-
logger_logger.trace
|
|
60506
|
+
logger_logger.trace(`ASSIGN(${coreIndex}, ${fixedSizeAuthQueue}) <- HUH`);
|
|
60546
60507
|
return;
|
|
60547
60508
|
}
|
|
60548
60509
|
debug_assertNever(e);
|
|
@@ -60608,7 +60569,7 @@ class Bless {
|
|
|
60608
60569
|
decoder.resetTo(0);
|
|
60609
60570
|
const memoryReadResult = memory.loadInto(result, memIndex);
|
|
60610
60571
|
if (memoryReadResult.isError) {
|
|
60611
|
-
logger_logger.trace
|
|
60572
|
+
logger_logger.trace(`BLESS(${manager}, ${validator}) <- PANIC`);
|
|
60612
60573
|
return PvmExecution.Panic;
|
|
60613
60574
|
}
|
|
60614
60575
|
const { serviceId, gas } = decoder.object(serviceIdAndGasCodec);
|
|
@@ -60621,24 +60582,24 @@ class Bless {
|
|
|
60621
60582
|
const authorizersDecoder = decoder_Decoder.fromBlob(res);
|
|
60622
60583
|
const memoryReadResult = memory.loadInto(res, authorization);
|
|
60623
60584
|
if (memoryReadResult.isError) {
|
|
60624
|
-
logger_logger.trace
|
|
60585
|
+
logger_logger.trace(`BLESS(${manager}, ${validator}, ${autoAccumulateEntries}) <- PANIC`);
|
|
60625
60586
|
return PvmExecution.Panic;
|
|
60626
60587
|
}
|
|
60627
60588
|
const authorizers = tryAsPerCore(authorizersDecoder.sequenceFixLen(descriptors_codec.u32.asOpaque(), this.chainSpec.coresCount), this.chainSpec);
|
|
60628
60589
|
const updateResult = this.partialState.updatePrivilegedServices(manager, authorizers, validator, autoAccumulateEntries);
|
|
60629
60590
|
if (updateResult.isOk) {
|
|
60630
|
-
logger_logger.trace
|
|
60591
|
+
logger_logger.trace(`BLESS(${manager}, ${authorizers}, ${validator}, ${autoAccumulateEntries}) <- OK`);
|
|
60631
60592
|
regs.set(bless_IN_OUT_REG, HostCallResult.OK);
|
|
60632
60593
|
return;
|
|
60633
60594
|
}
|
|
60634
60595
|
const e = updateResult.error;
|
|
60635
60596
|
if (e === UpdatePrivilegesError.UnprivilegedService) {
|
|
60636
|
-
logger_logger.trace
|
|
60597
|
+
logger_logger.trace(`BLESS(${manager}, ${authorizers}, ${validator}, ${autoAccumulateEntries}) <- HUH`);
|
|
60637
60598
|
regs.set(bless_IN_OUT_REG, HostCallResult.HUH);
|
|
60638
60599
|
return;
|
|
60639
60600
|
}
|
|
60640
60601
|
if (e === UpdatePrivilegesError.InvalidServiceId) {
|
|
60641
|
-
logger_logger.trace
|
|
60602
|
+
logger_logger.trace(`BLESS(${manager}, ${authorizers}, ${validator}, ${autoAccumulateEntries}) <- WHO`);
|
|
60642
60603
|
regs.set(bless_IN_OUT_REG, HostCallResult.WHO);
|
|
60643
60604
|
return;
|
|
60644
60605
|
}
|
|
@@ -60668,7 +60629,7 @@ class GasHostCall {
|
|
|
60668
60629
|
}
|
|
60669
60630
|
execute(gas, regs) {
|
|
60670
60631
|
const gasValue = gas.get();
|
|
60671
|
-
logger_logger.trace
|
|
60632
|
+
logger_logger.trace(`GAS <- ${gasValue}`);
|
|
60672
60633
|
regs.set(7, numbers_tryAsU64(gasValue));
|
|
60673
60634
|
return Promise.resolve(undefined);
|
|
60674
60635
|
}
|
|
@@ -60700,7 +60661,7 @@ class Checkpoint {
|
|
|
60700
60661
|
async execute(gas, regs) {
|
|
60701
60662
|
await this.gasHostCall.execute(gas, regs);
|
|
60702
60663
|
this.partialState.checkpoint();
|
|
60703
|
-
logger_logger.trace
|
|
60664
|
+
logger_logger.trace("CHECKPOINT()");
|
|
60704
60665
|
return;
|
|
60705
60666
|
}
|
|
60706
60667
|
}
|
|
@@ -60739,18 +60700,18 @@ class Designate {
|
|
|
60739
60700
|
const memoryReadResult = memory.loadInto(res, validatorsStart);
|
|
60740
60701
|
// error while reading the memory.
|
|
60741
60702
|
if (memoryReadResult.isError) {
|
|
60742
|
-
logger_logger.trace
|
|
60703
|
+
logger_logger.trace("DESIGNATE() <- PANIC");
|
|
60743
60704
|
return PvmExecution.Panic;
|
|
60744
60705
|
}
|
|
60745
60706
|
const decoder = decoder_Decoder.fromBlob(res);
|
|
60746
60707
|
const validatorsData = decoder.sequenceFixLen(ValidatorData.Codec, this.chainSpec.validatorsCount);
|
|
60747
60708
|
const result = this.partialState.updateValidatorsData(tryAsPerValidator(validatorsData, this.chainSpec));
|
|
60748
60709
|
if (result.isError) {
|
|
60749
|
-
logger_logger.trace
|
|
60710
|
+
logger_logger.trace(`DESIGNATE([${validatorsData[0]}, ${validatorsData[1]}, ...]) <- HUH`);
|
|
60750
60711
|
regs.set(designate_IN_OUT_REG, HostCallResult.HUH);
|
|
60751
60712
|
}
|
|
60752
60713
|
else {
|
|
60753
|
-
logger_logger.trace
|
|
60714
|
+
logger_logger.trace(`DESIGNATE([${validatorsData[0]}, ${validatorsData[1]}, ...]) <- OK`);
|
|
60754
60715
|
regs.set(designate_IN_OUT_REG, HostCallResult.OK);
|
|
60755
60716
|
}
|
|
60756
60717
|
}
|
|
@@ -60791,17 +60752,17 @@ class Eject {
|
|
|
60791
60752
|
const previousCodeHash = bytes_Bytes.zero(hash_HASH_SIZE).asOpaque();
|
|
60792
60753
|
const memoryReadResult = memory.loadInto(previousCodeHash.raw, preimageHashStart);
|
|
60793
60754
|
if (memoryReadResult.isError) {
|
|
60794
|
-
logger_logger.trace
|
|
60755
|
+
logger_logger.trace(`EJECT(${serviceId}) <- PANIC`);
|
|
60795
60756
|
return PvmExecution.Panic;
|
|
60796
60757
|
}
|
|
60797
60758
|
// cannot eject self
|
|
60798
60759
|
if (serviceId === this.currentServiceId) {
|
|
60799
60760
|
regs.set(eject_IN_OUT_REG, HostCallResult.WHO);
|
|
60800
|
-
logger_logger.trace
|
|
60761
|
+
logger_logger.trace(`EJECT(${serviceId}, ${previousCodeHash}) <- WHO`);
|
|
60801
60762
|
return;
|
|
60802
60763
|
}
|
|
60803
60764
|
const result = this.partialState.eject(serviceId, previousCodeHash);
|
|
60804
|
-
logger_logger.trace
|
|
60765
|
+
logger_logger.trace(`EJECT(${serviceId}, ${previousCodeHash}) <- ${resultToString(result)}`);
|
|
60805
60766
|
// All good!
|
|
60806
60767
|
if (result.isOk) {
|
|
60807
60768
|
regs.set(eject_IN_OUT_REG, HostCallResult.OK);
|
|
@@ -60853,11 +60814,11 @@ class Forget {
|
|
|
60853
60814
|
const memoryReadResult = memory.loadInto(hash.raw, hashStart);
|
|
60854
60815
|
// error while reading the memory.
|
|
60855
60816
|
if (memoryReadResult.isError) {
|
|
60856
|
-
logger_logger.trace
|
|
60817
|
+
logger_logger.trace(`FORGET(${hash}, ${length}) <- PANIC`);
|
|
60857
60818
|
return PvmExecution.Panic;
|
|
60858
60819
|
}
|
|
60859
60820
|
const result = this.partialState.forgetPreimage(hash.asOpaque(), length);
|
|
60860
|
-
logger_logger.trace
|
|
60821
|
+
logger_logger.trace(`FORGET(${hash}, ${length}) <- ${resultToString(result)}`);
|
|
60861
60822
|
if (result.isOk) {
|
|
60862
60823
|
regs.set(forget_IN_OUT_REG, HostCallResult.OK);
|
|
60863
60824
|
}
|
|
@@ -60910,11 +60871,11 @@ class New {
|
|
|
60910
60871
|
const memoryReadResult = memory.loadInto(codeHash.raw, codeHashStart);
|
|
60911
60872
|
// error while reading the memory.
|
|
60912
60873
|
if (memoryReadResult.isError) {
|
|
60913
|
-
logger_logger.trace
|
|
60874
|
+
logger_logger.trace(`NEW(${codeHash}, ${codeLength}, ${gas}, ${allowance}, ${gratisStorage}) <- PANIC`);
|
|
60914
60875
|
return PvmExecution.Panic;
|
|
60915
60876
|
}
|
|
60916
60877
|
const assignedId = this.partialState.newService(codeHash.asOpaque(), codeLength, gas, allowance, gratisStorage);
|
|
60917
|
-
logger_logger.trace
|
|
60878
|
+
logger_logger.trace(`NEW(${codeHash}, ${codeLength}, ${gas}, ${allowance}, ${gratisStorage}) <- ${resultToString(assignedId)}`);
|
|
60918
60879
|
if (assignedId.isOk) {
|
|
60919
60880
|
regs.set(new_IN_OUT_REG, numbers_tryAsU64(assignedId.ok));
|
|
60920
60881
|
return;
|
|
@@ -60969,11 +60930,11 @@ class Provide {
|
|
|
60969
60930
|
const preimage = bytes_BytesBlob.blobFrom(new Uint8Array(length));
|
|
60970
60931
|
const memoryReadResult = memory.loadInto(preimage.raw, preimageStart);
|
|
60971
60932
|
if (memoryReadResult.isError) {
|
|
60972
|
-
logger_logger.trace
|
|
60933
|
+
logger_logger.trace(`PROVIDE(${serviceId}, ${preimage.toStringTruncated()}) <- PANIC`);
|
|
60973
60934
|
return PvmExecution.Panic;
|
|
60974
60935
|
}
|
|
60975
60936
|
const result = this.partialState.providePreimage(serviceId, preimage);
|
|
60976
|
-
logger_logger.trace
|
|
60937
|
+
logger_logger.trace(`PROVIDE(${serviceId}, ${preimage.toStringTruncated()}) <- ${resultToString(result)}`);
|
|
60977
60938
|
if (result.isOk) {
|
|
60978
60939
|
regs.set(provide_IN_OUT_REG, HostCallResult.OK);
|
|
60979
60940
|
return;
|
|
@@ -61027,11 +60988,11 @@ class Query {
|
|
|
61027
60988
|
const memoryReadResult = memory.loadInto(hash.raw, hashStart);
|
|
61028
60989
|
// error while reading the memory.
|
|
61029
60990
|
if (memoryReadResult.isError) {
|
|
61030
|
-
logger_logger.trace
|
|
60991
|
+
logger_logger.trace(`QUERY(${hash}, ${length}) <- PANIC`);
|
|
61031
60992
|
return PvmExecution.Panic;
|
|
61032
60993
|
}
|
|
61033
60994
|
const result = this.partialState.checkPreimageStatus(hash.asOpaque(), length);
|
|
61034
|
-
logger_logger.trace
|
|
60995
|
+
logger_logger.trace(`QUERY(${hash}, ${length}) <- ${result}`);
|
|
61035
60996
|
const zero = numbers_tryAsU64(0n);
|
|
61036
60997
|
if (result === null) {
|
|
61037
60998
|
regs.set(IN_OUT_REG_1, HostCallResult.NONE);
|
|
@@ -61092,11 +61053,11 @@ class Solicit {
|
|
|
61092
61053
|
const hash = bytes_Bytes.zero(hash_HASH_SIZE);
|
|
61093
61054
|
const memoryReadResult = memory.loadInto(hash.raw, hashStart);
|
|
61094
61055
|
if (memoryReadResult.isError) {
|
|
61095
|
-
logger_logger.trace
|
|
61056
|
+
logger_logger.trace(`SOLICIT(${hash}, ${length}) <- PANIC`);
|
|
61096
61057
|
return PvmExecution.Panic;
|
|
61097
61058
|
}
|
|
61098
61059
|
const result = this.partialState.requestPreimage(hash.asOpaque(), length);
|
|
61099
|
-
logger_logger.trace
|
|
61060
|
+
logger_logger.trace(`SOLICIT(${hash}, ${length}) <- ${resultToString(result)}`);
|
|
61100
61061
|
if (result.isOk) {
|
|
61101
61062
|
regs.set(solicit_IN_OUT_REG, HostCallResult.OK);
|
|
61102
61063
|
return;
|
|
@@ -61168,11 +61129,11 @@ class Transfer {
|
|
|
61168
61129
|
const memoryReadResult = memory.loadInto(memo.raw, memoStart);
|
|
61169
61130
|
// page fault while reading the memory.
|
|
61170
61131
|
if (memoryReadResult.isError) {
|
|
61171
|
-
logger_logger.trace
|
|
61132
|
+
logger_logger.trace(`TRANSFER(${destination}, ${amount}, ${onTransferGas}, ${memo}) <- PANIC`);
|
|
61172
61133
|
return PvmExecution.Panic;
|
|
61173
61134
|
}
|
|
61174
61135
|
const transferResult = this.partialState.transfer(destination, amount, onTransferGas, memo);
|
|
61175
|
-
logger_logger.trace
|
|
61136
|
+
logger_logger.trace(`TRANSFER(${destination}, ${amount}, ${onTransferGas}, ${memo}) <- ${resultToString(transferResult)}`);
|
|
61176
61137
|
// All good!
|
|
61177
61138
|
if (transferResult.isOk) {
|
|
61178
61139
|
regs.set(transfer_IN_OUT_REG, HostCallResult.OK);
|
|
@@ -61231,11 +61192,11 @@ class Upgrade {
|
|
|
61231
61192
|
const codeHash = bytes_Bytes.zero(hash_HASH_SIZE);
|
|
61232
61193
|
const memoryReadResult = memory.loadInto(codeHash.raw, codeHashStart);
|
|
61233
61194
|
if (memoryReadResult.isError) {
|
|
61234
|
-
logger_logger.trace
|
|
61195
|
+
logger_logger.trace(`UPGRADE(${codeHash}, ${gas}, ${allowance}) <- PANIC`);
|
|
61235
61196
|
return PvmExecution.Panic;
|
|
61236
61197
|
}
|
|
61237
61198
|
this.partialState.upgradeService(codeHash.asOpaque(), gas, allowance);
|
|
61238
|
-
logger_logger.trace
|
|
61199
|
+
logger_logger.trace(`UPGRADE(${codeHash}, ${gas}, ${allowance})`);
|
|
61239
61200
|
regs.set(upgrade_IN_OUT_REG, HostCallResult.OK);
|
|
61240
61201
|
}
|
|
61241
61202
|
}
|
|
@@ -61269,11 +61230,11 @@ class Yield {
|
|
|
61269
61230
|
const hash = bytes_Bytes.zero(hash_HASH_SIZE);
|
|
61270
61231
|
const memoryReadResult = memory.loadInto(hash.raw, hashStart);
|
|
61271
61232
|
if (memoryReadResult.isError) {
|
|
61272
|
-
logger_logger.trace
|
|
61233
|
+
logger_logger.trace("YIELD() <- PANIC");
|
|
61273
61234
|
return PvmExecution.Panic;
|
|
61274
61235
|
}
|
|
61275
61236
|
this.partialState.yield(hash);
|
|
61276
|
-
logger_logger.trace
|
|
61237
|
+
logger_logger.trace(`YIELD(${hash})`);
|
|
61277
61238
|
regs.set(yield_IN_OUT_REG, HostCallResult.OK);
|
|
61278
61239
|
}
|
|
61279
61240
|
}
|
|
@@ -61315,10 +61276,10 @@ class Fetch {
|
|
|
61315
61276
|
const chunk = value === null ? new Uint8Array() : value.raw.subarray(Number(offset), Number(offset + length));
|
|
61316
61277
|
const storeResult = memory.storeFrom(output, chunk);
|
|
61317
61278
|
if (storeResult.isError) {
|
|
61318
|
-
logger_logger.trace
|
|
61279
|
+
logger_logger.trace(`FETCH(${kind}) <- PANIC`);
|
|
61319
61280
|
return PvmExecution.Panic;
|
|
61320
61281
|
}
|
|
61321
|
-
logger_logger.trace
|
|
61282
|
+
logger_logger.trace(`FETCH(${kind}) <- ${value?.toStringTruncated()}`);
|
|
61322
61283
|
// write result
|
|
61323
61284
|
regs.set(fetch_IN_OUT_REG, value === null ? HostCallResult.NONE : valueLength);
|
|
61324
61285
|
}
|
|
@@ -61477,10 +61438,10 @@ class Info {
|
|
|
61477
61438
|
const chunk = encodedInfo.raw.subarray(Number(offset), Number(offset + length));
|
|
61478
61439
|
const writeResult = memory.storeFrom(outputStart, chunk);
|
|
61479
61440
|
if (writeResult.isError) {
|
|
61480
|
-
logger_logger.trace
|
|
61441
|
+
logger_logger.trace(`INFO(${serviceId}) <- PANIC`);
|
|
61481
61442
|
return PvmExecution.Panic;
|
|
61482
61443
|
}
|
|
61483
|
-
logger_logger.trace
|
|
61444
|
+
logger_logger.trace(`INFO(${serviceId}) <- ${bytes_BytesBlob.blobFrom(chunk)}`);
|
|
61484
61445
|
if (accountInfo === null) {
|
|
61485
61446
|
regs.set(info_IN_OUT_REG, HostCallResult.NONE);
|
|
61486
61447
|
return;
|
|
@@ -61541,7 +61502,7 @@ class LogHostCall {
|
|
|
61541
61502
|
memory.loadInto(target, targetStart);
|
|
61542
61503
|
}
|
|
61543
61504
|
memory.loadInto(message, msgStart);
|
|
61544
|
-
logger_logger.trace
|
|
61505
|
+
logger_logger.trace(`SERVICE [${this.currentServiceId}] [${lvl}] ${decoder.decode(target)} ${decoder.decode(message)}`);
|
|
61545
61506
|
return Promise.resolve(undefined);
|
|
61546
61507
|
}
|
|
61547
61508
|
}
|
|
@@ -61581,12 +61542,12 @@ class Lookup {
|
|
|
61581
61542
|
const preImageHash = bytes_Bytes.zero(hash_HASH_SIZE);
|
|
61582
61543
|
const memoryReadResult = memory.loadInto(preImageHash.raw, hashAddress);
|
|
61583
61544
|
if (memoryReadResult.isError) {
|
|
61584
|
-
logger_logger.trace
|
|
61545
|
+
logger_logger.trace(`LOOKUP(${serviceId}, ${preImageHash}) <- PANIC`);
|
|
61585
61546
|
return PvmExecution.Panic;
|
|
61586
61547
|
}
|
|
61587
61548
|
// v
|
|
61588
61549
|
const preImage = this.account.lookup(serviceId, preImageHash);
|
|
61589
|
-
logger_logger.trace
|
|
61550
|
+
logger_logger.trace(`LOOKUP(${serviceId}, ${preImageHash}) <- ${preImage?.toStringTruncated()}...`);
|
|
61590
61551
|
const preImageLength = preImage === null ? numbers_tryAsU64(0) : numbers_tryAsU64(preImage.raw.length);
|
|
61591
61552
|
const preimageBlobOffset = regs.get(10);
|
|
61592
61553
|
const lengthToWrite = regs.get(11);
|
|
@@ -61648,7 +61609,7 @@ class Read {
|
|
|
61648
61609
|
const rawKey = bytes_BytesBlob.blobFrom(new Uint8Array(storageKeyLengthClamped));
|
|
61649
61610
|
const memoryReadResult = memory.loadInto(rawKey.raw, storageKeyStartAddress);
|
|
61650
61611
|
if (memoryReadResult.isError) {
|
|
61651
|
-
logger_logger.trace
|
|
61612
|
+
logger_logger.trace(`READ(${serviceId}, ${rawKey}) <- PANIC`);
|
|
61652
61613
|
return PvmExecution.Panic;
|
|
61653
61614
|
}
|
|
61654
61615
|
// v
|
|
@@ -61666,15 +61627,15 @@ class Read {
|
|
|
61666
61627
|
const chunk = value === null ? new Uint8Array(0) : value.raw.subarray(Number(offset), Number(offset + blobLength));
|
|
61667
61628
|
const memoryWriteResult = memory.storeFrom(destinationAddress, chunk);
|
|
61668
61629
|
if (memoryWriteResult.isError) {
|
|
61669
|
-
logger_logger.trace
|
|
61630
|
+
logger_logger.trace(`READ(${serviceId}, ${rawKey}) <- PANIC`);
|
|
61670
61631
|
return PvmExecution.Panic;
|
|
61671
61632
|
}
|
|
61672
61633
|
if (value === null) {
|
|
61673
|
-
logger_logger.trace
|
|
61634
|
+
logger_logger.trace(`READ(${serviceId}, ${rawKey}) <- NONE`);
|
|
61674
61635
|
regs.set(read_IN_OUT_REG, HostCallResult.NONE);
|
|
61675
61636
|
return;
|
|
61676
61637
|
}
|
|
61677
|
-
logger_logger.trace
|
|
61638
|
+
logger_logger.trace(`READ(${serviceId}, ${rawKey}) <- ${bytes_BytesBlob.blobFrom(chunk).toStringTruncated()}`);
|
|
61678
61639
|
regs.set(read_IN_OUT_REG, valueLength);
|
|
61679
61640
|
}
|
|
61680
61641
|
}
|
|
@@ -61717,7 +61678,7 @@ class Write {
|
|
|
61717
61678
|
const rawStorageKey = new Uint8Array(storageKeyLengthClamped);
|
|
61718
61679
|
const keyLoadingResult = memory.loadInto(rawStorageKey, storageKeyStartAddress);
|
|
61719
61680
|
if (keyLoadingResult.isError) {
|
|
61720
|
-
logger_logger.trace
|
|
61681
|
+
logger_logger.trace("WRITE() <- PANIC");
|
|
61721
61682
|
return PvmExecution.Panic;
|
|
61722
61683
|
}
|
|
61723
61684
|
// k
|
|
@@ -61727,14 +61688,14 @@ class Write {
|
|
|
61727
61688
|
const valueLoadingResult = memory.loadInto(value, valueStart);
|
|
61728
61689
|
// Note [MaSo] this is ok to return bcs if valueLength is 0, then this panic won't happen
|
|
61729
61690
|
if (valueLoadingResult.isError) {
|
|
61730
|
-
logger_logger.trace
|
|
61691
|
+
logger_logger.trace(`WRITE(${storageKey})}) <- PANIC`);
|
|
61731
61692
|
return PvmExecution.Panic;
|
|
61732
61693
|
}
|
|
61733
61694
|
/** https://graypaper.fluffylabs.dev/#/9a08063/33af0133b201?v=0.6.6 */
|
|
61734
61695
|
const maybeValue = valueLength === 0n ? null : bytes_BytesBlob.blobFrom(value);
|
|
61735
61696
|
// a
|
|
61736
61697
|
const result = this.account.write(storageKey, maybeValue);
|
|
61737
|
-
logger_logger.trace
|
|
61698
|
+
logger_logger.trace(`WRITE(${storageKey}, ${maybeValue?.toStringTruncated()}) <- ${resultToString(result)}`);
|
|
61738
61699
|
if (result.isError) {
|
|
61739
61700
|
regs.set(write_IN_OUT_REG, HostCallResult.FULL);
|
|
61740
61701
|
return;
|
|
@@ -61926,18 +61887,18 @@ class Accumulate {
|
|
|
61926
61887
|
async pvmAccumulateInvocation(slot, serviceId, operands, gas, entropy, inputStateUpdate) {
|
|
61927
61888
|
const service = this.state.getService(serviceId);
|
|
61928
61889
|
if (service === null) {
|
|
61929
|
-
accumulate_logger.log
|
|
61890
|
+
accumulate_logger.log(`Service with id ${serviceId} not found.`);
|
|
61930
61891
|
return result_Result.error(PvmInvocationError.NoService);
|
|
61931
61892
|
}
|
|
61932
61893
|
const codeHash = service.getInfo().codeHash;
|
|
61933
61894
|
// TODO [ToDr] Should we check that the preimage is still available?
|
|
61934
61895
|
const code = service.getPreimage(codeHash.asOpaque());
|
|
61935
61896
|
if (code === null) {
|
|
61936
|
-
accumulate_logger.log
|
|
61897
|
+
accumulate_logger.log(`Code with hash ${codeHash} not found for service ${serviceId}.`);
|
|
61937
61898
|
return result_Result.error(PvmInvocationError.NoPreimage);
|
|
61938
61899
|
}
|
|
61939
61900
|
if (code.length > W_C) {
|
|
61940
|
-
accumulate_logger.log
|
|
61901
|
+
accumulate_logger.log(`Code with hash ${codeHash} is too long for service ${serviceId}.`);
|
|
61941
61902
|
return result_Result.error(PvmInvocationError.PreimageTooLong);
|
|
61942
61903
|
}
|
|
61943
61904
|
const nextServiceId = generateNextServiceId({ serviceId, entropy, timeslot: slot }, this.chainSpec);
|
|
@@ -61959,10 +61920,10 @@ class Accumulate {
|
|
|
61959
61920
|
if (result.hasStatus()) {
|
|
61960
61921
|
const status = result.status;
|
|
61961
61922
|
if (status === status_Status.OOG || status === status_Status.PANIC) {
|
|
61962
|
-
accumulate_logger.trace
|
|
61923
|
+
accumulate_logger.trace(`[${serviceId}] accumulate finished with ${status_Status[status]} reverting to checkpoint.`);
|
|
61963
61924
|
return result_Result.ok({ stateUpdate: checkpoint, consumedGas: common_tryAsServiceGas(result.consumedGas) });
|
|
61964
61925
|
}
|
|
61965
|
-
accumulate_logger.trace
|
|
61926
|
+
accumulate_logger.trace(`[${serviceId}] accumulate finished with ${status_Status[status]}`);
|
|
61966
61927
|
}
|
|
61967
61928
|
/**
|
|
61968
61929
|
* PVM invocation returned a hash so we override whatever `yield` host call
|
|
@@ -61987,14 +61948,14 @@ class Accumulate {
|
|
|
61987
61948
|
* https://graypaper.fluffylabs.dev/#/7e6ff6a/18d70118d701?v=0.6.7
|
|
61988
61949
|
*/
|
|
61989
61950
|
async accumulateSingleService(serviceId, operands, gasCost, slot, entropy, inputStateUpdate) {
|
|
61990
|
-
accumulate_logger.log
|
|
61951
|
+
accumulate_logger.log(`Accumulating service ${serviceId}, items: ${operands.length} at slot: ${slot}.`);
|
|
61991
61952
|
const result = await this.pvmAccumulateInvocation(slot, serviceId, operands, gasCost, entropy, inputStateUpdate);
|
|
61992
61953
|
if (result.isError) {
|
|
61993
61954
|
// https://graypaper.fluffylabs.dev/#/7e6ff6a/2fb6012fb601?v=0.6.7
|
|
61994
|
-
accumulate_logger.log
|
|
61955
|
+
accumulate_logger.log(`Accumulation failed for ${serviceId}.`);
|
|
61995
61956
|
return { stateUpdate: null, consumedGas: 0n };
|
|
61996
61957
|
}
|
|
61997
|
-
accumulate_logger.log
|
|
61958
|
+
accumulate_logger.log(`Accumulation successful for ${serviceId}. Consumed: ${result.ok.consumedGas}`);
|
|
61998
61959
|
return result.ok;
|
|
61999
61960
|
}
|
|
62000
61961
|
/**
|
|
@@ -62043,7 +62004,6 @@ class Accumulate {
|
|
|
62043
62004
|
const serviceIds = accumulateData.getServiceIds();
|
|
62044
62005
|
let gasCost = common_tryAsServiceGas(0);
|
|
62045
62006
|
let currentState = inputStateUpdate;
|
|
62046
|
-
const currentManager = (inputStateUpdate.privilegedServices ?? this.state.privilegedServices).manager;
|
|
62047
62007
|
for (const serviceId of serviceIds) {
|
|
62048
62008
|
const checkpoint = AccumulationStateUpdate.copyFrom(currentState);
|
|
62049
62009
|
const { consumedGas, stateUpdate } = await this.accumulateSingleService(serviceId, accumulateData.getOperands(serviceId), accumulateData.getGasCost(serviceId), slot, entropy, currentState);
|
|
@@ -62053,21 +62013,6 @@ class Accumulate {
|
|
|
62053
62013
|
serviceStatistics.gasUsed = common_tryAsServiceGas(serviceStatistics.gasUsed + consumedGas);
|
|
62054
62014
|
statistics.set(serviceId, serviceStatistics);
|
|
62055
62015
|
currentState = stateUpdate === null ? checkpoint : stateUpdate;
|
|
62056
|
-
if (Compatibility.is(GpVersion.V0_7_0) && serviceId === currentManager) {
|
|
62057
|
-
const newV = currentState.privilegedServices?.validatorsManager;
|
|
62058
|
-
if (currentState.privilegedServices !== null && newV !== undefined && serviceIds.includes(newV)) {
|
|
62059
|
-
accumulate_logger.info `Entering completely incorrect code that probably reverts validatorsManager change. This is valid in 0.7.0 only and incorrect in 0.7.1+`;
|
|
62060
|
-
// Since serviceIds already contains newV, this service gets accumulated twice.
|
|
62061
|
-
// To avoid double-counting, we skip stats and gas cost tracking here.
|
|
62062
|
-
// We need this accumulation to get the correct `validatorsManager`
|
|
62063
|
-
const { stateUpdate } = await this.accumulateSingleService(newV, accumulateData.getOperands(newV), accumulateData.getGasCost(newV), slot, entropy, checkpoint);
|
|
62064
|
-
const correctV = stateUpdate?.privilegedServices?.validatorsManager ?? this.state.privilegedServices.validatorsManager;
|
|
62065
|
-
currentState.privilegedServices = PrivilegedServices.create({
|
|
62066
|
-
...currentState.privilegedServices,
|
|
62067
|
-
validatorsManager: correctV,
|
|
62068
|
-
});
|
|
62069
|
-
}
|
|
62070
|
-
}
|
|
62071
62016
|
}
|
|
62072
62017
|
return {
|
|
62073
62018
|
state: currentState,
|
|
@@ -62212,14 +62157,11 @@ class DeferredTransfers {
|
|
|
62212
62157
|
async transition({ pendingTransfers, timeslot, servicesUpdate: inputServicesUpdate, entropy, }) {
|
|
62213
62158
|
// https://graypaper.fluffylabs.dev/#/7e6ff6a/187a03187a03?v=0.6.7
|
|
62214
62159
|
const transferStatistics = new Map();
|
|
62215
|
-
const services = uniquePreserveOrder(pendingTransfers.
|
|
62160
|
+
const services = uniquePreserveOrder(pendingTransfers.flatMap((x) => [x.source, x.destination]));
|
|
62216
62161
|
let currentStateUpdate = AccumulationStateUpdate.new(inputServicesUpdate);
|
|
62217
62162
|
for (const serviceId of services) {
|
|
62218
62163
|
const partiallyUpdatedState = new PartiallyUpdatedState(this.state, currentStateUpdate);
|
|
62219
|
-
|
|
62220
|
-
const transfers = pendingTransfers
|
|
62221
|
-
.filter((pendingTransfer) => pendingTransfer.destination === serviceId)
|
|
62222
|
-
.toSorted((a, b) => a.source - b.source);
|
|
62164
|
+
const transfers = pendingTransfers.filter((pendingTransfer) => pendingTransfer.destination === serviceId);
|
|
62223
62165
|
const info = partiallyUpdatedState.getServiceInfo(serviceId);
|
|
62224
62166
|
if (info === null) {
|
|
62225
62167
|
return result_Result.error(DeferredTransfersErrorCode.ServiceInfoNotExist);
|
|
@@ -62239,13 +62181,13 @@ class DeferredTransfers {
|
|
|
62239
62181
|
const isCodeCorrect = code !== null && code.length <= W_C;
|
|
62240
62182
|
if (!hasTransfers || !isCodeCorrect) {
|
|
62241
62183
|
if (code === null) {
|
|
62242
|
-
deferred_transfers_logger.trace
|
|
62184
|
+
deferred_transfers_logger.trace(`Skipping ON_TRANSFER execution for service ${serviceId} because code is null`);
|
|
62243
62185
|
}
|
|
62244
62186
|
else if (!hasTransfers) {
|
|
62245
|
-
deferred_transfers_logger.trace
|
|
62187
|
+
deferred_transfers_logger.trace(`Skipping ON_TRANSFER execution for service ${serviceId} because there are no transfers`);
|
|
62246
62188
|
}
|
|
62247
62189
|
else {
|
|
62248
|
-
deferred_transfers_logger.trace
|
|
62190
|
+
deferred_transfers_logger.trace(`Skipping ON_TRANSFER execution for service ${serviceId} because code is too long`);
|
|
62249
62191
|
}
|
|
62250
62192
|
}
|
|
62251
62193
|
else {
|
|
@@ -62873,7 +62815,7 @@ function verifyRefineContexts(minLookupSlot, contexts, recentBlocksPartialUpdate
|
|
|
62873
62815
|
headerChain.isAncestor(context.lookupAnchorSlot, context.lookupAnchor, context.anchor);
|
|
62874
62816
|
if (!isInChain) {
|
|
62875
62817
|
if (process.env.SKIP_LOOKUP_ANCHOR_CHECK !== undefined) {
|
|
62876
|
-
verify_contextual_logger.warn
|
|
62818
|
+
verify_contextual_logger.warn(`Lookup anchor check for ${context.lookupAnchor} would fail, but override is active.`);
|
|
62877
62819
|
}
|
|
62878
62820
|
else {
|
|
62879
62821
|
return result_Result.error(ReportsError.SegmentRootLookupInvalid, `Lookup anchor is not found in chain. Hash: ${context.lookupAnchor} (slot: ${context.lookupAnchorSlot})`);
|
|
@@ -63760,7 +63702,7 @@ class OnChain {
|
|
|
63760
63702
|
reports: availableReports,
|
|
63761
63703
|
entropy: entropy[0],
|
|
63762
63704
|
});
|
|
63763
|
-
chain_stf_logger.log
|
|
63705
|
+
chain_stf_logger.log(timerAccumulate());
|
|
63764
63706
|
if (accumulateResult.isError) {
|
|
63765
63707
|
return stfError(StfErrorKind.Accumulate, accumulateResult);
|
|
63766
63708
|
}
|
|
@@ -63891,7 +63833,7 @@ class Importer {
|
|
|
63891
63833
|
this.stf = new OnChain(spec, state, blocks, hasher);
|
|
63892
63834
|
this.state = state;
|
|
63893
63835
|
this.currentHash = currentBestHeaderHash;
|
|
63894
|
-
logger.info
|
|
63836
|
+
logger.info(`😎 Best time slot: ${state.timeslot} (header hash: ${currentBestHeaderHash})`);
|
|
63895
63837
|
}
|
|
63896
63838
|
async importBlock(block, omitSealVerification) {
|
|
63897
63839
|
const timer = measure("importBlock");
|
|
@@ -63899,20 +63841,20 @@ class Importer {
|
|
|
63899
63841
|
const maybeBestHeader = await this.importBlockInternal(block, omitSealVerification);
|
|
63900
63842
|
if (maybeBestHeader.isOk) {
|
|
63901
63843
|
const bestHeader = maybeBestHeader.ok;
|
|
63902
|
-
this.logger.info
|
|
63903
|
-
this.logger.log
|
|
63844
|
+
this.logger.info(`🧊 Best block: #${timeSlot} (${bestHeader.hash})`);
|
|
63845
|
+
this.logger.log(timer());
|
|
63904
63846
|
return maybeBestHeader;
|
|
63905
63847
|
}
|
|
63906
|
-
this.logger.log
|
|
63907
|
-
this.logger.log
|
|
63848
|
+
this.logger.log(`❌ Rejected block #${timeSlot}: ${resultToString(maybeBestHeader)}`);
|
|
63849
|
+
this.logger.log(timer());
|
|
63908
63850
|
return maybeBestHeader;
|
|
63909
63851
|
}
|
|
63910
63852
|
async importBlockInternal(block, omitSealVerification = false) {
|
|
63911
63853
|
const logger = this.logger;
|
|
63912
|
-
logger.log
|
|
63854
|
+
logger.log("🧱 Attempting to import a new block");
|
|
63913
63855
|
const timerVerify = measure("import:verify");
|
|
63914
63856
|
const hash = await this.verifier.verifyBlock(block);
|
|
63915
|
-
logger.log
|
|
63857
|
+
logger.log(timerVerify());
|
|
63916
63858
|
if (hash.isError) {
|
|
63917
63859
|
return importerError(ImporterErrorKind.Verifier, hash);
|
|
63918
63860
|
}
|
|
@@ -63932,10 +63874,10 @@ class Importer {
|
|
|
63932
63874
|
}
|
|
63933
63875
|
const timeSlot = block.header.view().timeSlotIndex.materialize();
|
|
63934
63876
|
const headerHash = hash.ok;
|
|
63935
|
-
logger.log
|
|
63877
|
+
logger.log(`🧱 Verified block: Got hash ${headerHash} for block at slot ${timeSlot}.`);
|
|
63936
63878
|
const timerStf = measure("import:stf");
|
|
63937
63879
|
const res = await this.stf.transition(block, headerHash, omitSealVerification);
|
|
63938
|
-
logger.log
|
|
63880
|
+
logger.log(timerStf());
|
|
63939
63881
|
if (res.isError) {
|
|
63940
63882
|
return importerError(ImporterErrorKind.Stf, res);
|
|
63941
63883
|
}
|
|
@@ -63944,7 +63886,7 @@ class Importer {
|
|
|
63944
63886
|
const timerState = measure("import:state");
|
|
63945
63887
|
const updateResult = await this.states.updateAndSetState(headerHash, this.state, update);
|
|
63946
63888
|
if (updateResult.isError) {
|
|
63947
|
-
logger.error
|
|
63889
|
+
logger.error(`🧱 Unable to update state: ${resultToString(updateResult)}`);
|
|
63948
63890
|
return importerError(ImporterErrorKind.Update, updateResult);
|
|
63949
63891
|
}
|
|
63950
63892
|
const newState = this.states.getState(headerHash);
|
|
@@ -63955,17 +63897,17 @@ class Importer {
|
|
|
63955
63897
|
// the state of a parent block to support forks and create a fresh STF.
|
|
63956
63898
|
this.state.updateBackend(newState.backend);
|
|
63957
63899
|
this.currentHash = headerHash;
|
|
63958
|
-
logger.log
|
|
63900
|
+
logger.log(timerState());
|
|
63959
63901
|
// insert new state and the block to DB.
|
|
63960
63902
|
const timerDb = measure("import:db");
|
|
63961
63903
|
const writeBlocks = this.blocks.insertBlock(new WithHash(headerHash, block));
|
|
63962
63904
|
// Computation of the state root may happen asynchronously,
|
|
63963
63905
|
// but we still need to wait for it before next block can be imported
|
|
63964
63906
|
const stateRoot = await this.states.getStateRoot(newState);
|
|
63965
|
-
logger.log
|
|
63907
|
+
logger.log(`🧱 Storing post-state-root for ${headerHash}: ${stateRoot}.`);
|
|
63966
63908
|
const writeStateRoot = this.blocks.setPostStateRoot(headerHash, stateRoot);
|
|
63967
63909
|
await Promise.all([writeBlocks, writeStateRoot]);
|
|
63968
|
-
logger.log
|
|
63910
|
+
logger.log(timerDb());
|
|
63969
63911
|
// finally update the best block
|
|
63970
63912
|
await this.blocks.setBestHeaderHash(headerHash);
|
|
63971
63913
|
return result_Result.ok(new WithHash(headerHash, block.header.view()));
|
|
@@ -64013,7 +63955,7 @@ if (!external_node_worker_threads_namespaceObject.isMainThread) {
|
|
|
64013
63955
|
Logger.configureAll(process.env.JAM_LOG ?? "", Level.LOG);
|
|
64014
63956
|
const machine = importerStateMachine();
|
|
64015
63957
|
const channel = MessageChannelStateMachine.receiveChannel(machine, external_node_worker_threads_namespaceObject.parentPort);
|
|
64016
|
-
channel.then((channel) => importer_main(channel)).catch((e) => importer_logger.error
|
|
63958
|
+
channel.then((channel) => importer_main(channel)).catch((e) => importer_logger.error(e));
|
|
64017
63959
|
}
|
|
64018
63960
|
const keccakHasher = KeccakHasher.create();
|
|
64019
63961
|
async function createImporter(config) {
|
|
@@ -64035,7 +63977,7 @@ async function createImporter(config) {
|
|
|
64035
63977
|
*/
|
|
64036
63978
|
async function importer_main(channel) {
|
|
64037
63979
|
const wasmPromise = initAll();
|
|
64038
|
-
importer_logger.info
|
|
63980
|
+
importer_logger.info(`📥 Importer starting ${channel.currentState()}`);
|
|
64039
63981
|
// Await the configuration object
|
|
64040
63982
|
const ready = await channel.waitForState("ready(importer)");
|
|
64041
63983
|
let closeDb = async () => { };
|
|
@@ -64047,7 +63989,7 @@ async function importer_main(channel) {
|
|
|
64047
63989
|
};
|
|
64048
63990
|
// TODO [ToDr] this is shit, since we have circular dependency.
|
|
64049
63991
|
worker.setImporter(importer);
|
|
64050
|
-
importer_logger.info
|
|
63992
|
+
importer_logger.info("📥 Importer waiting for blocks.");
|
|
64051
63993
|
worker.onBlock.on(async (block) => {
|
|
64052
63994
|
const res = await importer.importBlock(block, config.omitSealVerification);
|
|
64053
63995
|
if (res.isOk) {
|
|
@@ -64056,7 +63998,7 @@ async function importer_main(channel) {
|
|
|
64056
63998
|
});
|
|
64057
63999
|
await wasmPromise;
|
|
64058
64000
|
});
|
|
64059
|
-
importer_logger.info
|
|
64001
|
+
importer_logger.info("📥 Importer finished. Closing channel.");
|
|
64060
64002
|
// close the database
|
|
64061
64003
|
await closeDb();
|
|
64062
64004
|
// Close the comms to gracefuly close the app.
|
|
@@ -64077,8 +64019,8 @@ async function importer_main(channel) {
|
|
|
64077
64019
|
const zeroHash = bytes_Bytes.zero(hash_HASH_SIZE).asOpaque();
|
|
64078
64020
|
async function mainImporter(config, withRelPath) {
|
|
64079
64021
|
await initAll();
|
|
64080
|
-
common_logger.info
|
|
64081
|
-
common_logger.info
|
|
64022
|
+
common_logger.info(`🫐 Typeberry ${package_namespaceObject.rE}. GP: ${CURRENT_VERSION} (${CURRENT_SUITE})`);
|
|
64023
|
+
common_logger.info(`🎸 Starting importer: ${config.nodeName}.`);
|
|
64082
64024
|
const chainSpec = getChainSpec(config.node.flavor);
|
|
64083
64025
|
const { rootDb, dbPath, genesisHeaderHash } = openDatabase(config.nodeName, config.node.chainSpec.genesisHeader, withRelPath(config.node.databaseBasePath));
|
|
64084
64026
|
// Initialize the database with genesis state and block if there isn't one.
|
|
@@ -64105,9 +64047,9 @@ async function mainImporter(config, withRelPath) {
|
|
|
64105
64047
|
return importer.getBestStateRootHash() ?? zeroHash;
|
|
64106
64048
|
},
|
|
64107
64049
|
async close() {
|
|
64108
|
-
common_logger.log
|
|
64050
|
+
common_logger.log("[main] 🛢️ Closing the database");
|
|
64109
64051
|
await lmdb.close();
|
|
64110
|
-
common_logger.info
|
|
64052
|
+
common_logger.info("[main] ✅ Done.");
|
|
64111
64053
|
},
|
|
64112
64054
|
};
|
|
64113
64055
|
return api;
|
|
@@ -64134,7 +64076,7 @@ function getFuzzDetails() {
|
|
|
64134
64076
|
};
|
|
64135
64077
|
}
|
|
64136
64078
|
async function mainFuzz(fuzzConfig, withRelPath) {
|
|
64137
|
-
main_fuzz_logger.info
|
|
64079
|
+
main_fuzz_logger.info(`💨 Fuzzer V${fuzzConfig.version} starting up.`);
|
|
64138
64080
|
const { jamNodeConfig: config } = fuzzConfig;
|
|
64139
64081
|
let runningNode = null;
|
|
64140
64082
|
const fuzzSeed = BigInt(Date.now());
|
|
@@ -64204,20 +64146,20 @@ async function mainFuzz(fuzzConfig, withRelPath) {
|
|
|
64204
64146
|
|
|
64205
64147
|
const importBlocks = async (node, blocksToImport) => {
|
|
64206
64148
|
const logger = Logger.new(import.meta.filename, "jam");
|
|
64207
|
-
logger.info
|
|
64149
|
+
logger.info(`📖 Reading ${blocksToImport.length} blocks`);
|
|
64208
64150
|
const reader = startBlocksReader({
|
|
64209
64151
|
files: blocksToImport,
|
|
64210
64152
|
chainSpec: node.chainSpec,
|
|
64211
64153
|
});
|
|
64212
64154
|
for (const block of reader) {
|
|
64213
|
-
logger.log
|
|
64155
|
+
logger.log(`📖 Importing block: #${block.header.view().timeSlotIndex.materialize()}`);
|
|
64214
64156
|
const res = await node.importBlock(block);
|
|
64215
64157
|
if (res.isError) {
|
|
64216
|
-
logger.error
|
|
64158
|
+
logger.error(`📖 ${resultToString(res)}`);
|
|
64217
64159
|
}
|
|
64218
64160
|
}
|
|
64219
64161
|
// close the importer.
|
|
64220
|
-
logger.info
|
|
64162
|
+
logger.info("All blocks scheduled to be imported.");
|
|
64221
64163
|
return await node.close();
|
|
64222
64164
|
};
|
|
64223
64165
|
function* startBlocksReader(options) {
|
|
@@ -64414,7 +64356,6 @@ function parseFuzzVersion(v) {
|
|
|
64414
64356
|
|
|
64415
64357
|
|
|
64416
64358
|
|
|
64417
|
-
|
|
64418
64359
|
const prepareConfigFile = (args) => {
|
|
64419
64360
|
const nodeConfig = loadConfig(args.args.configPath);
|
|
64420
64361
|
const nodeName = args.command === Command.Dev ? `${args.args.nodeName}-${args.args.index}` : args.args.nodeName;
|
|
@@ -64441,8 +64382,14 @@ const prepareConfigFile = (args) => {
|
|
|
64441
64382
|
};
|
|
64442
64383
|
if (import.meta.url === (0,external_node_url_namespaceObject.pathToFileURL)(process.argv[1]).href) {
|
|
64443
64384
|
Logger.configureAll(process.env.JAM_LOG ?? "", Level.LOG);
|
|
64385
|
+
const relPath = `${import.meta.dirname}/../..`;
|
|
64386
|
+
const withRelPath = (p) => {
|
|
64387
|
+
if (p.startsWith("/")) {
|
|
64388
|
+
return p;
|
|
64389
|
+
}
|
|
64390
|
+
return `${relPath}/${p}`;
|
|
64391
|
+
};
|
|
64444
64392
|
let args;
|
|
64445
|
-
const withRelPath = workspacePathFix(`${import.meta.dirname}/../..`);
|
|
64446
64393
|
try {
|
|
64447
64394
|
const parsed = parseArgs(process.argv.slice(2), withRelPath);
|
|
64448
64395
|
if (parsed === null) {
|