@typeberry/jam 0.1.1 → 0.1.2-3178190

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/importer/index.js CHANGED
@@ -4245,6 +4245,7 @@ var GpVersion;
4245
4245
  GpVersion["V0_6_7"] = "0.6.7";
4246
4246
  GpVersion["V0_7_0"] = "0.7.0";
4247
4247
  GpVersion["V0_7_1"] = "0.7.1-preview";
4248
+ GpVersion["V0_7_2"] = "0.7.2-preview";
4248
4249
  })(GpVersion || (GpVersion = {}));
4249
4250
  var TestSuite;
4250
4251
  (function (TestSuite) {
@@ -4252,7 +4253,7 @@ var TestSuite;
4252
4253
  TestSuite["JAMDUNA"] = "jamduna";
4253
4254
  })(TestSuite || (TestSuite = {}));
4254
4255
  const DEFAULT_SUITE = TestSuite.W3F_DAVXY;
4255
- const ALL_VERSIONS_IN_ORDER = [GpVersion.V0_6_7, GpVersion.V0_7_0, GpVersion.V0_7_1];
4256
+ const ALL_VERSIONS_IN_ORDER = [GpVersion.V0_6_7, GpVersion.V0_7_0, GpVersion.V0_7_1, GpVersion.V0_7_2];
4256
4257
  const env = typeof process === "undefined" ? {} : process.env;
4257
4258
  const DEFAULT_VERSION = GpVersion.V0_7_0;
4258
4259
  let CURRENT_VERSION = parseCurrentVersion(env.GP_VERSION) ?? DEFAULT_VERSION;
@@ -4261,20 +4262,26 @@ function parseCurrentVersion(env) {
4261
4262
  if (env === undefined) {
4262
4263
  return undefined;
4263
4264
  }
4264
- const version = env;
4265
- if (!Object.values(GpVersion).includes(version)) {
4266
- throw new Error(`Configured environment variable GP_VERSION is unknown: '${env}'. Use one of: ${ALL_VERSIONS_IN_ORDER}`);
4265
+ switch (env) {
4266
+ case GpVersion.V0_6_7:
4267
+ case GpVersion.V0_7_0:
4268
+ case GpVersion.V0_7_1:
4269
+ case GpVersion.V0_7_2:
4270
+ return env;
4271
+ default:
4272
+ throw new Error(`Configured environment variable GP_VERSION is unknown: '${env}'. Use one of: ${ALL_VERSIONS_IN_ORDER}`);
4267
4273
  }
4268
- return version;
4269
4274
  }
4270
4275
  function parseCurrentSuite(env) {
4271
4276
  if (env === undefined)
4272
4277
  return undefined;
4273
- const val = env;
4274
- if (!Object.values(TestSuite).includes(val)) {
4275
- throw new Error(`Configured environment variable TEST_SUITE is unknown: '${env}'. Use one of: ${Object.values(TestSuite)}`);
4278
+ switch (env) {
4279
+ case TestSuite.W3F_DAVXY:
4280
+ case TestSuite.JAMDUNA:
4281
+ return env;
4282
+ default:
4283
+ throw new Error(`Configured environment variable TEST_SUITE is unknown: '${env}'. Use one of: ${Object.values(TestSuite)}`);
4276
4284
  }
4277
- return val;
4278
4285
  }
4279
4286
  class Compatibility {
4280
4287
  static override(version) {
@@ -4436,6 +4443,34 @@ class WithDebug {
4436
4443
  }
4437
4444
  }
4438
4445
 
4446
+ ;// CONCATENATED MODULE: ./packages/core/utils/dev.ts
4447
+ const dev_env = typeof process === "undefined" ? {} : process.env;
4448
+ /**
4449
+ * The function will produce relative path resolver that is adjusted
4450
+ * for package location within the workspace.
4451
+ *
4452
+ * Example:
4453
+ * $ npm start -w @typeberry/jam
4454
+ *
4455
+ * The above command will run `./bin/jam/index.js`, however we would
4456
+ * still want relative paths to be resolved according to top-level workspace
4457
+ * directory.
4458
+ *
4459
+ * So the caller, passes the absolute workspace path as argument and get's
4460
+ * a function that can properly resolve relative paths.
4461
+ *
4462
+ * NOTE: the translation happens only for development build! When
4463
+ * we build a single library from our project, we no longer mangle the paths.
4464
+ */
4465
+ const workspacePathFix = dev_env.NODE_ENV === "development"
4466
+ ? (workspacePath) => (p) => {
4467
+ if (p.startsWith("/")) {
4468
+ return p;
4469
+ }
4470
+ return `${workspacePath}/${p}`;
4471
+ }
4472
+ : () => (p) => p;
4473
+
4439
4474
  ;// CONCATENATED MODULE: ./packages/core/utils/opaque.ts
4440
4475
  /**
4441
4476
  * @fileoverview `Opaque<Type, Token>` constructs a unique type which is a subset of Type with a
@@ -4779,6 +4814,7 @@ function isResult(x) {
4779
4814
 
4780
4815
 
4781
4816
 
4817
+
4782
4818
  ;// CONCATENATED MODULE: ./packages/core/bytes/bitvec.ts
4783
4819
 
4784
4820
  /**
@@ -13372,6 +13408,23 @@ function parseLevel(lvl) {
13372
13408
  ;// CONCATENATED MODULE: ./packages/core/logger/console.ts
13373
13409
  // biome-ignore-all lint/suspicious/noConsole: logger
13374
13410
 
13411
+ function print(level, levelAndName, strings, data) {
13412
+ if (level < levelAndName[0]) {
13413
+ return;
13414
+ }
13415
+ const lvlText = Level[level].padEnd(5);
13416
+ const val = strings.map((v, idx) => `${v}${idx < data.length ? data[idx] : ""}`);
13417
+ const msg = `${lvlText} [${levelAndName[1]}] ${val.join("")}`;
13418
+ if (level === Level.WARN) {
13419
+ console.warn(msg);
13420
+ }
13421
+ else if (level === Level.ERROR) {
13422
+ console.error(msg);
13423
+ }
13424
+ else {
13425
+ console.info(msg);
13426
+ }
13427
+ }
13375
13428
  /** An optimized logger that ignores `TRACE`, `DEBUG` and `LOG` messages.
13376
13429
  *
13377
13430
  * Use the `create` method to instantiate the right instance of a more specialized logger.
@@ -13402,109 +13455,91 @@ class ConsoleTransport {
13402
13455
  constructor(options) {
13403
13456
  this.options = options;
13404
13457
  }
13405
- insane(_moduleName, _val) {
13458
+ insane(_levelAndName, _strings, _data) {
13406
13459
  /* no-op */
13407
13460
  }
13408
- trace(_moduleName, _val) {
13461
+ trace(_levelAndName, _strings, _data) {
13409
13462
  /* no-op */
13410
13463
  }
13411
- log(_moduleName, _val) {
13464
+ log(_levelAndName, _strings, _data) {
13412
13465
  /* no-op */
13413
13466
  }
13414
- info(_moduleName, _val) {
13467
+ info(_levelAndName, _strings, _data) {
13415
13468
  /* no-op */
13416
13469
  }
13417
- warn(moduleName, val) {
13418
- this.push(Level.WARN, moduleName, val);
13470
+ warn(levelAndName, strings, data) {
13471
+ print(Level.WARN, levelAndName, strings, data);
13419
13472
  }
13420
- error(moduleName, val) {
13421
- this.push(Level.ERROR, moduleName, val);
13422
- }
13423
- push(level, moduleName, val) {
13424
- const shortModule = moduleName.replace(this.options.workingDir, "");
13425
- const configuredLevel = findLevel(this.options, moduleName);
13426
- const lvlText = Level[level].padEnd(5);
13427
- if (level < configuredLevel) {
13428
- return;
13429
- }
13430
- const msg = `${lvlText} [${shortModule}] ${val}`;
13431
- if (level === Level.WARN) {
13432
- console.warn(msg);
13433
- }
13434
- else if (level === Level.ERROR) {
13435
- console.error(msg);
13436
- }
13437
- else {
13438
- console.info(msg);
13439
- }
13473
+ error(levelAndName, strings, data) {
13474
+ print(Level.ERROR, levelAndName, strings, data);
13440
13475
  }
13441
13476
  }
13442
13477
  /**
13443
13478
  * Insane version of console logger - supports insane level.
13444
13479
  */
13445
13480
  class InsaneConsoleLogger extends ConsoleTransport {
13446
- insane(moduleName, val) {
13447
- this.push(Level.INSANE, moduleName, val);
13481
+ insane(levelAndName, strings, data) {
13482
+ print(Level.INSANE, levelAndName, strings, data);
13448
13483
  }
13449
- trace(moduleName, val) {
13450
- this.push(Level.TRACE, moduleName, val);
13484
+ trace(levelAndName, strings, data) {
13485
+ print(Level.TRACE, levelAndName, strings, data);
13451
13486
  }
13452
- log(moduleName, val) {
13453
- this.push(Level.LOG, moduleName, val);
13487
+ log(levelAndName, strings, data) {
13488
+ print(Level.LOG, levelAndName, strings, data);
13454
13489
  }
13455
- info(moduleName, val) {
13456
- this.push(Level.INFO, moduleName, val);
13490
+ info(levelAndName, strings, data) {
13491
+ print(Level.INFO, levelAndName, strings, data);
13457
13492
  }
13458
13493
  }
13459
13494
  /**
13460
13495
  * A basic version of console logger - printing everything.
13461
13496
  */
13462
13497
  class TraceConsoleTransport extends ConsoleTransport {
13463
- insane(_moduleName, _val) {
13498
+ insane(_levelAndName, _strings, _data) {
13464
13499
  /* no-op */
13465
13500
  }
13466
- trace(moduleName, val) {
13467
- this.push(Level.TRACE, moduleName, val);
13501
+ trace(levelAndName, strings, data) {
13502
+ print(Level.TRACE, levelAndName, strings, data);
13468
13503
  }
13469
- log(moduleName, val) {
13470
- this.push(Level.LOG, moduleName, val);
13504
+ log(levelAndName, strings, data) {
13505
+ print(Level.LOG, levelAndName, strings, data);
13471
13506
  }
13472
- info(moduleName, val) {
13473
- this.push(Level.INFO, moduleName, val);
13507
+ info(levelAndName, strings, data) {
13508
+ print(Level.INFO, levelAndName, strings, data);
13474
13509
  }
13475
13510
  }
13476
13511
  /**
13477
13512
  * An optimized version of the logger - completely ignores `TRACE` level calls.
13478
13513
  */
13479
13514
  class LogConsoleTransport extends ConsoleTransport {
13480
- insane(_moduleName, _val) {
13515
+ insane(_levelAndName, _strings, _data) {
13481
13516
  /* no-op */
13482
13517
  }
13483
- trace(_moduleName, _val) {
13518
+ trace(_levelAndName, _strings, _data) {
13484
13519
  /* no-op */
13485
13520
  }
13486
- log(moduleName, val) {
13487
- this.push(Level.LOG, moduleName, val);
13521
+ log(levelAndName, strings, data) {
13522
+ print(Level.LOG, levelAndName, strings, data);
13488
13523
  }
13489
- info(moduleName, val) {
13490
- this.push(Level.INFO, moduleName, val);
13524
+ info(levelAndName, strings, data) {
13525
+ print(Level.INFO, levelAndName, strings, data);
13491
13526
  }
13492
13527
  }
13493
13528
  /**
13494
13529
  * An optimized version of the logger - completely ignores `TRACE` & `DEBUG` level calls.
13495
13530
  */
13496
13531
  class InfoConsoleTransport extends ConsoleTransport {
13497
- insane(_moduleName, _val) {
13532
+ insane(_levelAndName, _strings, _data) {
13498
13533
  /* no-op */
13499
13534
  }
13500
- trace(_moduleName, _val) {
13535
+ trace(_levelAndName, _strings, _data) {
13501
13536
  /* no-op */
13502
13537
  }
13503
- log(_moduleName, _val) {
13538
+ log(_levelAndName, _strings, _data) {
13504
13539
  /* no-op */
13505
13540
  }
13506
- info(moduleName, val) {
13507
- this.push(Level.INFO, moduleName, val);
13541
+ info(levelAndName, strings, data) {
13542
+ print(Level.INFO, levelAndName, strings, data);
13508
13543
  }
13509
13544
  }
13510
13545
 
@@ -13541,11 +13576,6 @@ class Logger {
13541
13576
  const module = moduleName ?? fName;
13542
13577
  return new Logger(module.padStart(8, " "), GLOBAL_CONFIG);
13543
13578
  }
13544
- /**
13545
- * Return currently configured level for given module. */
13546
- static getLevel(moduleName) {
13547
- return findLevel(GLOBAL_CONFIG.options, moduleName);
13548
- }
13549
13579
  /**
13550
13580
  * Global configuration of all loggers.
13551
13581
  *
@@ -13576,33 +13606,46 @@ class Logger {
13576
13606
  const options = parseLoggerOptions(input, defaultLevel, workingDir);
13577
13607
  Logger.configureAllFromOptions(options);
13578
13608
  }
13609
+ cachedLevelAndName;
13579
13610
  constructor(moduleName, config) {
13580
13611
  this.moduleName = moduleName;
13581
13612
  this.config = config;
13582
13613
  }
13614
+ /** Return currently configured level for given module. */
13615
+ getLevel() {
13616
+ return this.getLevelAndName()[0];
13617
+ }
13618
+ getLevelAndName() {
13619
+ if (this.cachedLevelAndName === undefined) {
13620
+ const level = findLevel(this.config.options, this.moduleName);
13621
+ const shortName = this.moduleName.replace(this.config.options.workingDir, "");
13622
+ this.cachedLevelAndName = [level, shortName];
13623
+ }
13624
+ return this.cachedLevelAndName;
13625
+ }
13583
13626
  /** Log a message with `INSANE` level. */
13584
- insane(val) {
13585
- this.config.transport.insane(this.moduleName, val);
13627
+ insane(strings, ...data) {
13628
+ this.config.transport.insane(this.getLevelAndName(), strings, data);
13586
13629
  }
13587
13630
  /** Log a message with `TRACE` level. */
13588
- trace(val) {
13589
- this.config.transport.trace(this.moduleName, val);
13631
+ trace(strings, ...data) {
13632
+ this.config.transport.trace(this.getLevelAndName(), strings, data);
13590
13633
  }
13591
13634
  /** Log a message with `DEBUG`/`LOG` level. */
13592
- log(val) {
13593
- this.config.transport.log(this.moduleName, val);
13635
+ log(strings, ...data) {
13636
+ this.config.transport.log(this.getLevelAndName(), strings, data);
13594
13637
  }
13595
13638
  /** Log a message with `INFO` level. */
13596
- info(val) {
13597
- this.config.transport.info(this.moduleName, val);
13639
+ info(strings, ...data) {
13640
+ this.config.transport.info(this.getLevelAndName(), strings, data);
13598
13641
  }
13599
13642
  /** Log a message with `WARN` level. */
13600
- warn(val) {
13601
- this.config.transport.warn(this.moduleName, val);
13643
+ warn(strings, ...data) {
13644
+ this.config.transport.warn(this.getLevelAndName(), strings, data);
13602
13645
  }
13603
13646
  /** Log a message with `ERROR` level. */
13604
- error(val) {
13605
- this.config.transport.error(this.moduleName, val);
13647
+ error(strings, ...data) {
13648
+ this.config.transport.error(this.getLevelAndName(), strings, data);
13606
13649
  }
13607
13650
  }
13608
13651
 
@@ -13717,7 +13760,7 @@ class LmdbStates {
13717
13760
  await Promise.all([valuesWrite, statesWrite]);
13718
13761
  }
13719
13762
  catch (e) {
13720
- logger.error(`${e}`);
13763
+ logger.error `${e}`;
13721
13764
  return result_Result.error(StateUpdateError.Commit);
13722
13765
  }
13723
13766
  return result_Result.ok(result_OK);
@@ -13825,7 +13868,7 @@ class TypedPort {
13825
13868
  this.dispatchPortMessage(msg);
13826
13869
  }
13827
13870
  catch (e) {
13828
- port_logger.error(`[${this.constructor.name}] Failed to dispatch a message: ${e}: ${JSON.stringify(msg)}`);
13871
+ port_logger.error `[${this.constructor.name}] Failed to dispatch a message: ${e}: ${msg}`;
13829
13872
  throw e;
13830
13873
  }
13831
13874
  });
@@ -13899,7 +13942,7 @@ class TypedPort {
13899
13942
  this.port.postMessage(msg, transferList);
13900
13943
  }
13901
13944
  catch (e) {
13902
- port_logger.error(`[${this.constructor.name}] Failed to post a message: ${e}: ${JSON.stringify(msg)}`);
13945
+ port_logger.error `[${this.constructor.name}] Failed to post a message: ${e}: ${msg}`;
13903
13946
  throw e;
13904
13947
  }
13905
13948
  }
@@ -13930,7 +13973,7 @@ class TypedPort {
13930
13973
  cleanup(reason) {
13931
13974
  // resolve all pending requests with an error.
13932
13975
  const responseListeners = this.responseListeners.eventNames();
13933
- for (const ev in responseListeners) {
13976
+ for (const ev of responseListeners) {
13934
13977
  this.responseListeners.emit(ev, new Error(`port is ${reason}`));
13935
13978
  }
13936
13979
  }
@@ -13979,7 +14022,7 @@ class channel_MessageChannelStateMachine {
13979
14022
  this.dispatchSignal(name, data);
13980
14023
  }
13981
14024
  catch (e) {
13982
- channel_logger.error(`[${this.constructor.name}] Unable to dispatch signal (${name}): ${e}. ${this.stateInfo(remoteState)}`);
14025
+ channel_logger.error `[${this.constructor.name}] Unable to dispatch signal (${name}): ${e}. ${this.stateInfo(remoteState)}`;
13983
14026
  throw e;
13984
14027
  }
13985
14028
  });
@@ -13988,7 +14031,7 @@ class channel_MessageChannelStateMachine {
13988
14031
  await this.dispatchRequest(name, data, msg);
13989
14032
  }
13990
14033
  catch (e) {
13991
- channel_logger.error(`[${this.constructor.name}] Unable to dispatch request (${name}): ${e}. ${this.stateInfo(remoteState)}`);
14034
+ channel_logger.error `[${this.constructor.name}] Unable to dispatch request (${name}): ${e}. ${this.stateInfo(remoteState)}`;
13992
14035
  throw e;
13993
14036
  }
13994
14037
  });
@@ -14074,7 +14117,7 @@ class channel_MessageChannelStateMachine {
14074
14117
  this.machine.transition(res.transitionTo.state, res.transitionTo.data);
14075
14118
  }
14076
14119
  if (didStateChangeInMeantime) {
14077
- channel_logger.warn(`Ignoring obsolete response for an old request: "${name}"`);
14120
+ channel_logger.warn `Ignoring obsolete response for an old request: "${name}"`;
14078
14121
  return;
14079
14122
  }
14080
14123
  return this.port.respond(prevState.stateName, msg, res.response);
@@ -14090,7 +14133,7 @@ class channel_MessageChannelStateMachine {
14090
14133
  }
14091
14134
  }
14092
14135
  transitionTo() {
14093
- channel_logger.trace(`[${this.machine.name}] transitioned to ${this.currentState()}`);
14136
+ channel_logger.trace `[${this.machine.name}] transitioned to ${this.currentState()}`;
14094
14137
  return this;
14095
14138
  }
14096
14139
  /**
@@ -14110,7 +14153,7 @@ class channel_MessageChannelStateMachine {
14110
14153
  await promise;
14111
14154
  }
14112
14155
  catch (e) {
14113
- channel_logger.error(JSON.stringify(e));
14156
+ channel_logger.error `${e}`;
14114
14157
  }
14115
14158
  return new channel_MessageChannelStateMachine(machine, port);
14116
14159
  }
@@ -14313,7 +14356,7 @@ class State {
14313
14356
  * actions.
14314
14357
  */
14315
14358
  onActivation(data) {
14316
- state_logger.trace(`[${this.constructor.name}] Changing state to: ${this}`);
14359
+ state_logger.trace `[${this.constructor.name}] Changing state to: ${this}`;
14317
14360
  this.data = data;
14318
14361
  }
14319
14362
  /**
@@ -15896,6 +15939,17 @@ class PageRange {
15896
15939
  }
15897
15940
  return new PageRange(start, length);
15898
15941
  }
15942
+ /** Returns true if the page range is wrapped (`start` >= `end`) and is not empty */
15943
+ isWrapped() {
15944
+ return this.start >= this.end && !this.isEmpty();
15945
+ }
15946
+ /** Checks if given page number is within the range */
15947
+ isInRange(page) {
15948
+ if (this.isWrapped()) {
15949
+ return page >= this.start || page < this.end;
15950
+ }
15951
+ return page >= this.start && page < this.end;
15952
+ }
15899
15953
  /** Checks if a range is empty (`length === 0`) */
15900
15954
  isEmpty() {
15901
15955
  return this.length === 0;
@@ -16024,12 +16078,14 @@ class WriteablePage extends MemoryPage {
16024
16078
 
16025
16079
 
16026
16080
 
16081
+
16082
+
16027
16083
  var AccessType;
16028
16084
  (function (AccessType) {
16029
16085
  AccessType[AccessType["READ"] = 0] = "READ";
16030
16086
  AccessType[AccessType["WRITE"] = 1] = "WRITE";
16031
16087
  })(AccessType || (AccessType = {}));
16032
- // const logger = Logger.new(import.meta.filename, "pvm:mem");
16088
+ const memory_logger = Logger.new(import.meta.filename, "pvm:mem");
16033
16089
  class Memory {
16034
16090
  sbrkIndex;
16035
16091
  virtualSbrkIndex;
@@ -16060,7 +16116,7 @@ class Memory {
16060
16116
  if (bytes.length === 0) {
16061
16117
  return result_Result.ok(result_OK);
16062
16118
  }
16063
- // logger.insane(`MEM[${address}] <- ${BytesBlob.blobFrom(bytes)}`);
16119
+ memory_logger.insane `MEM[${address}] <- ${bytes_BytesBlob.blobFrom(bytes)}`;
16064
16120
  const pagesResult = this.getPages(address, bytes.length, AccessType.WRITE);
16065
16121
  if (pagesResult.isError) {
16066
16122
  return result_Result.error(pagesResult.error);
@@ -16127,7 +16183,7 @@ class Memory {
16127
16183
  currentPosition += bytesToRead;
16128
16184
  bytesLeft -= bytesToRead;
16129
16185
  }
16130
- // logger.insane(`MEM[${startAddress}] => ${BytesBlob.blobFrom(result)}`);
16186
+ memory_logger.insane `MEM[${startAddress}] => ${bytes_BytesBlob.blobFrom(result)}`;
16131
16187
  return result_Result.ok(result_OK);
16132
16188
  }
16133
16189
  sbrk(length) {
@@ -16273,10 +16329,11 @@ class MemoryBuilder {
16273
16329
  startHeapIndex (${startHeapIndex}) has to be less than or equal to endHeapIndex (${endHeapIndex})
16274
16330
  `;
16275
16331
  this.ensureNotFinalized();
16276
- const range = MemoryRange.fromStartAndLength(startHeapIndex, endHeapIndex - startHeapIndex);
16277
- const pages = PageRange.fromMemoryRange(range);
16278
- for (const pageNumber of pages) {
16279
- if (this.initialMemory.has(pageNumber)) {
16332
+ const heapRange = MemoryRange.fromStartAndLength(startHeapIndex, endHeapIndex - startHeapIndex);
16333
+ const heapPagesRange = PageRange.fromMemoryRange(heapRange);
16334
+ const initializedPageNumbers = Array.from(this.initialMemory.keys());
16335
+ for (const pageNumber of initializedPageNumbers) {
16336
+ if (heapPagesRange.isInRange(pageNumber)) {
16280
16337
  throw new IncorrectSbrkIndex();
16281
16338
  }
16282
16339
  }
@@ -18057,7 +18114,7 @@ class ProgramDecoder {
18057
18114
  return result_Result.ok(new ProgramDecoder(program));
18058
18115
  }
18059
18116
  catch (e) {
18060
- program_decoder_logger.error(`Invalid program: ${e}`);
18117
+ program_decoder_logger.error `Invalid program: ${e}`;
18061
18118
  return result_Result.error(ProgramDecoderError.InvalidProgramError);
18062
18119
  }
18063
18120
  }
@@ -18223,7 +18280,7 @@ class Interpreter {
18223
18280
  const argsType = instructionArgumentTypeMap[currentInstruction] ?? ArgumentType.NO_ARGUMENTS;
18224
18281
  const argsResult = this.argsDecodingResults[argsType];
18225
18282
  this.argsDecoder.fillArgs(this.pc, argsResult);
18226
- interpreter_logger.insane(`[PC: ${this.pc}] ${Instruction[currentInstruction]}`);
18283
+ interpreter_logger.insane `[PC: ${this.pc}] ${Instruction[currentInstruction]}`;
18227
18284
  if (!isValidInstruction) {
18228
18285
  this.instructionResult.status = pvm_interpreter_result_Result.PANIC;
18229
18286
  }
@@ -18295,7 +18352,7 @@ class Interpreter {
18295
18352
  this.status = status_Status.HOST;
18296
18353
  break;
18297
18354
  }
18298
- interpreter_logger.insane(`[PC: ${this.pc}] Status: ${pvm_interpreter_result_Result[this.instructionResult.status]}`);
18355
+ interpreter_logger.insane `[PC: ${this.pc}] Status: ${pvm_interpreter_result_Result[this.instructionResult.status]}`;
18299
18356
  return this.status;
18300
18357
  }
18301
18358
  this.pc = this.instructionResult.nextPc;
@@ -18542,7 +18599,7 @@ class host_calls_manager_HostCallsManager {
18542
18599
  return `r${idx}=${value} (0x${value.toString(16)})`;
18543
18600
  })
18544
18601
  .join(", ");
18545
- host_calls_manager_logger.insane(`[${currentServiceId}] ${context} ${name}${requested}. Gas: ${gas}. Regs: ${registerValues}.`);
18602
+ host_calls_manager_logger.insane `[${currentServiceId}] ${context} ${name}${requested}. Gas: ${gas}. Regs: ${registerValues}.`;
18546
18603
  }
18547
18604
  }
18548
18605
  class NoopMissing {
@@ -20181,16 +20238,12 @@ class PartiallyUpdatedState {
20181
20238
  *
20182
20239
  * NOTE the info may be updated compared to what is in the state.
20183
20240
  *
20184
- * Takes into account newly created services as well.
20241
+ * Takes into account ejected and newly created services as well.
20185
20242
  */
20186
20243
  getServiceInfo(destination) {
20187
20244
  if (destination === null) {
20188
20245
  return null;
20189
20246
  }
20190
- const isEjected = this.stateUpdate.services.servicesRemoved.some((x) => x === destination);
20191
- if (isEjected) {
20192
- return null;
20193
- }
20194
20247
  const maybeNewService = this.stateUpdate.services.servicesUpdates.find((update) => update.serviceId === destination);
20195
20248
  if (maybeNewService !== undefined) {
20196
20249
  return maybeNewService.action.account;
@@ -20925,7 +20978,7 @@ class AccumulateExternalities {
20925
20978
  /** https://graypaper.fluffylabs.dev/#/7e6ff6a/362802362d02?v=0.6.7 */
20926
20979
  const validatorsManager = this.updatedState.getPrivilegedServices().validatorsManager;
20927
20980
  if (validatorsManager !== this.currentServiceId) {
20928
- accumulate_externalities_logger.trace(`Current service id (${this.currentServiceId}) is not a validators manager. (expected: ${validatorsManager}) and cannot update validators data. Ignoring`);
20981
+ accumulate_externalities_logger.trace `Current service id (${this.currentServiceId}) is not a validators manager. (expected: ${validatorsManager}) and cannot update validators data. Ignoring`;
20929
20982
  return result_Result.error(UnprivilegedError);
20930
20983
  }
20931
20984
  this.updatedState.stateUpdate.validatorsData = validatorsData;
@@ -20940,11 +20993,11 @@ class AccumulateExternalities {
20940
20993
  // NOTE `coreIndex` is already verified in the HC, so this is infallible.
20941
20994
  const currentAuthManager = this.updatedState.getPrivilegedServices().authManager[coreIndex];
20942
20995
  if (currentAuthManager !== this.currentServiceId) {
20943
- accumulate_externalities_logger.trace(`Current service id (${this.currentServiceId}) is not an auth manager of core ${coreIndex} (expected: ${currentAuthManager}) and cannot update authorization queue. Ignoring`);
20996
+ accumulate_externalities_logger.trace `Current service id (${this.currentServiceId}) is not an auth manager of core ${coreIndex} (expected: ${currentAuthManager}) and cannot update authorization queue. Ignoring`;
20944
20997
  return result_Result.error(UpdatePrivilegesError.UnprivilegedService);
20945
20998
  }
20946
20999
  if (authManager === null && Compatibility.isGreaterOrEqual(GpVersion.V0_7_1)) {
20947
- accumulate_externalities_logger.trace("The new auth manager is not a valid service id. Ignoring");
21000
+ accumulate_externalities_logger.trace `The new auth manager is not a valid service id. Ignoring`;
20948
21001
  return result_Result.error(UpdatePrivilegesError.InvalidServiceId);
20949
21002
  }
20950
21003
  this.updatedState.stateUpdate.authorizationQueues.set(coreIndex, authQueue);
@@ -21825,7 +21878,7 @@ class Assign {
21825
21878
  const memoryReadResult = memory.loadInto(res, authorizationQueueStart);
21826
21879
  // error while reading the memory.
21827
21880
  if (memoryReadResult.isError) {
21828
- logger_logger.trace("ASSIGN() <- PANIC");
21881
+ logger_logger.trace `ASSIGN() <- PANIC`;
21829
21882
  return PvmExecution.Panic;
21830
21883
  }
21831
21884
  if (maybeCoreIndex >= this.chainSpec.coresCount) {
@@ -21840,18 +21893,18 @@ class Assign {
21840
21893
  const result = this.partialState.updateAuthorizationQueue(coreIndex, fixedSizeAuthQueue, authManager);
21841
21894
  if (result.isOk) {
21842
21895
  regs.set(IN_OUT_REG, HostCallResult.OK);
21843
- logger_logger.trace(`ASSIGN(${coreIndex}, ${fixedSizeAuthQueue}) <- OK`);
21896
+ logger_logger.trace `ASSIGN(${coreIndex}, ${fixedSizeAuthQueue}) <- OK`;
21844
21897
  return;
21845
21898
  }
21846
21899
  const e = result.error;
21847
21900
  if (e === UpdatePrivilegesError.UnprivilegedService) {
21848
21901
  regs.set(IN_OUT_REG, HostCallResult.HUH);
21849
- logger_logger.trace(`ASSIGN(${coreIndex}, ${fixedSizeAuthQueue}) <- HUH`);
21902
+ logger_logger.trace `ASSIGN(${coreIndex}, ${fixedSizeAuthQueue}) <- HUH`;
21850
21903
  return;
21851
21904
  }
21852
21905
  if (e === UpdatePrivilegesError.InvalidServiceId) {
21853
21906
  regs.set(IN_OUT_REG, HostCallResult.WHO);
21854
- logger_logger.trace(`ASSIGN(${coreIndex}, ${fixedSizeAuthQueue}) <- HUH`);
21907
+ logger_logger.trace `ASSIGN(${coreIndex}, ${fixedSizeAuthQueue}) <- HUH`;
21855
21908
  return;
21856
21909
  }
21857
21910
  debug_assertNever(e);
@@ -21917,7 +21970,7 @@ class Bless {
21917
21970
  decoder.resetTo(0);
21918
21971
  const memoryReadResult = memory.loadInto(result, memIndex);
21919
21972
  if (memoryReadResult.isError) {
21920
- logger_logger.trace(`BLESS(${manager}, ${validator}) <- PANIC`);
21973
+ logger_logger.trace `BLESS(${manager}, ${validator}) <- PANIC`;
21921
21974
  return PvmExecution.Panic;
21922
21975
  }
21923
21976
  const { serviceId, gas } = decoder.object(serviceIdAndGasCodec);
@@ -21930,24 +21983,24 @@ class Bless {
21930
21983
  const authorizersDecoder = decoder_Decoder.fromBlob(res);
21931
21984
  const memoryReadResult = memory.loadInto(res, authorization);
21932
21985
  if (memoryReadResult.isError) {
21933
- logger_logger.trace(`BLESS(${manager}, ${validator}, ${autoAccumulateEntries}) <- PANIC`);
21986
+ logger_logger.trace `BLESS(${manager}, ${validator}, ${autoAccumulateEntries}) <- PANIC`;
21934
21987
  return PvmExecution.Panic;
21935
21988
  }
21936
21989
  const authorizers = tryAsPerCore(authorizersDecoder.sequenceFixLen(descriptors_codec.u32.asOpaque(), this.chainSpec.coresCount), this.chainSpec);
21937
21990
  const updateResult = this.partialState.updatePrivilegedServices(manager, authorizers, validator, autoAccumulateEntries);
21938
21991
  if (updateResult.isOk) {
21939
- logger_logger.trace(`BLESS(${manager}, ${authorizers}, ${validator}, ${autoAccumulateEntries}) <- OK`);
21992
+ logger_logger.trace `BLESS(${manager}, ${authorizers}, ${validator}, ${autoAccumulateEntries}) <- OK`;
21940
21993
  regs.set(bless_IN_OUT_REG, HostCallResult.OK);
21941
21994
  return;
21942
21995
  }
21943
21996
  const e = updateResult.error;
21944
21997
  if (e === UpdatePrivilegesError.UnprivilegedService) {
21945
- logger_logger.trace(`BLESS(${manager}, ${authorizers}, ${validator}, ${autoAccumulateEntries}) <- HUH`);
21998
+ logger_logger.trace `BLESS(${manager}, ${authorizers}, ${validator}, ${autoAccumulateEntries}) <- HUH`;
21946
21999
  regs.set(bless_IN_OUT_REG, HostCallResult.HUH);
21947
22000
  return;
21948
22001
  }
21949
22002
  if (e === UpdatePrivilegesError.InvalidServiceId) {
21950
- logger_logger.trace(`BLESS(${manager}, ${authorizers}, ${validator}, ${autoAccumulateEntries}) <- WHO`);
22003
+ logger_logger.trace `BLESS(${manager}, ${authorizers}, ${validator}, ${autoAccumulateEntries}) <- WHO`;
21951
22004
  regs.set(bless_IN_OUT_REG, HostCallResult.WHO);
21952
22005
  return;
21953
22006
  }
@@ -21977,7 +22030,7 @@ class GasHostCall {
21977
22030
  }
21978
22031
  execute(gas, regs) {
21979
22032
  const gasValue = gas.get();
21980
- logger_logger.trace(`GAS <- ${gasValue}`);
22033
+ logger_logger.trace `GAS <- ${gasValue}`;
21981
22034
  regs.set(7, numbers_tryAsU64(gasValue));
21982
22035
  return Promise.resolve(undefined);
21983
22036
  }
@@ -22009,7 +22062,7 @@ class Checkpoint {
22009
22062
  async execute(gas, regs) {
22010
22063
  await this.gasHostCall.execute(gas, regs);
22011
22064
  this.partialState.checkpoint();
22012
- logger_logger.trace("CHECKPOINT()");
22065
+ logger_logger.trace `CHECKPOINT()`;
22013
22066
  return;
22014
22067
  }
22015
22068
  }
@@ -22048,18 +22101,18 @@ class Designate {
22048
22101
  const memoryReadResult = memory.loadInto(res, validatorsStart);
22049
22102
  // error while reading the memory.
22050
22103
  if (memoryReadResult.isError) {
22051
- logger_logger.trace("DESIGNATE() <- PANIC");
22104
+ logger_logger.trace `DESIGNATE() <- PANIC`;
22052
22105
  return PvmExecution.Panic;
22053
22106
  }
22054
22107
  const decoder = decoder_Decoder.fromBlob(res);
22055
22108
  const validatorsData = decoder.sequenceFixLen(ValidatorData.Codec, this.chainSpec.validatorsCount);
22056
22109
  const result = this.partialState.updateValidatorsData(tryAsPerValidator(validatorsData, this.chainSpec));
22057
22110
  if (result.isError) {
22058
- logger_logger.trace(`DESIGNATE([${validatorsData[0]}, ${validatorsData[1]}, ...]) <- HUH`);
22111
+ logger_logger.trace `DESIGNATE([${validatorsData[0]}, ${validatorsData[1]}, ...]) <- HUH`;
22059
22112
  regs.set(designate_IN_OUT_REG, HostCallResult.HUH);
22060
22113
  }
22061
22114
  else {
22062
- logger_logger.trace(`DESIGNATE([${validatorsData[0]}, ${validatorsData[1]}, ...]) <- OK`);
22115
+ logger_logger.trace `DESIGNATE([${validatorsData[0]}, ${validatorsData[1]}, ...]) <- OK`;
22063
22116
  regs.set(designate_IN_OUT_REG, HostCallResult.OK);
22064
22117
  }
22065
22118
  }
@@ -22100,17 +22153,17 @@ class Eject {
22100
22153
  const previousCodeHash = bytes_Bytes.zero(hash_HASH_SIZE).asOpaque();
22101
22154
  const memoryReadResult = memory.loadInto(previousCodeHash.raw, preimageHashStart);
22102
22155
  if (memoryReadResult.isError) {
22103
- logger_logger.trace(`EJECT(${serviceId}) <- PANIC`);
22156
+ logger_logger.trace `EJECT(${serviceId}) <- PANIC`;
22104
22157
  return PvmExecution.Panic;
22105
22158
  }
22106
22159
  // cannot eject self
22107
22160
  if (serviceId === this.currentServiceId) {
22108
22161
  regs.set(eject_IN_OUT_REG, HostCallResult.WHO);
22109
- logger_logger.trace(`EJECT(${serviceId}, ${previousCodeHash}) <- WHO`);
22162
+ logger_logger.trace `EJECT(${serviceId}, ${previousCodeHash}) <- WHO`;
22110
22163
  return;
22111
22164
  }
22112
22165
  const result = this.partialState.eject(serviceId, previousCodeHash);
22113
- logger_logger.trace(`EJECT(${serviceId}, ${previousCodeHash}) <- ${resultToString(result)}`);
22166
+ logger_logger.trace `EJECT(${serviceId}, ${previousCodeHash}) <- ${resultToString(result)}`;
22114
22167
  // All good!
22115
22168
  if (result.isOk) {
22116
22169
  regs.set(eject_IN_OUT_REG, HostCallResult.OK);
@@ -22162,11 +22215,11 @@ class Forget {
22162
22215
  const memoryReadResult = memory.loadInto(hash.raw, hashStart);
22163
22216
  // error while reading the memory.
22164
22217
  if (memoryReadResult.isError) {
22165
- logger_logger.trace(`FORGET(${hash}, ${length}) <- PANIC`);
22218
+ logger_logger.trace `FORGET(${hash}, ${length}) <- PANIC`;
22166
22219
  return PvmExecution.Panic;
22167
22220
  }
22168
22221
  const result = this.partialState.forgetPreimage(hash.asOpaque(), length);
22169
- logger_logger.trace(`FORGET(${hash}, ${length}) <- ${resultToString(result)}`);
22222
+ logger_logger.trace `FORGET(${hash}, ${length}) <- ${resultToString(result)}`;
22170
22223
  if (result.isOk) {
22171
22224
  regs.set(forget_IN_OUT_REG, HostCallResult.OK);
22172
22225
  }
@@ -22219,11 +22272,11 @@ class New {
22219
22272
  const memoryReadResult = memory.loadInto(codeHash.raw, codeHashStart);
22220
22273
  // error while reading the memory.
22221
22274
  if (memoryReadResult.isError) {
22222
- logger_logger.trace(`NEW(${codeHash}, ${codeLength}, ${gas}, ${allowance}, ${gratisStorage}) <- PANIC`);
22275
+ logger_logger.trace `NEW(${codeHash}, ${codeLength}, ${gas}, ${allowance}, ${gratisStorage}) <- PANIC`;
22223
22276
  return PvmExecution.Panic;
22224
22277
  }
22225
22278
  const assignedId = this.partialState.newService(codeHash.asOpaque(), codeLength, gas, allowance, gratisStorage);
22226
- logger_logger.trace(`NEW(${codeHash}, ${codeLength}, ${gas}, ${allowance}, ${gratisStorage}) <- ${resultToString(assignedId)}`);
22279
+ logger_logger.trace `NEW(${codeHash}, ${codeLength}, ${gas}, ${allowance}, ${gratisStorage}) <- ${resultToString(assignedId)}`;
22227
22280
  if (assignedId.isOk) {
22228
22281
  regs.set(new_IN_OUT_REG, numbers_tryAsU64(assignedId.ok));
22229
22282
  return;
@@ -22278,11 +22331,11 @@ class Provide {
22278
22331
  const preimage = bytes_BytesBlob.blobFrom(new Uint8Array(length));
22279
22332
  const memoryReadResult = memory.loadInto(preimage.raw, preimageStart);
22280
22333
  if (memoryReadResult.isError) {
22281
- logger_logger.trace(`PROVIDE(${serviceId}, ${preimage.toStringTruncated()}) <- PANIC`);
22334
+ logger_logger.trace `PROVIDE(${serviceId}, ${preimage.toStringTruncated()}) <- PANIC`;
22282
22335
  return PvmExecution.Panic;
22283
22336
  }
22284
22337
  const result = this.partialState.providePreimage(serviceId, preimage);
22285
- logger_logger.trace(`PROVIDE(${serviceId}, ${preimage.toStringTruncated()}) <- ${resultToString(result)}`);
22338
+ logger_logger.trace `PROVIDE(${serviceId}, ${preimage.toStringTruncated()}) <- ${resultToString(result)}`;
22286
22339
  if (result.isOk) {
22287
22340
  regs.set(provide_IN_OUT_REG, HostCallResult.OK);
22288
22341
  return;
@@ -22336,11 +22389,11 @@ class Query {
22336
22389
  const memoryReadResult = memory.loadInto(hash.raw, hashStart);
22337
22390
  // error while reading the memory.
22338
22391
  if (memoryReadResult.isError) {
22339
- logger_logger.trace(`QUERY(${hash}, ${length}) <- PANIC`);
22392
+ logger_logger.trace `QUERY(${hash}, ${length}) <- PANIC`;
22340
22393
  return PvmExecution.Panic;
22341
22394
  }
22342
22395
  const result = this.partialState.checkPreimageStatus(hash.asOpaque(), length);
22343
- logger_logger.trace(`QUERY(${hash}, ${length}) <- ${result}`);
22396
+ logger_logger.trace `QUERY(${hash}, ${length}) <- ${result}`;
22344
22397
  const zero = numbers_tryAsU64(0n);
22345
22398
  if (result === null) {
22346
22399
  regs.set(IN_OUT_REG_1, HostCallResult.NONE);
@@ -22401,11 +22454,11 @@ class Solicit {
22401
22454
  const hash = bytes_Bytes.zero(hash_HASH_SIZE);
22402
22455
  const memoryReadResult = memory.loadInto(hash.raw, hashStart);
22403
22456
  if (memoryReadResult.isError) {
22404
- logger_logger.trace(`SOLICIT(${hash}, ${length}) <- PANIC`);
22457
+ logger_logger.trace `SOLICIT(${hash}, ${length}) <- PANIC`;
22405
22458
  return PvmExecution.Panic;
22406
22459
  }
22407
22460
  const result = this.partialState.requestPreimage(hash.asOpaque(), length);
22408
- logger_logger.trace(`SOLICIT(${hash}, ${length}) <- ${resultToString(result)}`);
22461
+ logger_logger.trace `SOLICIT(${hash}, ${length}) <- ${resultToString(result)}`;
22409
22462
  if (result.isOk) {
22410
22463
  regs.set(solicit_IN_OUT_REG, HostCallResult.OK);
22411
22464
  return;
@@ -22477,11 +22530,11 @@ class Transfer {
22477
22530
  const memoryReadResult = memory.loadInto(memo.raw, memoStart);
22478
22531
  // page fault while reading the memory.
22479
22532
  if (memoryReadResult.isError) {
22480
- logger_logger.trace(`TRANSFER(${destination}, ${amount}, ${onTransferGas}, ${memo}) <- PANIC`);
22533
+ logger_logger.trace `TRANSFER(${destination}, ${amount}, ${onTransferGas}, ${memo}) <- PANIC`;
22481
22534
  return PvmExecution.Panic;
22482
22535
  }
22483
22536
  const transferResult = this.partialState.transfer(destination, amount, onTransferGas, memo);
22484
- logger_logger.trace(`TRANSFER(${destination}, ${amount}, ${onTransferGas}, ${memo}) <- ${resultToString(transferResult)}`);
22537
+ logger_logger.trace `TRANSFER(${destination}, ${amount}, ${onTransferGas}, ${memo}) <- ${resultToString(transferResult)}`;
22485
22538
  // All good!
22486
22539
  if (transferResult.isOk) {
22487
22540
  regs.set(transfer_IN_OUT_REG, HostCallResult.OK);
@@ -22540,11 +22593,11 @@ class Upgrade {
22540
22593
  const codeHash = bytes_Bytes.zero(hash_HASH_SIZE);
22541
22594
  const memoryReadResult = memory.loadInto(codeHash.raw, codeHashStart);
22542
22595
  if (memoryReadResult.isError) {
22543
- logger_logger.trace(`UPGRADE(${codeHash}, ${gas}, ${allowance}) <- PANIC`);
22596
+ logger_logger.trace `UPGRADE(${codeHash}, ${gas}, ${allowance}) <- PANIC`;
22544
22597
  return PvmExecution.Panic;
22545
22598
  }
22546
22599
  this.partialState.upgradeService(codeHash.asOpaque(), gas, allowance);
22547
- logger_logger.trace(`UPGRADE(${codeHash}, ${gas}, ${allowance})`);
22600
+ logger_logger.trace `UPGRADE(${codeHash}, ${gas}, ${allowance})`;
22548
22601
  regs.set(upgrade_IN_OUT_REG, HostCallResult.OK);
22549
22602
  }
22550
22603
  }
@@ -22578,11 +22631,11 @@ class Yield {
22578
22631
  const hash = bytes_Bytes.zero(hash_HASH_SIZE);
22579
22632
  const memoryReadResult = memory.loadInto(hash.raw, hashStart);
22580
22633
  if (memoryReadResult.isError) {
22581
- logger_logger.trace("YIELD() <- PANIC");
22634
+ logger_logger.trace `YIELD() <- PANIC`;
22582
22635
  return PvmExecution.Panic;
22583
22636
  }
22584
22637
  this.partialState.yield(hash);
22585
- logger_logger.trace(`YIELD(${hash})`);
22638
+ logger_logger.trace `YIELD(${hash})`;
22586
22639
  regs.set(yield_IN_OUT_REG, HostCallResult.OK);
22587
22640
  }
22588
22641
  }
@@ -22624,10 +22677,10 @@ class Fetch {
22624
22677
  const chunk = value === null ? new Uint8Array() : value.raw.subarray(Number(offset), Number(offset + length));
22625
22678
  const storeResult = memory.storeFrom(output, chunk);
22626
22679
  if (storeResult.isError) {
22627
- logger_logger.trace(`FETCH(${kind}) <- PANIC`);
22680
+ logger_logger.trace `FETCH(${kind}) <- PANIC`;
22628
22681
  return PvmExecution.Panic;
22629
22682
  }
22630
- logger_logger.trace(`FETCH(${kind}) <- ${value?.toStringTruncated()}`);
22683
+ logger_logger.trace `FETCH(${kind}) <- ${value?.toStringTruncated()}`;
22631
22684
  // write result
22632
22685
  regs.set(fetch_IN_OUT_REG, value === null ? HostCallResult.NONE : valueLength);
22633
22686
  }
@@ -22735,8 +22788,8 @@ var FetchKind;
22735
22788
 
22736
22789
 
22737
22790
  const info_IN_OUT_REG = 7;
22738
- const OFFSET_REG = Compatibility.isSuite(TestSuite.W3F_DAVXY) ? 9 : 11;
22739
- const LEN_REG = Compatibility.isSuite(TestSuite.W3F_DAVXY) ? 10 : 12;
22791
+ const OFFSET_REG = Compatibility.isSuite(TestSuite.W3F_DAVXY) || Compatibility.isGreaterOrEqual(GpVersion.V0_7_2) ? 9 : 11;
22792
+ const LEN_REG = Compatibility.isSuite(TestSuite.W3F_DAVXY) || Compatibility.isGreaterOrEqual(GpVersion.V0_7_2) ? 10 : 12;
22740
22793
  /**
22741
22794
  * Return info about some account.
22742
22795
  *
@@ -22753,7 +22806,7 @@ const LEN_REG = Compatibility.isSuite(TestSuite.W3F_DAVXY) ? 10 : 12;
22753
22806
  * a = last accumulation timeslot
22754
22807
  * p = parent service
22755
22808
  *
22756
- * https://graypaper.fluffylabs.dev/#/38c4e62/338302338302?v=0.7.0
22809
+ * https://graypaper.fluffylabs.dev/#/ab2cdbd/333b00333b00?v=0.7.2
22757
22810
  */
22758
22811
  class Info {
22759
22812
  currentServiceId;
@@ -22783,13 +22836,15 @@ class Info {
22783
22836
  const offset = minU64(regs.get(OFFSET_REG), valueLength);
22784
22837
  // l
22785
22838
  const length = minU64(regs.get(LEN_REG), numbers_tryAsU64(valueLength - offset));
22839
+ // NOTE: casting to `Number` is safe in both places, since we are always bounded
22840
+ // by the actual `encodedInfo.length`, which is equal `96`.
22786
22841
  const chunk = encodedInfo.raw.subarray(Number(offset), Number(offset + length));
22787
22842
  const writeResult = memory.storeFrom(outputStart, chunk);
22788
22843
  if (writeResult.isError) {
22789
- logger_logger.trace(`INFO(${serviceId}) <- PANIC`);
22844
+ logger_logger.trace `INFO(${serviceId}) <- PANIC`;
22790
22845
  return PvmExecution.Panic;
22791
22846
  }
22792
- logger_logger.trace(`INFO(${serviceId}) <- ${bytes_BytesBlob.blobFrom(chunk)}`);
22847
+ logger_logger.trace `INFO(${serviceId}) <- ${bytes_BytesBlob.blobFrom(chunk)}`;
22793
22848
  if (accountInfo === null) {
22794
22849
  regs.set(info_IN_OUT_REG, HostCallResult.NONE);
22795
22850
  return;
@@ -22802,7 +22857,7 @@ class Info {
22802
22857
  *
22803
22858
  * Used exclusively by `info` host call.
22804
22859
  *
22805
- * https://graypaper.fluffylabs.dev/#/7e6ff6a/337602337602?v=0.6.7
22860
+ * https://graypaper.fluffylabs.dev/#/ab2cdbd/33920033b500?v=0.7.2
22806
22861
  */
22807
22862
  const codecServiceAccountInfoWithThresholdBalance = descriptors_codec.object({
22808
22863
  codeHash: descriptors_codec.bytes(hash_HASH_SIZE),
@@ -22850,7 +22905,7 @@ class LogHostCall {
22850
22905
  memory.loadInto(target, targetStart);
22851
22906
  }
22852
22907
  memory.loadInto(message, msgStart);
22853
- logger_logger.trace(`SERVICE [${this.currentServiceId}] [${lvl}] ${decoder.decode(target)} ${decoder.decode(message)}`);
22908
+ logger_logger.trace `SERVICE [${this.currentServiceId}] [${lvl}] ${decoder.decode(target)} ${decoder.decode(message)}`;
22854
22909
  return Promise.resolve(undefined);
22855
22910
  }
22856
22911
  }
@@ -22890,12 +22945,12 @@ class Lookup {
22890
22945
  const preImageHash = bytes_Bytes.zero(hash_HASH_SIZE);
22891
22946
  const memoryReadResult = memory.loadInto(preImageHash.raw, hashAddress);
22892
22947
  if (memoryReadResult.isError) {
22893
- logger_logger.trace(`LOOKUP(${serviceId}, ${preImageHash}) <- PANIC`);
22948
+ logger_logger.trace `LOOKUP(${serviceId}, ${preImageHash}) <- PANIC`;
22894
22949
  return PvmExecution.Panic;
22895
22950
  }
22896
22951
  // v
22897
22952
  const preImage = this.account.lookup(serviceId, preImageHash);
22898
- logger_logger.trace(`LOOKUP(${serviceId}, ${preImageHash}) <- ${preImage?.toStringTruncated()}...`);
22953
+ logger_logger.trace `LOOKUP(${serviceId}, ${preImageHash}) <- ${preImage?.toStringTruncated()}...`;
22899
22954
  const preImageLength = preImage === null ? numbers_tryAsU64(0) : numbers_tryAsU64(preImage.raw.length);
22900
22955
  const preimageBlobOffset = regs.get(10);
22901
22956
  const lengthToWrite = regs.get(11);
@@ -22957,7 +23012,7 @@ class Read {
22957
23012
  const rawKey = bytes_BytesBlob.blobFrom(new Uint8Array(storageKeyLengthClamped));
22958
23013
  const memoryReadResult = memory.loadInto(rawKey.raw, storageKeyStartAddress);
22959
23014
  if (memoryReadResult.isError) {
22960
- logger_logger.trace(`READ(${serviceId}, ${rawKey}) <- PANIC`);
23015
+ logger_logger.trace `READ(${serviceId}, ${rawKey}) <- PANIC`;
22961
23016
  return PvmExecution.Panic;
22962
23017
  }
22963
23018
  // v
@@ -22975,15 +23030,15 @@ class Read {
22975
23030
  const chunk = value === null ? new Uint8Array(0) : value.raw.subarray(Number(offset), Number(offset + blobLength));
22976
23031
  const memoryWriteResult = memory.storeFrom(destinationAddress, chunk);
22977
23032
  if (memoryWriteResult.isError) {
22978
- logger_logger.trace(`READ(${serviceId}, ${rawKey}) <- PANIC`);
23033
+ logger_logger.trace `READ(${serviceId}, ${rawKey}) <- PANIC`;
22979
23034
  return PvmExecution.Panic;
22980
23035
  }
22981
23036
  if (value === null) {
22982
- logger_logger.trace(`READ(${serviceId}, ${rawKey}) <- NONE`);
23037
+ logger_logger.trace `READ(${serviceId}, ${rawKey}) <- NONE`;
22983
23038
  regs.set(read_IN_OUT_REG, HostCallResult.NONE);
22984
23039
  return;
22985
23040
  }
22986
- logger_logger.trace(`READ(${serviceId}, ${rawKey}) <- ${bytes_BytesBlob.blobFrom(chunk).toStringTruncated()}`);
23041
+ logger_logger.trace `READ(${serviceId}, ${rawKey}) <- ${bytes_BytesBlob.blobFrom(chunk).toStringTruncated()}`;
22987
23042
  regs.set(read_IN_OUT_REG, valueLength);
22988
23043
  }
22989
23044
  }
@@ -23026,7 +23081,7 @@ class Write {
23026
23081
  const rawStorageKey = new Uint8Array(storageKeyLengthClamped);
23027
23082
  const keyLoadingResult = memory.loadInto(rawStorageKey, storageKeyStartAddress);
23028
23083
  if (keyLoadingResult.isError) {
23029
- logger_logger.trace("WRITE() <- PANIC");
23084
+ logger_logger.trace `WRITE() <- PANIC`;
23030
23085
  return PvmExecution.Panic;
23031
23086
  }
23032
23087
  // k
@@ -23036,14 +23091,14 @@ class Write {
23036
23091
  const valueLoadingResult = memory.loadInto(value, valueStart);
23037
23092
  // Note [MaSo] this is ok to return bcs if valueLength is 0, then this panic won't happen
23038
23093
  if (valueLoadingResult.isError) {
23039
- logger_logger.trace(`WRITE(${storageKey})}) <- PANIC`);
23094
+ logger_logger.trace `WRITE(${storageKey}) <- PANIC`;
23040
23095
  return PvmExecution.Panic;
23041
23096
  }
23042
23097
  /** https://graypaper.fluffylabs.dev/#/9a08063/33af0133b201?v=0.6.6 */
23043
23098
  const maybeValue = valueLength === 0n ? null : bytes_BytesBlob.blobFrom(value);
23044
23099
  // a
23045
23100
  const result = this.account.write(storageKey, maybeValue);
23046
- logger_logger.trace(`WRITE(${storageKey}, ${maybeValue?.toStringTruncated()}) <- ${resultToString(result)}`);
23101
+ logger_logger.trace `WRITE(${storageKey}, ${maybeValue?.toStringTruncated()}) <- ${resultToString(result)}`;
23047
23102
  if (result.isError) {
23048
23103
  regs.set(write_IN_OUT_REG, HostCallResult.FULL);
23049
23104
  return;
@@ -23235,18 +23290,18 @@ class Accumulate {
23235
23290
  async pvmAccumulateInvocation(slot, serviceId, operands, gas, entropy, inputStateUpdate) {
23236
23291
  const service = this.state.getService(serviceId);
23237
23292
  if (service === null) {
23238
- accumulate_logger.log(`Service with id ${serviceId} not found.`);
23293
+ accumulate_logger.log `Service with id ${serviceId} not found.`;
23239
23294
  return result_Result.error(PvmInvocationError.NoService);
23240
23295
  }
23241
23296
  const codeHash = service.getInfo().codeHash;
23242
23297
  // TODO [ToDr] Should we check that the preimage is still available?
23243
23298
  const code = service.getPreimage(codeHash.asOpaque());
23244
23299
  if (code === null) {
23245
- accumulate_logger.log(`Code with hash ${codeHash} not found for service ${serviceId}.`);
23300
+ accumulate_logger.log `Code with hash ${codeHash} not found for service ${serviceId}.`;
23246
23301
  return result_Result.error(PvmInvocationError.NoPreimage);
23247
23302
  }
23248
23303
  if (code.length > W_C) {
23249
- accumulate_logger.log(`Code with hash ${codeHash} is too long for service ${serviceId}.`);
23304
+ accumulate_logger.log `Code with hash ${codeHash} is too long for service ${serviceId}.`;
23250
23305
  return result_Result.error(PvmInvocationError.PreimageTooLong);
23251
23306
  }
23252
23307
  const nextServiceId = generateNextServiceId({ serviceId, entropy, timeslot: slot }, this.chainSpec);
@@ -23268,10 +23323,10 @@ class Accumulate {
23268
23323
  if (result.hasStatus()) {
23269
23324
  const status = result.status;
23270
23325
  if (status === status_Status.OOG || status === status_Status.PANIC) {
23271
- accumulate_logger.trace(`[${serviceId}] accumulate finished with ${status_Status[status]} reverting to checkpoint.`);
23326
+ accumulate_logger.trace `[${serviceId}] accumulate finished with ${status_Status[status]} reverting to checkpoint.`;
23272
23327
  return result_Result.ok({ stateUpdate: checkpoint, consumedGas: common_tryAsServiceGas(result.consumedGas) });
23273
23328
  }
23274
- accumulate_logger.trace(`[${serviceId}] accumulate finished with ${status_Status[status]}`);
23329
+ accumulate_logger.trace `[${serviceId}] accumulate finished with ${status_Status[status]}`;
23275
23330
  }
23276
23331
  /**
23277
23332
  * PVM invocation returned a hash so we override whatever `yield` host call
@@ -23296,14 +23351,14 @@ class Accumulate {
23296
23351
  * https://graypaper.fluffylabs.dev/#/7e6ff6a/18d70118d701?v=0.6.7
23297
23352
  */
23298
23353
  async accumulateSingleService(serviceId, operands, gasCost, slot, entropy, inputStateUpdate) {
23299
- accumulate_logger.log(`Accumulating service ${serviceId}, items: ${operands.length} at slot: ${slot}.`);
23354
+ accumulate_logger.log `Accumulating service ${serviceId}, items: ${operands.length} at slot: ${slot}.`;
23300
23355
  const result = await this.pvmAccumulateInvocation(slot, serviceId, operands, gasCost, entropy, inputStateUpdate);
23301
23356
  if (result.isError) {
23302
23357
  // https://graypaper.fluffylabs.dev/#/7e6ff6a/2fb6012fb601?v=0.6.7
23303
- accumulate_logger.log(`Accumulation failed for ${serviceId}.`);
23358
+ accumulate_logger.log `Accumulation failed for ${serviceId}.`;
23304
23359
  return { stateUpdate: null, consumedGas: 0n };
23305
23360
  }
23306
- accumulate_logger.log(`Accumulation successful for ${serviceId}. Consumed: ${result.ok.consumedGas}`);
23361
+ accumulate_logger.log `Accumulation successful for ${serviceId}. Consumed: ${result.ok.consumedGas}`;
23307
23362
  return result.ok;
23308
23363
  }
23309
23364
  /**
@@ -23352,6 +23407,7 @@ class Accumulate {
23352
23407
  const serviceIds = accumulateData.getServiceIds();
23353
23408
  let gasCost = common_tryAsServiceGas(0);
23354
23409
  let currentState = inputStateUpdate;
23410
+ const currentManager = (inputStateUpdate.privilegedServices ?? this.state.privilegedServices).manager;
23355
23411
  for (const serviceId of serviceIds) {
23356
23412
  const checkpoint = AccumulationStateUpdate.copyFrom(currentState);
23357
23413
  const { consumedGas, stateUpdate } = await this.accumulateSingleService(serviceId, accumulateData.getOperands(serviceId), accumulateData.getGasCost(serviceId), slot, entropy, currentState);
@@ -23361,6 +23417,21 @@ class Accumulate {
23361
23417
  serviceStatistics.gasUsed = common_tryAsServiceGas(serviceStatistics.gasUsed + consumedGas);
23362
23418
  statistics.set(serviceId, serviceStatistics);
23363
23419
  currentState = stateUpdate === null ? checkpoint : stateUpdate;
23420
+ if (Compatibility.is(GpVersion.V0_7_0) && serviceId === currentManager) {
23421
+ const newV = currentState.privilegedServices?.validatorsManager;
23422
+ if (currentState.privilegedServices !== null && newV !== undefined && serviceIds.includes(newV)) {
23423
+ accumulate_logger.info `Entering completely incorrect code that probably reverts validatorsManager change. This is valid in 0.7.0 only and incorrect in 0.7.1+`;
23424
+ // Since serviceIds already contains newV, this service gets accumulated twice.
23425
+ // To avoid double-counting, we skip stats and gas cost tracking here.
23426
+ // We need this accumulation to get the correct `validatorsManager`
23427
+ const { stateUpdate } = await this.accumulateSingleService(newV, accumulateData.getOperands(newV), accumulateData.getGasCost(newV), slot, entropy, checkpoint);
23428
+ const correctV = stateUpdate?.privilegedServices?.validatorsManager ?? this.state.privilegedServices.validatorsManager;
23429
+ currentState.privilegedServices = PrivilegedServices.create({
23430
+ ...currentState.privilegedServices,
23431
+ validatorsManager: correctV,
23432
+ });
23433
+ }
23434
+ }
23364
23435
  }
23365
23436
  return {
23366
23437
  state: currentState,
@@ -23505,11 +23576,14 @@ class DeferredTransfers {
23505
23576
  async transition({ pendingTransfers, timeslot, servicesUpdate: inputServicesUpdate, entropy, }) {
23506
23577
  // https://graypaper.fluffylabs.dev/#/7e6ff6a/187a03187a03?v=0.6.7
23507
23578
  const transferStatistics = new Map();
23508
- const services = uniquePreserveOrder(pendingTransfers.flatMap((x) => [x.source, x.destination]));
23579
+ const services = uniquePreserveOrder(pendingTransfers.map((x) => x.destination));
23509
23580
  let currentStateUpdate = AccumulationStateUpdate.new(inputServicesUpdate);
23510
23581
  for (const serviceId of services) {
23511
23582
  const partiallyUpdatedState = new PartiallyUpdatedState(this.state, currentStateUpdate);
23512
- const transfers = pendingTransfers.filter((pendingTransfer) => pendingTransfer.destination === serviceId);
23583
+ // https://graypaper.fluffylabs.dev/#/38c4e62/18750318ae03?v=0.7.0
23584
+ const transfers = pendingTransfers
23585
+ .filter((pendingTransfer) => pendingTransfer.destination === serviceId)
23586
+ .toSorted((a, b) => a.source - b.source);
23513
23587
  const info = partiallyUpdatedState.getServiceInfo(serviceId);
23514
23588
  if (info === null) {
23515
23589
  return result_Result.error(DeferredTransfersErrorCode.ServiceInfoNotExist);
@@ -23529,13 +23603,13 @@ class DeferredTransfers {
23529
23603
  const isCodeCorrect = code !== null && code.length <= W_C;
23530
23604
  if (!hasTransfers || !isCodeCorrect) {
23531
23605
  if (code === null) {
23532
- deferred_transfers_logger.trace(`Skipping ON_TRANSFER execution for service ${serviceId} because code is null`);
23606
+ deferred_transfers_logger.trace `Skipping ON_TRANSFER execution for service ${serviceId} because code is null`;
23533
23607
  }
23534
23608
  else if (!hasTransfers) {
23535
- deferred_transfers_logger.trace(`Skipping ON_TRANSFER execution for service ${serviceId} because there are no transfers`);
23609
+ deferred_transfers_logger.trace `Skipping ON_TRANSFER execution for service ${serviceId} because there are no transfers`;
23536
23610
  }
23537
23611
  else {
23538
- deferred_transfers_logger.trace(`Skipping ON_TRANSFER execution for service ${serviceId} because code is too long`);
23612
+ deferred_transfers_logger.trace `Skipping ON_TRANSFER execution for service ${serviceId} because code is too long`;
23539
23613
  }
23540
23614
  }
23541
23615
  else {
@@ -24163,7 +24237,7 @@ function verifyRefineContexts(minLookupSlot, contexts, recentBlocksPartialUpdate
24163
24237
  headerChain.isAncestor(context.lookupAnchorSlot, context.lookupAnchor, context.anchor);
24164
24238
  if (!isInChain) {
24165
24239
  if (process.env.SKIP_LOOKUP_ANCHOR_CHECK !== undefined) {
24166
- verify_contextual_logger.warn(`Lookup anchor check for ${context.lookupAnchor} would fail, but override is active.`);
24240
+ verify_contextual_logger.warn `Lookup anchor check for ${context.lookupAnchor} would fail, but override is active.`;
24167
24241
  }
24168
24242
  else {
24169
24243
  return result_Result.error(ReportsError.SegmentRootLookupInvalid, `Lookup anchor is not found in chain. Hash: ${context.lookupAnchor} (slot: ${context.lookupAnchorSlot})`);
@@ -25050,7 +25124,7 @@ class OnChain {
25050
25124
  reports: availableReports,
25051
25125
  entropy: entropy[0],
25052
25126
  });
25053
- chain_stf_logger.log(timerAccumulate());
25127
+ chain_stf_logger.log `${timerAccumulate()}`;
25054
25128
  if (accumulateResult.isError) {
25055
25129
  return stfError(StfErrorKind.Accumulate, accumulateResult);
25056
25130
  }
@@ -25181,7 +25255,7 @@ class Importer {
25181
25255
  this.stf = new OnChain(spec, state, blocks, hasher);
25182
25256
  this.state = state;
25183
25257
  this.currentHash = currentBestHeaderHash;
25184
- logger.info(`😎 Best time slot: ${state.timeslot} (header hash: ${currentBestHeaderHash})`);
25258
+ logger.info `😎 Best time slot: ${state.timeslot} (header hash: ${currentBestHeaderHash})`;
25185
25259
  }
25186
25260
  async importBlock(block, omitSealVerification) {
25187
25261
  const timer = measure("importBlock");
@@ -25189,20 +25263,20 @@ class Importer {
25189
25263
  const maybeBestHeader = await this.importBlockInternal(block, omitSealVerification);
25190
25264
  if (maybeBestHeader.isOk) {
25191
25265
  const bestHeader = maybeBestHeader.ok;
25192
- this.logger.info(`🧊 Best block: #${timeSlot} (${bestHeader.hash})`);
25193
- this.logger.log(timer());
25266
+ this.logger.info `🧊 Best block: #${timeSlot} (${bestHeader.hash})`;
25267
+ this.logger.log `${timer()}`;
25194
25268
  return maybeBestHeader;
25195
25269
  }
25196
- this.logger.log(`❌ Rejected block #${timeSlot}: ${resultToString(maybeBestHeader)}`);
25197
- this.logger.log(timer());
25270
+ this.logger.log `❌ Rejected block #${timeSlot}: ${resultToString(maybeBestHeader)}`;
25271
+ this.logger.log `${timer()}`;
25198
25272
  return maybeBestHeader;
25199
25273
  }
25200
25274
  async importBlockInternal(block, omitSealVerification = false) {
25201
25275
  const logger = this.logger;
25202
- logger.log("🧱 Attempting to import a new block");
25276
+ logger.log `🧱 Attempting to import a new block`;
25203
25277
  const timerVerify = measure("import:verify");
25204
25278
  const hash = await this.verifier.verifyBlock(block);
25205
- logger.log(timerVerify());
25279
+ logger.log `${timerVerify()}`;
25206
25280
  if (hash.isError) {
25207
25281
  return importerError(ImporterErrorKind.Verifier, hash);
25208
25282
  }
@@ -25222,10 +25296,10 @@ class Importer {
25222
25296
  }
25223
25297
  const timeSlot = block.header.view().timeSlotIndex.materialize();
25224
25298
  const headerHash = hash.ok;
25225
- logger.log(`🧱 Verified block: Got hash ${headerHash} for block at slot ${timeSlot}.`);
25299
+ logger.log `🧱 Verified block: Got hash ${headerHash} for block at slot ${timeSlot}.`;
25226
25300
  const timerStf = measure("import:stf");
25227
25301
  const res = await this.stf.transition(block, headerHash, omitSealVerification);
25228
- logger.log(timerStf());
25302
+ logger.log `${timerStf()}`;
25229
25303
  if (res.isError) {
25230
25304
  return importerError(ImporterErrorKind.Stf, res);
25231
25305
  }
@@ -25234,7 +25308,7 @@ class Importer {
25234
25308
  const timerState = measure("import:state");
25235
25309
  const updateResult = await this.states.updateAndSetState(headerHash, this.state, update);
25236
25310
  if (updateResult.isError) {
25237
- logger.error(`🧱 Unable to update state: ${resultToString(updateResult)}`);
25311
+ logger.error `🧱 Unable to update state: ${resultToString(updateResult)}`;
25238
25312
  return importerError(ImporterErrorKind.Update, updateResult);
25239
25313
  }
25240
25314
  const newState = this.states.getState(headerHash);
@@ -25245,17 +25319,17 @@ class Importer {
25245
25319
  // the state of a parent block to support forks and create a fresh STF.
25246
25320
  this.state.updateBackend(newState.backend);
25247
25321
  this.currentHash = headerHash;
25248
- logger.log(timerState());
25322
+ logger.log `${timerState()}`;
25249
25323
  // insert new state and the block to DB.
25250
25324
  const timerDb = measure("import:db");
25251
25325
  const writeBlocks = this.blocks.insertBlock(new WithHash(headerHash, block));
25252
25326
  // Computation of the state root may happen asynchronously,
25253
25327
  // but we still need to wait for it before next block can be imported
25254
25328
  const stateRoot = await this.states.getStateRoot(newState);
25255
- logger.log(`🧱 Storing post-state-root for ${headerHash}: ${stateRoot}.`);
25329
+ logger.log `🧱 Storing post-state-root for ${headerHash}: ${stateRoot}.`;
25256
25330
  const writeStateRoot = this.blocks.setPostStateRoot(headerHash, stateRoot);
25257
25331
  await Promise.all([writeBlocks, writeStateRoot]);
25258
- logger.log(timerDb());
25332
+ logger.log `${timerDb()}`;
25259
25333
  // finally update the best block
25260
25334
  await this.blocks.setBestHeaderHash(headerHash);
25261
25335
  return result_Result.ok(new WithHash(headerHash, block.header.view()));
@@ -25357,7 +25431,7 @@ async function spawnWorkerGeneric(bootstrapPath, logger, mainReadyName, mainRead
25357
25431
  const worker = new Worker(bootstrapPath);
25358
25432
  const machine = stateMachineMain(`main->${mainReadyName}`, mainReadyName, mainReadyState);
25359
25433
  const channel = await MessageChannelStateMachine.createAndTransferChannel(machine, worker);
25360
- logger.trace(`[${machine.name}] Worker spawned ${channel.currentState()}`);
25434
+ logger.trace `[${machine.name}] Worker spawned ${channel.currentState()}`;
25361
25435
  return channel;
25362
25436
  }
25363
25437
 
@@ -25470,7 +25544,7 @@ class MainReady extends State {
25470
25544
  if (res instanceof Uint8Array) {
25471
25545
  return bytes_Bytes.fromBlob(res, hash_HASH_SIZE).asOpaque();
25472
25546
  }
25473
- state_machine_logger.error(`Invalid response for getBestStateRootHash. Expected Uint8Array, got: ${res}`);
25547
+ state_machine_logger.error `Invalid response for getBestStateRootHash. Expected Uint8Array, got: ${res}`;
25474
25548
  return bytes_Bytes.zero(hash_HASH_SIZE).asOpaque();
25475
25549
  }
25476
25550
  finish(channel) {
@@ -25518,7 +25592,7 @@ class ImporterReady extends State {
25518
25592
  }
25519
25593
  async getStateEntries(hash) {
25520
25594
  if (this.importer === null) {
25521
- state_machine_logger.error(`${this.constructor.name} importer not initialized yet!`);
25595
+ state_machine_logger.error `${this.constructor.name} importer not initialized yet!`;
25522
25596
  await new Promise((resolve) => {
25523
25597
  this.onImporter.once(resolve);
25524
25598
  });
@@ -25532,7 +25606,7 @@ class ImporterReady extends State {
25532
25606
  response: encoded.raw,
25533
25607
  };
25534
25608
  }
25535
- state_machine_logger.error(`${this.constructor.name} got invalid request type: ${JSON.stringify(hash)}.`);
25609
+ state_machine_logger.error `${this.constructor.name} got invalid request type: ${JSON.stringify(hash)}.`;
25536
25610
  return {
25537
25611
  response: null,
25538
25612
  };
@@ -25552,7 +25626,7 @@ class ImporterReady extends State {
25552
25626
  }
25553
25627
  async importBlock(block) {
25554
25628
  if (this.importer === null) {
25555
- state_machine_logger.error(`${this.constructor.name} importer not initialized yet!`);
25629
+ state_machine_logger.error `${this.constructor.name} importer not initialized yet!`;
25556
25630
  await new Promise((resolve) => {
25557
25631
  this.onImporter.once(resolve);
25558
25632
  });
@@ -25572,8 +25646,8 @@ class ImporterReady extends State {
25572
25646
  }
25573
25647
  }
25574
25648
  catch (e) {
25575
- state_machine_logger.error(`Failed to import block: ${e}`);
25576
- state_machine_logger.error(`${e instanceof Error ? e.stack : ""}`);
25649
+ state_machine_logger.error `Failed to import block: ${e}`;
25650
+ state_machine_logger.error `${e instanceof Error ? e.stack : ""}`;
25577
25651
  response = result_Result.error(`${e}`);
25578
25652
  }
25579
25653
  const encoded = encoder_Encoder.encodeObject(importBlockResultCodec, response);
@@ -25581,7 +25655,7 @@ class ImporterReady extends State {
25581
25655
  response: encoded.raw,
25582
25656
  };
25583
25657
  }
25584
- state_machine_logger.error(`${this.constructor.name} got invalid request type: ${JSON.stringify(block)}.`);
25658
+ state_machine_logger.error `${this.constructor.name} got invalid request type: ${JSON.stringify(block)}.`;
25585
25659
  return {
25586
25660
  response: null,
25587
25661
  };
@@ -25593,7 +25667,7 @@ class ImporterReady extends State {
25593
25667
  this.onBlock.emit(blockView);
25594
25668
  }
25595
25669
  else {
25596
- state_machine_logger.error(`${this.constructor.name} got invalid signal type: ${JSON.stringify(block)}.`);
25670
+ state_machine_logger.error `${this.constructor.name} got invalid signal type: ${JSON.stringify(block)}.`;
25597
25671
  }
25598
25672
  }
25599
25673
  async endWork() {
@@ -25620,7 +25694,7 @@ if (!external_node_worker_threads_namespaceObject.isMainThread) {
25620
25694
  Logger.configureAll(process.env.JAM_LOG ?? "", Level.LOG);
25621
25695
  const machine = importerStateMachine();
25622
25696
  const channel = channel_MessageChannelStateMachine.receiveChannel(machine, external_node_worker_threads_namespaceObject.parentPort);
25623
- channel.then((channel) => main(channel)).catch((e) => importer_logger.error(e));
25697
+ channel.then((channel) => main(channel)).catch((e) => importer_logger.error `${e}`);
25624
25698
  }
25625
25699
  const keccakHasher = KeccakHasher.create();
25626
25700
  async function createImporter(config) {
@@ -25642,7 +25716,7 @@ async function createImporter(config) {
25642
25716
  */
25643
25717
  async function main(channel) {
25644
25718
  const wasmPromise = initAll();
25645
- importer_logger.info(`📥 Importer starting ${channel.currentState()}`);
25719
+ importer_logger.info `📥 Importer starting ${channel.currentState()}`;
25646
25720
  // Await the configuration object
25647
25721
  const ready = await channel.waitForState("ready(importer)");
25648
25722
  let closeDb = async () => { };
@@ -25654,7 +25728,7 @@ async function main(channel) {
25654
25728
  };
25655
25729
  // TODO [ToDr] this is shit, since we have circular dependency.
25656
25730
  worker.setImporter(importer);
25657
- importer_logger.info("📥 Importer waiting for blocks.");
25731
+ importer_logger.info `📥 Importer waiting for blocks.`;
25658
25732
  worker.onBlock.on(async (block) => {
25659
25733
  const res = await importer.importBlock(block, config.omitSealVerification);
25660
25734
  if (res.isOk) {
@@ -25663,7 +25737,7 @@ async function main(channel) {
25663
25737
  });
25664
25738
  await wasmPromise;
25665
25739
  });
25666
- importer_logger.info("📥 Importer finished. Closing channel.");
25740
+ importer_logger.info `📥 Importer finished. Closing channel.`;
25667
25741
  // close the database
25668
25742
  await closeDb();
25669
25743
  // Close the comms to gracefuly close the app.