@typeberry/jam 0.2.0-e767e74 → 0.2.0-f506473

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -3840,7 +3840,7 @@ function resultToString(res) {
3840
3840
  if (res.isOk) {
3841
3841
  return `OK: ${typeof res.ok === "symbol" ? res.ok.toString() : res.ok}`;
3842
3842
  }
3843
- return `${res.details}\nError: ${maybeTaggedErrorToString(res.error)}`;
3843
+ return `${res.details()}\nError: ${maybeTaggedErrorToString(res.error)}`;
3844
3844
  }
3845
3845
  /** An indication of two possible outcomes returned from a function. */
3846
3846
  const result_Result = {
@@ -3854,7 +3854,7 @@ const result_Result = {
3854
3854
  };
3855
3855
  },
3856
3856
  /** Create new [`Result`] with `Error` status. */
3857
- error: (error, details = "") => {
3857
+ error: (error, details) => {
3858
3858
  debug_check `${error !== undefined} 'Error' type cannot be undefined.`;
3859
3859
  return {
3860
3860
  isOk: false,
@@ -3973,7 +3973,7 @@ function deepEqual(actual, expected, { context = [], errorsCollector, ignore = [
3973
3973
  }
3974
3974
  if (actual.isError && expected.isError) {
3975
3975
  deepEqual(actual.error, expected.error, { context: ctx.concat(["error"]), errorsCollector: errors, ignore });
3976
- deepEqual(actual.details, expected.details, {
3976
+ deepEqual(actual.details(), expected.details(), {
3977
3977
  context: ctx.concat(["details"]),
3978
3978
  errorsCollector: errors,
3979
3979
  // display details when error does not match
@@ -5214,8 +5214,8 @@ class decoder_Decoder {
5214
5214
  /**
5215
5215
  * Create a new [`Decoder`] instance given a raw array of bytes as a source.
5216
5216
  */
5217
- static fromBlob(source) {
5218
- return new decoder_Decoder(source);
5217
+ static fromBlob(source, context) {
5218
+ return new decoder_Decoder(source, undefined, context);
5219
5219
  }
5220
5220
  /**
5221
5221
  * Decode a single object from all of the source bytes.
@@ -5510,7 +5510,7 @@ class decoder_Decoder {
5510
5510
  ensureHasBytes(bytes) {
5511
5511
  debug_check `${bytes >= 0} Negative number of bytes given.`;
5512
5512
  if (this.offset + bytes > this.source.length) {
5513
- throw new Error(`Attempting to decode more data than there is left. Need ${bytes}, left: ${this.source.length - this.offset}.`);
5513
+ throw new EndOfDataError(`Attempting to decode more data than there is left. Need ${bytes}, left: ${this.source.length - this.offset}.`);
5514
5514
  }
5515
5515
  }
5516
5516
  }
@@ -5524,6 +5524,8 @@ function decodeVariableLengthExtraBytes(firstByte) {
5524
5524
  }
5525
5525
  return 0;
5526
5526
  }
5527
+ class EndOfDataError extends Error {
5528
+ }
5527
5529
 
5528
5530
  ;// CONCATENATED MODULE: ./packages/core/codec/skip.ts
5529
5531
 
@@ -6315,6 +6317,7 @@ class SequenceView {
6315
6317
 
6316
6318
 
6317
6319
 
6320
+
6318
6321
  /**
6319
6322
  * For sequences with unknown length we need to give some size hint.
6320
6323
  * TODO [ToDr] [opti] This value should be updated when we run some real-data bechmarks.
@@ -6564,6 +6567,9 @@ function forEachDescriptor(descriptors, f) {
6564
6567
  f(k, descriptors[k]);
6565
6568
  }
6566
6569
  catch (e) {
6570
+ if (e instanceof EndOfDataError) {
6571
+ throw new EndOfDataError(`${key}: ${e}`);
6572
+ }
6567
6573
  throw new Error(`${key}: ${e}`);
6568
6574
  }
6569
6575
  }
@@ -9470,6 +9476,7 @@ function accumulationOutputComparator(a, b) {
9470
9476
 
9471
9477
  ;// CONCATENATED MODULE: ./packages/jam/block/gp-constants.ts
9472
9478
 
9479
+
9473
9480
  /**
9474
9481
  * This file lists all of the constants defined in the GrayPaper appendix.
9475
9482
  *
@@ -9480,7 +9487,7 @@ function accumulationOutputComparator(a, b) {
9480
9487
  * here are only temporarily for convenience. When we figure out better names
9481
9488
  * and places for these this file will be eradicated.
9482
9489
  *
9483
- * https://graypaper.fluffylabs.dev/#/579bd12/413000413000
9490
+ * https://graypaper.fluffylabs.dev/#/ab2cdbd/442300442300?v=0.7.2
9484
9491
  */
9485
9492
  /** `G_I`: The gas allocated to invoke a work-package’s Is-Authorized logic. */
9486
9493
  const G_I = 50_000_000;
@@ -9496,8 +9503,8 @@ const S = 1024;
9496
9503
  const T = 128;
9497
9504
  /** `W_A`: The maximum size of is-authorized code in octets. */
9498
9505
  const W_A = 64_000;
9499
- /** `W_B`: The maximum size of an encoded work-package with extrinsic data and imports. */
9500
- const W_B = 13_794_305;
9506
+ /** `W_B`: The maximum size of the concatenated variable-size blobs, extrinsics and imported segments of a work-package, in octets */
9507
+ const W_B = Compatibility.isGreaterOrEqual(GpVersion.V0_7_2) ? 13_791_360 : 13_794_305;
9501
9508
  /** `W_C`: The maximum size of service code in octets. */
9502
9509
  const W_C = 4_000_000;
9503
9510
  /** `W_M`: The maximum number of imports in a work-package. */
@@ -10596,31 +10603,29 @@ var UpdatePreimageKind;
10596
10603
  * 3. Update `LookupHistory` with given value.
10597
10604
  */
10598
10605
  class UpdatePreimage {
10599
- serviceId;
10600
10606
  action;
10601
- constructor(serviceId, action) {
10602
- this.serviceId = serviceId;
10607
+ constructor(action) {
10603
10608
  this.action = action;
10604
10609
  }
10605
10610
  /** A preimage is provided. We should update the lookuphistory and add the preimage to db. */
10606
- static provide({ serviceId, preimage, slot, }) {
10607
- return new UpdatePreimage(serviceId, {
10611
+ static provide({ preimage, slot }) {
10612
+ return new UpdatePreimage({
10608
10613
  kind: UpdatePreimageKind.Provide,
10609
10614
  preimage,
10610
10615
  slot,
10611
10616
  });
10612
10617
  }
10613
10618
  /** The preimage should be removed completely from the database. */
10614
- static remove({ serviceId, hash, length }) {
10615
- return new UpdatePreimage(serviceId, {
10619
+ static remove({ hash, length }) {
10620
+ return new UpdatePreimage({
10616
10621
  kind: UpdatePreimageKind.Remove,
10617
10622
  hash,
10618
10623
  length,
10619
10624
  });
10620
10625
  }
10621
10626
  /** Update the lookup history of some preimage or add a new one (request). */
10622
- static updateOrAdd({ serviceId, lookupHistory }) {
10623
- return new UpdatePreimage(serviceId, {
10627
+ static updateOrAdd({ lookupHistory }) {
10628
+ return new UpdatePreimage({
10624
10629
  kind: UpdatePreimageKind.UpdateOrAdd,
10625
10630
  item: lookupHistory,
10626
10631
  });
@@ -10657,23 +10662,21 @@ var UpdateServiceKind;
10657
10662
  UpdateServiceKind[UpdateServiceKind["Create"] = 1] = "Create";
10658
10663
  })(UpdateServiceKind || (UpdateServiceKind = {}));
10659
10664
  /**
10660
- * Update service info of a particular `ServiceId` or create a new one.
10665
+ * Update service info or create a new one.
10661
10666
  */
10662
10667
  class UpdateService {
10663
- serviceId;
10664
10668
  action;
10665
- constructor(serviceId, action) {
10666
- this.serviceId = serviceId;
10669
+ constructor(action) {
10667
10670
  this.action = action;
10668
10671
  }
10669
- static update({ serviceId, serviceInfo }) {
10670
- return new UpdateService(serviceId, {
10672
+ static update({ serviceInfo }) {
10673
+ return new UpdateService({
10671
10674
  kind: UpdateServiceKind.Update,
10672
10675
  account: serviceInfo,
10673
10676
  });
10674
10677
  }
10675
- static create({ serviceId, serviceInfo, lookupHistory, }) {
10676
- return new UpdateService(serviceId, {
10678
+ static create({ serviceInfo, lookupHistory, }) {
10679
+ return new UpdateService({
10677
10680
  kind: UpdateServiceKind.Create,
10678
10681
  account: serviceInfo,
10679
10682
  lookupHistory,
@@ -10694,17 +10697,15 @@ var UpdateStorageKind;
10694
10697
  * Can either create/modify an entry or remove it.
10695
10698
  */
10696
10699
  class UpdateStorage {
10697
- serviceId;
10698
10700
  action;
10699
- constructor(serviceId, action) {
10700
- this.serviceId = serviceId;
10701
+ constructor(action) {
10701
10702
  this.action = action;
10702
10703
  }
10703
- static set({ serviceId, storage }) {
10704
- return new UpdateStorage(serviceId, { kind: UpdateStorageKind.Set, storage });
10704
+ static set({ storage }) {
10705
+ return new UpdateStorage({ kind: UpdateStorageKind.Set, storage });
10705
10706
  }
10706
- static remove({ serviceId, key }) {
10707
- return new UpdateStorage(serviceId, { kind: UpdateStorageKind.Remove, key });
10707
+ static remove({ key }) {
10708
+ return new UpdateStorage({ kind: UpdateStorageKind.Remove, key });
10708
10709
  }
10709
10710
  get key() {
10710
10711
  if (this.action.kind === UpdateStorageKind.Remove) {
@@ -10913,12 +10914,12 @@ class in_memory_state_InMemoryState extends WithDebug {
10913
10914
  * Modify the state and apply a single state update.
10914
10915
  */
10915
10916
  applyUpdate(update) {
10916
- const { servicesRemoved, servicesUpdates, preimages, storage, ...rest } = update;
10917
+ const { removed, created: _, updated, preimages, storage, ...rest } = update;
10917
10918
  // just assign all other variables
10918
10919
  Object.assign(this, rest);
10919
10920
  // and update the services state
10920
10921
  let result;
10921
- result = this.updateServices(servicesUpdates);
10922
+ result = this.updateServices(updated);
10922
10923
  if (result.isError) {
10923
10924
  return result;
10924
10925
  }
@@ -10930,7 +10931,7 @@ class in_memory_state_InMemoryState extends WithDebug {
10930
10931
  if (result.isError) {
10931
10932
  return result;
10932
10933
  }
10933
- this.removeServices(servicesRemoved);
10934
+ this.removeServices(removed);
10934
10935
  return result_Result.ok(result_OK);
10935
10936
  }
10936
10937
  removeServices(servicesRemoved) {
@@ -10939,89 +10940,102 @@ class in_memory_state_InMemoryState extends WithDebug {
10939
10940
  this.services.delete(serviceId);
10940
10941
  }
10941
10942
  }
10942
- updateStorage(storage) {
10943
- for (const { serviceId, action } of storage ?? []) {
10944
- const { kind } = action;
10945
- const service = this.services.get(serviceId);
10946
- if (service === undefined) {
10947
- return result_Result.error(in_memory_state_UpdateError.NoService, `Attempting to update storage of non-existing service: ${serviceId}`);
10948
- }
10949
- if (kind === UpdateStorageKind.Set) {
10950
- const { key, value } = action.storage;
10951
- service.data.storage.set(key.toString(), StorageItem.create({ key, value }));
10952
- }
10953
- else if (kind === UpdateStorageKind.Remove) {
10954
- const { key } = action;
10955
- debug_check `
10943
+ updateStorage(storageUpdates) {
10944
+ if (storageUpdates === undefined) {
10945
+ return result_Result.ok(result_OK);
10946
+ }
10947
+ for (const [serviceId, updates] of storageUpdates.entries()) {
10948
+ for (const update of updates) {
10949
+ const { kind } = update.action;
10950
+ const service = this.services.get(serviceId);
10951
+ if (service === undefined) {
10952
+ return result_Result.error(in_memory_state_UpdateError.NoService, () => `Attempting to update storage of non-existing service: ${serviceId}`);
10953
+ }
10954
+ if (kind === UpdateStorageKind.Set) {
10955
+ const { key, value } = update.action.storage;
10956
+ service.data.storage.set(key.toString(), StorageItem.create({ key, value }));
10957
+ }
10958
+ else if (kind === UpdateStorageKind.Remove) {
10959
+ const { key } = update.action;
10960
+ debug_check `
10956
10961
  ${service.data.storage.has(key.toString())}
10957
- Attempting to remove non-existing storage item at ${serviceId}: ${action.key}
10962
+ Attempting to remove non-existing storage item at ${serviceId}: ${update.action.key}
10958
10963
  `;
10959
- service.data.storage.delete(key.toString());
10960
- }
10961
- else {
10962
- debug_assertNever(kind);
10964
+ service.data.storage.delete(key.toString());
10965
+ }
10966
+ else {
10967
+ debug_assertNever(kind);
10968
+ }
10963
10969
  }
10964
10970
  }
10965
10971
  return result_Result.ok(result_OK);
10966
10972
  }
10967
- updatePreimages(preimages) {
10968
- for (const { serviceId, action } of preimages ?? []) {
10973
+ updatePreimages(preimagesUpdates) {
10974
+ if (preimagesUpdates === undefined) {
10975
+ return result_Result.ok(result_OK);
10976
+ }
10977
+ for (const [serviceId, updates] of preimagesUpdates.entries()) {
10969
10978
  const service = this.services.get(serviceId);
10970
10979
  if (service === undefined) {
10971
- return result_Result.error(in_memory_state_UpdateError.NoService, `Attempting to update preimage of non-existing service: ${serviceId}`);
10972
- }
10973
- const { kind } = action;
10974
- if (kind === UpdatePreimageKind.Provide) {
10975
- const { preimage, slot } = action;
10976
- if (service.data.preimages.has(preimage.hash)) {
10977
- return result_Result.error(in_memory_state_UpdateError.PreimageExists, `Overwriting existing preimage at ${serviceId}: ${preimage}`);
10978
- }
10979
- service.data.preimages.set(preimage.hash, preimage);
10980
- if (slot !== null) {
10981
- const lookupHistory = service.data.lookupHistory.get(preimage.hash);
10982
- const length = numbers_tryAsU32(preimage.blob.length);
10983
- const lookup = new LookupHistoryItem(preimage.hash, length, tryAsLookupHistorySlots([slot]));
10984
- if (lookupHistory === undefined) {
10985
- // no lookup history for that preimage at all (edge case, should be requested)
10986
- service.data.lookupHistory.set(preimage.hash, [lookup]);
10980
+ return result_Result.error(in_memory_state_UpdateError.NoService, () => `Attempting to update preimage of non-existing service: ${serviceId}`);
10981
+ }
10982
+ for (const update of updates) {
10983
+ const { kind } = update.action;
10984
+ if (kind === UpdatePreimageKind.Provide) {
10985
+ const { preimage, slot } = update.action;
10986
+ if (service.data.preimages.has(preimage.hash)) {
10987
+ return result_Result.error(in_memory_state_UpdateError.PreimageExists, () => `Overwriting existing preimage at ${serviceId}: ${preimage}`);
10987
10988
  }
10988
- else {
10989
- // insert or replace exiting entry
10990
- const index = lookupHistory.map((x) => x.length).indexOf(length);
10991
- lookupHistory.splice(index, index === -1 ? 0 : 1, lookup);
10989
+ service.data.preimages.set(preimage.hash, preimage);
10990
+ if (slot !== null) {
10991
+ const lookupHistory = service.data.lookupHistory.get(preimage.hash);
10992
+ const length = numbers_tryAsU32(preimage.blob.length);
10993
+ const lookup = new LookupHistoryItem(preimage.hash, length, tryAsLookupHistorySlots([slot]));
10994
+ if (lookupHistory === undefined) {
10995
+ // no lookup history for that preimage at all (edge case, should be requested)
10996
+ service.data.lookupHistory.set(preimage.hash, [lookup]);
10997
+ }
10998
+ else {
10999
+ // insert or replace exiting entry
11000
+ const index = lookupHistory.map((x) => x.length).indexOf(length);
11001
+ lookupHistory.splice(index, index === -1 ? 0 : 1, lookup);
11002
+ }
10992
11003
  }
10993
11004
  }
10994
- }
10995
- else if (kind === UpdatePreimageKind.Remove) {
10996
- const { hash, length } = action;
10997
- service.data.preimages.delete(hash);
10998
- const history = service.data.lookupHistory.get(hash) ?? [];
10999
- const idx = history.map((x) => x.length).indexOf(length);
11000
- if (idx !== -1) {
11001
- history.splice(idx, 1);
11005
+ else if (kind === UpdatePreimageKind.Remove) {
11006
+ const { hash, length } = update.action;
11007
+ service.data.preimages.delete(hash);
11008
+ const history = service.data.lookupHistory.get(hash) ?? [];
11009
+ const idx = history.map((x) => x.length).indexOf(length);
11010
+ if (idx !== -1) {
11011
+ history.splice(idx, 1);
11012
+ }
11013
+ }
11014
+ else if (kind === UpdatePreimageKind.UpdateOrAdd) {
11015
+ const { item } = update.action;
11016
+ const history = service.data.lookupHistory.get(item.hash) ?? [];
11017
+ const existingIdx = history.map((x) => x.length).indexOf(item.length);
11018
+ const removeCount = existingIdx === -1 ? 0 : 1;
11019
+ history.splice(existingIdx, removeCount, item);
11020
+ service.data.lookupHistory.set(item.hash, history);
11021
+ }
11022
+ else {
11023
+ debug_assertNever(kind);
11002
11024
  }
11003
- }
11004
- else if (kind === UpdatePreimageKind.UpdateOrAdd) {
11005
- const { item } = action;
11006
- const history = service.data.lookupHistory.get(item.hash) ?? [];
11007
- const existingIdx = history.map((x) => x.length).indexOf(item.length);
11008
- const removeCount = existingIdx === -1 ? 0 : 1;
11009
- history.splice(existingIdx, removeCount, item);
11010
- service.data.lookupHistory.set(item.hash, history);
11011
- }
11012
- else {
11013
- debug_assertNever(kind);
11014
11025
  }
11015
11026
  }
11016
11027
  return result_Result.ok(result_OK);
11017
11028
  }
11018
11029
  updateServices(servicesUpdates) {
11019
- for (const { serviceId, action } of servicesUpdates ?? []) {
11020
- const { kind, account } = action;
11030
+ if (servicesUpdates === undefined) {
11031
+ return result_Result.ok(result_OK);
11032
+ }
11033
+ for (const [serviceId, update] of servicesUpdates.entries()) {
11034
+ const { kind, account } = update.action;
11021
11035
  if (kind === UpdateServiceKind.Create) {
11022
- const { lookupHistory } = action;
11036
+ const { lookupHistory } = update.action;
11023
11037
  if (this.services.has(serviceId)) {
11024
- return result_Result.error(in_memory_state_UpdateError.DuplicateService, `${serviceId} already exists!`);
11038
+ return result_Result.error(in_memory_state_UpdateError.DuplicateService, () => `${serviceId} already exists!`);
11025
11039
  }
11026
11040
  this.services.set(serviceId, new InMemoryService(serviceId, {
11027
11041
  info: account,
@@ -11033,7 +11047,7 @@ class in_memory_state_InMemoryState extends WithDebug {
11033
11047
  else if (kind === UpdateServiceKind.Update) {
11034
11048
  const existingService = this.services.get(serviceId);
11035
11049
  if (existingService === undefined) {
11036
- return result_Result.error(in_memory_state_UpdateError.NoService, `Cannot update ${serviceId} because it does not exist.`);
11050
+ return result_Result.error(in_memory_state_UpdateError.NoService, () => `Cannot update ${serviceId} because it does not exist.`);
11037
11051
  }
11038
11052
  existingService.data.info = account;
11039
11053
  }
@@ -12280,7 +12294,6 @@ function getKeccakTrieHasher(hasher) {
12280
12294
 
12281
12295
 
12282
12296
 
12283
-
12284
12297
  /** What should be done with that key? */
12285
12298
  var StateEntryUpdateAction;
12286
12299
  (function (StateEntryUpdateAction) {
@@ -12296,76 +12309,88 @@ function* serializeStateUpdate(spec, blake2b, update) {
12296
12309
  yield* serializeBasicKeys(spec, update);
12297
12310
  const encode = (codec, val) => encoder_Encoder.encodeObject(codec, val, spec);
12298
12311
  // then let's proceed with service updates
12299
- yield* serializeServiceUpdates(update.servicesUpdates, encode, blake2b);
12312
+ yield* serializeServiceUpdates(update.updated, encode, blake2b);
12300
12313
  yield* serializePreimages(update.preimages, encode, blake2b);
12301
12314
  yield* serializeStorage(update.storage, blake2b);
12302
- yield* serializeRemovedServices(update.servicesRemoved);
12315
+ yield* serializeRemovedServices(update.removed);
12303
12316
  }
12304
12317
  function* serializeRemovedServices(servicesRemoved) {
12305
- for (const serviceId of servicesRemoved ?? []) {
12318
+ if (servicesRemoved === undefined) {
12319
+ return;
12320
+ }
12321
+ for (const serviceId of servicesRemoved) {
12306
12322
  // TODO [ToDr] what about all data associated with a service?
12307
12323
  const codec = serialize_serialize.serviceData(serviceId);
12308
12324
  yield [StateEntryUpdateAction.Remove, codec.key, EMPTY_BLOB];
12309
12325
  }
12310
12326
  }
12311
- function* serializeStorage(storage, blake2b) {
12312
- for (const { action, serviceId } of storage ?? []) {
12313
- switch (action.kind) {
12314
- case UpdateStorageKind.Set: {
12315
- const key = action.storage.key;
12316
- const codec = serialize_serialize.serviceStorage(blake2b, serviceId, key);
12317
- yield [StateEntryUpdateAction.Insert, codec.key, action.storage.value];
12318
- break;
12319
- }
12320
- case UpdateStorageKind.Remove: {
12321
- const key = action.key;
12322
- const codec = serialize_serialize.serviceStorage(blake2b, serviceId, key);
12323
- yield [StateEntryUpdateAction.Remove, codec.key, EMPTY_BLOB];
12324
- break;
12327
+ function* serializeStorage(storageUpdates, blake2b) {
12328
+ if (storageUpdates === undefined) {
12329
+ return;
12330
+ }
12331
+ for (const [serviceId, updates] of storageUpdates.entries()) {
12332
+ for (const { action } of updates) {
12333
+ switch (action.kind) {
12334
+ case UpdateStorageKind.Set: {
12335
+ const key = action.storage.key;
12336
+ const codec = serialize_serialize.serviceStorage(blake2b, serviceId, key);
12337
+ yield [StateEntryUpdateAction.Insert, codec.key, action.storage.value];
12338
+ break;
12339
+ }
12340
+ case UpdateStorageKind.Remove: {
12341
+ const key = action.key;
12342
+ const codec = serialize_serialize.serviceStorage(blake2b, serviceId, key);
12343
+ yield [StateEntryUpdateAction.Remove, codec.key, EMPTY_BLOB];
12344
+ break;
12345
+ }
12325
12346
  }
12326
- default:
12327
- debug_assertNever(action);
12328
12347
  }
12329
12348
  }
12330
12349
  }
12331
- function* serializePreimages(preimages, encode, blake2b) {
12332
- for (const { action, serviceId } of preimages ?? []) {
12333
- switch (action.kind) {
12334
- case UpdatePreimageKind.Provide: {
12335
- const { hash, blob } = action.preimage;
12336
- const codec = serialize_serialize.servicePreimages(blake2b, serviceId, hash);
12337
- yield [StateEntryUpdateAction.Insert, codec.key, blob];
12338
- if (action.slot !== null) {
12339
- const codec2 = serialize_serialize.serviceLookupHistory(blake2b, serviceId, hash, numbers_tryAsU32(blob.length));
12340
- yield [
12341
- StateEntryUpdateAction.Insert,
12342
- codec2.key,
12343
- encode(codec2.Codec, tryAsLookupHistorySlots([action.slot])),
12344
- ];
12350
+ function* serializePreimages(preimagesUpdates, encode, blake2b) {
12351
+ if (preimagesUpdates === undefined) {
12352
+ return;
12353
+ }
12354
+ for (const [serviceId, updates] of preimagesUpdates.entries()) {
12355
+ for (const { action } of updates) {
12356
+ switch (action.kind) {
12357
+ case UpdatePreimageKind.Provide: {
12358
+ const { hash, blob } = action.preimage;
12359
+ const codec = serialize_serialize.servicePreimages(blake2b, serviceId, hash);
12360
+ yield [StateEntryUpdateAction.Insert, codec.key, blob];
12361
+ if (action.slot !== null) {
12362
+ const codec2 = serialize_serialize.serviceLookupHistory(blake2b, serviceId, hash, numbers_tryAsU32(blob.length));
12363
+ yield [
12364
+ StateEntryUpdateAction.Insert,
12365
+ codec2.key,
12366
+ encode(codec2.Codec, tryAsLookupHistorySlots([action.slot])),
12367
+ ];
12368
+ }
12369
+ break;
12370
+ }
12371
+ case UpdatePreimageKind.UpdateOrAdd: {
12372
+ const { hash, length, slots } = action.item;
12373
+ const codec = serialize_serialize.serviceLookupHistory(blake2b, serviceId, hash, length);
12374
+ yield [StateEntryUpdateAction.Insert, codec.key, encode(codec.Codec, slots)];
12375
+ break;
12376
+ }
12377
+ case UpdatePreimageKind.Remove: {
12378
+ const { hash, length } = action;
12379
+ const codec = serialize_serialize.servicePreimages(blake2b, serviceId, hash);
12380
+ yield [StateEntryUpdateAction.Remove, codec.key, EMPTY_BLOB];
12381
+ const codec2 = serialize_serialize.serviceLookupHistory(blake2b, serviceId, hash, length);
12382
+ yield [StateEntryUpdateAction.Remove, codec2.key, EMPTY_BLOB];
12383
+ break;
12345
12384
  }
12346
- break;
12347
- }
12348
- case UpdatePreimageKind.UpdateOrAdd: {
12349
- const { hash, length, slots } = action.item;
12350
- const codec = serialize_serialize.serviceLookupHistory(blake2b, serviceId, hash, length);
12351
- yield [StateEntryUpdateAction.Insert, codec.key, encode(codec.Codec, slots)];
12352
- break;
12353
- }
12354
- case UpdatePreimageKind.Remove: {
12355
- const { hash, length } = action;
12356
- const codec = serialize_serialize.servicePreimages(blake2b, serviceId, hash);
12357
- yield [StateEntryUpdateAction.Remove, codec.key, EMPTY_BLOB];
12358
- const codec2 = serialize_serialize.serviceLookupHistory(blake2b, serviceId, hash, length);
12359
- yield [StateEntryUpdateAction.Remove, codec2.key, EMPTY_BLOB];
12360
- break;
12361
12385
  }
12362
- default:
12363
- debug_assertNever(action);
12364
12386
  }
12365
12387
  }
12366
12388
  }
12367
12389
  function* serializeServiceUpdates(servicesUpdates, encode, blake2b) {
12368
- for (const { action, serviceId } of servicesUpdates ?? []) {
12390
+ if (servicesUpdates === undefined) {
12391
+ return;
12392
+ }
12393
+ for (const [serviceId, { action }] of servicesUpdates.entries()) {
12369
12394
  // new service being created or updated
12370
12395
  const codec = serialize_serialize.serviceData(serviceId);
12371
12396
  yield [StateEntryUpdateAction.Insert, codec.key, encode(codec.Codec, action.account)];
@@ -12652,13 +12677,13 @@ class LeafDb {
12652
12677
  */
12653
12678
  static fromLeavesBlob(blob, db) {
12654
12679
  if (blob.length % TRIE_NODE_BYTES !== 0) {
12655
- return result_Result.error(LeafDbError.InvalidLeafData, `${blob.length} is not a multiply of ${TRIE_NODE_BYTES}: ${blob}`);
12680
+ return result_Result.error(LeafDbError.InvalidLeafData, () => `${blob.length} is not a multiply of ${TRIE_NODE_BYTES}: ${blob}`);
12656
12681
  }
12657
12682
  const leaves = SortedSet.fromArray(leafComparator, []);
12658
12683
  for (const nodeData of blob.chunks(TRIE_NODE_BYTES)) {
12659
12684
  const node = new TrieNode(nodeData.raw);
12660
12685
  if (node.getNodeType() === NodeType.Branch) {
12661
- return result_Result.error(LeafDbError.InvalidLeafData, `Branch node detected: ${nodeData}`);
12686
+ return result_Result.error(LeafDbError.InvalidLeafData, () => `Branch node detected: ${nodeData}`);
12662
12687
  }
12663
12688
  leaves.insert(node.asLeafNode());
12664
12689
  }
@@ -13377,7 +13402,7 @@ class LmdbStates {
13377
13402
  }
13378
13403
  catch (e) {
13379
13404
  logger.error `${e}`;
13380
- return result_Result.error(StateUpdateError.Commit);
13405
+ return result_Result.error(StateUpdateError.Commit, () => `Failed to commit state update: ${e}`);
13381
13406
  }
13382
13407
  return result_Result.ok(result_OK);
13383
13408
  }
@@ -14097,32 +14122,33 @@ class Preimages {
14097
14122
  }
14098
14123
  if (prevPreimage.requester > currPreimage.requester ||
14099
14124
  currPreimage.blob.compare(prevPreimage.blob).isLessOrEqual()) {
14100
- return result_Result.error(PreimagesErrorCode.PreimagesNotSortedUnique);
14125
+ return result_Result.error(PreimagesErrorCode.PreimagesNotSortedUnique, () => `Preimages not sorted/unique at index ${i}`);
14101
14126
  }
14102
14127
  }
14103
14128
  const { preimages, slot } = input;
14104
- const pendingChanges = [];
14129
+ const pendingChanges = new Map();
14105
14130
  // select preimages for integration
14106
14131
  for (const preimage of preimages) {
14107
14132
  const { requester, blob } = preimage;
14108
14133
  const hash = this.blake2b.hashBytes(blob).asOpaque();
14109
14134
  const service = this.state.getService(requester);
14110
14135
  if (service === null) {
14111
- return result_Result.error(PreimagesErrorCode.AccountNotFound);
14136
+ return result_Result.error(PreimagesErrorCode.AccountNotFound, () => `Service not found: ${requester}`);
14112
14137
  }
14113
14138
  const hasPreimage = service.hasPreimage(hash);
14114
14139
  const slots = service.getLookupHistory(hash, numbers_tryAsU32(blob.length));
14115
14140
  // https://graypaper.fluffylabs.dev/#/5f542d7/181800181900
14116
14141
  // https://graypaper.fluffylabs.dev/#/5f542d7/116f0011a500
14117
14142
  if (hasPreimage || slots === null || !LookupHistoryItem.isRequested(slots)) {
14118
- return result_Result.error(PreimagesErrorCode.PreimageUnneeded);
14143
+ return result_Result.error(PreimagesErrorCode.PreimageUnneeded, () => `Preimage unneeded: requester=${requester}, hash=${hash}, hasPreimage=${hasPreimage}, isRequested=${slots !== null && LookupHistoryItem.isRequested(slots)}`);
14119
14144
  }
14120
14145
  // https://graypaper.fluffylabs.dev/#/5f542d7/18c00018f300
14121
- pendingChanges.push(UpdatePreimage.provide({
14122
- serviceId: requester,
14146
+ const updates = pendingChanges.get(requester) ?? [];
14147
+ updates.push(UpdatePreimage.provide({
14123
14148
  preimage: PreimageItem.create({ hash, blob }),
14124
14149
  slot,
14125
14150
  }));
14151
+ pendingChanges.set(requester, updates);
14126
14152
  }
14127
14153
  return result_Result.ok({
14128
14154
  preimages: pendingChanges,
@@ -14160,7 +14186,7 @@ class BlockVerifier {
14160
14186
  const headerHash = this.hasher.header(headerView);
14161
14187
  // check if current block is already imported
14162
14188
  if (this.blocks.getHeader(headerHash.hash) !== null) {
14163
- return result_Result.error(BlockVerifierError.AlreadyImported, `Block ${headerHash.hash} is already imported.`);
14189
+ return result_Result.error(BlockVerifierError.AlreadyImported, () => `Block ${headerHash.hash} is already imported.`);
14164
14190
  }
14165
14191
  // Check if parent block exists.
14166
14192
  // https://graypaper.fluffylabs.dev/#/cc517d7/0c82000c8200?v=0.6.5
@@ -14170,14 +14196,14 @@ class BlockVerifier {
14170
14196
  if (!parentHash.isEqualTo(block_verifier_ZERO_HASH)) {
14171
14197
  const parentBlock = this.blocks.getHeader(parentHash);
14172
14198
  if (parentBlock === null) {
14173
- return result_Result.error(BlockVerifierError.ParentNotFound, `Parent ${parentHash.toString()} not found`);
14199
+ return result_Result.error(BlockVerifierError.ParentNotFound, () => `Parent ${parentHash.toString()} not found`);
14174
14200
  }
14175
14201
  // Check if the time slot index is consecutive and not from future.
14176
14202
  // https://graypaper.fluffylabs.dev/#/cc517d7/0c02010c0201?v=0.6.5
14177
14203
  const timeslot = headerView.timeSlotIndex.materialize();
14178
14204
  const parentTimeslot = parentBlock.timeSlotIndex.materialize();
14179
14205
  if (timeslot <= parentTimeslot) {
14180
- return result_Result.error(BlockVerifierError.InvalidTimeSlot, `Invalid time slot index: ${timeslot}, expected > ${parentTimeslot}`);
14206
+ return result_Result.error(BlockVerifierError.InvalidTimeSlot, () => `Invalid time slot index: ${timeslot}, expected > ${parentTimeslot}`);
14181
14207
  }
14182
14208
  }
14183
14209
  // Check if extrinsic is valid.
@@ -14185,17 +14211,17 @@ class BlockVerifier {
14185
14211
  const extrinsicHash = headerView.extrinsicHash.materialize();
14186
14212
  const extrinsicMerkleCommitment = this.hasher.extrinsic(block.extrinsic.view());
14187
14213
  if (!extrinsicHash.isEqualTo(extrinsicMerkleCommitment.hash)) {
14188
- return result_Result.error(BlockVerifierError.InvalidExtrinsic, `Invalid extrinsic hash: ${extrinsicHash.toString()}, expected ${extrinsicMerkleCommitment.hash.toString()}`);
14214
+ return result_Result.error(BlockVerifierError.InvalidExtrinsic, () => `Invalid extrinsic hash: ${extrinsicHash.toString()}, expected ${extrinsicMerkleCommitment.hash.toString()}`);
14189
14215
  }
14190
14216
  // Check if the state root is valid.
14191
14217
  // https://graypaper.fluffylabs.dev/#/cc517d7/0c18010c1801?v=0.6.5
14192
14218
  const stateRoot = headerView.priorStateRoot.materialize();
14193
14219
  const posteriorStateRoot = this.blocks.getPostStateRoot(parentHash);
14194
14220
  if (posteriorStateRoot === null) {
14195
- return result_Result.error(BlockVerifierError.StateRootNotFound, `Posterior state root ${parentHash.toString()} not found`);
14221
+ return result_Result.error(BlockVerifierError.StateRootNotFound, () => `Posterior state root ${parentHash.toString()} not found`);
14196
14222
  }
14197
14223
  if (!stateRoot.isEqualTo(posteriorStateRoot)) {
14198
- return result_Result.error(BlockVerifierError.InvalidStateRoot, `Invalid prior state root: ${stateRoot.toString()}, expected ${posteriorStateRoot.toString()} (ours)`);
14224
+ return result_Result.error(BlockVerifierError.InvalidStateRoot, () => `Invalid prior state root: ${stateRoot.toString()}, expected ${posteriorStateRoot.toString()} (ours)`);
14199
14225
  }
14200
14226
  return result_Result.ok(headerHash.hash);
14201
14227
  }
@@ -14320,7 +14346,7 @@ class Disputes {
14320
14346
  // check if culprits are sorted by key
14321
14347
  // https://graypaper.fluffylabs.dev/#/579bd12/12c50112c601
14322
14348
  if (!isUniqueSortedBy(disputes.culprits, "key")) {
14323
- return result_Result.error(DisputesErrorCode.CulpritsNotSortedUnique);
14349
+ return result_Result.error(DisputesErrorCode.CulpritsNotSortedUnique, () => "Culprits are not uniquely sorted by key");
14324
14350
  }
14325
14351
  const culpritsLength = disputes.culprits.length;
14326
14352
  for (let i = 0; i < culpritsLength; i++) {
@@ -14329,24 +14355,24 @@ class Disputes {
14329
14355
  // https://graypaper.fluffylabs.dev/#/579bd12/125501125501
14330
14356
  const isInPunishSet = this.state.disputesRecords.asDictionaries().punishSet.has(key);
14331
14357
  if (isInPunishSet) {
14332
- return result_Result.error(DisputesErrorCode.OffenderAlreadyReported);
14358
+ return result_Result.error(DisputesErrorCode.OffenderAlreadyReported, () => `Offender already reported: culprit ${i}, key=${key}`);
14333
14359
  }
14334
14360
  // check if the guarantor key is correct
14335
14361
  // https://graypaper.fluffylabs.dev/#/85129da/125501125501?v=0.6.3
14336
14362
  if (!allValidatorKeys.has(key)) {
14337
- return result_Result.error(DisputesErrorCode.BadGuarantorKey);
14363
+ return result_Result.error(DisputesErrorCode.BadGuarantorKey, () => `Bad guarantor key: culprit ${i}, key=${key}`);
14338
14364
  }
14339
14365
  // verify if the culprit will be in new bad set
14340
14366
  // https://graypaper.fluffylabs.dev/#/579bd12/124601124601
14341
14367
  const isInNewBadSet = newItems.asDictionaries().badSet.has(workReportHash);
14342
14368
  if (!isInNewBadSet) {
14343
- return result_Result.error(DisputesErrorCode.CulpritsVerdictNotBad);
14369
+ return result_Result.error(DisputesErrorCode.CulpritsVerdictNotBad, () => `Culprit verdict not bad: culprit ${i}, work report=${workReportHash}`);
14344
14370
  }
14345
14371
  // verify culprit signature
14346
14372
  // https://graypaper.fluffylabs.dev/#/579bd12/125c01125c01
14347
14373
  const result = verificationResult.culprits[i];
14348
14374
  if (!result?.isValid) {
14349
- return result_Result.error(DisputesErrorCode.BadSignature, `Invalid signature for culprit: ${i}`);
14375
+ return result_Result.error(DisputesErrorCode.BadSignature, () => `Invalid signature for culprit: ${i}`);
14350
14376
  }
14351
14377
  }
14352
14378
  return result_Result.ok(null);
@@ -14355,7 +14381,7 @@ class Disputes {
14355
14381
  // check if faults are sorted by key
14356
14382
  // https://graypaper.fluffylabs.dev/#/579bd12/12c50112c601
14357
14383
  if (!isUniqueSortedBy(disputes.faults, "key")) {
14358
- return result_Result.error(DisputesErrorCode.FaultsNotSortedUnique);
14384
+ return result_Result.error(DisputesErrorCode.FaultsNotSortedUnique, () => "Faults are not uniquely sorted by key");
14359
14385
  }
14360
14386
  const faultsLength = disputes.faults.length;
14361
14387
  for (let i = 0; i < faultsLength; i++) {
@@ -14364,12 +14390,12 @@ class Disputes {
14364
14390
  // https://graypaper.fluffylabs.dev/#/579bd12/12a20112a201
14365
14391
  const isInPunishSet = this.state.disputesRecords.asDictionaries().punishSet.has(key);
14366
14392
  if (isInPunishSet) {
14367
- return result_Result.error(DisputesErrorCode.OffenderAlreadyReported);
14393
+ return result_Result.error(DisputesErrorCode.OffenderAlreadyReported, () => `Offender already reported: fault ${i}, key=${key}`);
14368
14394
  }
14369
14395
  // check if the auditor key is correct
14370
14396
  // https://graypaper.fluffylabs.dev/#/85129da/12a20112a201?v=0.6.3
14371
14397
  if (!allValidatorKeys.has(key)) {
14372
- return result_Result.error(DisputesErrorCode.BadAuditorKey);
14398
+ return result_Result.error(DisputesErrorCode.BadAuditorKey, () => `Bad auditor key: fault ${i}, key=${key}`);
14373
14399
  }
14374
14400
  // verify if the fault will be included in new good/bad set
14375
14401
  // it may be not correct as in GP there is "iff" what means it should be rather
@@ -14381,14 +14407,14 @@ class Disputes {
14381
14407
  const isInNewGoodSet = goodSet.has(workReportHash);
14382
14408
  const isInNewBadSet = badSet.has(workReportHash);
14383
14409
  if (isInNewGoodSet || !isInNewBadSet) {
14384
- return result_Result.error(DisputesErrorCode.FaultVerdictWrong);
14410
+ return result_Result.error(DisputesErrorCode.FaultVerdictWrong, () => `Fault verdict wrong: fault ${i}, work report=${workReportHash}, inGood=${isInNewGoodSet}, inBad=${isInNewBadSet}`);
14385
14411
  }
14386
14412
  }
14387
14413
  // verify fault signature. Verification was done earlier, here we only check the result.
14388
14414
  // https://graypaper.fluffylabs.dev/#/579bd12/12a90112a901
14389
14415
  const result = verificationResult.faults[i];
14390
14416
  if (!result.isValid) {
14391
- return result_Result.error(DisputesErrorCode.BadSignature, `Invalid signature for fault: ${i}`);
14417
+ return result_Result.error(DisputesErrorCode.BadSignature, () => `Invalid signature for fault: ${i}`);
14392
14418
  }
14393
14419
  }
14394
14420
  return result_Result.ok(null);
@@ -14397,32 +14423,32 @@ class Disputes {
14397
14423
  // check if verdicts are correctly sorted
14398
14424
  // https://graypaper.fluffylabs.dev/#/579bd12/12c40112c401
14399
14425
  if (!isUniqueSortedBy(disputes.verdicts, "workReportHash")) {
14400
- return result_Result.error(DisputesErrorCode.VerdictsNotSortedUnique);
14426
+ return result_Result.error(DisputesErrorCode.VerdictsNotSortedUnique, () => "Verdicts are not uniquely sorted by work report hash");
14401
14427
  }
14402
14428
  // check if judgement are correctly sorted
14403
14429
  // https://graypaper.fluffylabs.dev/#/579bd12/123702123802
14404
14430
  if (disputes.verdicts.some((verdict) => !isUniqueSortedByIndex(verdict.votes))) {
14405
- return result_Result.error(DisputesErrorCode.JudgementsNotSortedUnique);
14431
+ return result_Result.error(DisputesErrorCode.JudgementsNotSortedUnique, () => "Judgements are not uniquely sorted by index");
14406
14432
  }
14407
14433
  const currentEpoch = Math.floor(this.state.timeslot / this.chainSpec.epochLength);
14408
14434
  let voteSignatureIndex = 0;
14409
14435
  for (const { votesEpoch, votes } of disputes.verdicts) {
14410
14436
  // https://graypaper.fluffylabs.dev/#/579bd12/12bb0012bc00
14411
14437
  if (votesEpoch !== currentEpoch && votesEpoch + 1 !== currentEpoch) {
14412
- return result_Result.error(DisputesErrorCode.BadJudgementAge);
14438
+ return result_Result.error(DisputesErrorCode.BadJudgementAge, () => `Bad judgement age: epoch=${votesEpoch}, current=${currentEpoch}`);
14413
14439
  }
14414
14440
  const k = votesEpoch === currentEpoch ? this.state.currentValidatorData : this.state.previousValidatorData;
14415
14441
  for (const { index } of votes) {
14416
14442
  const key = k[index]?.ed25519;
14417
14443
  // no particular GP fragment but I think we don't believe in ghosts
14418
14444
  if (key === undefined) {
14419
- return result_Result.error(DisputesErrorCode.BadValidatorIndex);
14445
+ return result_Result.error(DisputesErrorCode.BadValidatorIndex, () => `Bad validator index: ${index} in epoch ${votesEpoch}`);
14420
14446
  }
14421
14447
  // verify vote signature. Verification was done earlier, here we only check the result.
14422
14448
  // https://graypaper.fluffylabs.dev/#/579bd12/12cd0012cd00
14423
14449
  const result = verificationResult.judgements[voteSignatureIndex];
14424
14450
  if (!result.isValid) {
14425
- return result_Result.error(DisputesErrorCode.BadSignature, `Invalid signature for judgement: ${voteSignatureIndex}`);
14451
+ return result_Result.error(DisputesErrorCode.BadSignature, () => `Invalid signature for judgement: ${voteSignatureIndex}`);
14426
14452
  }
14427
14453
  voteSignatureIndex += 1;
14428
14454
  }
@@ -14438,7 +14464,7 @@ class Disputes {
14438
14464
  const isInBadSet = badSet.has(verdict.workReportHash);
14439
14465
  const isInWonkySet = wonkySet.has(verdict.workReportHash);
14440
14466
  if (isInGoodSet || isInBadSet || isInWonkySet) {
14441
- return result_Result.error(DisputesErrorCode.AlreadyJudged);
14467
+ return result_Result.error(DisputesErrorCode.AlreadyJudged, () => `Work report already judged: ${verdict.workReportHash}`);
14442
14468
  }
14443
14469
  }
14444
14470
  return result_Result.ok(null);
@@ -14469,7 +14495,7 @@ class Disputes {
14469
14495
  // https://graypaper.fluffylabs.dev/#/579bd12/12f10212fc02
14470
14496
  const f = disputes.faults.find((x) => x.workReportHash.isEqualTo(r));
14471
14497
  if (f === undefined) {
14472
- return result_Result.error(DisputesErrorCode.NotEnoughFaults);
14498
+ return result_Result.error(DisputesErrorCode.NotEnoughFaults, () => `Not enough faults for work report: ${r}`);
14473
14499
  }
14474
14500
  }
14475
14501
  else if (sum === 0) {
@@ -14478,13 +14504,13 @@ class Disputes {
14478
14504
  const c1 = disputes.culprits.find((x) => x.workReportHash.isEqualTo(r));
14479
14505
  const c2 = disputes.culprits.findLast((x) => x.workReportHash.isEqualTo(r));
14480
14506
  if (c1 === c2) {
14481
- return result_Result.error(DisputesErrorCode.NotEnoughCulprits);
14507
+ return result_Result.error(DisputesErrorCode.NotEnoughCulprits, () => `Not enough culprits for work report: ${r}`);
14482
14508
  }
14483
14509
  }
14484
14510
  else if (sum !== this.chainSpec.thirdOfValidators) {
14485
14511
  // positive votes count is not correct
14486
14512
  // https://graypaper.fluffylabs.dev/#/579bd12/125002128102
14487
- return result_Result.error(DisputesErrorCode.BadVoteSplit);
14513
+ return result_Result.error(DisputesErrorCode.BadVoteSplit, () => `Bad vote split: sum=${sum}, expected=${this.chainSpec.thirdOfValidators} for work report ${r}`);
14488
14514
  }
14489
14515
  }
14490
14516
  return result_Result.ok(null);
@@ -14572,7 +14598,7 @@ class Disputes {
14572
14598
  const validator = k[j.index];
14573
14599
  // no particular GP fragment but I think we don't believe in ghosts
14574
14600
  if (validator === undefined) {
14575
- return result_Result.error(DisputesErrorCode.BadValidatorIndex);
14601
+ return result_Result.error(DisputesErrorCode.BadValidatorIndex, () => `Bad validator index in signature verification: ${j.index}`);
14576
14602
  }
14577
14603
  const key = validator.ed25519;
14578
14604
  // verify vote signature
@@ -14680,7 +14706,7 @@ const ringCommitmentCache = [];
14680
14706
  async function verifySeal(bandersnatch, authorKey, signature, payload, encodedUnsealedHeader) {
14681
14707
  const sealResult = await bandersnatch.verifySeal(authorKey.raw, signature.raw, payload.raw, encodedUnsealedHeader.raw);
14682
14708
  if (sealResult[RESULT_INDEX] === ResultValues.Error) {
14683
- return result_Result.error(null);
14709
+ return result_Result.error(null, () => "Bandersnatch VRF seal verification failed");
14684
14710
  }
14685
14711
  return result_Result.ok(bytes_Bytes.fromBlob(sealResult.subarray(1), hash_HASH_SIZE).asOpaque());
14686
14712
  }
@@ -14706,7 +14732,7 @@ function getRingCommitment(bandersnatch, validators) {
14706
14732
  async function getRingCommitmentNoCache(bandersnatch, keys) {
14707
14733
  const commitmentResult = await bandersnatch.getRingCommitment(keys.raw);
14708
14734
  if (commitmentResult[RESULT_INDEX] === ResultValues.Error) {
14709
- return result_Result.error(null);
14735
+ return result_Result.error(null, () => "Bandersnatch ring commitment calculation failed");
14710
14736
  }
14711
14737
  return result_Result.ok(bytes_Bytes.fromBlob(commitmentResult.subarray(1), bandersnatch_BANDERSNATCH_RING_ROOT_BYTES).asOpaque());
14712
14738
  }
@@ -14881,7 +14907,7 @@ class Safrole {
14881
14907
  epochRoot: epochRootResult.ok,
14882
14908
  });
14883
14909
  }
14884
- return result_Result.error(SafroleErrorCode.IncorrectData);
14910
+ return result_Result.error(SafroleErrorCode.IncorrectData, () => "Safrole: failed to get epoch root for validator keys");
14885
14911
  }
14886
14912
  /**
14887
14913
  * Ticket sequencer that is used in standard mode
@@ -14972,10 +14998,10 @@ class Safrole {
14972
14998
  for (let i = 1; i < ticketsLength; i++) {
14973
14999
  const order = tickets[i - 1].id.compare(tickets[i].id);
14974
15000
  if (order.isEqual()) {
14975
- return result_Result.error(SafroleErrorCode.DuplicateTicket);
15001
+ return result_Result.error(SafroleErrorCode.DuplicateTicket, () => `Safrole: duplicate ticket found at index ${i}`);
14976
15002
  }
14977
15003
  if (order.isGreater()) {
14978
- return result_Result.error(SafroleErrorCode.BadTicketOrder);
15004
+ return result_Result.error(SafroleErrorCode.BadTicketOrder, () => `Safrole: bad ticket order at index ${i}`);
14979
15005
  }
14980
15006
  }
14981
15007
  return result_Result.ok(null);
@@ -15002,7 +15028,7 @@ class Safrole {
15002
15028
  attempt: ticket.attempt,
15003
15029
  }));
15004
15030
  if (!verificationResult.every((x) => x.isValid)) {
15005
- return result_Result.error(SafroleErrorCode.BadTicketProof);
15031
+ return result_Result.error(SafroleErrorCode.BadTicketProof, () => "Safrole: invalid ticket proof in extrinsic");
15006
15032
  }
15007
15033
  /**
15008
15034
  * Verify if tickets are sorted and unique
@@ -15011,7 +15037,7 @@ class Safrole {
15011
15037
  */
15012
15038
  const ticketsVerifcationResult = this.verifyTickets(tickets);
15013
15039
  if (ticketsVerifcationResult.isError) {
15014
- return result_Result.error(ticketsVerifcationResult.error);
15040
+ return result_Result.error(ticketsVerifcationResult.error, ticketsVerifcationResult.details);
15015
15041
  }
15016
15042
  if (this.isEpochChanged(timeslot)) {
15017
15043
  return result_Result.ok(tickets);
@@ -15020,7 +15046,7 @@ class Safrole {
15020
15046
  const ticketsFromExtrinsic = SortedSet.fromSortedArray(ticketComparator, tickets);
15021
15047
  const mergedTickets = SortedSet.fromTwoSortedCollections(ticketsFromState, ticketsFromExtrinsic);
15022
15048
  if (ticketsFromState.length + ticketsFromExtrinsic.length !== mergedTickets.length) {
15023
- return result_Result.error(SafroleErrorCode.DuplicateTicket);
15049
+ return result_Result.error(SafroleErrorCode.DuplicateTicket, () => "Safrole: duplicate ticket when merging state and extrinsic tickets");
15024
15050
  }
15025
15051
  /**
15026
15052
  * Remove tickets if size of accumulator exceeds E (epoch length).
@@ -15089,24 +15115,24 @@ class Safrole {
15089
15115
  }
15090
15116
  async transition(input) {
15091
15117
  if (this.state.timeslot >= input.slot) {
15092
- return result_Result.error(SafroleErrorCode.BadSlot);
15118
+ return result_Result.error(SafroleErrorCode.BadSlot, () => `Safrole: bad slot, state timeslot ${this.state.timeslot} >= input slot ${input.slot}`);
15093
15119
  }
15094
15120
  if (!this.isExtrinsicLengthValid(input.slot, input.extrinsic)) {
15095
- return result_Result.error(SafroleErrorCode.UnexpectedTicket);
15121
+ return result_Result.error(SafroleErrorCode.UnexpectedTicket, () => `Safrole: unexpected ticket, invalid extrinsic length ${input.extrinsic.length}`);
15096
15122
  }
15097
15123
  if (!this.areTicketAttemptsValid(input.extrinsic)) {
15098
- return result_Result.error(SafroleErrorCode.BadTicketAttempt);
15124
+ return result_Result.error(SafroleErrorCode.BadTicketAttempt, () => "Safrole: bad ticket attempt value in extrinsic");
15099
15125
  }
15100
15126
  const validatorKeysResult = await this.getValidatorKeys(input.slot, input.punishSet);
15101
15127
  if (validatorKeysResult.isError) {
15102
- return result_Result.error(validatorKeysResult.error);
15128
+ return result_Result.error(validatorKeysResult.error, validatorKeysResult.details);
15103
15129
  }
15104
15130
  const { nextValidatorData, currentValidatorData, previousValidatorData, epochRoot } = validatorKeysResult.ok;
15105
15131
  const entropy = this.getEntropy(input.slot, input.entropy);
15106
15132
  const sealingKeySeries = this.getSlotKeySequence(input.slot, currentValidatorData, entropy[2]);
15107
15133
  const newTicketsAccumulatorResult = await this.getNewTicketAccumulator(input.slot, input.extrinsic, this.state.nextValidatorData, epochRoot, entropy[2]);
15108
15134
  if (newTicketsAccumulatorResult.isError) {
15109
- return result_Result.error(newTicketsAccumulatorResult.error);
15135
+ return result_Result.error(newTicketsAccumulatorResult.error, newTicketsAccumulatorResult.details);
15110
15136
  }
15111
15137
  const stateUpdate = {
15112
15138
  nextValidatorData,
@@ -15140,14 +15166,14 @@ function compareWithEncoding(chainSpec, error, actual, expected, codec) {
15140
15166
  if (actual === null || expected === null) {
15141
15167
  // if one of them is `null`, both need to be.
15142
15168
  if (actual !== expected) {
15143
- return result_Result.error(error, `${SafroleErrorCode[error]} Expected: ${expected}, got: ${actual}`);
15169
+ return result_Result.error(error, () => `${SafroleErrorCode[error]} Expected: ${expected}, got: ${actual}`);
15144
15170
  }
15145
15171
  return result_Result.ok(result_OK);
15146
15172
  }
15147
15173
  // compare the literal encoding.
15148
15174
  const encoded = encoder_Encoder.encodeObject(codec, actual, chainSpec);
15149
15175
  if (!encoded.isEqualTo(expected.encoded())) {
15150
- return result_Result.error(error, `${SafroleErrorCode[error]} Expected: ${expected.encoded()}, got: ${encoded}`);
15176
+ return result_Result.error(error, () => `${SafroleErrorCode[error]} Expected: ${expected.encoded()}, got: ${encoded}`);
15151
15177
  }
15152
15178
  return result_Result.ok(result_OK);
15153
15179
  }
@@ -15190,7 +15216,7 @@ class SafroleSeal {
15190
15216
  const blockAuthorKey = state.currentValidatorData.at(blockAuthorIndex)?.bandersnatch;
15191
15217
  const entropySourceResult = await bandersnatch_vrf.verifySeal(await this.bandersnatch, blockAuthorKey ?? BANDERSNATCH_ZERO_KEY, headerView.entropySource.materialize(), payload, bytes_BytesBlob.blobFromNumbers([]));
15192
15218
  if (entropySourceResult.isError) {
15193
- return result_Result.error(SafroleSealError.IncorrectEntropySource);
15219
+ return result_Result.error(SafroleSealError.IncorrectEntropySource, () => "Safrole: incorrect entropy source in header seal");
15194
15220
  }
15195
15221
  return result_Result.ok(entropySourceResult.ok);
15196
15222
  }
@@ -15199,7 +15225,7 @@ class SafroleSeal {
15199
15225
  const validatorIndex = headerView.bandersnatchBlockAuthorIndex.materialize();
15200
15226
  const authorKeys = state.currentValidatorData.at(validatorIndex);
15201
15227
  if (authorKeys === undefined) {
15202
- return result_Result.error(SafroleSealError.InvalidValidatorIndex);
15228
+ return result_Result.error(SafroleSealError.InvalidValidatorIndex, () => `Safrole: invalid validator index ${validatorIndex}`);
15203
15229
  }
15204
15230
  const timeSlot = headerView.timeSlotIndex.materialize();
15205
15231
  const sealingKeys = state.sealingKeySeries;
@@ -15218,10 +15244,10 @@ class SafroleSeal {
15218
15244
  const authorKey = validatorData.bandersnatch;
15219
15245
  const result = await bandersnatch_vrf.verifySeal(await this.bandersnatch, authorKey ?? BANDERSNATCH_ZERO_KEY, headerView.seal.materialize(), payload, encodeUnsealedHeader(headerView));
15220
15246
  if (result.isError) {
15221
- return result_Result.error(SafroleSealError.IncorrectSeal);
15247
+ return result_Result.error(SafroleSealError.IncorrectSeal, () => "Safrole: incorrect seal with ticket");
15222
15248
  }
15223
15249
  if (ticket === undefined || !ticket.id.isEqualTo(result.ok)) {
15224
- return result_Result.error(SafroleSealError.InvalidTicket);
15250
+ return result_Result.error(SafroleSealError.InvalidTicket, () => `Safrole: invalid ticket, expected ${ticket?.id} got ${result.ok}`);
15225
15251
  }
15226
15252
  return result_Result.ok(result.ok);
15227
15253
  }
@@ -15231,13 +15257,13 @@ class SafroleSeal {
15231
15257
  const sealingKey = keys.at(index);
15232
15258
  const authorBandersnatchKey = authorKey.bandersnatch;
15233
15259
  if (sealingKey === undefined || !sealingKey.isEqualTo(authorBandersnatchKey)) {
15234
- return result_Result.error(SafroleSealError.InvalidValidator, `Invalid Validator. Expected: ${sealingKey}, got: ${authorKey.bandersnatch}`);
15260
+ return result_Result.error(SafroleSealError.InvalidValidator, () => `Invalid Validator. Expected: ${sealingKey}, got: ${authorKey.bandersnatch}`);
15235
15261
  }
15236
15262
  // verify seal correctness
15237
15263
  const payload = bytes_BytesBlob.blobFromParts(JAM_FALLBACK_SEAL, entropy.raw);
15238
15264
  const result = await bandersnatch_vrf.verifySeal(await this.bandersnatch, authorBandersnatchKey, headerView.seal.materialize(), payload, encodeUnsealedHeader(headerView));
15239
15265
  if (result.isError) {
15240
- return result_Result.error(SafroleSealError.IncorrectSeal);
15266
+ return result_Result.error(SafroleSealError.IncorrectSeal, () => "Safrole: incorrect seal with keys");
15241
15267
  }
15242
15268
  return result_Result.ok(result.ok);
15243
15269
  }
@@ -15280,6 +15306,14 @@ async function getRootHash(yieldedRoots) {
15280
15306
 
15281
15307
 
15282
15308
  const InsufficientFundsError = "insufficient funds";
15309
+ /** Deep clone of a map with array. */
15310
+ function deepCloneMapWithArray(map) {
15311
+ const cloned = [];
15312
+ for (const [k, v] of map.entries()) {
15313
+ cloned.push([k, v.slice()]);
15314
+ }
15315
+ return new Map(cloned);
15316
+ }
15283
15317
  /**
15284
15318
  * State updates that currently accumulating service produced.
15285
15319
  *
@@ -15309,10 +15343,11 @@ class AccumulationStateUpdate {
15309
15343
  /** Create new empty state update. */
15310
15344
  static empty() {
15311
15345
  return new AccumulationStateUpdate({
15312
- servicesUpdates: [],
15313
- servicesRemoved: [],
15314
- preimages: [],
15315
- storage: [],
15346
+ created: [],
15347
+ updated: new Map(),
15348
+ removed: [],
15349
+ preimages: new Map(),
15350
+ storage: new Map(),
15316
15351
  }, []);
15317
15352
  }
15318
15353
  /** Create a state update with some existing, yet uncommited services updates. */
@@ -15324,10 +15359,13 @@ class AccumulationStateUpdate {
15324
15359
  /** Create a copy of another `StateUpdate`. Used by checkpoints. */
15325
15360
  static copyFrom(from) {
15326
15361
  const serviceUpdates = {
15327
- servicesUpdates: [...from.services.servicesUpdates],
15328
- servicesRemoved: [...from.services.servicesRemoved],
15329
- preimages: [...from.services.preimages],
15330
- storage: [...from.services.storage],
15362
+ // shallow copy
15363
+ created: [...from.services.created],
15364
+ updated: new Map(from.services.updated),
15365
+ removed: [...from.services.removed],
15366
+ // deep copy
15367
+ preimages: deepCloneMapWithArray(from.services.preimages),
15368
+ storage: deepCloneMapWithArray(from.services.storage),
15331
15369
  };
15332
15370
  const transfers = [...from.transfers];
15333
15371
  const update = new AccumulationStateUpdate(serviceUpdates, transfers, new Map(from.yieldedRoots));
@@ -15375,9 +15413,9 @@ class PartiallyUpdatedState {
15375
15413
  if (destination === null) {
15376
15414
  return null;
15377
15415
  }
15378
- const maybeNewService = this.stateUpdate.services.servicesUpdates.find((update) => update.serviceId === destination);
15379
- if (maybeNewService !== undefined) {
15380
- return maybeNewService.action.account;
15416
+ const maybeUpdatedServiceInfo = this.stateUpdate.services.updated.get(destination);
15417
+ if (maybeUpdatedServiceInfo !== undefined) {
15418
+ return maybeUpdatedServiceInfo.action.account;
15381
15419
  }
15382
15420
  const maybeService = this.state.getService(destination);
15383
15421
  if (maybeService === null) {
@@ -15386,7 +15424,8 @@ class PartiallyUpdatedState {
15386
15424
  return maybeService.getInfo();
15387
15425
  }
15388
15426
  getStorage(serviceId, rawKey) {
15389
- const item = this.stateUpdate.services.storage.find((x) => x.serviceId === serviceId && x.key.isEqualTo(rawKey));
15427
+ const storages = this.stateUpdate.services.storage.get(serviceId) ?? [];
15428
+ const item = storages.find((x) => x.key.isEqualTo(rawKey));
15390
15429
  if (item !== undefined) {
15391
15430
  return item.value;
15392
15431
  }
@@ -15401,10 +15440,11 @@ class PartiallyUpdatedState {
15401
15440
  * the existence in `preimages` map.
15402
15441
  */
15403
15442
  hasPreimage(serviceId, hash) {
15404
- const providedPreimage = this.stateUpdate.services.preimages.find(
15443
+ const preimages = this.stateUpdate.services.preimages.get(serviceId) ?? [];
15444
+ const providedPreimage = preimages.find(
15405
15445
  // we ignore the action here, since if there is <any> update on that
15406
15446
  // hash it means it has to exist, right?
15407
- (p) => p.serviceId === serviceId && p.hash.isEqualTo(hash));
15447
+ (p) => p.hash.isEqualTo(hash));
15408
15448
  if (providedPreimage !== undefined) {
15409
15449
  return true;
15410
15450
  }
@@ -15417,7 +15457,8 @@ class PartiallyUpdatedState {
15417
15457
  }
15418
15458
  getPreimage(serviceId, hash) {
15419
15459
  // TODO [ToDr] Should we verify availability here?
15420
- const freshlyProvided = this.stateUpdate.services.preimages.find((x) => x.serviceId === serviceId && x.hash.isEqualTo(hash));
15460
+ const preimages = this.stateUpdate.services.preimages.get(serviceId) ?? [];
15461
+ const freshlyProvided = preimages.find((x) => x.hash.isEqualTo(hash));
15421
15462
  if (freshlyProvided !== undefined && freshlyProvided.action.kind === UpdatePreimageKind.Provide) {
15422
15463
  return freshlyProvided.action.preimage.blob;
15423
15464
  }
@@ -15426,10 +15467,11 @@ class PartiallyUpdatedState {
15426
15467
  }
15427
15468
  /** Get status of a preimage of current service taking into account any updates. */
15428
15469
  getLookupHistory(currentTimeslot, serviceId, hash, length) {
15470
+ const preimages = this.stateUpdate.services.preimages.get(serviceId) ?? [];
15429
15471
  // TODO [ToDr] This is most likely wrong. We may have `provide` and `remove` within
15430
15472
  // the same state update. We should however switch to proper "updated state"
15431
15473
  // representation soon.
15432
- const updatedPreimage = this.stateUpdate.services.preimages.findLast((update) => update.serviceId === serviceId && update.hash.isEqualTo(hash) && BigInt(update.length) === length);
15474
+ const updatedPreimage = preimages.findLast((update) => update.hash.isEqualTo(hash) && BigInt(update.length) === length);
15433
15475
  const stateFallback = () => {
15434
15476
  // fallback to state lookup
15435
15477
  const service = this.state.getService(serviceId);
@@ -15466,14 +15508,15 @@ class PartiallyUpdatedState {
15466
15508
  /* State update functions. */
15467
15509
  updateStorage(serviceId, key, value) {
15468
15510
  const update = value === null
15469
- ? UpdateStorage.remove({ serviceId, key })
15511
+ ? UpdateStorage.remove({ key })
15470
15512
  : UpdateStorage.set({
15471
- serviceId,
15472
15513
  storage: StorageItem.create({ key, value }),
15473
15514
  });
15474
- const index = this.stateUpdate.services.storage.findIndex((x) => x.serviceId === update.serviceId && x.key.isEqualTo(key));
15515
+ const storages = this.stateUpdate.services.storage.get(serviceId) ?? [];
15516
+ const index = storages.findIndex((x) => x.key.isEqualTo(key));
15475
15517
  const count = index === -1 ? 0 : 1;
15476
- this.stateUpdate.services.storage.splice(index, count, update);
15518
+ storages.splice(index, count, update);
15519
+ this.stateUpdate.services.storage.set(serviceId, storages);
15477
15520
  }
15478
15521
  /**
15479
15522
  * Update a preimage.
@@ -15481,8 +15524,10 @@ class PartiallyUpdatedState {
15481
15524
  * Note we store all previous entries as well, since there might be a sequence of:
15482
15525
  * `provide` -> `remove` and both should update the end state somehow.
15483
15526
  */
15484
- updatePreimage(newUpdate) {
15485
- this.stateUpdate.services.preimages.push(newUpdate);
15527
+ updatePreimage(serviceId, newUpdate) {
15528
+ const updatePreimages = this.stateUpdate.services.preimages.get(serviceId) ?? [];
15529
+ updatePreimages.push(newUpdate);
15530
+ this.stateUpdate.services.preimages.set(serviceId, updatePreimages);
15486
15531
  }
15487
15532
  updateServiceStorageUtilisation(serviceId, items, bytes, serviceInfo) {
15488
15533
  debug_check `${items >= 0} storageUtilisationCount has to be a positive number, got: ${items}`;
@@ -15491,11 +15536,11 @@ class PartiallyUpdatedState {
15491
15536
  const overflowBytes = !isU64(bytes);
15492
15537
  // TODO [ToDr] this is not specified in GP, but it seems sensible.
15493
15538
  if (overflowItems || overflowBytes) {
15494
- return result_Result.error(InsufficientFundsError);
15539
+ return result_Result.error(InsufficientFundsError, () => `Storage utilisation overflow: items=${overflowItems}, bytes=${overflowBytes}`);
15495
15540
  }
15496
15541
  const thresholdBalance = ServiceAccountInfo.calculateThresholdBalance(items, bytes, serviceInfo.gratisStorage);
15497
15542
  if (serviceInfo.balance < thresholdBalance) {
15498
- return result_Result.error(InsufficientFundsError);
15543
+ return result_Result.error(InsufficientFundsError, () => `Service balance (${serviceInfo.balance}) below threshold (${thresholdBalance})`);
15499
15544
  }
15500
15545
  // Update service info with new details.
15501
15546
  this.updateServiceInfo(serviceId, ServiceAccountInfo.create({
@@ -15506,22 +15551,25 @@ class PartiallyUpdatedState {
15506
15551
  return result_Result.ok(result_OK);
15507
15552
  }
15508
15553
  updateServiceInfo(serviceId, newInfo) {
15509
- const idx = this.stateUpdate.services.servicesUpdates.findIndex((x) => x.serviceId === serviceId);
15510
- const toRemove = idx === -1 ? 0 : 1;
15511
- const existingItem = this.stateUpdate.services.servicesUpdates[idx];
15512
- if (existingItem?.action.kind === UpdateServiceKind.Create) {
15513
- this.stateUpdate.services.servicesUpdates.splice(idx, toRemove, UpdateService.create({
15514
- serviceId,
15554
+ const existingUpdate = this.stateUpdate.services.updated.get(serviceId);
15555
+ if (existingUpdate?.action.kind === UpdateServiceKind.Create) {
15556
+ this.stateUpdate.services.updated.set(serviceId, UpdateService.create({
15515
15557
  serviceInfo: newInfo,
15516
- lookupHistory: existingItem.action.lookupHistory,
15558
+ lookupHistory: existingUpdate.action.lookupHistory,
15517
15559
  }));
15518
15560
  return;
15519
15561
  }
15520
- this.stateUpdate.services.servicesUpdates.splice(idx, toRemove, UpdateService.update({
15521
- serviceId,
15562
+ this.stateUpdate.services.updated.set(serviceId, UpdateService.update({
15522
15563
  serviceInfo: newInfo,
15523
15564
  }));
15524
15565
  }
15566
+ createService(serviceId, newInfo, newLookupHistory) {
15567
+ this.stateUpdate.services.created.push(serviceId);
15568
+ this.stateUpdate.services.updated.set(serviceId, UpdateService.create({
15569
+ serviceInfo: newInfo,
15570
+ lookupHistory: newLookupHistory,
15571
+ }));
15572
+ }
15525
15573
  getPrivilegedServices() {
15526
15574
  if (this.stateUpdate.privilegedServices !== null) {
15527
15575
  return this.stateUpdate.privilegedServices;
@@ -16995,7 +17043,7 @@ class ReadablePage extends MemoryPage {
16995
17043
  loadInto(result, startIndex, length) {
16996
17044
  const endIndex = startIndex + length;
16997
17045
  if (endIndex > PAGE_SIZE) {
16998
- return result_Result.error(PageFault.fromMemoryIndex(this.start + PAGE_SIZE));
17046
+ return result_Result.error(PageFault.fromMemoryIndex(this.start + PAGE_SIZE), () => `Page fault: read beyond page boundary at ${this.start + PAGE_SIZE}`);
16999
17047
  }
17000
17048
  const bytes = this.data.subarray(startIndex, endIndex);
17001
17049
  // we zero the bytes, since data might not yet be initialized at `endIndex`.
@@ -17004,7 +17052,7 @@ class ReadablePage extends MemoryPage {
17004
17052
  return result_Result.ok(result_OK);
17005
17053
  }
17006
17054
  storeFrom(_address, _data) {
17007
- return result_Result.error(PageFault.fromMemoryIndex(this.start, true));
17055
+ return result_Result.error(PageFault.fromMemoryIndex(this.start, true), () => `Page fault: attempted to write to read-only page at ${this.start}`);
17008
17056
  }
17009
17057
  setData(pageIndex, data) {
17010
17058
  this.data.set(data, pageIndex);
@@ -17038,7 +17086,7 @@ class WriteablePage extends MemoryPage {
17038
17086
  loadInto(result, startIndex, length) {
17039
17087
  const endIndex = startIndex + length;
17040
17088
  if (endIndex > PAGE_SIZE) {
17041
- return result_Result.error(PageFault.fromMemoryIndex(this.start + PAGE_SIZE));
17089
+ return result_Result.error(PageFault.fromMemoryIndex(this.start + PAGE_SIZE), () => `Page fault: read beyond page boundary at ${this.start + PAGE_SIZE}`);
17042
17090
  }
17043
17091
  const bytes = this.view.subarray(startIndex, endIndex);
17044
17092
  // we zero the bytes, since the view might not yet be initialized at `endIndex`.
@@ -17124,7 +17172,7 @@ class Memory {
17124
17172
  memory_logger.insane `MEM[${address}] <- ${bytes_BytesBlob.blobFrom(bytes)}`;
17125
17173
  const pagesResult = this.getPages(address, bytes.length, AccessType.WRITE);
17126
17174
  if (pagesResult.isError) {
17127
- return result_Result.error(pagesResult.error);
17175
+ return result_Result.error(pagesResult.error, pagesResult.details);
17128
17176
  }
17129
17177
  const pages = pagesResult.ok;
17130
17178
  let currentPosition = address;
@@ -17149,14 +17197,14 @@ class Memory {
17149
17197
  const pages = [];
17150
17198
  for (const pageNumber of pageRange) {
17151
17199
  if (pageNumber < RESERVED_NUMBER_OF_PAGES) {
17152
- return result_Result.error(PageFault.fromPageNumber(pageNumber, true));
17200
+ return result_Result.error(PageFault.fromPageNumber(pageNumber, true), () => `Page fault: attempted to access reserved page ${pageNumber}`);
17153
17201
  }
17154
17202
  const page = this.memory.get(pageNumber);
17155
17203
  if (page === undefined) {
17156
- return result_Result.error(PageFault.fromPageNumber(pageNumber));
17204
+ return result_Result.error(PageFault.fromPageNumber(pageNumber), () => `Page fault: page ${pageNumber} not allocated`);
17157
17205
  }
17158
17206
  if (accessType === AccessType.WRITE && !page.isWriteable()) {
17159
- return result_Result.error(PageFault.fromPageNumber(pageNumber, true));
17207
+ return result_Result.error(PageFault.fromPageNumber(pageNumber, true), () => `Page fault: attempted to write to read-only page ${pageNumber}`);
17160
17208
  }
17161
17209
  pages.push(page);
17162
17210
  }
@@ -17174,7 +17222,7 @@ class Memory {
17174
17222
  }
17175
17223
  const pagesResult = this.getPages(startAddress, result.length, AccessType.READ);
17176
17224
  if (pagesResult.isError) {
17177
- return result_Result.error(pagesResult.error);
17225
+ return result_Result.error(pagesResult.error, pagesResult.details);
17178
17226
  }
17179
17227
  const pages = pagesResult.ok;
17180
17228
  let currentPosition = startAddress;
@@ -19113,7 +19161,7 @@ class ProgramDecoder {
19113
19161
  }
19114
19162
  catch (e) {
19115
19163
  program_decoder_logger.error `Invalid program: ${e}`;
19116
- return result_Result.error(ProgramDecoderError.InvalidProgramError);
19164
+ return result_Result.error(ProgramDecoderError.InvalidProgramError, () => `Program decoder error: ${e}`);
19117
19165
  }
19118
19166
  }
19119
19167
  }
@@ -19856,10 +19904,10 @@ class AccumulateExternalities {
19856
19904
  const len = existingPreimage.slots.length;
19857
19905
  // https://graypaper.fluffylabs.dev/#/9a08063/380901380901?v=0.6.6
19858
19906
  if (len === PreimageStatusKind.Requested) {
19859
- return result_Result.error(RequestPreimageError.AlreadyRequested);
19907
+ return result_Result.error(RequestPreimageError.AlreadyRequested, () => `Preimage already requested: hash=${hash}`);
19860
19908
  }
19861
19909
  if (len === PreimageStatusKind.Available || len === PreimageStatusKind.Reavailable) {
19862
- return result_Result.error(RequestPreimageError.AlreadyAvailable);
19910
+ return result_Result.error(RequestPreimageError.AlreadyAvailable, () => `Preimage already available: hash=${hash}`);
19863
19911
  }
19864
19912
  // TODO [ToDr] Not sure if we should update the service info in that case,
19865
19913
  // but for now we let that case fall-through.
@@ -19884,15 +19932,13 @@ class AccumulateExternalities {
19884
19932
  const clampedLength = clampU64ToU32(length);
19885
19933
  if (existingPreimage === null) {
19886
19934
  // https://graypaper.fluffylabs.dev/#/9a08063/38a60038a600?v=0.6.6
19887
- this.updatedState.updatePreimage(UpdatePreimage.updateOrAdd({
19888
- serviceId: this.currentServiceId,
19935
+ this.updatedState.updatePreimage(this.currentServiceId, UpdatePreimage.updateOrAdd({
19889
19936
  lookupHistory: new LookupHistoryItem(hash, clampedLength, tryAsLookupHistorySlots([])),
19890
19937
  }));
19891
19938
  }
19892
19939
  else {
19893
19940
  /** https://graypaper.fluffylabs.dev/#/9a08063/38ca0038ca00?v=0.6.6 */
19894
- this.updatedState.updatePreimage(UpdatePreimage.updateOrAdd({
19895
- serviceId: this.currentServiceId,
19941
+ this.updatedState.updatePreimage(this.currentServiceId, UpdatePreimage.updateOrAdd({
19896
19942
  lookupHistory: new LookupHistoryItem(hash, clampedLength, tryAsLookupHistorySlots([...existingPreimage.slots, this.currentTimeslot])),
19897
19943
  }));
19898
19944
  }
@@ -19902,7 +19948,7 @@ class AccumulateExternalities {
19902
19948
  const serviceId = this.currentServiceId;
19903
19949
  const status = this.updatedState.getLookupHistory(this.currentTimeslot, this.currentServiceId, hash, length);
19904
19950
  if (status === null) {
19905
- return result_Result.error(ForgetPreimageError.NotFound);
19951
+ return result_Result.error(ForgetPreimageError.NotFound, () => `Preimage not found: hash=${hash}, length=${length}`);
19906
19952
  }
19907
19953
  const s = slotsToPreimageStatus(status.slots);
19908
19954
  const updateStorageUtilisation = () => {
@@ -19915,10 +19961,9 @@ class AccumulateExternalities {
19915
19961
  if (s.status === PreimageStatusKind.Requested) {
19916
19962
  const res = updateStorageUtilisation();
19917
19963
  if (res.isError) {
19918
- return result_Result.error(ForgetPreimageError.StorageUtilisationError);
19964
+ return result_Result.error(ForgetPreimageError.StorageUtilisationError, res.details);
19919
19965
  }
19920
- this.updatedState.updatePreimage(UpdatePreimage.remove({
19921
- serviceId,
19966
+ this.updatedState.updatePreimage(serviceId, UpdatePreimage.remove({
19922
19967
  hash: status.hash,
19923
19968
  length: status.length,
19924
19969
  }));
@@ -19931,21 +19976,19 @@ class AccumulateExternalities {
19931
19976
  if (y < t - this.chainSpec.preimageExpungePeriod) {
19932
19977
  const res = updateStorageUtilisation();
19933
19978
  if (res.isError) {
19934
- return result_Result.error(ForgetPreimageError.StorageUtilisationError);
19979
+ return result_Result.error(ForgetPreimageError.StorageUtilisationError, res.details);
19935
19980
  }
19936
- this.updatedState.updatePreimage(UpdatePreimage.remove({
19937
- serviceId,
19981
+ this.updatedState.updatePreimage(serviceId, UpdatePreimage.remove({
19938
19982
  hash: status.hash,
19939
19983
  length: status.length,
19940
19984
  }));
19941
19985
  return result_Result.ok(result_OK);
19942
19986
  }
19943
- return result_Result.error(ForgetPreimageError.NotExpired);
19987
+ return result_Result.error(ForgetPreimageError.NotExpired, () => `Preimage not expired: y=${y}, timeslot=${t}, period=${this.chainSpec.preimageExpungePeriod}`);
19944
19988
  }
19945
19989
  // https://graypaper.fluffylabs.dev/#/9a08063/38c80138c801?v=0.6.6
19946
19990
  if (s.status === PreimageStatusKind.Available) {
19947
- this.updatedState.updatePreimage(UpdatePreimage.updateOrAdd({
19948
- serviceId,
19991
+ this.updatedState.updatePreimage(serviceId, UpdatePreimage.updateOrAdd({
19949
19992
  lookupHistory: new LookupHistoryItem(status.hash, status.length, tryAsLookupHistorySlots([s.data[0], t])),
19950
19993
  }));
19951
19994
  return result_Result.ok(result_OK);
@@ -19954,13 +19997,12 @@ class AccumulateExternalities {
19954
19997
  if (s.status === PreimageStatusKind.Reavailable) {
19955
19998
  const y = s.data[1];
19956
19999
  if (y < t - this.chainSpec.preimageExpungePeriod) {
19957
- this.updatedState.updatePreimage(UpdatePreimage.updateOrAdd({
19958
- serviceId,
20000
+ this.updatedState.updatePreimage(serviceId, UpdatePreimage.updateOrAdd({
19959
20001
  lookupHistory: new LookupHistoryItem(status.hash, status.length, tryAsLookupHistorySlots([s.data[2], t])),
19960
20002
  }));
19961
20003
  return result_Result.ok(result_OK);
19962
20004
  }
19963
- return result_Result.error(ForgetPreimageError.NotExpired);
20005
+ return result_Result.error(ForgetPreimageError.NotExpired, () => `Preimage not expired: y=${y}, timeslot=${t}, period=${this.chainSpec.preimageExpungePeriod}`);
19964
20006
  }
19965
20007
  debug_assertNever(s);
19966
20008
  }
@@ -19969,17 +20011,17 @@ class AccumulateExternalities {
19969
20011
  const destination = this.getServiceInfo(destinationId);
19970
20012
  /** https://graypaper.fluffylabs.dev/#/9a08063/370401370401?v=0.6.6 */
19971
20013
  if (destination === null || destinationId === null) {
19972
- return result_Result.error(TransferError.DestinationNotFound);
20014
+ return result_Result.error(TransferError.DestinationNotFound, () => `Destination service not found: ${destinationId}`);
19973
20015
  }
19974
20016
  /** https://graypaper.fluffylabs.dev/#/9a08063/371301371301?v=0.6.6 */
19975
20017
  if (gas < destination.onTransferMinGas) {
19976
- return result_Result.error(TransferError.GasTooLow);
20018
+ return result_Result.error(TransferError.GasTooLow, () => `Gas ${gas} below minimum ${destination.onTransferMinGas}`);
19977
20019
  }
19978
20020
  /** https://graypaper.fluffylabs.dev/#/9a08063/371b01371b01?v=0.6.6 */
19979
20021
  const newBalance = source.balance - amount;
19980
20022
  const thresholdBalance = ServiceAccountInfo.calculateThresholdBalance(source.storageUtilisationCount, source.storageUtilisationBytes, source.gratisStorage);
19981
20023
  if (newBalance < thresholdBalance) {
19982
- return result_Result.error(TransferError.BalanceBelowThreshold);
20024
+ return result_Result.error(TransferError.BalanceBelowThreshold, () => `Balance ${newBalance} below threshold ${thresholdBalance}`);
19983
20025
  }
19984
20026
  // outgoing transfer
19985
20027
  this.updatedState.stateUpdate.transfers.push(PendingTransfer.create({
@@ -20006,7 +20048,7 @@ class AccumulateExternalities {
20006
20048
  // check if we are priviledged to set gratis storage
20007
20049
  // https://graypaper.fluffylabs.dev/#/7e6ff6a/369203369603?v=0.6.7
20008
20050
  if (gratisStorage !== numbers_tryAsU64(0) && this.currentServiceId !== this.updatedState.getPrivilegedServices().manager) {
20009
- return result_Result.error(NewServiceError.UnprivilegedService);
20051
+ return result_Result.error(NewServiceError.UnprivilegedService, () => `Service ${this.currentServiceId} not privileged to set gratis storage`);
20010
20052
  }
20011
20053
  // check if we have enough balance
20012
20054
  // https://graypaper.fluffylabs.dev/#/7e6ff6a/369e0336a303?v=0.6.7
@@ -20015,7 +20057,7 @@ class AccumulateExternalities {
20015
20057
  const thresholdForCurrent = ServiceAccountInfo.calculateThresholdBalance(currentService.storageUtilisationCount, currentService.storageUtilisationBytes, currentService.gratisStorage);
20016
20058
  const balanceLeftForCurrent = currentService.balance - thresholdForNew;
20017
20059
  if (balanceLeftForCurrent < thresholdForCurrent || bytes.overflow) {
20018
- return result_Result.error(NewServiceError.InsufficientFunds);
20060
+ return result_Result.error(NewServiceError.InsufficientFunds, () => `Insufficient funds: balance=${currentService.balance}, required=${thresholdForNew}, overflow=${bytes.overflow}`);
20019
20061
  }
20020
20062
  // `a`: https://graypaper.fluffylabs.dev/#/ab2cdbd/366b02366d02?v=0.7.2
20021
20063
  const newAccount = ServiceAccountInfo.create({
@@ -20042,15 +20084,11 @@ class AccumulateExternalities {
20042
20084
  // NOTE: It's safe to cast to `Number` here, bcs here service ID cannot be bigger than 2**16
20043
20085
  const newServiceId = tryAsServiceId(Number(wantedServiceId));
20044
20086
  if (this.getServiceInfo(newServiceId) !== null) {
20045
- return result_Result.error(NewServiceError.RegistrarServiceIdAlreadyTaken);
20087
+ return result_Result.error(NewServiceError.RegistrarServiceIdAlreadyTaken, () => `Service ID ${newServiceId} already taken`);
20046
20088
  }
20047
20089
  // add the new service with selected ID
20048
20090
  // https://graypaper.fluffylabs.dev/#/ab2cdbd/36be0336c003?v=0.7.2
20049
- this.updatedState.stateUpdate.services.servicesUpdates.push(UpdateService.create({
20050
- serviceId: newServiceId,
20051
- serviceInfo: newAccount,
20052
- lookupHistory: newLookupItem,
20053
- }));
20091
+ this.updatedState.createService(newServiceId, newAccount, newLookupItem);
20054
20092
  // update the balance of current service
20055
20093
  // https://graypaper.fluffylabs.dev/#/ab2cdbd/36c20336c403?v=0.7.2
20056
20094
  this.updatedState.updateServiceInfo(this.currentServiceId, updatedCurrentAccount);
@@ -20061,12 +20099,8 @@ class AccumulateExternalities {
20061
20099
  }
20062
20100
  const newServiceId = this.nextNewServiceId;
20063
20101
  // add the new service
20064
- // https://graypaper.fluffylabs.dev/#/ab2cdbd/36e70336e903?v=0.7.2
20065
- this.updatedState.stateUpdate.services.servicesUpdates.push(UpdateService.create({
20066
- serviceId: newServiceId,
20067
- serviceInfo: newAccount,
20068
- lookupHistory: newLookupItem,
20069
- }));
20102
+ // https://graypaper.fluffylabs.dev/#/7e6ff6a/36cb0236cb02?v=0.6.7
20103
+ this.updatedState.createService(newServiceId, newAccount, newLookupItem);
20070
20104
  // update the balance of current service
20071
20105
  // https://graypaper.fluffylabs.dev/#/ab2cdbd/36ec0336ee03?v=0.7.2
20072
20106
  this.updatedState.updateServiceInfo(this.currentServiceId, updatedCurrentAccount);
@@ -20090,7 +20124,7 @@ class AccumulateExternalities {
20090
20124
  const currentDelegator = this.updatedState.getPrivilegedServices().delegator;
20091
20125
  if (currentDelegator !== this.currentServiceId) {
20092
20126
  accumulate_externalities_logger.trace `Current service id (${this.currentServiceId}) is not a validators manager. (expected: ${currentDelegator}) and cannot update validators data. Ignoring`;
20093
- return result_Result.error(UnprivilegedError);
20127
+ return result_Result.error(UnprivilegedError, () => `Service ${this.currentServiceId} is not delegator (expected: ${currentDelegator})`);
20094
20128
  }
20095
20129
  this.updatedState.stateUpdate.validatorsData = validatorsData;
20096
20130
  return result_Result.ok(result_OK);
@@ -20105,11 +20139,11 @@ class AccumulateExternalities {
20105
20139
  const currentAssigners = this.updatedState.getPrivilegedServices().assigners[coreIndex];
20106
20140
  if (currentAssigners !== this.currentServiceId) {
20107
20141
  accumulate_externalities_logger.trace `Current service id (${this.currentServiceId}) is not an auth manager of core ${coreIndex} (expected: ${currentAssigners}) and cannot update authorization queue.`;
20108
- return result_Result.error(UpdatePrivilegesError.UnprivilegedService);
20142
+ return result_Result.error(UpdatePrivilegesError.UnprivilegedService, () => `Service ${this.currentServiceId} not assigner for core ${coreIndex} (expected: ${currentAssigners})`);
20109
20143
  }
20110
20144
  if (assigners === null && Compatibility.isGreaterOrEqual(GpVersion.V0_7_1)) {
20111
20145
  accumulate_externalities_logger.trace `The new auth manager is not a valid service id.`;
20112
- return result_Result.error(UpdatePrivilegesError.InvalidServiceId);
20146
+ return result_Result.error(UpdatePrivilegesError.InvalidServiceId, () => `New auth manager is null for core ${coreIndex}`);
20113
20147
  }
20114
20148
  this.updatedState.stateUpdate.authorizationQueues.set(coreIndex, authQueue);
20115
20149
  return result_Result.ok(result_OK);
@@ -20142,10 +20176,10 @@ class AccumulateExternalities {
20142
20176
  const isManager = current.manager === this.currentServiceId;
20143
20177
  if (Compatibility.isLessThan(GpVersion.V0_7_1)) {
20144
20178
  if (!isManager) {
20145
- return result_Result.error(UpdatePrivilegesError.UnprivilegedService);
20179
+ return result_Result.error(UpdatePrivilegesError.UnprivilegedService, () => `Service ${this.currentServiceId} is not manager`);
20146
20180
  }
20147
20181
  if (manager === null || delegator === null) {
20148
- return result_Result.error(UpdatePrivilegesError.InvalidServiceId, "Either manager or delegator is not a valid service id.");
20182
+ return result_Result.error(UpdatePrivilegesError.InvalidServiceId, () => "Either manager or delegator is not a valid service id.");
20149
20183
  }
20150
20184
  this.updatedState.stateUpdate.privilegedServices = PrivilegedServices.create({
20151
20185
  manager,
@@ -20158,7 +20192,7 @@ class AccumulateExternalities {
20158
20192
  }
20159
20193
  const original = this.updatedState.state.privilegedServices;
20160
20194
  if (manager === null || delegator === null || registrar === null) {
20161
- return result_Result.error(UpdatePrivilegesError.InvalidServiceId, "Either manager or delegator or registrar is not a valid service id.");
20195
+ return result_Result.error(UpdatePrivilegesError.InvalidServiceId, () => "Either manager or delegator or registrar is not a valid service id.");
20162
20196
  }
20163
20197
  const newDelegator = this.updatePrivilegedServiceId(delegator, current.delegator, {
20164
20198
  isManager,
@@ -20198,23 +20232,22 @@ class AccumulateExternalities {
20198
20232
  // TODO [ToDr] what about newly created services?
20199
20233
  const service = serviceId === null ? null : this.updatedState.state.getService(serviceId);
20200
20234
  if (service === null || serviceId === null) {
20201
- return result_Result.error(ProvidePreimageError.ServiceNotFound);
20235
+ return result_Result.error(ProvidePreimageError.ServiceNotFound, () => `Service not found: ${serviceId}`);
20202
20236
  }
20203
20237
  // calculating the hash
20204
20238
  const preimageHash = this.blake2b.hashBytes(preimage).asOpaque();
20205
20239
  // checking service internal lookup
20206
20240
  const stateLookup = this.updatedState.getLookupHistory(this.currentTimeslot, serviceId, preimageHash, numbers_tryAsU64(preimage.length));
20207
20241
  if (stateLookup === null || !LookupHistoryItem.isRequested(stateLookup)) {
20208
- return result_Result.error(ProvidePreimageError.WasNotRequested);
20242
+ return result_Result.error(ProvidePreimageError.WasNotRequested, () => `Preimage was not requested: hash=${preimageHash}, service=${serviceId}`);
20209
20243
  }
20210
20244
  // checking already provided preimages
20211
20245
  const hasPreimage = this.updatedState.hasPreimage(serviceId, preimageHash);
20212
20246
  if (hasPreimage) {
20213
- return result_Result.error(ProvidePreimageError.AlreadyProvided);
20247
+ return result_Result.error(ProvidePreimageError.AlreadyProvided, () => `Preimage already provided: hash=${preimageHash}, service=${serviceId}`);
20214
20248
  }
20215
20249
  // setting up the new preimage
20216
- this.updatedState.updatePreimage(UpdatePreimage.provide({
20217
- serviceId,
20250
+ this.updatedState.updatePreimage(serviceId, UpdatePreimage.provide({
20218
20251
  preimage: PreimageItem.create({
20219
20252
  hash: preimageHash,
20220
20253
  blob: preimage,
@@ -20226,31 +20259,31 @@ class AccumulateExternalities {
20226
20259
  eject(destination, previousCodeHash) {
20227
20260
  const service = this.getServiceInfo(destination);
20228
20261
  if (service === null || destination === null) {
20229
- return result_Result.error(EjectError.InvalidService, "Service missing");
20262
+ return result_Result.error(EjectError.InvalidService, () => "Service missing");
20230
20263
  }
20231
20264
  const currentService = this.getCurrentServiceInfo();
20232
20265
  // check if the service expects to be ejected by us:
20233
20266
  const expectedCodeHash = bytes_Bytes.zero(hash_HASH_SIZE).asOpaque();
20234
20267
  writeServiceIdAsLeBytes(this.currentServiceId, expectedCodeHash.raw);
20235
20268
  if (!service.codeHash.isEqualTo(expectedCodeHash)) {
20236
- return result_Result.error(EjectError.InvalidService, "Invalid code hash");
20269
+ return result_Result.error(EjectError.InvalidService, () => "Invalid code hash");
20237
20270
  }
20238
20271
  // make sure the service only has required number of storage items?
20239
20272
  if (service.storageUtilisationCount !== REQUIRED_NUMBER_OF_STORAGE_ITEMS_FOR_EJECT) {
20240
- return result_Result.error(EjectError.InvalidPreimage, "Too many storage items");
20273
+ return result_Result.error(EjectError.InvalidPreimage, () => "Too many storage items");
20241
20274
  }
20242
20275
  // storage items length
20243
20276
  const l = numbers_tryAsU64(maxU64(service.storageUtilisationBytes, LOOKUP_HISTORY_ENTRY_BYTES) - LOOKUP_HISTORY_ENTRY_BYTES);
20244
20277
  // check if we have a preimage with the entire storage.
20245
20278
  const [isPreviousCodeExpired, errorReason] = this.isPreviousCodeExpired(destination, previousCodeHash, l);
20246
20279
  if (!isPreviousCodeExpired) {
20247
- return result_Result.error(EjectError.InvalidPreimage, `Previous code available: ${errorReason}`);
20280
+ return result_Result.error(EjectError.InvalidPreimage, () => `Previous code available: ${errorReason}`);
20248
20281
  }
20249
20282
  // compute new balance of the service.
20250
20283
  const newBalance = sumU64(currentService.balance, service.balance);
20251
20284
  // TODO [ToDr] what to do in case of overflow?
20252
20285
  if (newBalance.overflow) {
20253
- return result_Result.error(EjectError.InvalidService, "Balance overflow");
20286
+ return result_Result.error(EjectError.InvalidService, () => "Balance overflow");
20254
20287
  }
20255
20288
  // update current service.
20256
20289
  this.updatedState.updateServiceInfo(this.currentServiceId, ServiceAccountInfo.create({
@@ -20258,11 +20291,13 @@ class AccumulateExternalities {
20258
20291
  balance: newBalance.value,
20259
20292
  }));
20260
20293
  // and finally add an ejected service.
20261
- this.updatedState.stateUpdate.services.servicesRemoved.push(destination);
20294
+ this.updatedState.stateUpdate.services.removed.push(destination);
20262
20295
  // take care of the code preimage and its lookup history
20263
20296
  // Safe, because we know the preimage is valid, and it's the code of the service, which is bounded by maximal service code size anyway (much smaller than 2**32 bytes).
20264
20297
  const preimageLength = numbers_tryAsU32(Number(l));
20265
- this.updatedState.stateUpdate.services.preimages.push(UpdatePreimage.remove({ serviceId: destination, hash: previousCodeHash, length: preimageLength }));
20298
+ const preimages = this.updatedState.stateUpdate.services.preimages.get(destination) ?? [];
20299
+ preimages.push(UpdatePreimage.remove({ hash: previousCodeHash, length: preimageLength }));
20300
+ this.updatedState.stateUpdate.services.preimages.set(destination, preimages);
20266
20301
  return result_Result.ok(result_OK);
20267
20302
  }
20268
20303
  read(serviceId, rawKey) {
@@ -20443,10 +20478,10 @@ class Assurances {
20443
20478
  for (const assurance of assurances) {
20444
20479
  const { anchor, validatorIndex, bitfield } = assurance;
20445
20480
  if (!anchor.isEqualTo(input.parentHash)) {
20446
- return result_Result.error(AssurancesError.InvalidAnchor, `anchor: expected: ${input.parentHash}, got ${anchor}`);
20481
+ return result_Result.error(AssurancesError.InvalidAnchor, () => `anchor: expected: ${input.parentHash}, got ${anchor}`);
20447
20482
  }
20448
20483
  if (prevValidatorIndex >= validatorIndex) {
20449
- return result_Result.error(AssurancesError.InvalidOrder, `order: expected: ${prevValidatorIndex + 1}, got: ${validatorIndex}`);
20484
+ return result_Result.error(AssurancesError.InvalidOrder, () => `order: expected: ${prevValidatorIndex + 1}, got: ${validatorIndex}`);
20450
20485
  }
20451
20486
  prevValidatorIndex = assurance.validatorIndex;
20452
20487
  debug_check `${bitfield.bitLength === coresCount} Invalid bitfield length of ${bitfield.bitLength}`;
@@ -20469,7 +20504,7 @@ class Assurances {
20469
20504
  * https://graypaper.fluffylabs.dev/#/579bd12/14e90014ea00
20470
20505
  */
20471
20506
  if (noOfAssurances > 0 && !isReportPending) {
20472
- return result_Result.error(AssurancesError.NoReportPending, `no report pending for core ${c} yet we got an assurance`);
20507
+ return result_Result.error(AssurancesError.NoReportPending, () => `no report pending for core ${c} yet we got an assurance`);
20473
20508
  }
20474
20509
  /**
20475
20510
  * Remove work report if it's became available or timed out.
@@ -20515,7 +20550,7 @@ class Assurances {
20515
20550
  const v = assurance.view();
20516
20551
  const key = validatorData[v.validatorIndex.materialize()];
20517
20552
  if (key === undefined) {
20518
- return result_Result.error(AssurancesError.InvalidValidatorIndex);
20553
+ return result_Result.error(AssurancesError.InvalidValidatorIndex, () => `Invalid validator index: ${v.validatorIndex.materialize()}`);
20519
20554
  }
20520
20555
  signatures.push({
20521
20556
  signature: v.signature.materialize(),
@@ -20527,7 +20562,7 @@ class Assurances {
20527
20562
  const isAllSignaturesValid = signaturesValid.every((x) => x);
20528
20563
  if (!isAllSignaturesValid) {
20529
20564
  const invalidIndices = signaturesValid.reduce((acc, isValid, idx) => (isValid ? acc : acc.concat(idx)), []);
20530
- return result_Result.error(AssurancesError.InvalidSignature, `invalid signatures at ${invalidIndices.join(", ")}`);
20565
+ return result_Result.error(AssurancesError.InvalidSignature, () => `invalid signatures at ${invalidIndices.join(", ")}`);
20531
20566
  }
20532
20567
  return result_Result.ok(result_OK);
20533
20568
  }
@@ -21138,7 +21173,7 @@ class HostCallMemory {
21138
21173
  return result_Result.ok(result_OK);
21139
21174
  }
21140
21175
  if (address + numbers_tryAsU64(bytes.length) > MEMORY_SIZE) {
21141
- return result_Result.error(new OutOfBounds());
21176
+ return result_Result.error(new OutOfBounds(), () => `Memory access out of bounds: address ${address} + length ${bytes.length} exceeds memory size`);
21142
21177
  }
21143
21178
  return this.memory.storeFrom(tryAsMemoryIndex(Number(address)), bytes);
21144
21179
  }
@@ -21147,13 +21182,10 @@ class HostCallMemory {
21147
21182
  return result_Result.ok(result_OK);
21148
21183
  }
21149
21184
  if (startAddress + numbers_tryAsU64(result.length) > MEMORY_SIZE) {
21150
- return result_Result.error(new OutOfBounds());
21185
+ return result_Result.error(new OutOfBounds(), () => `Memory access out of bounds: address ${startAddress} + length ${result.length} exceeds memory size`);
21151
21186
  }
21152
21187
  return this.memory.loadInto(result, tryAsMemoryIndex(Number(startAddress)));
21153
21188
  }
21154
- getMemory() {
21155
- return this.memory;
21156
- }
21157
21189
  }
21158
21190
 
21159
21191
  ;// CONCATENATED MODULE: ./packages/core/pvm-host-calls/host-call-registers.ts
@@ -23093,18 +23125,18 @@ class Accumulate {
23093
23125
  const serviceInfo = updatedState.getServiceInfo(serviceId);
23094
23126
  if (serviceInfo === null) {
23095
23127
  accumulate_logger.log `Service with id ${serviceId} not found.`;
23096
- return result_Result.error(PvmInvocationError.NoService);
23128
+ return result_Result.error(PvmInvocationError.NoService, () => `Accumulate: service ${serviceId} not found`);
23097
23129
  }
23098
23130
  const codeHash = serviceInfo.codeHash;
23099
23131
  // TODO [ToDr] Should we check that the preimage is still available?
23100
23132
  const code = updatedState.getPreimage(serviceId, codeHash.asOpaque());
23101
23133
  if (code === null) {
23102
23134
  accumulate_logger.log `Code with hash ${codeHash} not found for service ${serviceId}.`;
23103
- return result_Result.error(PvmInvocationError.NoPreimage);
23135
+ return result_Result.error(PvmInvocationError.NoPreimage, () => `Accumulate: code with hash ${codeHash} not found for service ${serviceId}`);
23104
23136
  }
23105
23137
  if (code.length > W_C) {
23106
23138
  accumulate_logger.log `Code with hash ${codeHash} is too long for service ${serviceId}.`;
23107
- return result_Result.error(PvmInvocationError.PreimageTooLong);
23139
+ return result_Result.error(PvmInvocationError.PreimageTooLong, () => `Accumulate: code length ${code.length} exceeds max ${W_C} for service ${serviceId}`);
23108
23140
  }
23109
23141
  const nextServiceId = generateNextServiceId({ serviceId, entropy, timeslot: slot }, this.chainSpec, this.blake2b);
23110
23142
  const partialState = new AccumulateExternalities(this.chainSpec, this.blake2b, updatedState, serviceId, nextServiceId, slot);
@@ -23178,7 +23210,7 @@ class Accumulate {
23178
23210
  }
23179
23211
  const result = await this.pvmAccumulateInvocation(slot, serviceId, transfers, operands, gasCost, entropy, updatedState);
23180
23212
  if (result.isError) {
23181
- // https://graypaper.fluffylabs.dev/#/7e6ff6a/2fb6012fb601?v=0.6.7
23213
+ // https://graypaper.fluffylabs.dev/#/ab2cdbd/2fc9032fc903?v=0.7.2
23182
23214
  accumulate_logger.log `Accumulation failed for ${serviceId}.`;
23183
23215
  // even though accumulation failed, we still need to make sure that
23184
23216
  // incoming transfers updated the balance, hence we pass state update here
@@ -23272,10 +23304,12 @@ class Accumulate {
23272
23304
  const { consumedGas, stateUpdate } = await this.accumulateSingleService(serviceId, accumulateData.getTransfers(serviceId), operands, accumulateData.getGasCost(serviceId), slot, entropy, currentState);
23273
23305
  gasCost = tryAsServiceGas(gasCost + consumedGas);
23274
23306
  // https://graypaper.fluffylabs.dev/#/ab2cdbd/193b05193b05?v=0.7.2
23275
- // do not update statistics, if the service only had incoming transfers
23276
- if (operands.length > 0) {
23277
- const serviceStatistics = statistics.get(serviceId) ?? { count: numbers_tryAsU32(0), gasUsed: tryAsServiceGas(0) };
23278
- serviceStatistics.count = numbers_tryAsU32(serviceStatistics.count + accumulateData.getReportsLength(serviceId));
23307
+ const serviceStatistics = statistics.get(serviceId) ?? { count: numbers_tryAsU32(0), gasUsed: tryAsServiceGas(0) };
23308
+ const count = accumulateData.getReportsLength(serviceId);
23309
+ // [0.7.1]: do not update statistics, if the service only had incoming transfers
23310
+ if ((Compatibility.isLessThan(GpVersion.V0_7_2) && count > 0) ||
23311
+ (Compatibility.isGreaterOrEqual(GpVersion.V0_7_2) && (count > 0 || consumedGas > 0n))) {
23312
+ serviceStatistics.count = numbers_tryAsU32(serviceStatistics.count + count);
23279
23313
  serviceStatistics.gasUsed = tryAsServiceGas(serviceStatistics.gasUsed + consumedGas);
23280
23314
  statistics.set(serviceId, serviceStatistics);
23281
23315
  }
@@ -23357,19 +23391,16 @@ class Accumulate {
23357
23391
  const gasLimit = tryAsServiceGas(this.chainSpec.maxBlockGas > calculatedGasLimit ? this.chainSpec.maxBlockGas : calculatedGasLimit);
23358
23392
  return tryAsServiceGas(gasLimit);
23359
23393
  }
23360
- hasDuplicatedServicesCreated(updateServices) {
23361
- const createdServiceIds = new Set();
23362
- for (const update of updateServices) {
23363
- if (update.action.kind === UpdateServiceKind.Create) {
23364
- const serviceId = update.serviceId;
23365
- if (createdServiceIds.has(serviceId)) {
23366
- accumulate_logger.log `Duplicated Service creation detected ${serviceId}. Block is invalid.`;
23367
- return true;
23368
- }
23369
- createdServiceIds.add(serviceId);
23370
- }
23371
- }
23372
- return false;
23394
+ /**
23395
+ * Detects the very unlikely situation where multiple services are created with the same ID.
23396
+ *
23397
+ * https://graypaper.fluffylabs.dev/#/ab2cdbd/30f20330f403?v=0.7.2
23398
+ *
23399
+ * NOTE: This is public only for testing purposes and should not be used outside of accumulation.
23400
+ */
23401
+ hasDuplicatedServiceIdCreated(createdIds) {
23402
+ const uniqueIds = new Set(createdIds);
23403
+ return uniqueIds.size !== createdIds.length;
23373
23404
  }
23374
23405
  async transition({ reports, slot, entropy }) {
23375
23406
  const statistics = new Map();
@@ -23391,8 +23422,9 @@ class Accumulate {
23391
23422
  const accumulated = accumulatableReports.subview(0, accumulatedReports);
23392
23423
  const { services, yieldedRoots, transfers, validatorsData, privilegedServices, authorizationQueues, ...stateUpdateRest } = state;
23393
23424
  assertEmpty(stateUpdateRest);
23394
- if (this.hasDuplicatedServicesCreated(services.servicesUpdates)) {
23395
- return result_Result.error(ACCUMULATION_ERROR);
23425
+ if (this.hasDuplicatedServiceIdCreated(services.created)) {
23426
+ accumulate_logger.trace `Duplicated Service creation detected. Block is invalid.`;
23427
+ return result_Result.error(ACCUMULATION_ERROR, () => "Accumulate: duplicate service created");
23396
23428
  }
23397
23429
  const accStateUpdate = this.getAccumulationStateUpdate(accumulated.toArray(), toAccumulateLater, slot, Array.from(statistics.keys()), services);
23398
23430
  const accumulationOutputUnsorted = Array.from(yieldedRoots.entries()).map(([serviceId, root]) => {
@@ -23473,13 +23505,13 @@ class DeferredTransfers {
23473
23505
  .toSorted((a, b) => a.source - b.source);
23474
23506
  const info = partiallyUpdatedState.getServiceInfo(serviceId);
23475
23507
  if (info === null) {
23476
- return result_Result.error(DeferredTransfersErrorCode.ServiceInfoNotExist);
23508
+ return result_Result.error(DeferredTransfersErrorCode.ServiceInfoNotExist, () => `Deferred transfers: service info not found for ${serviceId}`);
23477
23509
  }
23478
23510
  const codeHash = info.codeHash;
23479
23511
  const code = partiallyUpdatedState.getPreimage(serviceId, codeHash.asOpaque());
23480
23512
  const newBalance = sumU64(info.balance, ...transfers.map((item) => item.amount));
23481
23513
  if (newBalance.overflow) {
23482
- return result_Result.error(DeferredTransfersErrorCode.ServiceBalanceOverflow);
23514
+ return result_Result.error(DeferredTransfersErrorCode.ServiceBalanceOverflow, () => `Deferred transfers: balance overflow for service ${serviceId}`);
23483
23515
  }
23484
23516
  const newInfo = ServiceAccountInfo.create({ ...info, balance: newBalance.value });
23485
23517
  partiallyUpdatedState.updateServiceInfo(serviceId, newInfo);
@@ -23957,7 +23989,7 @@ function verifyReportsBasic(input) {
23957
23989
  const noOfPrerequisites = reportView.context.view().prerequisites.view().length;
23958
23990
  const noOfSegmentRootLookups = reportView.segmentRootLookup.view().length;
23959
23991
  if (noOfPrerequisites + noOfSegmentRootLookups > MAX_REPORT_DEPENDENCIES) {
23960
- return result_Result.error(ReportsError.TooManyDependencies, `Report at ${reportView.coreIndex.materialize()} has too many dependencies. Got ${noOfPrerequisites} + ${noOfSegmentRootLookups}, max: ${MAX_REPORT_DEPENDENCIES}`);
23992
+ return result_Result.error(ReportsError.TooManyDependencies, () => `Report at ${reportView.coreIndex.materialize()} has too many dependencies. Got ${noOfPrerequisites} + ${noOfSegmentRootLookups}, max: ${MAX_REPORT_DEPENDENCIES}`);
23961
23993
  }
23962
23994
  /**
23963
23995
  * In order to ensure fair use of a block’s extrinsic space,
@@ -23976,7 +24008,7 @@ function verifyReportsBasic(input) {
23976
24008
  totalOutputsSize += item.view().result.view().okBlob?.raw.length ?? 0;
23977
24009
  }
23978
24010
  if (authOutputSize + totalOutputsSize > MAX_WORK_REPORT_SIZE_BYTES) {
23979
- return result_Result.error(ReportsError.WorkReportTooBig, `Work report at ${reportView.coreIndex.materialize()} too big. Got ${authOutputSize} + ${totalOutputsSize}, max: ${MAX_WORK_REPORT_SIZE_BYTES}`);
24011
+ return result_Result.error(ReportsError.WorkReportTooBig, () => `Work report at ${reportView.coreIndex.materialize()} too big. Got ${authOutputSize} + ${totalOutputsSize}, max: ${MAX_WORK_REPORT_SIZE_BYTES}`);
23980
24012
  }
23981
24013
  }
23982
24014
  return result_Result.ok(result_OK);
@@ -24010,12 +24042,12 @@ function verifyContextualValidity(input, state, headerChain, maxLookupAnchorAge)
24010
24042
  for (const result of guarantee.report.results) {
24011
24043
  const service = state.getService(result.serviceId);
24012
24044
  if (service === null) {
24013
- return result_Result.error(ReportsError.BadServiceId, `No service with id: ${result.serviceId}`);
24045
+ return result_Result.error(ReportsError.BadServiceId, () => `No service with id: ${result.serviceId}`);
24014
24046
  }
24015
24047
  // check service code hash
24016
24048
  // https://graypaper.fluffylabs.dev/#/5f542d7/154b02154b02
24017
24049
  if (!result.codeHash.isEqualTo(service.getInfo().codeHash)) {
24018
- return result_Result.error(ReportsError.BadCodeHash, `Service (${result.serviceId}) code hash mismatch. Got: ${result.codeHash}, expected: ${service.getInfo().codeHash}`);
24050
+ return result_Result.error(ReportsError.BadCodeHash, () => `Service (${result.serviceId}) code hash mismatch. Got: ${result.codeHash}, expected: ${service.getInfo().codeHash}`);
24019
24051
  }
24020
24052
  }
24021
24053
  }
@@ -24026,7 +24058,7 @@ function verifyContextualValidity(input, state, headerChain, maxLookupAnchorAge)
24026
24058
  * https://graypaper.fluffylabs.dev/#/5f542d7/151f01152101
24027
24059
  */
24028
24060
  if (currentWorkPackages.size !== input.guarantees.length) {
24029
- return result_Result.error(ReportsError.DuplicatePackage, "Duplicate work package detected.");
24061
+ return result_Result.error(ReportsError.DuplicatePackage, () => "Duplicate work package detected.");
24030
24062
  }
24031
24063
  const minLookupSlot = Math.max(0, input.slot - maxLookupAnchorAge);
24032
24064
  const contextResult = verifyRefineContexts(minLookupSlot, contexts, input.recentBlocksPartialUpdate, headerChain);
@@ -24071,7 +24103,7 @@ function verifyContextualValidity(input, state, headerChain, maxLookupAnchorAge)
24071
24103
  : undefined;
24072
24104
  }
24073
24105
  if (root === undefined || !root.segmentTreeRoot.isEqualTo(lookup.segmentTreeRoot)) {
24074
- return result_Result.error(ReportsError.SegmentRootLookupInvalid, `Mismatching segment tree root for package ${lookup.workPackageHash}. Got: ${lookup.segmentTreeRoot}, expected: ${root?.segmentTreeRoot}`);
24106
+ return result_Result.error(ReportsError.SegmentRootLookupInvalid, () => `Mismatching segment tree root for package ${lookup.workPackageHash}. Got: ${lookup.segmentTreeRoot}, expected: ${root?.segmentTreeRoot}`);
24075
24107
  }
24076
24108
  }
24077
24109
  }
@@ -24094,16 +24126,16 @@ function verifyRefineContexts(minLookupSlot, contexts, recentBlocksPartialUpdate
24094
24126
  */
24095
24127
  const recentBlock = recentBlocks.get(context.anchor);
24096
24128
  if (recentBlock === undefined) {
24097
- return result_Result.error(ReportsError.AnchorNotRecent, `Anchor block ${context.anchor} not found in recent blocks.`);
24129
+ return result_Result.error(ReportsError.AnchorNotRecent, () => `Anchor block ${context.anchor} not found in recent blocks.`);
24098
24130
  }
24099
24131
  // check state root
24100
24132
  if (!recentBlock.postStateRoot.isEqualTo(context.stateRoot)) {
24101
- return result_Result.error(ReportsError.BadStateRoot, `Anchor state root mismatch. Got: ${context.stateRoot}, expected: ${recentBlock.postStateRoot}.`);
24133
+ return result_Result.error(ReportsError.BadStateRoot, () => `Anchor state root mismatch. Got: ${context.stateRoot}, expected: ${recentBlock.postStateRoot}.`);
24102
24134
  }
24103
24135
  // check beefy root
24104
24136
  const beefyRoot = recentBlock.accumulationResult;
24105
24137
  if (!beefyRoot.isEqualTo(context.beefyRoot)) {
24106
- return result_Result.error(ReportsError.BadBeefyMmrRoot, `Invalid BEEFY super peak hash. Got: ${context.beefyRoot}, expected: ${beefyRoot}. Anchor: ${recentBlock.headerHash}`);
24138
+ return result_Result.error(ReportsError.BadBeefyMmrRoot, () => `Invalid BEEFY super peak hash. Got: ${context.beefyRoot}, expected: ${beefyRoot}. Anchor: ${recentBlock.headerHash}`);
24107
24139
  }
24108
24140
  /**
24109
24141
  * We require that each lookup-anchor block be within the
@@ -24112,7 +24144,7 @@ function verifyRefineContexts(minLookupSlot, contexts, recentBlocksPartialUpdate
24112
24144
  * https://graypaper.fluffylabs.dev/#/5f542d7/154601154701
24113
24145
  */
24114
24146
  if (context.lookupAnchorSlot < minLookupSlot) {
24115
- return result_Result.error(ReportsError.SegmentRootLookupInvalid, `Lookup anchor slot's too old. Got: ${context.lookupAnchorSlot}, minimal: ${minLookupSlot}`);
24147
+ return result_Result.error(ReportsError.SegmentRootLookupInvalid, () => `Lookup anchor slot's too old. Got: ${context.lookupAnchorSlot}, minimal: ${minLookupSlot}`);
24116
24148
  }
24117
24149
  /**
24118
24150
  * We also require that we have a record of it; this is one of
@@ -24129,7 +24161,7 @@ function verifyRefineContexts(minLookupSlot, contexts, recentBlocksPartialUpdate
24129
24161
  verify_contextual_logger.warn `Lookup anchor check for ${context.lookupAnchor} would fail, but override is active.`;
24130
24162
  }
24131
24163
  else {
24132
- return result_Result.error(ReportsError.SegmentRootLookupInvalid, `Lookup anchor is not found in chain. Hash: ${context.lookupAnchor} (slot: ${context.lookupAnchorSlot})`);
24164
+ return result_Result.error(ReportsError.SegmentRootLookupInvalid, () => `Lookup anchor is not found in chain. Hash: ${context.lookupAnchor} (slot: ${context.lookupAnchorSlot})`);
24133
24165
  }
24134
24166
  }
24135
24167
  }
@@ -24152,7 +24184,7 @@ function verifyDependencies({ currentWorkPackages, recentlyReported, prerequisit
24152
24184
  if (recentlyReported.has(preReqHash)) {
24153
24185
  continue;
24154
24186
  }
24155
- return result_Result.error(isSegmentRoot ? ReportsError.SegmentRootLookupInvalid : ReportsError.DependencyMissing, `Missing work package ${preReqHash} in current extrinsic or recent history.`);
24187
+ return result_Result.error(isSegmentRoot ? ReportsError.SegmentRootLookupInvalid : ReportsError.DependencyMissing, () => `Missing work package ${preReqHash} in current extrinsic or recent history.`);
24156
24188
  }
24157
24189
  return result_Result.ok(result_OK);
24158
24190
  };
@@ -24200,7 +24232,7 @@ function verifyWorkPackagesUniqueness(workPackageHashes, state) {
24200
24232
  // let's check if any of our packages is in the pipeline
24201
24233
  const intersection = packagesInPipeline.intersection(workPackageHashes);
24202
24234
  for (const packageHash of intersection) {
24203
- return result_Result.error(ReportsError.DuplicatePackage, `The same work package hash found in the pipeline (workPackageHash: ${packageHash})`);
24235
+ return result_Result.error(ReportsError.DuplicatePackage, () => `The same work package hash found in the pipeline (workPackageHash: ${packageHash})`);
24204
24236
  }
24205
24237
  return result_Result.ok(result_OK);
24206
24238
  }
@@ -24239,7 +24271,7 @@ workReportHashes, slot, getGuarantorAssignment) {
24239
24271
  const credentialsView = guaranteeView.credentials.view();
24240
24272
  if (credentialsView.length < REQUIRED_CREDENTIALS_RANGE[0] ||
24241
24273
  credentialsView.length > REQUIRED_CREDENTIALS_RANGE[1]) {
24242
- return result_Result.error(ReportsError.InsufficientGuarantees, `Invalid number of credentials. Expected ${REQUIRED_CREDENTIALS_RANGE}, got ${credentialsView.length}`);
24274
+ return result_Result.error(ReportsError.InsufficientGuarantees, () => `Invalid number of credentials. Expected ${REQUIRED_CREDENTIALS_RANGE}, got ${credentialsView.length}`);
24243
24275
  }
24244
24276
  /** Retrieve current core assignment. */
24245
24277
  const timeSlot = guaranteeView.slot.materialize();
@@ -24254,20 +24286,20 @@ workReportHashes, slot, getGuarantorAssignment) {
24254
24286
  const credentialView = credential.view();
24255
24287
  const validatorIndex = credentialView.validatorIndex.materialize();
24256
24288
  if (lastValidatorIndex >= validatorIndex) {
24257
- return result_Result.error(ReportsError.NotSortedOrUniqueGuarantors, `Credentials must be sorted by validator index. Got ${validatorIndex}, expected at least ${lastValidatorIndex + 1}`);
24289
+ return result_Result.error(ReportsError.NotSortedOrUniqueGuarantors, () => `Credentials must be sorted by validator index. Got ${validatorIndex}, expected at least ${lastValidatorIndex + 1}`);
24258
24290
  }
24259
24291
  lastValidatorIndex = validatorIndex;
24260
24292
  const signature = credentialView.signature.materialize();
24261
24293
  const guarantorData = guarantorAssignments[validatorIndex];
24262
24294
  if (guarantorData === undefined) {
24263
- return result_Result.error(ReportsError.BadValidatorIndex, `Invalid validator index: ${validatorIndex}`);
24295
+ return result_Result.error(ReportsError.BadValidatorIndex, () => `Invalid validator index: ${validatorIndex}`);
24264
24296
  }
24265
24297
  /**
24266
24298
  * Verify core assignment.
24267
24299
  * https://graypaper.fluffylabs.dev/#/5f542d7/14e40214e602
24268
24300
  */
24269
24301
  if (guarantorData.core !== coreIndex) {
24270
- return result_Result.error(ReportsError.WrongAssignment, `Invalid core assignment for validator ${validatorIndex}. Expected: ${guarantorData.core}, got: ${coreIndex}`);
24302
+ return result_Result.error(ReportsError.WrongAssignment, () => `Invalid core assignment for validator ${validatorIndex}. Expected: ${guarantorData.core}, got: ${coreIndex}`);
24271
24303
  }
24272
24304
  signaturesToVerify.push({
24273
24305
  signature,
@@ -24305,10 +24337,10 @@ function verifyReportsOrder(input, chainSpec) {
24305
24337
  const reportView = guarantee.view().report.view();
24306
24338
  const coreIndex = reportView.coreIndex.materialize();
24307
24339
  if (lastCoreIndex >= coreIndex) {
24308
- return result_Result.error(ReportsError.OutOfOrderGuarantee, `Core indices of work reports are not unique or in order. Got: ${coreIndex}, expected at least: ${lastCoreIndex + 1}`);
24340
+ return result_Result.error(ReportsError.OutOfOrderGuarantee, () => `Core indices of work reports are not unique or in order. Got: ${coreIndex}, expected at least: ${lastCoreIndex + 1}`);
24309
24341
  }
24310
24342
  if (coreIndex >= noOfCores) {
24311
- return result_Result.error(ReportsError.BadCoreIndex, `Invalid core index. Got: ${coreIndex}, max: ${noOfCores}`);
24343
+ return result_Result.error(ReportsError.BadCoreIndex, () => `Invalid core index. Got: ${coreIndex}, max: ${noOfCores}`);
24312
24344
  }
24313
24345
  lastCoreIndex = coreIndex;
24314
24346
  }
@@ -24333,7 +24365,7 @@ function verifyPostSignatureChecks(input, availabilityAssignment, authPools, ser
24333
24365
  * https://graypaper.fluffylabs.dev/#/5f542d7/15ea0015ea00
24334
24366
  */
24335
24367
  if (availabilityAssignment[coreIndex] !== null) {
24336
- return result_Result.error(ReportsError.CoreEngaged, `Report pending availability at core: ${coreIndex}`);
24368
+ return result_Result.error(ReportsError.CoreEngaged, () => `Report pending availability at core: ${coreIndex}`);
24337
24369
  }
24338
24370
  /**
24339
24371
  * A report is valid only if the authorizer hash is present
@@ -24346,7 +24378,7 @@ function verifyPostSignatureChecks(input, availabilityAssignment, authPools, ser
24346
24378
  const authorizerPool = authPools.get(coreIndex);
24347
24379
  const pool = authorizerPool?.materialize() ?? [];
24348
24380
  if (pool.find((hash) => hash.isEqualTo(authorizerHash)) === undefined) {
24349
- return result_Result.error(ReportsError.CoreUnauthorized, `Authorizer hash not found in the pool of core ${coreIndex}: ${authorizerHash}`);
24381
+ return result_Result.error(ReportsError.CoreUnauthorized, () => `Authorizer hash not found in the pool of core ${coreIndex}: ${authorizerHash}`);
24350
24382
  }
24351
24383
  /**
24352
24384
  * We require that the gas allotted for accumulation of each
@@ -24358,17 +24390,17 @@ function verifyPostSignatureChecks(input, availabilityAssignment, authPools, ser
24358
24390
  for (const result of report.results) {
24359
24391
  const service = services(result.serviceId);
24360
24392
  if (service === null) {
24361
- return result_Result.error(ReportsError.BadServiceId, `No service with id: ${result.serviceId}`);
24393
+ return result_Result.error(ReportsError.BadServiceId, () => `No service with id: ${result.serviceId}`);
24362
24394
  }
24363
24395
  const info = service.getInfo();
24364
24396
  // check minimal accumulation gas
24365
24397
  if (result.gas < info.accumulateMinGas) {
24366
- return result_Result.error(ReportsError.ServiceItemGasTooLow, `Service (${result.serviceId}) gas is less than minimal. Got: ${result.gas}, expected at least: ${info.accumulateMinGas}`);
24398
+ return result_Result.error(ReportsError.ServiceItemGasTooLow, () => `Service (${result.serviceId}) gas is less than minimal. Got: ${result.gas}, expected at least: ${info.accumulateMinGas}`);
24367
24399
  }
24368
24400
  }
24369
24401
  const totalGas = sumU64(...report.results.map((x) => x.gas));
24370
24402
  if (totalGas.overflow || totalGas.value > G_A) {
24371
- return result_Result.error(ReportsError.WorkReportGasTooHigh, `Total gas too high. Got: ${totalGas.value} (ovfl: ${totalGas.overflow}), maximal: ${G_A}`);
24403
+ return result_Result.error(ReportsError.WorkReportGasTooHigh, () => `Total gas too high. Got: ${totalGas.value} (ovfl: ${totalGas.overflow}), maximal: ${G_A}`);
24372
24404
  }
24373
24405
  }
24374
24406
  return result_Result.ok(result_OK);
@@ -24454,7 +24486,7 @@ class Reports {
24454
24486
  }
24455
24487
  const reporters = SortedSet.fromArray(bytesBlobComparator, signaturesToVerify.ok.map((x) => x.key)).slice();
24456
24488
  if (hasAnyOffenders(reporters, input.offenders)) {
24457
- return result_Result.error(ReportsError.BannedValidator);
24489
+ return result_Result.error(ReportsError.BannedValidator, () => "One or more reporters are banned validators");
24458
24490
  }
24459
24491
  return result_Result.ok({
24460
24492
  stateUpdate: {
@@ -24494,7 +24526,7 @@ class Reports {
24494
24526
  return signaturesToVerify[idx].key;
24495
24527
  })
24496
24528
  .filter((x) => x !== null);
24497
- return result_Result.error(ReportsError.BadSignature, `Invalid signatures for validators with keys: ${invalidKeys.join(", ")}`);
24529
+ return result_Result.error(ReportsError.BadSignature, () => `Invalid signatures for validators with keys: ${invalidKeys.join(", ")}`);
24498
24530
  }
24499
24531
  /**
24500
24532
  * Get the guarantor assignment (both core and validator data)
@@ -24510,10 +24542,10 @@ class Reports {
24510
24542
  const minTimeSlot = Math.max(0, headerRotation - 1) * rotationPeriod;
24511
24543
  // https://graypaper.fluffylabs.dev/#/5f542d7/155e00156900
24512
24544
  if (guaranteeTimeSlot > headerTimeSlot) {
24513
- return result_Result.error(ReportsError.FutureReportSlot, `Report slot is in future. Block ${headerTimeSlot}, Report: ${guaranteeTimeSlot}`);
24545
+ return result_Result.error(ReportsError.FutureReportSlot, () => `Report slot is in future. Block ${headerTimeSlot}, Report: ${guaranteeTimeSlot}`);
24514
24546
  }
24515
24547
  if (guaranteeTimeSlot < minTimeSlot) {
24516
- return result_Result.error(ReportsError.ReportEpochBeforeLast, `Report slot is too old. Block ${headerTimeSlot}, Report: ${guaranteeTimeSlot}`);
24548
+ return result_Result.error(ReportsError.ReportEpochBeforeLast, () => `Report slot is too old. Block ${headerTimeSlot}, Report: ${guaranteeTimeSlot}`);
24517
24549
  }
24518
24550
  // TODO [ToDr] [opti] below code needs cache.
24519
24551
  // The `G` and `G*` sets should only be computed once per rotation.
@@ -25081,6 +25113,16 @@ class OnChain {
25081
25113
  });
25082
25114
  const { statistics, ...statisticsRest } = statisticsUpdate;
25083
25115
  assertEmpty(statisticsRest);
25116
+ // Concat accumulatePreimages updates with preimages
25117
+ for (const [serviceId, accPreimageUpdates] of accumulatePreimages.entries()) {
25118
+ const preimagesUpdates = preimages.get(serviceId);
25119
+ if (preimagesUpdates === undefined) {
25120
+ preimages.set(serviceId, accPreimageUpdates);
25121
+ }
25122
+ else {
25123
+ preimages.set(serviceId, preimagesUpdates.concat(accPreimageUpdates));
25124
+ }
25125
+ }
25084
25126
  return result_Result.ok({
25085
25127
  ...(maybeAuthorizationQueues !== undefined ? { authQueues: maybeAuthorizationQueues } : {}),
25086
25128
  ...(maybeDesignatedValidatorData !== undefined ? { designatedValidatorData: maybeDesignatedValidatorData } : {}),
@@ -25102,7 +25144,7 @@ class OnChain {
25102
25144
  recentlyAccumulated,
25103
25145
  accumulationOutputLog,
25104
25146
  ...servicesUpdate,
25105
- preimages: preimages.concat(accumulatePreimages),
25147
+ preimages,
25106
25148
  });
25107
25149
  }
25108
25150
  getUsedAuthorizerHashes(guarantees) {
@@ -25119,11 +25161,11 @@ class OnChain {
25119
25161
  }
25120
25162
  function checkOffendersMatch(offendersMark, headerOffendersMark) {
25121
25163
  if (offendersMark.size !== headerOffendersMark.length) {
25122
- return result_Result.error(OFFENDERS_ERROR, `Length mismatch: ${offendersMark.size} vs ${headerOffendersMark.length}`);
25164
+ return result_Result.error(OFFENDERS_ERROR, () => `Length mismatch: ${offendersMark.size} vs ${headerOffendersMark.length}`);
25123
25165
  }
25124
25166
  for (const key of headerOffendersMark) {
25125
25167
  if (!offendersMark.has(key)) {
25126
- return result_Result.error(OFFENDERS_ERROR, `Missing key: ${key}`);
25168
+ return result_Result.error(OFFENDERS_ERROR, () => `Missing key: ${key}`);
25127
25169
  }
25128
25170
  }
25129
25171
  return result_Result.ok(result_OK);
@@ -25205,7 +25247,7 @@ class Importer {
25205
25247
  if (!this.currentHash.isEqualTo(parentHash)) {
25206
25248
  const state = this.states.getState(parentHash);
25207
25249
  if (state === null) {
25208
- const e = result_Result.error(BlockVerifierError.StateRootNotFound);
25250
+ const e = result_Result.error(BlockVerifierError.StateRootNotFound, () => `State not found for parent block ${parentHash}`);
25209
25251
  if (!e.isError) {
25210
25252
  throw new Error("unreachable, just adding to make compiler happy");
25211
25253
  }
@@ -25401,7 +25443,7 @@ const importBlockResultCodec = descriptors_codec.custom({
25401
25443
  }
25402
25444
  if (kind === 1) {
25403
25445
  const error = d.bytesBlob();
25404
- return result_Result.error(error.asText());
25446
+ return result_Result.error(error.asText(), () => error.asText());
25405
25447
  }
25406
25448
  throw new Error(`Invalid Result: ${kind}`);
25407
25449
  }, (s) => {
@@ -25445,14 +25487,14 @@ class MainReady extends State {
25445
25487
  sendBlock(port, block) {
25446
25488
  // TODO [ToDr] How to make a better API to pass this binary data around?
25447
25489
  // Currently we don't guarantee that the underlying buffer is actually `ArrayBuffer`.
25448
- port.sendSignal("block", block, [block.buffer]);
25490
+ port.sendSignal("block", block, []);
25449
25491
  }
25450
25492
  async importBlock(port, block) {
25451
- const res = await port.sendRequest("importBlock", block, [block.buffer]);
25493
+ const res = await port.sendRequest("importBlock", block, []);
25452
25494
  if (res instanceof Uint8Array) {
25453
25495
  return decoder_Decoder.decodeObject(importBlockResultCodec, res);
25454
25496
  }
25455
- return result_Result.error("Invalid worker response.");
25497
+ return result_Result.error("Invalid worker response.", () => "Invalid worker response: expected Uint8Array");
25456
25498
  }
25457
25499
  async getStateEntries(port, hash) {
25458
25500
  const res = await port.sendRequest("getStateEntries", hash, [hash.buffer]);
@@ -25564,13 +25606,13 @@ class ImporterReady extends State {
25564
25606
  response = result_Result.ok(this.importer.getBestStateRootHash() ?? ZERO_HASH.asOpaque());
25565
25607
  }
25566
25608
  else {
25567
- response = result_Result.error(resultToString(res));
25609
+ response = result_Result.error(resultToString(res), () => resultToString(res));
25568
25610
  }
25569
25611
  }
25570
25612
  catch (e) {
25571
25613
  state_machine_logger.error `Failed to import block: ${e}`;
25572
25614
  state_machine_logger.error `${e instanceof Error ? e.stack : ""}`;
25573
- response = result_Result.error(`${e}`);
25615
+ response = result_Result.error(`${e}`, () => `${e}`);
25574
25616
  }
25575
25617
  const encoded = encoder_Encoder.encodeObject(importBlockResultCodec, response);
25576
25618
  return {