@typeberry/convert 0.2.0-74f246e → 0.2.0-adde0dd

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/index.js +396 -362
  2. package/index.js.map +1 -1
  3. package/package.json +1 -1
package/index.js CHANGED
@@ -4544,7 +4544,7 @@ function result_resultToString(res) {
4544
4544
  if (res.isOk) {
4545
4545
  return `OK: ${typeof res.ok === "symbol" ? res.ok.toString() : res.ok}`;
4546
4546
  }
4547
- return `${res.details}\nError: ${maybeTaggedErrorToString(res.error)}`;
4547
+ return `${res.details()}\nError: ${maybeTaggedErrorToString(res.error)}`;
4548
4548
  }
4549
4549
  /** An indication of two possible outcomes returned from a function. */
4550
4550
  const result_Result = {
@@ -4558,7 +4558,7 @@ const result_Result = {
4558
4558
  };
4559
4559
  },
4560
4560
  /** Create new [`Result`] with `Error` status. */
4561
- error: (error, details = "") => {
4561
+ error: (error, details) => {
4562
4562
  debug_check `${error !== undefined} 'Error' type cannot be undefined.`;
4563
4563
  return {
4564
4564
  isOk: false,
@@ -4677,7 +4677,7 @@ function test_deepEqual(actual, expected, { context = [], errorsCollector, ignor
4677
4677
  }
4678
4678
  if (actual.isError && expected.isError) {
4679
4679
  test_deepEqual(actual.error, expected.error, { context: ctx.concat(["error"]), errorsCollector: errors, ignore });
4680
- test_deepEqual(actual.details, expected.details, {
4680
+ test_deepEqual(actual.details(), expected.details(), {
4681
4681
  context: ctx.concat(["details"]),
4682
4682
  errorsCollector: errors,
4683
4683
  // display details when error does not match
@@ -11465,6 +11465,7 @@ function accumulation_output_accumulationOutputComparator(a, b) {
11465
11465
 
11466
11466
  ;// CONCATENATED MODULE: ./packages/jam/block/gp-constants.ts
11467
11467
 
11468
+
11468
11469
  /**
11469
11470
  * This file lists all of the constants defined in the GrayPaper appendix.
11470
11471
  *
@@ -11475,7 +11476,7 @@ function accumulation_output_accumulationOutputComparator(a, b) {
11475
11476
  * here are only temporarily for convenience. When we figure out better names
11476
11477
  * and places for these this file will be eradicated.
11477
11478
  *
11478
- * https://graypaper.fluffylabs.dev/#/579bd12/413000413000
11479
+ * https://graypaper.fluffylabs.dev/#/ab2cdbd/442300442300?v=0.7.2
11479
11480
  */
11480
11481
  /** `G_I`: The gas allocated to invoke a work-package’s Is-Authorized logic. */
11481
11482
  const gp_constants_G_I = 50_000_000;
@@ -11491,8 +11492,8 @@ const S = 1024;
11491
11492
  const gp_constants_T = 128;
11492
11493
  /** `W_A`: The maximum size of is-authorized code in octets. */
11493
11494
  const gp_constants_W_A = 64_000;
11494
- /** `W_B`: The maximum size of an encoded work-package with extrinsic data and imports. */
11495
- const gp_constants_W_B = 13_794_305;
11495
+ /** `W_B`: The maximum size of the concatenated variable-size blobs, extrinsics and imported segments of a work-package, in octets */
11496
+ const gp_constants_W_B = compatibility_Compatibility.isGreaterOrEqual(compatibility_GpVersion.V0_7_2) ? 13_791_360 : 13_794_305;
11496
11497
  /** `W_C`: The maximum size of service code in octets. */
11497
11498
  const gp_constants_W_C = 4_000_000;
11498
11499
  /** `W_M`: The maximum number of imports in a work-package. */
@@ -12591,31 +12592,29 @@ var state_update_UpdatePreimageKind;
12591
12592
  * 3. Update `LookupHistory` with given value.
12592
12593
  */
12593
12594
  class state_update_UpdatePreimage {
12594
- serviceId;
12595
12595
  action;
12596
- constructor(serviceId, action) {
12597
- this.serviceId = serviceId;
12596
+ constructor(action) {
12598
12597
  this.action = action;
12599
12598
  }
12600
12599
  /** A preimage is provided. We should update the lookuphistory and add the preimage to db. */
12601
- static provide({ serviceId, preimage, slot, }) {
12602
- return new state_update_UpdatePreimage(serviceId, {
12600
+ static provide({ preimage, slot }) {
12601
+ return new state_update_UpdatePreimage({
12603
12602
  kind: state_update_UpdatePreimageKind.Provide,
12604
12603
  preimage,
12605
12604
  slot,
12606
12605
  });
12607
12606
  }
12608
12607
  /** The preimage should be removed completely from the database. */
12609
- static remove({ serviceId, hash, length }) {
12610
- return new state_update_UpdatePreimage(serviceId, {
12608
+ static remove({ hash, length }) {
12609
+ return new state_update_UpdatePreimage({
12611
12610
  kind: state_update_UpdatePreimageKind.Remove,
12612
12611
  hash,
12613
12612
  length,
12614
12613
  });
12615
12614
  }
12616
12615
  /** Update the lookup history of some preimage or add a new one (request). */
12617
- static updateOrAdd({ serviceId, lookupHistory }) {
12618
- return new state_update_UpdatePreimage(serviceId, {
12616
+ static updateOrAdd({ lookupHistory }) {
12617
+ return new state_update_UpdatePreimage({
12619
12618
  kind: state_update_UpdatePreimageKind.UpdateOrAdd,
12620
12619
  item: lookupHistory,
12621
12620
  });
@@ -12652,23 +12651,21 @@ var state_update_UpdateServiceKind;
12652
12651
  UpdateServiceKind[UpdateServiceKind["Create"] = 1] = "Create";
12653
12652
  })(state_update_UpdateServiceKind || (state_update_UpdateServiceKind = {}));
12654
12653
  /**
12655
- * Update service info of a particular `ServiceId` or create a new one.
12654
+ * Update service info or create a new one.
12656
12655
  */
12657
12656
  class state_update_UpdateService {
12658
- serviceId;
12659
12657
  action;
12660
- constructor(serviceId, action) {
12661
- this.serviceId = serviceId;
12658
+ constructor(action) {
12662
12659
  this.action = action;
12663
12660
  }
12664
- static update({ serviceId, serviceInfo }) {
12665
- return new state_update_UpdateService(serviceId, {
12661
+ static update({ serviceInfo }) {
12662
+ return new state_update_UpdateService({
12666
12663
  kind: state_update_UpdateServiceKind.Update,
12667
12664
  account: serviceInfo,
12668
12665
  });
12669
12666
  }
12670
- static create({ serviceId, serviceInfo, lookupHistory, }) {
12671
- return new state_update_UpdateService(serviceId, {
12667
+ static create({ serviceInfo, lookupHistory, }) {
12668
+ return new state_update_UpdateService({
12672
12669
  kind: state_update_UpdateServiceKind.Create,
12673
12670
  account: serviceInfo,
12674
12671
  lookupHistory,
@@ -12689,17 +12686,15 @@ var state_update_UpdateStorageKind;
12689
12686
  * Can either create/modify an entry or remove it.
12690
12687
  */
12691
12688
  class state_update_UpdateStorage {
12692
- serviceId;
12693
12689
  action;
12694
- constructor(serviceId, action) {
12695
- this.serviceId = serviceId;
12690
+ constructor(action) {
12696
12691
  this.action = action;
12697
12692
  }
12698
- static set({ serviceId, storage }) {
12699
- return new state_update_UpdateStorage(serviceId, { kind: state_update_UpdateStorageKind.Set, storage });
12693
+ static set({ storage }) {
12694
+ return new state_update_UpdateStorage({ kind: state_update_UpdateStorageKind.Set, storage });
12700
12695
  }
12701
- static remove({ serviceId, key }) {
12702
- return new state_update_UpdateStorage(serviceId, { kind: state_update_UpdateStorageKind.Remove, key });
12696
+ static remove({ key }) {
12697
+ return new state_update_UpdateStorage({ kind: state_update_UpdateStorageKind.Remove, key });
12703
12698
  }
12704
12699
  get key() {
12705
12700
  if (this.action.kind === state_update_UpdateStorageKind.Remove) {
@@ -12908,12 +12903,12 @@ class InMemoryState extends WithDebug {
12908
12903
  * Modify the state and apply a single state update.
12909
12904
  */
12910
12905
  applyUpdate(update) {
12911
- const { servicesRemoved, servicesUpdates, preimages, storage, ...rest } = update;
12906
+ const { removed, created: _, updated, preimages, storage, ...rest } = update;
12912
12907
  // just assign all other variables
12913
12908
  Object.assign(this, rest);
12914
12909
  // and update the services state
12915
12910
  let result;
12916
- result = this.updateServices(servicesUpdates);
12911
+ result = this.updateServices(updated);
12917
12912
  if (result.isError) {
12918
12913
  return result;
12919
12914
  }
@@ -12925,7 +12920,7 @@ class InMemoryState extends WithDebug {
12925
12920
  if (result.isError) {
12926
12921
  return result;
12927
12922
  }
12928
- this.removeServices(servicesRemoved);
12923
+ this.removeServices(removed);
12929
12924
  return result_Result.ok(result_OK);
12930
12925
  }
12931
12926
  removeServices(servicesRemoved) {
@@ -12934,89 +12929,102 @@ class InMemoryState extends WithDebug {
12934
12929
  this.services.delete(serviceId);
12935
12930
  }
12936
12931
  }
12937
- updateStorage(storage) {
12938
- for (const { serviceId, action } of storage ?? []) {
12939
- const { kind } = action;
12940
- const service = this.services.get(serviceId);
12941
- if (service === undefined) {
12942
- return result_Result.error(in_memory_state_UpdateError.NoService, `Attempting to update storage of non-existing service: ${serviceId}`);
12943
- }
12944
- if (kind === state_update_UpdateStorageKind.Set) {
12945
- const { key, value } = action.storage;
12946
- service.data.storage.set(key.toString(), service_StorageItem.create({ key, value }));
12947
- }
12948
- else if (kind === state_update_UpdateStorageKind.Remove) {
12949
- const { key } = action;
12950
- debug_check `
12932
+ updateStorage(storageUpdates) {
12933
+ if (storageUpdates === undefined) {
12934
+ return result_Result.ok(result_OK);
12935
+ }
12936
+ for (const [serviceId, updates] of storageUpdates.entries()) {
12937
+ for (const update of updates) {
12938
+ const { kind } = update.action;
12939
+ const service = this.services.get(serviceId);
12940
+ if (service === undefined) {
12941
+ return result_Result.error(in_memory_state_UpdateError.NoService, () => `Attempting to update storage of non-existing service: ${serviceId}`);
12942
+ }
12943
+ if (kind === state_update_UpdateStorageKind.Set) {
12944
+ const { key, value } = update.action.storage;
12945
+ service.data.storage.set(key.toString(), service_StorageItem.create({ key, value }));
12946
+ }
12947
+ else if (kind === state_update_UpdateStorageKind.Remove) {
12948
+ const { key } = update.action;
12949
+ debug_check `
12951
12950
  ${service.data.storage.has(key.toString())}
12952
- Attempting to remove non-existing storage item at ${serviceId}: ${action.key}
12951
+ Attempting to remove non-existing storage item at ${serviceId}: ${update.action.key}
12953
12952
  `;
12954
- service.data.storage.delete(key.toString());
12955
- }
12956
- else {
12957
- debug_assertNever(kind);
12953
+ service.data.storage.delete(key.toString());
12954
+ }
12955
+ else {
12956
+ debug_assertNever(kind);
12957
+ }
12958
12958
  }
12959
12959
  }
12960
12960
  return result_Result.ok(result_OK);
12961
12961
  }
12962
- updatePreimages(preimages) {
12963
- for (const { serviceId, action } of preimages ?? []) {
12962
+ updatePreimages(preimagesUpdates) {
12963
+ if (preimagesUpdates === undefined) {
12964
+ return result_Result.ok(result_OK);
12965
+ }
12966
+ for (const [serviceId, updates] of preimagesUpdates.entries()) {
12964
12967
  const service = this.services.get(serviceId);
12965
12968
  if (service === undefined) {
12966
- return result_Result.error(in_memory_state_UpdateError.NoService, `Attempting to update preimage of non-existing service: ${serviceId}`);
12967
- }
12968
- const { kind } = action;
12969
- if (kind === state_update_UpdatePreimageKind.Provide) {
12970
- const { preimage, slot } = action;
12971
- if (service.data.preimages.has(preimage.hash)) {
12972
- return result_Result.error(in_memory_state_UpdateError.PreimageExists, `Overwriting existing preimage at ${serviceId}: ${preimage}`);
12973
- }
12974
- service.data.preimages.set(preimage.hash, preimage);
12975
- if (slot !== null) {
12976
- const lookupHistory = service.data.lookupHistory.get(preimage.hash);
12977
- const length = numbers_tryAsU32(preimage.blob.length);
12978
- const lookup = new service_LookupHistoryItem(preimage.hash, length, service_tryAsLookupHistorySlots([slot]));
12979
- if (lookupHistory === undefined) {
12980
- // no lookup history for that preimage at all (edge case, should be requested)
12981
- service.data.lookupHistory.set(preimage.hash, [lookup]);
12969
+ return result_Result.error(in_memory_state_UpdateError.NoService, () => `Attempting to update preimage of non-existing service: ${serviceId}`);
12970
+ }
12971
+ for (const update of updates) {
12972
+ const { kind } = update.action;
12973
+ if (kind === state_update_UpdatePreimageKind.Provide) {
12974
+ const { preimage, slot } = update.action;
12975
+ if (service.data.preimages.has(preimage.hash)) {
12976
+ return result_Result.error(in_memory_state_UpdateError.PreimageExists, () => `Overwriting existing preimage at ${serviceId}: ${preimage}`);
12977
+ }
12978
+ service.data.preimages.set(preimage.hash, preimage);
12979
+ if (slot !== null) {
12980
+ const lookupHistory = service.data.lookupHistory.get(preimage.hash);
12981
+ const length = numbers_tryAsU32(preimage.blob.length);
12982
+ const lookup = new service_LookupHistoryItem(preimage.hash, length, service_tryAsLookupHistorySlots([slot]));
12983
+ if (lookupHistory === undefined) {
12984
+ // no lookup history for that preimage at all (edge case, should be requested)
12985
+ service.data.lookupHistory.set(preimage.hash, [lookup]);
12986
+ }
12987
+ else {
12988
+ // insert or replace exiting entry
12989
+ const index = lookupHistory.map((x) => x.length).indexOf(length);
12990
+ lookupHistory.splice(index, index === -1 ? 0 : 1, lookup);
12991
+ }
12982
12992
  }
12983
- else {
12984
- // insert or replace exiting entry
12985
- const index = lookupHistory.map((x) => x.length).indexOf(length);
12986
- lookupHistory.splice(index, index === -1 ? 0 : 1, lookup);
12993
+ }
12994
+ else if (kind === state_update_UpdatePreimageKind.Remove) {
12995
+ const { hash, length } = update.action;
12996
+ service.data.preimages.delete(hash);
12997
+ const history = service.data.lookupHistory.get(hash) ?? [];
12998
+ const idx = history.map((x) => x.length).indexOf(length);
12999
+ if (idx !== -1) {
13000
+ history.splice(idx, 1);
12987
13001
  }
12988
13002
  }
12989
- }
12990
- else if (kind === state_update_UpdatePreimageKind.Remove) {
12991
- const { hash, length } = action;
12992
- service.data.preimages.delete(hash);
12993
- const history = service.data.lookupHistory.get(hash) ?? [];
12994
- const idx = history.map((x) => x.length).indexOf(length);
12995
- if (idx !== -1) {
12996
- history.splice(idx, 1);
13003
+ else if (kind === state_update_UpdatePreimageKind.UpdateOrAdd) {
13004
+ const { item } = update.action;
13005
+ const history = service.data.lookupHistory.get(item.hash) ?? [];
13006
+ const existingIdx = history.map((x) => x.length).indexOf(item.length);
13007
+ const removeCount = existingIdx === -1 ? 0 : 1;
13008
+ history.splice(existingIdx, removeCount, item);
13009
+ service.data.lookupHistory.set(item.hash, history);
13010
+ }
13011
+ else {
13012
+ debug_assertNever(kind);
12997
13013
  }
12998
- }
12999
- else if (kind === state_update_UpdatePreimageKind.UpdateOrAdd) {
13000
- const { item } = action;
13001
- const history = service.data.lookupHistory.get(item.hash) ?? [];
13002
- const existingIdx = history.map((x) => x.length).indexOf(item.length);
13003
- const removeCount = existingIdx === -1 ? 0 : 1;
13004
- history.splice(existingIdx, removeCount, item);
13005
- service.data.lookupHistory.set(item.hash, history);
13006
- }
13007
- else {
13008
- debug_assertNever(kind);
13009
13014
  }
13010
13015
  }
13011
13016
  return result_Result.ok(result_OK);
13012
13017
  }
13013
13018
  updateServices(servicesUpdates) {
13014
- for (const { serviceId, action } of servicesUpdates ?? []) {
13015
- const { kind, account } = action;
13019
+ if (servicesUpdates === undefined) {
13020
+ return result_Result.ok(result_OK);
13021
+ }
13022
+ for (const [serviceId, update] of servicesUpdates.entries()) {
13023
+ const { kind, account } = update.action;
13016
13024
  if (kind === state_update_UpdateServiceKind.Create) {
13017
- const { lookupHistory } = action;
13025
+ const { lookupHistory } = update.action;
13018
13026
  if (this.services.has(serviceId)) {
13019
- return result_Result.error(in_memory_state_UpdateError.DuplicateService, `${serviceId} already exists!`);
13027
+ return result_Result.error(in_memory_state_UpdateError.DuplicateService, () => `${serviceId} already exists!`);
13020
13028
  }
13021
13029
  this.services.set(serviceId, new InMemoryService(serviceId, {
13022
13030
  info: account,
@@ -13028,7 +13036,7 @@ class InMemoryState extends WithDebug {
13028
13036
  else if (kind === state_update_UpdateServiceKind.Update) {
13029
13037
  const existingService = this.services.get(serviceId);
13030
13038
  if (existingService === undefined) {
13031
- return result_Result.error(in_memory_state_UpdateError.NoService, `Cannot update ${serviceId} because it does not exist.`);
13039
+ return result_Result.error(in_memory_state_UpdateError.NoService, () => `Cannot update ${serviceId} because it does not exist.`);
13032
13040
  }
13033
13041
  existingService.data.info = account;
13034
13042
  }
@@ -14938,7 +14946,6 @@ function hasher_getKeccakTrieHasher(hasher) {
14938
14946
 
14939
14947
 
14940
14948
 
14941
-
14942
14949
  /** What should be done with that key? */
14943
14950
  var StateEntryUpdateAction;
14944
14951
  (function (StateEntryUpdateAction) {
@@ -14954,76 +14961,88 @@ function* serialize_state_update_serializeStateUpdate(spec, blake2b, update) {
14954
14961
  yield* serializeBasicKeys(spec, update);
14955
14962
  const encode = (codec, val) => Encoder.encodeObject(codec, val, spec);
14956
14963
  // then let's proceed with service updates
14957
- yield* serializeServiceUpdates(update.servicesUpdates, encode, blake2b);
14964
+ yield* serializeServiceUpdates(update.updated, encode, blake2b);
14958
14965
  yield* serializePreimages(update.preimages, encode, blake2b);
14959
14966
  yield* serializeStorage(update.storage, blake2b);
14960
- yield* serializeRemovedServices(update.servicesRemoved);
14967
+ yield* serializeRemovedServices(update.removed);
14961
14968
  }
14962
14969
  function* serializeRemovedServices(servicesRemoved) {
14963
- for (const serviceId of servicesRemoved ?? []) {
14970
+ if (servicesRemoved === undefined) {
14971
+ return;
14972
+ }
14973
+ for (const serviceId of servicesRemoved) {
14964
14974
  // TODO [ToDr] what about all data associated with a service?
14965
14975
  const codec = serialize.serviceData(serviceId);
14966
14976
  yield [StateEntryUpdateAction.Remove, codec.key, EMPTY_BLOB];
14967
14977
  }
14968
14978
  }
14969
- function* serializeStorage(storage, blake2b) {
14970
- for (const { action, serviceId } of storage ?? []) {
14971
- switch (action.kind) {
14972
- case UpdateStorageKind.Set: {
14973
- const key = action.storage.key;
14974
- const codec = serialize.serviceStorage(blake2b, serviceId, key);
14975
- yield [StateEntryUpdateAction.Insert, codec.key, action.storage.value];
14976
- break;
14977
- }
14978
- case UpdateStorageKind.Remove: {
14979
- const key = action.key;
14980
- const codec = serialize.serviceStorage(blake2b, serviceId, key);
14981
- yield [StateEntryUpdateAction.Remove, codec.key, EMPTY_BLOB];
14982
- break;
14979
+ function* serializeStorage(storageUpdates, blake2b) {
14980
+ if (storageUpdates === undefined) {
14981
+ return;
14982
+ }
14983
+ for (const [serviceId, updates] of storageUpdates.entries()) {
14984
+ for (const { action } of updates) {
14985
+ switch (action.kind) {
14986
+ case UpdateStorageKind.Set: {
14987
+ const key = action.storage.key;
14988
+ const codec = serialize.serviceStorage(blake2b, serviceId, key);
14989
+ yield [StateEntryUpdateAction.Insert, codec.key, action.storage.value];
14990
+ break;
14991
+ }
14992
+ case UpdateStorageKind.Remove: {
14993
+ const key = action.key;
14994
+ const codec = serialize.serviceStorage(blake2b, serviceId, key);
14995
+ yield [StateEntryUpdateAction.Remove, codec.key, EMPTY_BLOB];
14996
+ break;
14997
+ }
14983
14998
  }
14984
- default:
14985
- assertNever(action);
14986
14999
  }
14987
15000
  }
14988
15001
  }
14989
- function* serializePreimages(preimages, encode, blake2b) {
14990
- for (const { action, serviceId } of preimages ?? []) {
14991
- switch (action.kind) {
14992
- case UpdatePreimageKind.Provide: {
14993
- const { hash, blob } = action.preimage;
14994
- const codec = serialize.servicePreimages(blake2b, serviceId, hash);
14995
- yield [StateEntryUpdateAction.Insert, codec.key, blob];
14996
- if (action.slot !== null) {
14997
- const codec2 = serialize.serviceLookupHistory(blake2b, serviceId, hash, tryAsU32(blob.length));
14998
- yield [
14999
- StateEntryUpdateAction.Insert,
15000
- codec2.key,
15001
- encode(codec2.Codec, tryAsLookupHistorySlots([action.slot])),
15002
- ];
15002
+ function* serializePreimages(preimagesUpdates, encode, blake2b) {
15003
+ if (preimagesUpdates === undefined) {
15004
+ return;
15005
+ }
15006
+ for (const [serviceId, updates] of preimagesUpdates.entries()) {
15007
+ for (const { action } of updates) {
15008
+ switch (action.kind) {
15009
+ case UpdatePreimageKind.Provide: {
15010
+ const { hash, blob } = action.preimage;
15011
+ const codec = serialize.servicePreimages(blake2b, serviceId, hash);
15012
+ yield [StateEntryUpdateAction.Insert, codec.key, blob];
15013
+ if (action.slot !== null) {
15014
+ const codec2 = serialize.serviceLookupHistory(blake2b, serviceId, hash, tryAsU32(blob.length));
15015
+ yield [
15016
+ StateEntryUpdateAction.Insert,
15017
+ codec2.key,
15018
+ encode(codec2.Codec, tryAsLookupHistorySlots([action.slot])),
15019
+ ];
15020
+ }
15021
+ break;
15022
+ }
15023
+ case UpdatePreimageKind.UpdateOrAdd: {
15024
+ const { hash, length, slots } = action.item;
15025
+ const codec = serialize.serviceLookupHistory(blake2b, serviceId, hash, length);
15026
+ yield [StateEntryUpdateAction.Insert, codec.key, encode(codec.Codec, slots)];
15027
+ break;
15028
+ }
15029
+ case UpdatePreimageKind.Remove: {
15030
+ const { hash, length } = action;
15031
+ const codec = serialize.servicePreimages(blake2b, serviceId, hash);
15032
+ yield [StateEntryUpdateAction.Remove, codec.key, EMPTY_BLOB];
15033
+ const codec2 = serialize.serviceLookupHistory(blake2b, serviceId, hash, length);
15034
+ yield [StateEntryUpdateAction.Remove, codec2.key, EMPTY_BLOB];
15035
+ break;
15003
15036
  }
15004
- break;
15005
- }
15006
- case UpdatePreimageKind.UpdateOrAdd: {
15007
- const { hash, length, slots } = action.item;
15008
- const codec = serialize.serviceLookupHistory(blake2b, serviceId, hash, length);
15009
- yield [StateEntryUpdateAction.Insert, codec.key, encode(codec.Codec, slots)];
15010
- break;
15011
- }
15012
- case UpdatePreimageKind.Remove: {
15013
- const { hash, length } = action;
15014
- const codec = serialize.servicePreimages(blake2b, serviceId, hash);
15015
- yield [StateEntryUpdateAction.Remove, codec.key, EMPTY_BLOB];
15016
- const codec2 = serialize.serviceLookupHistory(blake2b, serviceId, hash, length);
15017
- yield [StateEntryUpdateAction.Remove, codec2.key, EMPTY_BLOB];
15018
- break;
15019
15037
  }
15020
- default:
15021
- assertNever(action);
15022
15038
  }
15023
15039
  }
15024
15040
  }
15025
15041
  function* serializeServiceUpdates(servicesUpdates, encode, blake2b) {
15026
- for (const { action, serviceId } of servicesUpdates ?? []) {
15042
+ if (servicesUpdates === undefined) {
15043
+ return;
15044
+ }
15045
+ for (const [serviceId, { action }] of servicesUpdates.entries()) {
15027
15046
  // new service being created or updated
15028
15047
  const codec = serialize.serviceData(serviceId);
15029
15048
  yield [StateEntryUpdateAction.Insert, codec.key, encode(codec.Codec, action.account)];
@@ -15515,13 +15534,13 @@ class LeafDb {
15515
15534
  */
15516
15535
  static fromLeavesBlob(blob, db) {
15517
15536
  if (blob.length % TRIE_NODE_BYTES !== 0) {
15518
- return Result.error(LeafDbError.InvalidLeafData, `${blob.length} is not a multiply of ${TRIE_NODE_BYTES}: ${blob}`);
15537
+ return Result.error(LeafDbError.InvalidLeafData, () => `${blob.length} is not a multiply of ${TRIE_NODE_BYTES}: ${blob}`);
15519
15538
  }
15520
15539
  const leaves = SortedSet.fromArray(leafComparator, []);
15521
15540
  for (const nodeData of blob.chunks(TRIE_NODE_BYTES)) {
15522
15541
  const node = new TrieNode(nodeData.raw);
15523
15542
  if (node.getNodeType() === NodeType.Branch) {
15524
- return Result.error(LeafDbError.InvalidLeafData, `Branch node detected: ${nodeData}`);
15543
+ return Result.error(LeafDbError.InvalidLeafData, () => `Branch node detected: ${nodeData}`);
15525
15544
  }
15526
15545
  leaves.insert(node.asLeafNode());
15527
15546
  }
@@ -15742,32 +15761,33 @@ class preimages_Preimages {
15742
15761
  }
15743
15762
  if (prevPreimage.requester > currPreimage.requester ||
15744
15763
  currPreimage.blob.compare(prevPreimage.blob).isLessOrEqual()) {
15745
- return Result.error(PreimagesErrorCode.PreimagesNotSortedUnique);
15764
+ return Result.error(PreimagesErrorCode.PreimagesNotSortedUnique, () => `Preimages not sorted/unique at index ${i}`);
15746
15765
  }
15747
15766
  }
15748
15767
  const { preimages, slot } = input;
15749
- const pendingChanges = [];
15768
+ const pendingChanges = new Map();
15750
15769
  // select preimages for integration
15751
15770
  for (const preimage of preimages) {
15752
15771
  const { requester, blob } = preimage;
15753
15772
  const hash = this.blake2b.hashBytes(blob).asOpaque();
15754
15773
  const service = this.state.getService(requester);
15755
15774
  if (service === null) {
15756
- return Result.error(PreimagesErrorCode.AccountNotFound);
15775
+ return Result.error(PreimagesErrorCode.AccountNotFound, () => `Service not found: ${requester}`);
15757
15776
  }
15758
15777
  const hasPreimage = service.hasPreimage(hash);
15759
15778
  const slots = service.getLookupHistory(hash, tryAsU32(blob.length));
15760
15779
  // https://graypaper.fluffylabs.dev/#/5f542d7/181800181900
15761
15780
  // https://graypaper.fluffylabs.dev/#/5f542d7/116f0011a500
15762
15781
  if (hasPreimage || slots === null || !LookupHistoryItem.isRequested(slots)) {
15763
- return Result.error(PreimagesErrorCode.PreimageUnneeded);
15782
+ return Result.error(PreimagesErrorCode.PreimageUnneeded, () => `Preimage unneeded: requester=${requester}, hash=${hash}, hasPreimage=${hasPreimage}, isRequested=${slots !== null && LookupHistoryItem.isRequested(slots)}`);
15764
15783
  }
15765
15784
  // https://graypaper.fluffylabs.dev/#/5f542d7/18c00018f300
15766
- pendingChanges.push(UpdatePreimage.provide({
15767
- serviceId: requester,
15785
+ const updates = pendingChanges.get(requester) ?? [];
15786
+ updates.push(UpdatePreimage.provide({
15768
15787
  preimage: PreimageItem.create({ hash, blob }),
15769
15788
  slot,
15770
15789
  }));
15790
+ pendingChanges.set(requester, updates);
15771
15791
  }
15772
15792
  return Result.ok({
15773
15793
  preimages: pendingChanges,
@@ -15805,7 +15825,7 @@ class block_verifier_BlockVerifier {
15805
15825
  const headerHash = this.hasher.header(headerView);
15806
15826
  // check if current block is already imported
15807
15827
  if (this.blocks.getHeader(headerHash.hash) !== null) {
15808
- return Result.error(BlockVerifierError.AlreadyImported, `Block ${headerHash.hash} is already imported.`);
15828
+ return Result.error(BlockVerifierError.AlreadyImported, () => `Block ${headerHash.hash} is already imported.`);
15809
15829
  }
15810
15830
  // Check if parent block exists.
15811
15831
  // https://graypaper.fluffylabs.dev/#/cc517d7/0c82000c8200?v=0.6.5
@@ -15815,14 +15835,14 @@ class block_verifier_BlockVerifier {
15815
15835
  if (!parentHash.isEqualTo(block_verifier_ZERO_HASH)) {
15816
15836
  const parentBlock = this.blocks.getHeader(parentHash);
15817
15837
  if (parentBlock === null) {
15818
- return Result.error(BlockVerifierError.ParentNotFound, `Parent ${parentHash.toString()} not found`);
15838
+ return Result.error(BlockVerifierError.ParentNotFound, () => `Parent ${parentHash.toString()} not found`);
15819
15839
  }
15820
15840
  // Check if the time slot index is consecutive and not from future.
15821
15841
  // https://graypaper.fluffylabs.dev/#/cc517d7/0c02010c0201?v=0.6.5
15822
15842
  const timeslot = headerView.timeSlotIndex.materialize();
15823
15843
  const parentTimeslot = parentBlock.timeSlotIndex.materialize();
15824
15844
  if (timeslot <= parentTimeslot) {
15825
- return Result.error(BlockVerifierError.InvalidTimeSlot, `Invalid time slot index: ${timeslot}, expected > ${parentTimeslot}`);
15845
+ return Result.error(BlockVerifierError.InvalidTimeSlot, () => `Invalid time slot index: ${timeslot}, expected > ${parentTimeslot}`);
15826
15846
  }
15827
15847
  }
15828
15848
  // Check if extrinsic is valid.
@@ -15830,17 +15850,17 @@ class block_verifier_BlockVerifier {
15830
15850
  const extrinsicHash = headerView.extrinsicHash.materialize();
15831
15851
  const extrinsicMerkleCommitment = this.hasher.extrinsic(block.extrinsic.view());
15832
15852
  if (!extrinsicHash.isEqualTo(extrinsicMerkleCommitment.hash)) {
15833
- return Result.error(BlockVerifierError.InvalidExtrinsic, `Invalid extrinsic hash: ${extrinsicHash.toString()}, expected ${extrinsicMerkleCommitment.hash.toString()}`);
15853
+ return Result.error(BlockVerifierError.InvalidExtrinsic, () => `Invalid extrinsic hash: ${extrinsicHash.toString()}, expected ${extrinsicMerkleCommitment.hash.toString()}`);
15834
15854
  }
15835
15855
  // Check if the state root is valid.
15836
15856
  // https://graypaper.fluffylabs.dev/#/cc517d7/0c18010c1801?v=0.6.5
15837
15857
  const stateRoot = headerView.priorStateRoot.materialize();
15838
15858
  const posteriorStateRoot = this.blocks.getPostStateRoot(parentHash);
15839
15859
  if (posteriorStateRoot === null) {
15840
- return Result.error(BlockVerifierError.StateRootNotFound, `Posterior state root ${parentHash.toString()} not found`);
15860
+ return Result.error(BlockVerifierError.StateRootNotFound, () => `Posterior state root ${parentHash.toString()} not found`);
15841
15861
  }
15842
15862
  if (!stateRoot.isEqualTo(posteriorStateRoot)) {
15843
- return Result.error(BlockVerifierError.InvalidStateRoot, `Invalid prior state root: ${stateRoot.toString()}, expected ${posteriorStateRoot.toString()} (ours)`);
15863
+ return Result.error(BlockVerifierError.InvalidStateRoot, () => `Invalid prior state root: ${stateRoot.toString()}, expected ${posteriorStateRoot.toString()} (ours)`);
15844
15864
  }
15845
15865
  return Result.ok(headerHash.hash);
15846
15866
  }
@@ -15941,7 +15961,7 @@ class disputes_Disputes {
15941
15961
  // check if culprits are sorted by key
15942
15962
  // https://graypaper.fluffylabs.dev/#/579bd12/12c50112c601
15943
15963
  if (!isUniqueSortedBy(disputes.culprits, "key")) {
15944
- return Result.error(DisputesErrorCode.CulpritsNotSortedUnique);
15964
+ return Result.error(DisputesErrorCode.CulpritsNotSortedUnique, () => "Culprits are not uniquely sorted by key");
15945
15965
  }
15946
15966
  const culpritsLength = disputes.culprits.length;
15947
15967
  for (let i = 0; i < culpritsLength; i++) {
@@ -15950,24 +15970,24 @@ class disputes_Disputes {
15950
15970
  // https://graypaper.fluffylabs.dev/#/579bd12/125501125501
15951
15971
  const isInPunishSet = this.state.disputesRecords.asDictionaries().punishSet.has(key);
15952
15972
  if (isInPunishSet) {
15953
- return Result.error(DisputesErrorCode.OffenderAlreadyReported);
15973
+ return Result.error(DisputesErrorCode.OffenderAlreadyReported, () => `Offender already reported: culprit ${i}, key=${key}`);
15954
15974
  }
15955
15975
  // check if the guarantor key is correct
15956
15976
  // https://graypaper.fluffylabs.dev/#/85129da/125501125501?v=0.6.3
15957
15977
  if (!allValidatorKeys.has(key)) {
15958
- return Result.error(DisputesErrorCode.BadGuarantorKey);
15978
+ return Result.error(DisputesErrorCode.BadGuarantorKey, () => `Bad guarantor key: culprit ${i}, key=${key}`);
15959
15979
  }
15960
15980
  // verify if the culprit will be in new bad set
15961
15981
  // https://graypaper.fluffylabs.dev/#/579bd12/124601124601
15962
15982
  const isInNewBadSet = newItems.asDictionaries().badSet.has(workReportHash);
15963
15983
  if (!isInNewBadSet) {
15964
- return Result.error(DisputesErrorCode.CulpritsVerdictNotBad);
15984
+ return Result.error(DisputesErrorCode.CulpritsVerdictNotBad, () => `Culprit verdict not bad: culprit ${i}, work report=${workReportHash}`);
15965
15985
  }
15966
15986
  // verify culprit signature
15967
15987
  // https://graypaper.fluffylabs.dev/#/579bd12/125c01125c01
15968
15988
  const result = verificationResult.culprits[i];
15969
15989
  if (!result?.isValid) {
15970
- return Result.error(DisputesErrorCode.BadSignature, `Invalid signature for culprit: ${i}`);
15990
+ return Result.error(DisputesErrorCode.BadSignature, () => `Invalid signature for culprit: ${i}`);
15971
15991
  }
15972
15992
  }
15973
15993
  return Result.ok(null);
@@ -15976,7 +15996,7 @@ class disputes_Disputes {
15976
15996
  // check if faults are sorted by key
15977
15997
  // https://graypaper.fluffylabs.dev/#/579bd12/12c50112c601
15978
15998
  if (!isUniqueSortedBy(disputes.faults, "key")) {
15979
- return Result.error(DisputesErrorCode.FaultsNotSortedUnique);
15999
+ return Result.error(DisputesErrorCode.FaultsNotSortedUnique, () => "Faults are not uniquely sorted by key");
15980
16000
  }
15981
16001
  const faultsLength = disputes.faults.length;
15982
16002
  for (let i = 0; i < faultsLength; i++) {
@@ -15985,12 +16005,12 @@ class disputes_Disputes {
15985
16005
  // https://graypaper.fluffylabs.dev/#/579bd12/12a20112a201
15986
16006
  const isInPunishSet = this.state.disputesRecords.asDictionaries().punishSet.has(key);
15987
16007
  if (isInPunishSet) {
15988
- return Result.error(DisputesErrorCode.OffenderAlreadyReported);
16008
+ return Result.error(DisputesErrorCode.OffenderAlreadyReported, () => `Offender already reported: fault ${i}, key=${key}`);
15989
16009
  }
15990
16010
  // check if the auditor key is correct
15991
16011
  // https://graypaper.fluffylabs.dev/#/85129da/12a20112a201?v=0.6.3
15992
16012
  if (!allValidatorKeys.has(key)) {
15993
- return Result.error(DisputesErrorCode.BadAuditorKey);
16013
+ return Result.error(DisputesErrorCode.BadAuditorKey, () => `Bad auditor key: fault ${i}, key=${key}`);
15994
16014
  }
15995
16015
  // verify if the fault will be included in new good/bad set
15996
16016
  // it may be not correct as in GP there is "iff" what means it should be rather
@@ -16002,14 +16022,14 @@ class disputes_Disputes {
16002
16022
  const isInNewGoodSet = goodSet.has(workReportHash);
16003
16023
  const isInNewBadSet = badSet.has(workReportHash);
16004
16024
  if (isInNewGoodSet || !isInNewBadSet) {
16005
- return Result.error(DisputesErrorCode.FaultVerdictWrong);
16025
+ return Result.error(DisputesErrorCode.FaultVerdictWrong, () => `Fault verdict wrong: fault ${i}, work report=${workReportHash}, inGood=${isInNewGoodSet}, inBad=${isInNewBadSet}`);
16006
16026
  }
16007
16027
  }
16008
16028
  // verify fault signature. Verification was done earlier, here we only check the result.
16009
16029
  // https://graypaper.fluffylabs.dev/#/579bd12/12a90112a901
16010
16030
  const result = verificationResult.faults[i];
16011
16031
  if (!result.isValid) {
16012
- return Result.error(DisputesErrorCode.BadSignature, `Invalid signature for fault: ${i}`);
16032
+ return Result.error(DisputesErrorCode.BadSignature, () => `Invalid signature for fault: ${i}`);
16013
16033
  }
16014
16034
  }
16015
16035
  return Result.ok(null);
@@ -16018,32 +16038,32 @@ class disputes_Disputes {
16018
16038
  // check if verdicts are correctly sorted
16019
16039
  // https://graypaper.fluffylabs.dev/#/579bd12/12c40112c401
16020
16040
  if (!isUniqueSortedBy(disputes.verdicts, "workReportHash")) {
16021
- return Result.error(DisputesErrorCode.VerdictsNotSortedUnique);
16041
+ return Result.error(DisputesErrorCode.VerdictsNotSortedUnique, () => "Verdicts are not uniquely sorted by work report hash");
16022
16042
  }
16023
16043
  // check if judgement are correctly sorted
16024
16044
  // https://graypaper.fluffylabs.dev/#/579bd12/123702123802
16025
16045
  if (disputes.verdicts.some((verdict) => !isUniqueSortedByIndex(verdict.votes))) {
16026
- return Result.error(DisputesErrorCode.JudgementsNotSortedUnique);
16046
+ return Result.error(DisputesErrorCode.JudgementsNotSortedUnique, () => "Judgements are not uniquely sorted by index");
16027
16047
  }
16028
16048
  const currentEpoch = Math.floor(this.state.timeslot / this.chainSpec.epochLength);
16029
16049
  let voteSignatureIndex = 0;
16030
16050
  for (const { votesEpoch, votes } of disputes.verdicts) {
16031
16051
  // https://graypaper.fluffylabs.dev/#/579bd12/12bb0012bc00
16032
16052
  if (votesEpoch !== currentEpoch && votesEpoch + 1 !== currentEpoch) {
16033
- return Result.error(DisputesErrorCode.BadJudgementAge);
16053
+ return Result.error(DisputesErrorCode.BadJudgementAge, () => `Bad judgement age: epoch=${votesEpoch}, current=${currentEpoch}`);
16034
16054
  }
16035
16055
  const k = votesEpoch === currentEpoch ? this.state.currentValidatorData : this.state.previousValidatorData;
16036
16056
  for (const { index } of votes) {
16037
16057
  const key = k[index]?.ed25519;
16038
16058
  // no particular GP fragment but I think we don't believe in ghosts
16039
16059
  if (key === undefined) {
16040
- return Result.error(DisputesErrorCode.BadValidatorIndex);
16060
+ return Result.error(DisputesErrorCode.BadValidatorIndex, () => `Bad validator index: ${index} in epoch ${votesEpoch}`);
16041
16061
  }
16042
16062
  // verify vote signature. Verification was done earlier, here we only check the result.
16043
16063
  // https://graypaper.fluffylabs.dev/#/579bd12/12cd0012cd00
16044
16064
  const result = verificationResult.judgements[voteSignatureIndex];
16045
16065
  if (!result.isValid) {
16046
- return Result.error(DisputesErrorCode.BadSignature, `Invalid signature for judgement: ${voteSignatureIndex}`);
16066
+ return Result.error(DisputesErrorCode.BadSignature, () => `Invalid signature for judgement: ${voteSignatureIndex}`);
16047
16067
  }
16048
16068
  voteSignatureIndex += 1;
16049
16069
  }
@@ -16059,7 +16079,7 @@ class disputes_Disputes {
16059
16079
  const isInBadSet = badSet.has(verdict.workReportHash);
16060
16080
  const isInWonkySet = wonkySet.has(verdict.workReportHash);
16061
16081
  if (isInGoodSet || isInBadSet || isInWonkySet) {
16062
- return Result.error(DisputesErrorCode.AlreadyJudged);
16082
+ return Result.error(DisputesErrorCode.AlreadyJudged, () => `Work report already judged: ${verdict.workReportHash}`);
16063
16083
  }
16064
16084
  }
16065
16085
  return Result.ok(null);
@@ -16090,7 +16110,7 @@ class disputes_Disputes {
16090
16110
  // https://graypaper.fluffylabs.dev/#/579bd12/12f10212fc02
16091
16111
  const f = disputes.faults.find((x) => x.workReportHash.isEqualTo(r));
16092
16112
  if (f === undefined) {
16093
- return Result.error(DisputesErrorCode.NotEnoughFaults);
16113
+ return Result.error(DisputesErrorCode.NotEnoughFaults, () => `Not enough faults for work report: ${r}`);
16094
16114
  }
16095
16115
  }
16096
16116
  else if (sum === 0) {
@@ -16099,13 +16119,13 @@ class disputes_Disputes {
16099
16119
  const c1 = disputes.culprits.find((x) => x.workReportHash.isEqualTo(r));
16100
16120
  const c2 = disputes.culprits.findLast((x) => x.workReportHash.isEqualTo(r));
16101
16121
  if (c1 === c2) {
16102
- return Result.error(DisputesErrorCode.NotEnoughCulprits);
16122
+ return Result.error(DisputesErrorCode.NotEnoughCulprits, () => `Not enough culprits for work report: ${r}`);
16103
16123
  }
16104
16124
  }
16105
16125
  else if (sum !== this.chainSpec.thirdOfValidators) {
16106
16126
  // positive votes count is not correct
16107
16127
  // https://graypaper.fluffylabs.dev/#/579bd12/125002128102
16108
- return Result.error(DisputesErrorCode.BadVoteSplit);
16128
+ return Result.error(DisputesErrorCode.BadVoteSplit, () => `Bad vote split: sum=${sum}, expected=${this.chainSpec.thirdOfValidators} for work report ${r}`);
16109
16129
  }
16110
16130
  }
16111
16131
  return Result.ok(null);
@@ -16193,7 +16213,7 @@ class disputes_Disputes {
16193
16213
  const validator = k[j.index];
16194
16214
  // no particular GP fragment but I think we don't believe in ghosts
16195
16215
  if (validator === undefined) {
16196
- return Result.error(DisputesErrorCode.BadValidatorIndex);
16216
+ return Result.error(DisputesErrorCode.BadValidatorIndex, () => `Bad validator index in signature verification: ${j.index}`);
16197
16217
  }
16198
16218
  const key = validator.ed25519;
16199
16219
  // verify vote signature
@@ -16301,7 +16321,7 @@ const ringCommitmentCache = [];
16301
16321
  async function verifySeal(bandersnatch, authorKey, signature, payload, encodedUnsealedHeader) {
16302
16322
  const sealResult = await bandersnatch.verifySeal(authorKey.raw, signature.raw, payload.raw, encodedUnsealedHeader.raw);
16303
16323
  if (sealResult[RESULT_INDEX] === ResultValues.Error) {
16304
- return result_Result.error(null);
16324
+ return result_Result.error(null, () => "Bandersnatch VRF seal verification failed");
16305
16325
  }
16306
16326
  return result_Result.ok(bytes_Bytes.fromBlob(sealResult.subarray(1), hash_HASH_SIZE).asOpaque());
16307
16327
  }
@@ -16327,7 +16347,7 @@ function getRingCommitment(bandersnatch, validators) {
16327
16347
  async function getRingCommitmentNoCache(bandersnatch, keys) {
16328
16348
  const commitmentResult = await bandersnatch.getRingCommitment(keys.raw);
16329
16349
  if (commitmentResult[RESULT_INDEX] === ResultValues.Error) {
16330
- return result_Result.error(null);
16350
+ return result_Result.error(null, () => "Bandersnatch ring commitment calculation failed");
16331
16351
  }
16332
16352
  return result_Result.ok(bytes_Bytes.fromBlob(commitmentResult.subarray(1), BANDERSNATCH_RING_ROOT_BYTES).asOpaque());
16333
16353
  }
@@ -16502,7 +16522,7 @@ class safrole_Safrole {
16502
16522
  epochRoot: epochRootResult.ok,
16503
16523
  });
16504
16524
  }
16505
- return Result.error(SafroleErrorCode.IncorrectData);
16525
+ return Result.error(SafroleErrorCode.IncorrectData, () => "Safrole: failed to get epoch root for validator keys");
16506
16526
  }
16507
16527
  /**
16508
16528
  * Ticket sequencer that is used in standard mode
@@ -16593,10 +16613,10 @@ class safrole_Safrole {
16593
16613
  for (let i = 1; i < ticketsLength; i++) {
16594
16614
  const order = tickets[i - 1].id.compare(tickets[i].id);
16595
16615
  if (order.isEqual()) {
16596
- return Result.error(SafroleErrorCode.DuplicateTicket);
16616
+ return Result.error(SafroleErrorCode.DuplicateTicket, () => `Safrole: duplicate ticket found at index ${i}`);
16597
16617
  }
16598
16618
  if (order.isGreater()) {
16599
- return Result.error(SafroleErrorCode.BadTicketOrder);
16619
+ return Result.error(SafroleErrorCode.BadTicketOrder, () => `Safrole: bad ticket order at index ${i}`);
16600
16620
  }
16601
16621
  }
16602
16622
  return Result.ok(null);
@@ -16623,7 +16643,7 @@ class safrole_Safrole {
16623
16643
  attempt: ticket.attempt,
16624
16644
  }));
16625
16645
  if (!verificationResult.every((x) => x.isValid)) {
16626
- return Result.error(SafroleErrorCode.BadTicketProof);
16646
+ return Result.error(SafroleErrorCode.BadTicketProof, () => "Safrole: invalid ticket proof in extrinsic");
16627
16647
  }
16628
16648
  /**
16629
16649
  * Verify if tickets are sorted and unique
@@ -16632,7 +16652,7 @@ class safrole_Safrole {
16632
16652
  */
16633
16653
  const ticketsVerifcationResult = this.verifyTickets(tickets);
16634
16654
  if (ticketsVerifcationResult.isError) {
16635
- return Result.error(ticketsVerifcationResult.error);
16655
+ return Result.error(ticketsVerifcationResult.error, ticketsVerifcationResult.details);
16636
16656
  }
16637
16657
  if (this.isEpochChanged(timeslot)) {
16638
16658
  return Result.ok(tickets);
@@ -16641,7 +16661,7 @@ class safrole_Safrole {
16641
16661
  const ticketsFromExtrinsic = SortedSet.fromSortedArray(ticketComparator, tickets);
16642
16662
  const mergedTickets = SortedSet.fromTwoSortedCollections(ticketsFromState, ticketsFromExtrinsic);
16643
16663
  if (ticketsFromState.length + ticketsFromExtrinsic.length !== mergedTickets.length) {
16644
- return Result.error(SafroleErrorCode.DuplicateTicket);
16664
+ return Result.error(SafroleErrorCode.DuplicateTicket, () => "Safrole: duplicate ticket when merging state and extrinsic tickets");
16645
16665
  }
16646
16666
  /**
16647
16667
  * Remove tickets if size of accumulator exceeds E (epoch length).
@@ -16710,24 +16730,24 @@ class safrole_Safrole {
16710
16730
  }
16711
16731
  async transition(input) {
16712
16732
  if (this.state.timeslot >= input.slot) {
16713
- return Result.error(SafroleErrorCode.BadSlot);
16733
+ return Result.error(SafroleErrorCode.BadSlot, () => `Safrole: bad slot, state timeslot ${this.state.timeslot} >= input slot ${input.slot}`);
16714
16734
  }
16715
16735
  if (!this.isExtrinsicLengthValid(input.slot, input.extrinsic)) {
16716
- return Result.error(SafroleErrorCode.UnexpectedTicket);
16736
+ return Result.error(SafroleErrorCode.UnexpectedTicket, () => `Safrole: unexpected ticket, invalid extrinsic length ${input.extrinsic.length}`);
16717
16737
  }
16718
16738
  if (!this.areTicketAttemptsValid(input.extrinsic)) {
16719
- return Result.error(SafroleErrorCode.BadTicketAttempt);
16739
+ return Result.error(SafroleErrorCode.BadTicketAttempt, () => "Safrole: bad ticket attempt value in extrinsic");
16720
16740
  }
16721
16741
  const validatorKeysResult = await this.getValidatorKeys(input.slot, input.punishSet);
16722
16742
  if (validatorKeysResult.isError) {
16723
- return Result.error(validatorKeysResult.error);
16743
+ return Result.error(validatorKeysResult.error, validatorKeysResult.details);
16724
16744
  }
16725
16745
  const { nextValidatorData, currentValidatorData, previousValidatorData, epochRoot } = validatorKeysResult.ok;
16726
16746
  const entropy = this.getEntropy(input.slot, input.entropy);
16727
16747
  const sealingKeySeries = this.getSlotKeySequence(input.slot, currentValidatorData, entropy[2]);
16728
16748
  const newTicketsAccumulatorResult = await this.getNewTicketAccumulator(input.slot, input.extrinsic, this.state.nextValidatorData, epochRoot, entropy[2]);
16729
16749
  if (newTicketsAccumulatorResult.isError) {
16730
- return Result.error(newTicketsAccumulatorResult.error);
16750
+ return Result.error(newTicketsAccumulatorResult.error, newTicketsAccumulatorResult.details);
16731
16751
  }
16732
16752
  const stateUpdate = {
16733
16753
  nextValidatorData,
@@ -16761,14 +16781,14 @@ function compareWithEncoding(chainSpec, error, actual, expected, codec) {
16761
16781
  if (actual === null || expected === null) {
16762
16782
  // if one of them is `null`, both need to be.
16763
16783
  if (actual !== expected) {
16764
- return Result.error(error, `${SafroleErrorCode[error]} Expected: ${expected}, got: ${actual}`);
16784
+ return Result.error(error, () => `${SafroleErrorCode[error]} Expected: ${expected}, got: ${actual}`);
16765
16785
  }
16766
16786
  return Result.ok(OK);
16767
16787
  }
16768
16788
  // compare the literal encoding.
16769
16789
  const encoded = Encoder.encodeObject(codec, actual, chainSpec);
16770
16790
  if (!encoded.isEqualTo(expected.encoded())) {
16771
- return Result.error(error, `${SafroleErrorCode[error]} Expected: ${expected.encoded()}, got: ${encoded}`);
16791
+ return Result.error(error, () => `${SafroleErrorCode[error]} Expected: ${expected.encoded()}, got: ${encoded}`);
16772
16792
  }
16773
16793
  return Result.ok(OK);
16774
16794
  }
@@ -16811,7 +16831,7 @@ class safrole_seal_SafroleSeal {
16811
16831
  const blockAuthorKey = state.currentValidatorData.at(blockAuthorIndex)?.bandersnatch;
16812
16832
  const entropySourceResult = await bandersnatchVrf.verifySeal(await this.bandersnatch, blockAuthorKey ?? BANDERSNATCH_ZERO_KEY, headerView.entropySource.materialize(), payload, BytesBlob.blobFromNumbers([]));
16813
16833
  if (entropySourceResult.isError) {
16814
- return Result.error(SafroleSealError.IncorrectEntropySource);
16834
+ return Result.error(SafroleSealError.IncorrectEntropySource, () => "Safrole: incorrect entropy source in header seal");
16815
16835
  }
16816
16836
  return Result.ok(entropySourceResult.ok);
16817
16837
  }
@@ -16820,7 +16840,7 @@ class safrole_seal_SafroleSeal {
16820
16840
  const validatorIndex = headerView.bandersnatchBlockAuthorIndex.materialize();
16821
16841
  const authorKeys = state.currentValidatorData.at(validatorIndex);
16822
16842
  if (authorKeys === undefined) {
16823
- return Result.error(SafroleSealError.InvalidValidatorIndex);
16843
+ return Result.error(SafroleSealError.InvalidValidatorIndex, () => `Safrole: invalid validator index ${validatorIndex}`);
16824
16844
  }
16825
16845
  const timeSlot = headerView.timeSlotIndex.materialize();
16826
16846
  const sealingKeys = state.sealingKeySeries;
@@ -16839,10 +16859,10 @@ class safrole_seal_SafroleSeal {
16839
16859
  const authorKey = validatorData.bandersnatch;
16840
16860
  const result = await bandersnatchVrf.verifySeal(await this.bandersnatch, authorKey ?? BANDERSNATCH_ZERO_KEY, headerView.seal.materialize(), payload, encodeUnsealedHeader(headerView));
16841
16861
  if (result.isError) {
16842
- return Result.error(SafroleSealError.IncorrectSeal);
16862
+ return Result.error(SafroleSealError.IncorrectSeal, () => "Safrole: incorrect seal with ticket");
16843
16863
  }
16844
16864
  if (ticket === undefined || !ticket.id.isEqualTo(result.ok)) {
16845
- return Result.error(SafroleSealError.InvalidTicket);
16865
+ return Result.error(SafroleSealError.InvalidTicket, () => `Safrole: invalid ticket, expected ${ticket?.id} got ${result.ok}`);
16846
16866
  }
16847
16867
  return Result.ok(result.ok);
16848
16868
  }
@@ -16852,13 +16872,13 @@ class safrole_seal_SafroleSeal {
16852
16872
  const sealingKey = keys.at(index);
16853
16873
  const authorBandersnatchKey = authorKey.bandersnatch;
16854
16874
  if (sealingKey === undefined || !sealingKey.isEqualTo(authorBandersnatchKey)) {
16855
- return Result.error(SafroleSealError.InvalidValidator, `Invalid Validator. Expected: ${sealingKey}, got: ${authorKey.bandersnatch}`);
16875
+ return Result.error(SafroleSealError.InvalidValidator, () => `Invalid Validator. Expected: ${sealingKey}, got: ${authorKey.bandersnatch}`);
16856
16876
  }
16857
16877
  // verify seal correctness
16858
16878
  const payload = BytesBlob.blobFromParts(JAM_FALLBACK_SEAL, entropy.raw);
16859
16879
  const result = await bandersnatchVrf.verifySeal(await this.bandersnatch, authorBandersnatchKey, headerView.seal.materialize(), payload, encodeUnsealedHeader(headerView));
16860
16880
  if (result.isError) {
16861
- return Result.error(SafroleSealError.IncorrectSeal);
16881
+ return Result.error(SafroleSealError.IncorrectSeal, () => "Safrole: incorrect seal with keys");
16862
16882
  }
16863
16883
  return Result.ok(result.ok);
16864
16884
  }
@@ -16901,6 +16921,14 @@ async function getRootHash(yieldedRoots) {
16901
16921
 
16902
16922
 
16903
16923
  const InsufficientFundsError = "insufficient funds";
16924
+ /** Deep clone of a map with array. */
16925
+ function deepCloneMapWithArray(map) {
16926
+ const cloned = [];
16927
+ for (const [k, v] of map.entries()) {
16928
+ cloned.push([k, v.slice()]);
16929
+ }
16930
+ return new Map(cloned);
16931
+ }
16904
16932
  /**
16905
16933
  * State updates that currently accumulating service produced.
16906
16934
  *
@@ -16930,10 +16958,11 @@ class state_update_AccumulationStateUpdate {
16930
16958
  /** Create new empty state update. */
16931
16959
  static empty() {
16932
16960
  return new state_update_AccumulationStateUpdate({
16933
- servicesUpdates: [],
16934
- servicesRemoved: [],
16935
- preimages: [],
16936
- storage: [],
16961
+ created: [],
16962
+ updated: new Map(),
16963
+ removed: [],
16964
+ preimages: new Map(),
16965
+ storage: new Map(),
16937
16966
  }, []);
16938
16967
  }
16939
16968
  /** Create a state update with some existing, yet uncommited services updates. */
@@ -16945,10 +16974,13 @@ class state_update_AccumulationStateUpdate {
16945
16974
  /** Create a copy of another `StateUpdate`. Used by checkpoints. */
16946
16975
  static copyFrom(from) {
16947
16976
  const serviceUpdates = {
16948
- servicesUpdates: [...from.services.servicesUpdates],
16949
- servicesRemoved: [...from.services.servicesRemoved],
16950
- preimages: [...from.services.preimages],
16951
- storage: [...from.services.storage],
16977
+ // shallow copy
16978
+ created: [...from.services.created],
16979
+ updated: new Map(from.services.updated),
16980
+ removed: [...from.services.removed],
16981
+ // deep copy
16982
+ preimages: deepCloneMapWithArray(from.services.preimages),
16983
+ storage: deepCloneMapWithArray(from.services.storage),
16952
16984
  };
16953
16985
  const transfers = [...from.transfers];
16954
16986
  const update = new state_update_AccumulationStateUpdate(serviceUpdates, transfers, new Map(from.yieldedRoots));
@@ -16996,9 +17028,9 @@ class state_update_PartiallyUpdatedState {
16996
17028
  if (destination === null) {
16997
17029
  return null;
16998
17030
  }
16999
- const maybeNewService = this.stateUpdate.services.servicesUpdates.find((update) => update.serviceId === destination);
17000
- if (maybeNewService !== undefined) {
17001
- return maybeNewService.action.account;
17031
+ const maybeUpdatedServiceInfo = this.stateUpdate.services.updated.get(destination);
17032
+ if (maybeUpdatedServiceInfo !== undefined) {
17033
+ return maybeUpdatedServiceInfo.action.account;
17002
17034
  }
17003
17035
  const maybeService = this.state.getService(destination);
17004
17036
  if (maybeService === null) {
@@ -17007,7 +17039,8 @@ class state_update_PartiallyUpdatedState {
17007
17039
  return maybeService.getInfo();
17008
17040
  }
17009
17041
  getStorage(serviceId, rawKey) {
17010
- const item = this.stateUpdate.services.storage.find((x) => x.serviceId === serviceId && x.key.isEqualTo(rawKey));
17042
+ const storages = this.stateUpdate.services.storage.get(serviceId) ?? [];
17043
+ const item = storages.find((x) => x.key.isEqualTo(rawKey));
17011
17044
  if (item !== undefined) {
17012
17045
  return item.value;
17013
17046
  }
@@ -17022,10 +17055,11 @@ class state_update_PartiallyUpdatedState {
17022
17055
  * the existence in `preimages` map.
17023
17056
  */
17024
17057
  hasPreimage(serviceId, hash) {
17025
- const providedPreimage = this.stateUpdate.services.preimages.find(
17058
+ const preimages = this.stateUpdate.services.preimages.get(serviceId) ?? [];
17059
+ const providedPreimage = preimages.find(
17026
17060
  // we ignore the action here, since if there is <any> update on that
17027
17061
  // hash it means it has to exist, right?
17028
- (p) => p.serviceId === serviceId && p.hash.isEqualTo(hash));
17062
+ (p) => p.hash.isEqualTo(hash));
17029
17063
  if (providedPreimage !== undefined) {
17030
17064
  return true;
17031
17065
  }
@@ -17038,7 +17072,8 @@ class state_update_PartiallyUpdatedState {
17038
17072
  }
17039
17073
  getPreimage(serviceId, hash) {
17040
17074
  // TODO [ToDr] Should we verify availability here?
17041
- const freshlyProvided = this.stateUpdate.services.preimages.find((x) => x.serviceId === serviceId && x.hash.isEqualTo(hash));
17075
+ const preimages = this.stateUpdate.services.preimages.get(serviceId) ?? [];
17076
+ const freshlyProvided = preimages.find((x) => x.hash.isEqualTo(hash));
17042
17077
  if (freshlyProvided !== undefined && freshlyProvided.action.kind === UpdatePreimageKind.Provide) {
17043
17078
  return freshlyProvided.action.preimage.blob;
17044
17079
  }
@@ -17047,10 +17082,11 @@ class state_update_PartiallyUpdatedState {
17047
17082
  }
17048
17083
  /** Get status of a preimage of current service taking into account any updates. */
17049
17084
  getLookupHistory(currentTimeslot, serviceId, hash, length) {
17085
+ const preimages = this.stateUpdate.services.preimages.get(serviceId) ?? [];
17050
17086
  // TODO [ToDr] This is most likely wrong. We may have `provide` and `remove` within
17051
17087
  // the same state update. We should however switch to proper "updated state"
17052
17088
  // representation soon.
17053
- const updatedPreimage = this.stateUpdate.services.preimages.findLast((update) => update.serviceId === serviceId && update.hash.isEqualTo(hash) && BigInt(update.length) === length);
17089
+ const updatedPreimage = preimages.findLast((update) => update.hash.isEqualTo(hash) && BigInt(update.length) === length);
17054
17090
  const stateFallback = () => {
17055
17091
  // fallback to state lookup
17056
17092
  const service = this.state.getService(serviceId);
@@ -17087,14 +17123,15 @@ class state_update_PartiallyUpdatedState {
17087
17123
  /* State update functions. */
17088
17124
  updateStorage(serviceId, key, value) {
17089
17125
  const update = value === null
17090
- ? UpdateStorage.remove({ serviceId, key })
17126
+ ? UpdateStorage.remove({ key })
17091
17127
  : UpdateStorage.set({
17092
- serviceId,
17093
17128
  storage: StorageItem.create({ key, value }),
17094
17129
  });
17095
- const index = this.stateUpdate.services.storage.findIndex((x) => x.serviceId === update.serviceId && x.key.isEqualTo(key));
17130
+ const storages = this.stateUpdate.services.storage.get(serviceId) ?? [];
17131
+ const index = storages.findIndex((x) => x.key.isEqualTo(key));
17096
17132
  const count = index === -1 ? 0 : 1;
17097
- this.stateUpdate.services.storage.splice(index, count, update);
17133
+ storages.splice(index, count, update);
17134
+ this.stateUpdate.services.storage.set(serviceId, storages);
17098
17135
  }
17099
17136
  /**
17100
17137
  * Update a preimage.
@@ -17102,8 +17139,10 @@ class state_update_PartiallyUpdatedState {
17102
17139
  * Note we store all previous entries as well, since there might be a sequence of:
17103
17140
  * `provide` -> `remove` and both should update the end state somehow.
17104
17141
  */
17105
- updatePreimage(newUpdate) {
17106
- this.stateUpdate.services.preimages.push(newUpdate);
17142
+ updatePreimage(serviceId, newUpdate) {
17143
+ const updatePreimages = this.stateUpdate.services.preimages.get(serviceId) ?? [];
17144
+ updatePreimages.push(newUpdate);
17145
+ this.stateUpdate.services.preimages.set(serviceId, updatePreimages);
17107
17146
  }
17108
17147
  updateServiceStorageUtilisation(serviceId, items, bytes, serviceInfo) {
17109
17148
  check `${items >= 0} storageUtilisationCount has to be a positive number, got: ${items}`;
@@ -17112,11 +17151,11 @@ class state_update_PartiallyUpdatedState {
17112
17151
  const overflowBytes = !isU64(bytes);
17113
17152
  // TODO [ToDr] this is not specified in GP, but it seems sensible.
17114
17153
  if (overflowItems || overflowBytes) {
17115
- return Result.error(InsufficientFundsError);
17154
+ return Result.error(InsufficientFundsError, () => `Storage utilisation overflow: items=${overflowItems}, bytes=${overflowBytes}`);
17116
17155
  }
17117
17156
  const thresholdBalance = ServiceAccountInfo.calculateThresholdBalance(items, bytes, serviceInfo.gratisStorage);
17118
17157
  if (serviceInfo.balance < thresholdBalance) {
17119
- return Result.error(InsufficientFundsError);
17158
+ return Result.error(InsufficientFundsError, () => `Service balance (${serviceInfo.balance}) below threshold (${thresholdBalance})`);
17120
17159
  }
17121
17160
  // Update service info with new details.
17122
17161
  this.updateServiceInfo(serviceId, ServiceAccountInfo.create({
@@ -17127,20 +17166,23 @@ class state_update_PartiallyUpdatedState {
17127
17166
  return Result.ok(OK);
17128
17167
  }
17129
17168
  updateServiceInfo(serviceId, newInfo) {
17130
- const idx = this.stateUpdate.services.servicesUpdates.findIndex((x) => x.serviceId === serviceId);
17131
- const toRemove = idx === -1 ? 0 : 1;
17132
- const existingItem = this.stateUpdate.services.servicesUpdates[idx];
17133
- if (existingItem?.action.kind === UpdateServiceKind.Create) {
17134
- this.stateUpdate.services.servicesUpdates.splice(idx, toRemove, UpdateService.create({
17135
- serviceId,
17169
+ const existingUpdate = this.stateUpdate.services.updated.get(serviceId);
17170
+ if (existingUpdate?.action.kind === UpdateServiceKind.Create) {
17171
+ this.stateUpdate.services.updated.set(serviceId, UpdateService.create({
17136
17172
  serviceInfo: newInfo,
17137
- lookupHistory: existingItem.action.lookupHistory,
17173
+ lookupHistory: existingUpdate.action.lookupHistory,
17138
17174
  }));
17139
17175
  return;
17140
17176
  }
17141
- this.stateUpdate.services.servicesUpdates.splice(idx, toRemove, UpdateService.update({
17142
- serviceId,
17177
+ this.stateUpdate.services.updated.set(serviceId, UpdateService.update({
17178
+ serviceInfo: newInfo,
17179
+ }));
17180
+ }
17181
+ createService(serviceId, newInfo, newLookupHistory) {
17182
+ this.stateUpdate.services.created.push(serviceId);
17183
+ this.stateUpdate.services.updated.set(serviceId, UpdateService.create({
17143
17184
  serviceInfo: newInfo,
17185
+ lookupHistory: newLookupHistory,
17144
17186
  }));
17145
17187
  }
17146
17188
  getPrivilegedServices() {
@@ -18502,7 +18544,7 @@ class readable_page_ReadablePage extends MemoryPage {
18502
18544
  loadInto(result, startIndex, length) {
18503
18545
  const endIndex = startIndex + length;
18504
18546
  if (endIndex > memory_consts_PAGE_SIZE) {
18505
- return result_Result.error(errors_PageFault.fromMemoryIndex(this.start + memory_consts_PAGE_SIZE));
18547
+ return result_Result.error(errors_PageFault.fromMemoryIndex(this.start + memory_consts_PAGE_SIZE), () => `Page fault: read beyond page boundary at ${this.start + memory_consts_PAGE_SIZE}`);
18506
18548
  }
18507
18549
  const bytes = this.data.subarray(startIndex, endIndex);
18508
18550
  // we zero the bytes, since data might not yet be initialized at `endIndex`.
@@ -18511,7 +18553,7 @@ class readable_page_ReadablePage extends MemoryPage {
18511
18553
  return result_Result.ok(result_OK);
18512
18554
  }
18513
18555
  storeFrom(_address, _data) {
18514
- return result_Result.error(errors_PageFault.fromMemoryIndex(this.start, true));
18556
+ return result_Result.error(errors_PageFault.fromMemoryIndex(this.start, true), () => `Page fault: attempted to write to read-only page at ${this.start}`);
18515
18557
  }
18516
18558
  setData(pageIndex, data) {
18517
18559
  this.data.set(data, pageIndex);
@@ -18545,7 +18587,7 @@ class writeable_page_WriteablePage extends MemoryPage {
18545
18587
  loadInto(result, startIndex, length) {
18546
18588
  const endIndex = startIndex + length;
18547
18589
  if (endIndex > memory_consts_PAGE_SIZE) {
18548
- return result_Result.error(errors_PageFault.fromMemoryIndex(this.start + memory_consts_PAGE_SIZE));
18590
+ return result_Result.error(errors_PageFault.fromMemoryIndex(this.start + memory_consts_PAGE_SIZE), () => `Page fault: read beyond page boundary at ${this.start + memory_consts_PAGE_SIZE}`);
18549
18591
  }
18550
18592
  const bytes = this.view.subarray(startIndex, endIndex);
18551
18593
  // we zero the bytes, since the view might not yet be initialized at `endIndex`.
@@ -18631,7 +18673,7 @@ class memory_Memory {
18631
18673
  memory_logger.insane `MEM[${address}] <- ${BytesBlob.blobFrom(bytes)}`;
18632
18674
  const pagesResult = this.getPages(address, bytes.length, AccessType.WRITE);
18633
18675
  if (pagesResult.isError) {
18634
- return Result.error(pagesResult.error);
18676
+ return Result.error(pagesResult.error, pagesResult.details);
18635
18677
  }
18636
18678
  const pages = pagesResult.ok;
18637
18679
  let currentPosition = address;
@@ -18656,14 +18698,14 @@ class memory_Memory {
18656
18698
  const pages = [];
18657
18699
  for (const pageNumber of pageRange) {
18658
18700
  if (pageNumber < RESERVED_NUMBER_OF_PAGES) {
18659
- return Result.error(PageFault.fromPageNumber(pageNumber, true));
18701
+ return Result.error(PageFault.fromPageNumber(pageNumber, true), () => `Page fault: attempted to access reserved page ${pageNumber}`);
18660
18702
  }
18661
18703
  const page = this.memory.get(pageNumber);
18662
18704
  if (page === undefined) {
18663
- return Result.error(PageFault.fromPageNumber(pageNumber));
18705
+ return Result.error(PageFault.fromPageNumber(pageNumber), () => `Page fault: page ${pageNumber} not allocated`);
18664
18706
  }
18665
18707
  if (accessType === AccessType.WRITE && !page.isWriteable()) {
18666
- return Result.error(PageFault.fromPageNumber(pageNumber, true));
18708
+ return Result.error(PageFault.fromPageNumber(pageNumber, true), () => `Page fault: attempted to write to read-only page ${pageNumber}`);
18667
18709
  }
18668
18710
  pages.push(page);
18669
18711
  }
@@ -18681,7 +18723,7 @@ class memory_Memory {
18681
18723
  }
18682
18724
  const pagesResult = this.getPages(startAddress, result.length, AccessType.READ);
18683
18725
  if (pagesResult.isError) {
18684
- return Result.error(pagesResult.error);
18726
+ return Result.error(pagesResult.error, pagesResult.details);
18685
18727
  }
18686
18728
  const pages = pagesResult.ok;
18687
18729
  let currentPosition = startAddress;
@@ -20434,7 +20476,7 @@ class program_decoder_ProgramDecoder {
20434
20476
  }
20435
20477
  catch (e) {
20436
20478
  program_decoder_logger.error `Invalid program: ${e}`;
20437
- return Result.error(ProgramDecoderError.InvalidProgramError);
20479
+ return Result.error(ProgramDecoderError.InvalidProgramError, () => `Program decoder error: ${e}`);
20438
20480
  }
20439
20481
  }
20440
20482
  }
@@ -21177,10 +21219,10 @@ class accumulate_externalities_AccumulateExternalities {
21177
21219
  const len = existingPreimage.slots.length;
21178
21220
  // https://graypaper.fluffylabs.dev/#/9a08063/380901380901?v=0.6.6
21179
21221
  if (len === PreimageStatusKind.Requested) {
21180
- return Result.error(RequestPreimageError.AlreadyRequested);
21222
+ return Result.error(RequestPreimageError.AlreadyRequested, () => `Preimage already requested: hash=${hash}`);
21181
21223
  }
21182
21224
  if (len === PreimageStatusKind.Available || len === PreimageStatusKind.Reavailable) {
21183
- return Result.error(RequestPreimageError.AlreadyAvailable);
21225
+ return Result.error(RequestPreimageError.AlreadyAvailable, () => `Preimage already available: hash=${hash}`);
21184
21226
  }
21185
21227
  // TODO [ToDr] Not sure if we should update the service info in that case,
21186
21228
  // but for now we let that case fall-through.
@@ -21205,15 +21247,13 @@ class accumulate_externalities_AccumulateExternalities {
21205
21247
  const clampedLength = clampU64ToU32(length);
21206
21248
  if (existingPreimage === null) {
21207
21249
  // https://graypaper.fluffylabs.dev/#/9a08063/38a60038a600?v=0.6.6
21208
- this.updatedState.updatePreimage(UpdatePreimage.updateOrAdd({
21209
- serviceId: this.currentServiceId,
21250
+ this.updatedState.updatePreimage(this.currentServiceId, UpdatePreimage.updateOrAdd({
21210
21251
  lookupHistory: new LookupHistoryItem(hash, clampedLength, tryAsLookupHistorySlots([])),
21211
21252
  }));
21212
21253
  }
21213
21254
  else {
21214
21255
  /** https://graypaper.fluffylabs.dev/#/9a08063/38ca0038ca00?v=0.6.6 */
21215
- this.updatedState.updatePreimage(UpdatePreimage.updateOrAdd({
21216
- serviceId: this.currentServiceId,
21256
+ this.updatedState.updatePreimage(this.currentServiceId, UpdatePreimage.updateOrAdd({
21217
21257
  lookupHistory: new LookupHistoryItem(hash, clampedLength, tryAsLookupHistorySlots([...existingPreimage.slots, this.currentTimeslot])),
21218
21258
  }));
21219
21259
  }
@@ -21223,7 +21263,7 @@ class accumulate_externalities_AccumulateExternalities {
21223
21263
  const serviceId = this.currentServiceId;
21224
21264
  const status = this.updatedState.getLookupHistory(this.currentTimeslot, this.currentServiceId, hash, length);
21225
21265
  if (status === null) {
21226
- return Result.error(ForgetPreimageError.NotFound);
21266
+ return Result.error(ForgetPreimageError.NotFound, () => `Preimage not found: hash=${hash}, length=${length}`);
21227
21267
  }
21228
21268
  const s = slotsToPreimageStatus(status.slots);
21229
21269
  const updateStorageUtilisation = () => {
@@ -21236,10 +21276,9 @@ class accumulate_externalities_AccumulateExternalities {
21236
21276
  if (s.status === PreimageStatusKind.Requested) {
21237
21277
  const res = updateStorageUtilisation();
21238
21278
  if (res.isError) {
21239
- return Result.error(ForgetPreimageError.StorageUtilisationError);
21279
+ return Result.error(ForgetPreimageError.StorageUtilisationError, res.details);
21240
21280
  }
21241
- this.updatedState.updatePreimage(UpdatePreimage.remove({
21242
- serviceId,
21281
+ this.updatedState.updatePreimage(serviceId, UpdatePreimage.remove({
21243
21282
  hash: status.hash,
21244
21283
  length: status.length,
21245
21284
  }));
@@ -21252,21 +21291,19 @@ class accumulate_externalities_AccumulateExternalities {
21252
21291
  if (y < t - this.chainSpec.preimageExpungePeriod) {
21253
21292
  const res = updateStorageUtilisation();
21254
21293
  if (res.isError) {
21255
- return Result.error(ForgetPreimageError.StorageUtilisationError);
21294
+ return Result.error(ForgetPreimageError.StorageUtilisationError, res.details);
21256
21295
  }
21257
- this.updatedState.updatePreimage(UpdatePreimage.remove({
21258
- serviceId,
21296
+ this.updatedState.updatePreimage(serviceId, UpdatePreimage.remove({
21259
21297
  hash: status.hash,
21260
21298
  length: status.length,
21261
21299
  }));
21262
21300
  return Result.ok(OK);
21263
21301
  }
21264
- return Result.error(ForgetPreimageError.NotExpired);
21302
+ return Result.error(ForgetPreimageError.NotExpired, () => `Preimage not expired: y=${y}, timeslot=${t}, period=${this.chainSpec.preimageExpungePeriod}`);
21265
21303
  }
21266
21304
  // https://graypaper.fluffylabs.dev/#/9a08063/38c80138c801?v=0.6.6
21267
21305
  if (s.status === PreimageStatusKind.Available) {
21268
- this.updatedState.updatePreimage(UpdatePreimage.updateOrAdd({
21269
- serviceId,
21306
+ this.updatedState.updatePreimage(serviceId, UpdatePreimage.updateOrAdd({
21270
21307
  lookupHistory: new LookupHistoryItem(status.hash, status.length, tryAsLookupHistorySlots([s.data[0], t])),
21271
21308
  }));
21272
21309
  return Result.ok(OK);
@@ -21275,13 +21312,12 @@ class accumulate_externalities_AccumulateExternalities {
21275
21312
  if (s.status === PreimageStatusKind.Reavailable) {
21276
21313
  const y = s.data[1];
21277
21314
  if (y < t - this.chainSpec.preimageExpungePeriod) {
21278
- this.updatedState.updatePreimage(UpdatePreimage.updateOrAdd({
21279
- serviceId,
21315
+ this.updatedState.updatePreimage(serviceId, UpdatePreimage.updateOrAdd({
21280
21316
  lookupHistory: new LookupHistoryItem(status.hash, status.length, tryAsLookupHistorySlots([s.data[2], t])),
21281
21317
  }));
21282
21318
  return Result.ok(OK);
21283
21319
  }
21284
- return Result.error(ForgetPreimageError.NotExpired);
21320
+ return Result.error(ForgetPreimageError.NotExpired, () => `Preimage not expired: y=${y}, timeslot=${t}, period=${this.chainSpec.preimageExpungePeriod}`);
21285
21321
  }
21286
21322
  assertNever(s);
21287
21323
  }
@@ -21290,17 +21326,17 @@ class accumulate_externalities_AccumulateExternalities {
21290
21326
  const destination = this.getServiceInfo(destinationId);
21291
21327
  /** https://graypaper.fluffylabs.dev/#/9a08063/370401370401?v=0.6.6 */
21292
21328
  if (destination === null || destinationId === null) {
21293
- return Result.error(TransferError.DestinationNotFound);
21329
+ return Result.error(TransferError.DestinationNotFound, () => `Destination service not found: ${destinationId}`);
21294
21330
  }
21295
21331
  /** https://graypaper.fluffylabs.dev/#/9a08063/371301371301?v=0.6.6 */
21296
21332
  if (gas < destination.onTransferMinGas) {
21297
- return Result.error(TransferError.GasTooLow);
21333
+ return Result.error(TransferError.GasTooLow, () => `Gas ${gas} below minimum ${destination.onTransferMinGas}`);
21298
21334
  }
21299
21335
  /** https://graypaper.fluffylabs.dev/#/9a08063/371b01371b01?v=0.6.6 */
21300
21336
  const newBalance = source.balance - amount;
21301
21337
  const thresholdBalance = ServiceAccountInfo.calculateThresholdBalance(source.storageUtilisationCount, source.storageUtilisationBytes, source.gratisStorage);
21302
21338
  if (newBalance < thresholdBalance) {
21303
- return Result.error(TransferError.BalanceBelowThreshold);
21339
+ return Result.error(TransferError.BalanceBelowThreshold, () => `Balance ${newBalance} below threshold ${thresholdBalance}`);
21304
21340
  }
21305
21341
  // outgoing transfer
21306
21342
  this.updatedState.stateUpdate.transfers.push(PendingTransfer.create({
@@ -21327,7 +21363,7 @@ class accumulate_externalities_AccumulateExternalities {
21327
21363
  // check if we are priviledged to set gratis storage
21328
21364
  // https://graypaper.fluffylabs.dev/#/7e6ff6a/369203369603?v=0.6.7
21329
21365
  if (gratisStorage !== tryAsU64(0) && this.currentServiceId !== this.updatedState.getPrivilegedServices().manager) {
21330
- return Result.error(NewServiceError.UnprivilegedService);
21366
+ return Result.error(NewServiceError.UnprivilegedService, () => `Service ${this.currentServiceId} not privileged to set gratis storage`);
21331
21367
  }
21332
21368
  // check if we have enough balance
21333
21369
  // https://graypaper.fluffylabs.dev/#/7e6ff6a/369e0336a303?v=0.6.7
@@ -21336,7 +21372,7 @@ class accumulate_externalities_AccumulateExternalities {
21336
21372
  const thresholdForCurrent = ServiceAccountInfo.calculateThresholdBalance(currentService.storageUtilisationCount, currentService.storageUtilisationBytes, currentService.gratisStorage);
21337
21373
  const balanceLeftForCurrent = currentService.balance - thresholdForNew;
21338
21374
  if (balanceLeftForCurrent < thresholdForCurrent || bytes.overflow) {
21339
- return Result.error(NewServiceError.InsufficientFunds);
21375
+ return Result.error(NewServiceError.InsufficientFunds, () => `Insufficient funds: balance=${currentService.balance}, required=${thresholdForNew}, overflow=${bytes.overflow}`);
21340
21376
  }
21341
21377
  // `a`: https://graypaper.fluffylabs.dev/#/ab2cdbd/366b02366d02?v=0.7.2
21342
21378
  const newAccount = ServiceAccountInfo.create({
@@ -21363,15 +21399,11 @@ class accumulate_externalities_AccumulateExternalities {
21363
21399
  // NOTE: It's safe to cast to `Number` here, bcs here service ID cannot be bigger than 2**16
21364
21400
  const newServiceId = tryAsServiceId(Number(wantedServiceId));
21365
21401
  if (this.getServiceInfo(newServiceId) !== null) {
21366
- return Result.error(NewServiceError.RegistrarServiceIdAlreadyTaken);
21402
+ return Result.error(NewServiceError.RegistrarServiceIdAlreadyTaken, () => `Service ID ${newServiceId} already taken`);
21367
21403
  }
21368
21404
  // add the new service with selected ID
21369
21405
  // https://graypaper.fluffylabs.dev/#/ab2cdbd/36be0336c003?v=0.7.2
21370
- this.updatedState.stateUpdate.services.servicesUpdates.push(UpdateService.create({
21371
- serviceId: newServiceId,
21372
- serviceInfo: newAccount,
21373
- lookupHistory: newLookupItem,
21374
- }));
21406
+ this.updatedState.createService(newServiceId, newAccount, newLookupItem);
21375
21407
  // update the balance of current service
21376
21408
  // https://graypaper.fluffylabs.dev/#/ab2cdbd/36c20336c403?v=0.7.2
21377
21409
  this.updatedState.updateServiceInfo(this.currentServiceId, updatedCurrentAccount);
@@ -21382,12 +21414,8 @@ class accumulate_externalities_AccumulateExternalities {
21382
21414
  }
21383
21415
  const newServiceId = this.nextNewServiceId;
21384
21416
  // add the new service
21385
- // https://graypaper.fluffylabs.dev/#/ab2cdbd/36e70336e903?v=0.7.2
21386
- this.updatedState.stateUpdate.services.servicesUpdates.push(UpdateService.create({
21387
- serviceId: newServiceId,
21388
- serviceInfo: newAccount,
21389
- lookupHistory: newLookupItem,
21390
- }));
21417
+ // https://graypaper.fluffylabs.dev/#/7e6ff6a/36cb0236cb02?v=0.6.7
21418
+ this.updatedState.createService(newServiceId, newAccount, newLookupItem);
21391
21419
  // update the balance of current service
21392
21420
  // https://graypaper.fluffylabs.dev/#/ab2cdbd/36ec0336ee03?v=0.7.2
21393
21421
  this.updatedState.updateServiceInfo(this.currentServiceId, updatedCurrentAccount);
@@ -21411,7 +21439,7 @@ class accumulate_externalities_AccumulateExternalities {
21411
21439
  const currentDelegator = this.updatedState.getPrivilegedServices().delegator;
21412
21440
  if (currentDelegator !== this.currentServiceId) {
21413
21441
  accumulate_externalities_logger.trace `Current service id (${this.currentServiceId}) is not a validators manager. (expected: ${currentDelegator}) and cannot update validators data. Ignoring`;
21414
- return Result.error(UnprivilegedError);
21442
+ return Result.error(UnprivilegedError, () => `Service ${this.currentServiceId} is not delegator (expected: ${currentDelegator})`);
21415
21443
  }
21416
21444
  this.updatedState.stateUpdate.validatorsData = validatorsData;
21417
21445
  return Result.ok(OK);
@@ -21426,11 +21454,11 @@ class accumulate_externalities_AccumulateExternalities {
21426
21454
  const currentAssigners = this.updatedState.getPrivilegedServices().assigners[coreIndex];
21427
21455
  if (currentAssigners !== this.currentServiceId) {
21428
21456
  accumulate_externalities_logger.trace `Current service id (${this.currentServiceId}) is not an auth manager of core ${coreIndex} (expected: ${currentAssigners}) and cannot update authorization queue.`;
21429
- return Result.error(UpdatePrivilegesError.UnprivilegedService);
21457
+ return Result.error(UpdatePrivilegesError.UnprivilegedService, () => `Service ${this.currentServiceId} not assigner for core ${coreIndex} (expected: ${currentAssigners})`);
21430
21458
  }
21431
21459
  if (assigners === null && Compatibility.isGreaterOrEqual(GpVersion.V0_7_1)) {
21432
21460
  accumulate_externalities_logger.trace `The new auth manager is not a valid service id.`;
21433
- return Result.error(UpdatePrivilegesError.InvalidServiceId);
21461
+ return Result.error(UpdatePrivilegesError.InvalidServiceId, () => `New auth manager is null for core ${coreIndex}`);
21434
21462
  }
21435
21463
  this.updatedState.stateUpdate.authorizationQueues.set(coreIndex, authQueue);
21436
21464
  return Result.ok(OK);
@@ -21463,10 +21491,10 @@ class accumulate_externalities_AccumulateExternalities {
21463
21491
  const isManager = current.manager === this.currentServiceId;
21464
21492
  if (Compatibility.isLessThan(GpVersion.V0_7_1)) {
21465
21493
  if (!isManager) {
21466
- return Result.error(UpdatePrivilegesError.UnprivilegedService);
21494
+ return Result.error(UpdatePrivilegesError.UnprivilegedService, () => `Service ${this.currentServiceId} is not manager`);
21467
21495
  }
21468
21496
  if (manager === null || delegator === null) {
21469
- return Result.error(UpdatePrivilegesError.InvalidServiceId, "Either manager or delegator is not a valid service id.");
21497
+ return Result.error(UpdatePrivilegesError.InvalidServiceId, () => "Either manager or delegator is not a valid service id.");
21470
21498
  }
21471
21499
  this.updatedState.stateUpdate.privilegedServices = PrivilegedServices.create({
21472
21500
  manager,
@@ -21479,7 +21507,7 @@ class accumulate_externalities_AccumulateExternalities {
21479
21507
  }
21480
21508
  const original = this.updatedState.state.privilegedServices;
21481
21509
  if (manager === null || delegator === null || registrar === null) {
21482
- return Result.error(UpdatePrivilegesError.InvalidServiceId, "Either manager or delegator or registrar is not a valid service id.");
21510
+ return Result.error(UpdatePrivilegesError.InvalidServiceId, () => "Either manager or delegator or registrar is not a valid service id.");
21483
21511
  }
21484
21512
  const newDelegator = this.updatePrivilegedServiceId(delegator, current.delegator, {
21485
21513
  isManager,
@@ -21519,23 +21547,22 @@ class accumulate_externalities_AccumulateExternalities {
21519
21547
  // TODO [ToDr] what about newly created services?
21520
21548
  const service = serviceId === null ? null : this.updatedState.state.getService(serviceId);
21521
21549
  if (service === null || serviceId === null) {
21522
- return Result.error(ProvidePreimageError.ServiceNotFound);
21550
+ return Result.error(ProvidePreimageError.ServiceNotFound, () => `Service not found: ${serviceId}`);
21523
21551
  }
21524
21552
  // calculating the hash
21525
21553
  const preimageHash = this.blake2b.hashBytes(preimage).asOpaque();
21526
21554
  // checking service internal lookup
21527
21555
  const stateLookup = this.updatedState.getLookupHistory(this.currentTimeslot, serviceId, preimageHash, tryAsU64(preimage.length));
21528
21556
  if (stateLookup === null || !LookupHistoryItem.isRequested(stateLookup)) {
21529
- return Result.error(ProvidePreimageError.WasNotRequested);
21557
+ return Result.error(ProvidePreimageError.WasNotRequested, () => `Preimage was not requested: hash=${preimageHash}, service=${serviceId}`);
21530
21558
  }
21531
21559
  // checking already provided preimages
21532
21560
  const hasPreimage = this.updatedState.hasPreimage(serviceId, preimageHash);
21533
21561
  if (hasPreimage) {
21534
- return Result.error(ProvidePreimageError.AlreadyProvided);
21562
+ return Result.error(ProvidePreimageError.AlreadyProvided, () => `Preimage already provided: hash=${preimageHash}, service=${serviceId}`);
21535
21563
  }
21536
21564
  // setting up the new preimage
21537
- this.updatedState.updatePreimage(UpdatePreimage.provide({
21538
- serviceId,
21565
+ this.updatedState.updatePreimage(serviceId, UpdatePreimage.provide({
21539
21566
  preimage: PreimageItem.create({
21540
21567
  hash: preimageHash,
21541
21568
  blob: preimage,
@@ -21547,31 +21574,31 @@ class accumulate_externalities_AccumulateExternalities {
21547
21574
  eject(destination, previousCodeHash) {
21548
21575
  const service = this.getServiceInfo(destination);
21549
21576
  if (service === null || destination === null) {
21550
- return Result.error(EjectError.InvalidService, "Service missing");
21577
+ return Result.error(EjectError.InvalidService, () => "Service missing");
21551
21578
  }
21552
21579
  const currentService = this.getCurrentServiceInfo();
21553
21580
  // check if the service expects to be ejected by us:
21554
21581
  const expectedCodeHash = Bytes.zero(HASH_SIZE).asOpaque();
21555
21582
  writeServiceIdAsLeBytes(this.currentServiceId, expectedCodeHash.raw);
21556
21583
  if (!service.codeHash.isEqualTo(expectedCodeHash)) {
21557
- return Result.error(EjectError.InvalidService, "Invalid code hash");
21584
+ return Result.error(EjectError.InvalidService, () => "Invalid code hash");
21558
21585
  }
21559
21586
  // make sure the service only has required number of storage items?
21560
21587
  if (service.storageUtilisationCount !== REQUIRED_NUMBER_OF_STORAGE_ITEMS_FOR_EJECT) {
21561
- return Result.error(EjectError.InvalidPreimage, "Too many storage items");
21588
+ return Result.error(EjectError.InvalidPreimage, () => "Too many storage items");
21562
21589
  }
21563
21590
  // storage items length
21564
21591
  const l = tryAsU64(maxU64(service.storageUtilisationBytes, LOOKUP_HISTORY_ENTRY_BYTES) - LOOKUP_HISTORY_ENTRY_BYTES);
21565
21592
  // check if we have a preimage with the entire storage.
21566
21593
  const [isPreviousCodeExpired, errorReason] = this.isPreviousCodeExpired(destination, previousCodeHash, l);
21567
21594
  if (!isPreviousCodeExpired) {
21568
- return Result.error(EjectError.InvalidPreimage, `Previous code available: ${errorReason}`);
21595
+ return Result.error(EjectError.InvalidPreimage, () => `Previous code available: ${errorReason}`);
21569
21596
  }
21570
21597
  // compute new balance of the service.
21571
21598
  const newBalance = sumU64(currentService.balance, service.balance);
21572
21599
  // TODO [ToDr] what to do in case of overflow?
21573
21600
  if (newBalance.overflow) {
21574
- return Result.error(EjectError.InvalidService, "Balance overflow");
21601
+ return Result.error(EjectError.InvalidService, () => "Balance overflow");
21575
21602
  }
21576
21603
  // update current service.
21577
21604
  this.updatedState.updateServiceInfo(this.currentServiceId, ServiceAccountInfo.create({
@@ -21579,11 +21606,13 @@ class accumulate_externalities_AccumulateExternalities {
21579
21606
  balance: newBalance.value,
21580
21607
  }));
21581
21608
  // and finally add an ejected service.
21582
- this.updatedState.stateUpdate.services.servicesRemoved.push(destination);
21609
+ this.updatedState.stateUpdate.services.removed.push(destination);
21583
21610
  // take care of the code preimage and its lookup history
21584
21611
  // Safe, because we know the preimage is valid, and it's the code of the service, which is bounded by maximal service code size anyway (much smaller than 2**32 bytes).
21585
21612
  const preimageLength = tryAsU32(Number(l));
21586
- this.updatedState.stateUpdate.services.preimages.push(UpdatePreimage.remove({ serviceId: destination, hash: previousCodeHash, length: preimageLength }));
21613
+ const preimages = this.updatedState.stateUpdate.services.preimages.get(destination) ?? [];
21614
+ preimages.push(UpdatePreimage.remove({ hash: previousCodeHash, length: preimageLength }));
21615
+ this.updatedState.stateUpdate.services.preimages.set(destination, preimages);
21587
21616
  return Result.ok(OK);
21588
21617
  }
21589
21618
  read(serviceId, rawKey) {
@@ -21760,10 +21789,10 @@ class assurances_Assurances {
21760
21789
  for (const assurance of assurances) {
21761
21790
  const { anchor, validatorIndex, bitfield } = assurance;
21762
21791
  if (!anchor.isEqualTo(input.parentHash)) {
21763
- return Result.error(AssurancesError.InvalidAnchor, `anchor: expected: ${input.parentHash}, got ${anchor}`);
21792
+ return Result.error(AssurancesError.InvalidAnchor, () => `anchor: expected: ${input.parentHash}, got ${anchor}`);
21764
21793
  }
21765
21794
  if (prevValidatorIndex >= validatorIndex) {
21766
- return Result.error(AssurancesError.InvalidOrder, `order: expected: ${prevValidatorIndex + 1}, got: ${validatorIndex}`);
21795
+ return Result.error(AssurancesError.InvalidOrder, () => `order: expected: ${prevValidatorIndex + 1}, got: ${validatorIndex}`);
21767
21796
  }
21768
21797
  prevValidatorIndex = assurance.validatorIndex;
21769
21798
  check `${bitfield.bitLength === coresCount} Invalid bitfield length of ${bitfield.bitLength}`;
@@ -21786,7 +21815,7 @@ class assurances_Assurances {
21786
21815
  * https://graypaper.fluffylabs.dev/#/579bd12/14e90014ea00
21787
21816
  */
21788
21817
  if (noOfAssurances > 0 && !isReportPending) {
21789
- return Result.error(AssurancesError.NoReportPending, `no report pending for core ${c} yet we got an assurance`);
21818
+ return Result.error(AssurancesError.NoReportPending, () => `no report pending for core ${c} yet we got an assurance`);
21790
21819
  }
21791
21820
  /**
21792
21821
  * Remove work report if it's became available or timed out.
@@ -21832,7 +21861,7 @@ class assurances_Assurances {
21832
21861
  const v = assurance.view();
21833
21862
  const key = validatorData[v.validatorIndex.materialize()];
21834
21863
  if (key === undefined) {
21835
- return Result.error(AssurancesError.InvalidValidatorIndex);
21864
+ return Result.error(AssurancesError.InvalidValidatorIndex, () => `Invalid validator index: ${v.validatorIndex.materialize()}`);
21836
21865
  }
21837
21866
  signatures.push({
21838
21867
  signature: v.signature.materialize(),
@@ -21844,7 +21873,7 @@ class assurances_Assurances {
21844
21873
  const isAllSignaturesValid = signaturesValid.every((x) => x);
21845
21874
  if (!isAllSignaturesValid) {
21846
21875
  const invalidIndices = signaturesValid.reduce((acc, isValid, idx) => (isValid ? acc : acc.concat(idx)), []);
21847
- return Result.error(AssurancesError.InvalidSignature, `invalid signatures at ${invalidIndices.join(", ")}`);
21876
+ return Result.error(AssurancesError.InvalidSignature, () => `invalid signatures at ${invalidIndices.join(", ")}`);
21848
21877
  }
21849
21878
  return Result.ok(OK);
21850
21879
  }
@@ -22455,7 +22484,7 @@ class host_call_memory_HostCallMemory {
22455
22484
  return Result.ok(OK);
22456
22485
  }
22457
22486
  if (address + tryAsU64(bytes.length) > MEMORY_SIZE) {
22458
- return Result.error(new OutOfBounds());
22487
+ return Result.error(new OutOfBounds(), () => `Memory access out of bounds: address ${address} + length ${bytes.length} exceeds memory size`);
22459
22488
  }
22460
22489
  return this.memory.storeFrom(tryAsMemoryIndex(Number(address)), bytes);
22461
22490
  }
@@ -22464,13 +22493,10 @@ class host_call_memory_HostCallMemory {
22464
22493
  return Result.ok(OK);
22465
22494
  }
22466
22495
  if (startAddress + tryAsU64(result.length) > MEMORY_SIZE) {
22467
- return Result.error(new OutOfBounds());
22496
+ return Result.error(new OutOfBounds(), () => `Memory access out of bounds: address ${startAddress} + length ${result.length} exceeds memory size`);
22468
22497
  }
22469
22498
  return this.memory.loadInto(result, tryAsMemoryIndex(Number(startAddress)));
22470
22499
  }
22471
- getMemory() {
22472
- return this.memory;
22473
- }
22474
22500
  }
22475
22501
 
22476
22502
  ;// CONCATENATED MODULE: ./packages/core/pvm-host-calls/host-call-registers.ts
@@ -24276,18 +24302,18 @@ class accumulate_Accumulate {
24276
24302
  const serviceInfo = updatedState.getServiceInfo(serviceId);
24277
24303
  if (serviceInfo === null) {
24278
24304
  accumulate_logger.log `Service with id ${serviceId} not found.`;
24279
- return Result.error(PvmInvocationError.NoService);
24305
+ return Result.error(PvmInvocationError.NoService, () => `Accumulate: service ${serviceId} not found`);
24280
24306
  }
24281
24307
  const codeHash = serviceInfo.codeHash;
24282
24308
  // TODO [ToDr] Should we check that the preimage is still available?
24283
24309
  const code = updatedState.getPreimage(serviceId, codeHash.asOpaque());
24284
24310
  if (code === null) {
24285
24311
  accumulate_logger.log `Code with hash ${codeHash} not found for service ${serviceId}.`;
24286
- return Result.error(PvmInvocationError.NoPreimage);
24312
+ return Result.error(PvmInvocationError.NoPreimage, () => `Accumulate: code with hash ${codeHash} not found for service ${serviceId}`);
24287
24313
  }
24288
24314
  if (code.length > W_C) {
24289
24315
  accumulate_logger.log `Code with hash ${codeHash} is too long for service ${serviceId}.`;
24290
- return Result.error(PvmInvocationError.PreimageTooLong);
24316
+ return Result.error(PvmInvocationError.PreimageTooLong, () => `Accumulate: code length ${code.length} exceeds max ${W_C} for service ${serviceId}`);
24291
24317
  }
24292
24318
  const nextServiceId = generateNextServiceId({ serviceId, entropy, timeslot: slot }, this.chainSpec, this.blake2b);
24293
24319
  const partialState = new AccumulateExternalities(this.chainSpec, this.blake2b, updatedState, serviceId, nextServiceId, slot);
@@ -24542,19 +24568,16 @@ class accumulate_Accumulate {
24542
24568
  const gasLimit = tryAsServiceGas(this.chainSpec.maxBlockGas > calculatedGasLimit ? this.chainSpec.maxBlockGas : calculatedGasLimit);
24543
24569
  return tryAsServiceGas(gasLimit);
24544
24570
  }
24545
- hasDuplicatedServicesCreated(updateServices) {
24546
- const createdServiceIds = new Set();
24547
- for (const update of updateServices) {
24548
- if (update.action.kind === UpdateServiceKind.Create) {
24549
- const serviceId = update.serviceId;
24550
- if (createdServiceIds.has(serviceId)) {
24551
- accumulate_logger.log `Duplicated Service creation detected ${serviceId}. Block is invalid.`;
24552
- return true;
24553
- }
24554
- createdServiceIds.add(serviceId);
24555
- }
24556
- }
24557
- return false;
24571
+ /**
24572
+ * Detects the very unlikely situation where multiple services are created with the same ID.
24573
+ *
24574
+ * https://graypaper.fluffylabs.dev/#/ab2cdbd/30f20330f403?v=0.7.2
24575
+ *
24576
+ * NOTE: This is public only for testing purposes and should not be used outside of accumulation.
24577
+ */
24578
+ hasDuplicatedServiceIdCreated(createdIds) {
24579
+ const uniqueIds = new Set(createdIds);
24580
+ return uniqueIds.size !== createdIds.length;
24558
24581
  }
24559
24582
  async transition({ reports, slot, entropy }) {
24560
24583
  const statistics = new Map();
@@ -24576,8 +24599,9 @@ class accumulate_Accumulate {
24576
24599
  const accumulated = accumulatableReports.subview(0, accumulatedReports);
24577
24600
  const { services, yieldedRoots, transfers, validatorsData, privilegedServices, authorizationQueues, ...stateUpdateRest } = state;
24578
24601
  assertEmpty(stateUpdateRest);
24579
- if (this.hasDuplicatedServicesCreated(services.servicesUpdates)) {
24580
- return Result.error(ACCUMULATION_ERROR);
24602
+ if (this.hasDuplicatedServiceIdCreated(services.created)) {
24603
+ accumulate_logger.trace `Duplicated Service creation detected. Block is invalid.`;
24604
+ return Result.error(ACCUMULATION_ERROR, () => "Accumulate: duplicate service created");
24581
24605
  }
24582
24606
  const accStateUpdate = this.getAccumulationStateUpdate(accumulated.toArray(), toAccumulateLater, slot, Array.from(statistics.keys()), services);
24583
24607
  const accumulationOutputUnsorted = Array.from(yieldedRoots.entries()).map(([serviceId, root]) => {
@@ -24658,13 +24682,13 @@ class deferred_transfers_DeferredTransfers {
24658
24682
  .toSorted((a, b) => a.source - b.source);
24659
24683
  const info = partiallyUpdatedState.getServiceInfo(serviceId);
24660
24684
  if (info === null) {
24661
- return Result.error(DeferredTransfersErrorCode.ServiceInfoNotExist);
24685
+ return Result.error(DeferredTransfersErrorCode.ServiceInfoNotExist, () => `Deferred transfers: service info not found for ${serviceId}`);
24662
24686
  }
24663
24687
  const codeHash = info.codeHash;
24664
24688
  const code = partiallyUpdatedState.getPreimage(serviceId, codeHash.asOpaque());
24665
24689
  const newBalance = sumU64(info.balance, ...transfers.map((item) => item.amount));
24666
24690
  if (newBalance.overflow) {
24667
- return Result.error(DeferredTransfersErrorCode.ServiceBalanceOverflow);
24691
+ return Result.error(DeferredTransfersErrorCode.ServiceBalanceOverflow, () => `Deferred transfers: balance overflow for service ${serviceId}`);
24668
24692
  }
24669
24693
  const newInfo = ServiceAccountInfo.create({ ...info, balance: newBalance.value });
24670
24694
  partiallyUpdatedState.updateServiceInfo(serviceId, newInfo);
@@ -25142,7 +25166,7 @@ function verify_basic_verifyReportsBasic(input) {
25142
25166
  const noOfPrerequisites = reportView.context.view().prerequisites.view().length;
25143
25167
  const noOfSegmentRootLookups = reportView.segmentRootLookup.view().length;
25144
25168
  if (noOfPrerequisites + noOfSegmentRootLookups > MAX_REPORT_DEPENDENCIES) {
25145
- return Result.error(ReportsError.TooManyDependencies, `Report at ${reportView.coreIndex.materialize()} has too many dependencies. Got ${noOfPrerequisites} + ${noOfSegmentRootLookups}, max: ${MAX_REPORT_DEPENDENCIES}`);
25169
+ return Result.error(ReportsError.TooManyDependencies, () => `Report at ${reportView.coreIndex.materialize()} has too many dependencies. Got ${noOfPrerequisites} + ${noOfSegmentRootLookups}, max: ${MAX_REPORT_DEPENDENCIES}`);
25146
25170
  }
25147
25171
  /**
25148
25172
  * In order to ensure fair use of a block’s extrinsic space,
@@ -25161,7 +25185,7 @@ function verify_basic_verifyReportsBasic(input) {
25161
25185
  totalOutputsSize += item.view().result.view().okBlob?.raw.length ?? 0;
25162
25186
  }
25163
25187
  if (authOutputSize + totalOutputsSize > MAX_WORK_REPORT_SIZE_BYTES) {
25164
- return Result.error(ReportsError.WorkReportTooBig, `Work report at ${reportView.coreIndex.materialize()} too big. Got ${authOutputSize} + ${totalOutputsSize}, max: ${MAX_WORK_REPORT_SIZE_BYTES}`);
25188
+ return Result.error(ReportsError.WorkReportTooBig, () => `Work report at ${reportView.coreIndex.materialize()} too big. Got ${authOutputSize} + ${totalOutputsSize}, max: ${MAX_WORK_REPORT_SIZE_BYTES}`);
25165
25189
  }
25166
25190
  }
25167
25191
  return Result.ok(OK);
@@ -25195,12 +25219,12 @@ function verify_contextual_verifyContextualValidity(input, state, headerChain, m
25195
25219
  for (const result of guarantee.report.results) {
25196
25220
  const service = state.getService(result.serviceId);
25197
25221
  if (service === null) {
25198
- return Result.error(ReportsError.BadServiceId, `No service with id: ${result.serviceId}`);
25222
+ return Result.error(ReportsError.BadServiceId, () => `No service with id: ${result.serviceId}`);
25199
25223
  }
25200
25224
  // check service code hash
25201
25225
  // https://graypaper.fluffylabs.dev/#/5f542d7/154b02154b02
25202
25226
  if (!result.codeHash.isEqualTo(service.getInfo().codeHash)) {
25203
- return Result.error(ReportsError.BadCodeHash, `Service (${result.serviceId}) code hash mismatch. Got: ${result.codeHash}, expected: ${service.getInfo().codeHash}`);
25227
+ return Result.error(ReportsError.BadCodeHash, () => `Service (${result.serviceId}) code hash mismatch. Got: ${result.codeHash}, expected: ${service.getInfo().codeHash}`);
25204
25228
  }
25205
25229
  }
25206
25230
  }
@@ -25211,7 +25235,7 @@ function verify_contextual_verifyContextualValidity(input, state, headerChain, m
25211
25235
  * https://graypaper.fluffylabs.dev/#/5f542d7/151f01152101
25212
25236
  */
25213
25237
  if (currentWorkPackages.size !== input.guarantees.length) {
25214
- return Result.error(ReportsError.DuplicatePackage, "Duplicate work package detected.");
25238
+ return Result.error(ReportsError.DuplicatePackage, () => "Duplicate work package detected.");
25215
25239
  }
25216
25240
  const minLookupSlot = Math.max(0, input.slot - maxLookupAnchorAge);
25217
25241
  const contextResult = verifyRefineContexts(minLookupSlot, contexts, input.recentBlocksPartialUpdate, headerChain);
@@ -25256,7 +25280,7 @@ function verify_contextual_verifyContextualValidity(input, state, headerChain, m
25256
25280
  : undefined;
25257
25281
  }
25258
25282
  if (root === undefined || !root.segmentTreeRoot.isEqualTo(lookup.segmentTreeRoot)) {
25259
- return Result.error(ReportsError.SegmentRootLookupInvalid, `Mismatching segment tree root for package ${lookup.workPackageHash}. Got: ${lookup.segmentTreeRoot}, expected: ${root?.segmentTreeRoot}`);
25283
+ return Result.error(ReportsError.SegmentRootLookupInvalid, () => `Mismatching segment tree root for package ${lookup.workPackageHash}. Got: ${lookup.segmentTreeRoot}, expected: ${root?.segmentTreeRoot}`);
25260
25284
  }
25261
25285
  }
25262
25286
  }
@@ -25279,16 +25303,16 @@ function verifyRefineContexts(minLookupSlot, contexts, recentBlocksPartialUpdate
25279
25303
  */
25280
25304
  const recentBlock = recentBlocks.get(context.anchor);
25281
25305
  if (recentBlock === undefined) {
25282
- return Result.error(ReportsError.AnchorNotRecent, `Anchor block ${context.anchor} not found in recent blocks.`);
25306
+ return Result.error(ReportsError.AnchorNotRecent, () => `Anchor block ${context.anchor} not found in recent blocks.`);
25283
25307
  }
25284
25308
  // check state root
25285
25309
  if (!recentBlock.postStateRoot.isEqualTo(context.stateRoot)) {
25286
- return Result.error(ReportsError.BadStateRoot, `Anchor state root mismatch. Got: ${context.stateRoot}, expected: ${recentBlock.postStateRoot}.`);
25310
+ return Result.error(ReportsError.BadStateRoot, () => `Anchor state root mismatch. Got: ${context.stateRoot}, expected: ${recentBlock.postStateRoot}.`);
25287
25311
  }
25288
25312
  // check beefy root
25289
25313
  const beefyRoot = recentBlock.accumulationResult;
25290
25314
  if (!beefyRoot.isEqualTo(context.beefyRoot)) {
25291
- return Result.error(ReportsError.BadBeefyMmrRoot, `Invalid BEEFY super peak hash. Got: ${context.beefyRoot}, expected: ${beefyRoot}. Anchor: ${recentBlock.headerHash}`);
25315
+ return Result.error(ReportsError.BadBeefyMmrRoot, () => `Invalid BEEFY super peak hash. Got: ${context.beefyRoot}, expected: ${beefyRoot}. Anchor: ${recentBlock.headerHash}`);
25292
25316
  }
25293
25317
  /**
25294
25318
  * We require that each lookup-anchor block be within the
@@ -25297,7 +25321,7 @@ function verifyRefineContexts(minLookupSlot, contexts, recentBlocksPartialUpdate
25297
25321
  * https://graypaper.fluffylabs.dev/#/5f542d7/154601154701
25298
25322
  */
25299
25323
  if (context.lookupAnchorSlot < minLookupSlot) {
25300
- return Result.error(ReportsError.SegmentRootLookupInvalid, `Lookup anchor slot's too old. Got: ${context.lookupAnchorSlot}, minimal: ${minLookupSlot}`);
25324
+ return Result.error(ReportsError.SegmentRootLookupInvalid, () => `Lookup anchor slot's too old. Got: ${context.lookupAnchorSlot}, minimal: ${minLookupSlot}`);
25301
25325
  }
25302
25326
  /**
25303
25327
  * We also require that we have a record of it; this is one of
@@ -25314,7 +25338,7 @@ function verifyRefineContexts(minLookupSlot, contexts, recentBlocksPartialUpdate
25314
25338
  verify_contextual_logger.warn `Lookup anchor check for ${context.lookupAnchor} would fail, but override is active.`;
25315
25339
  }
25316
25340
  else {
25317
- return Result.error(ReportsError.SegmentRootLookupInvalid, `Lookup anchor is not found in chain. Hash: ${context.lookupAnchor} (slot: ${context.lookupAnchorSlot})`);
25341
+ return Result.error(ReportsError.SegmentRootLookupInvalid, () => `Lookup anchor is not found in chain. Hash: ${context.lookupAnchor} (slot: ${context.lookupAnchorSlot})`);
25318
25342
  }
25319
25343
  }
25320
25344
  }
@@ -25337,7 +25361,7 @@ function verifyDependencies({ currentWorkPackages, recentlyReported, prerequisit
25337
25361
  if (recentlyReported.has(preReqHash)) {
25338
25362
  continue;
25339
25363
  }
25340
- return Result.error(isSegmentRoot ? ReportsError.SegmentRootLookupInvalid : ReportsError.DependencyMissing, `Missing work package ${preReqHash} in current extrinsic or recent history.`);
25364
+ return Result.error(isSegmentRoot ? ReportsError.SegmentRootLookupInvalid : ReportsError.DependencyMissing, () => `Missing work package ${preReqHash} in current extrinsic or recent history.`);
25341
25365
  }
25342
25366
  return Result.ok(OK);
25343
25367
  };
@@ -25385,7 +25409,7 @@ function verifyWorkPackagesUniqueness(workPackageHashes, state) {
25385
25409
  // let's check if any of our packages is in the pipeline
25386
25410
  const intersection = packagesInPipeline.intersection(workPackageHashes);
25387
25411
  for (const packageHash of intersection) {
25388
- return Result.error(ReportsError.DuplicatePackage, `The same work package hash found in the pipeline (workPackageHash: ${packageHash})`);
25412
+ return Result.error(ReportsError.DuplicatePackage, () => `The same work package hash found in the pipeline (workPackageHash: ${packageHash})`);
25389
25413
  }
25390
25414
  return Result.ok(OK);
25391
25415
  }
@@ -25424,7 +25448,7 @@ workReportHashes, slot, getGuarantorAssignment) {
25424
25448
  const credentialsView = guaranteeView.credentials.view();
25425
25449
  if (credentialsView.length < REQUIRED_CREDENTIALS_RANGE[0] ||
25426
25450
  credentialsView.length > REQUIRED_CREDENTIALS_RANGE[1]) {
25427
- return Result.error(ReportsError.InsufficientGuarantees, `Invalid number of credentials. Expected ${REQUIRED_CREDENTIALS_RANGE}, got ${credentialsView.length}`);
25451
+ return Result.error(ReportsError.InsufficientGuarantees, () => `Invalid number of credentials. Expected ${REQUIRED_CREDENTIALS_RANGE}, got ${credentialsView.length}`);
25428
25452
  }
25429
25453
  /** Retrieve current core assignment. */
25430
25454
  const timeSlot = guaranteeView.slot.materialize();
@@ -25439,20 +25463,20 @@ workReportHashes, slot, getGuarantorAssignment) {
25439
25463
  const credentialView = credential.view();
25440
25464
  const validatorIndex = credentialView.validatorIndex.materialize();
25441
25465
  if (lastValidatorIndex >= validatorIndex) {
25442
- return Result.error(ReportsError.NotSortedOrUniqueGuarantors, `Credentials must be sorted by validator index. Got ${validatorIndex}, expected at least ${lastValidatorIndex + 1}`);
25466
+ return Result.error(ReportsError.NotSortedOrUniqueGuarantors, () => `Credentials must be sorted by validator index. Got ${validatorIndex}, expected at least ${lastValidatorIndex + 1}`);
25443
25467
  }
25444
25468
  lastValidatorIndex = validatorIndex;
25445
25469
  const signature = credentialView.signature.materialize();
25446
25470
  const guarantorData = guarantorAssignments[validatorIndex];
25447
25471
  if (guarantorData === undefined) {
25448
- return Result.error(ReportsError.BadValidatorIndex, `Invalid validator index: ${validatorIndex}`);
25472
+ return Result.error(ReportsError.BadValidatorIndex, () => `Invalid validator index: ${validatorIndex}`);
25449
25473
  }
25450
25474
  /**
25451
25475
  * Verify core assignment.
25452
25476
  * https://graypaper.fluffylabs.dev/#/5f542d7/14e40214e602
25453
25477
  */
25454
25478
  if (guarantorData.core !== coreIndex) {
25455
- return Result.error(ReportsError.WrongAssignment, `Invalid core assignment for validator ${validatorIndex}. Expected: ${guarantorData.core}, got: ${coreIndex}`);
25479
+ return Result.error(ReportsError.WrongAssignment, () => `Invalid core assignment for validator ${validatorIndex}. Expected: ${guarantorData.core}, got: ${coreIndex}`);
25456
25480
  }
25457
25481
  signaturesToVerify.push({
25458
25482
  signature,
@@ -25490,10 +25514,10 @@ function verify_order_verifyReportsOrder(input, chainSpec) {
25490
25514
  const reportView = guarantee.view().report.view();
25491
25515
  const coreIndex = reportView.coreIndex.materialize();
25492
25516
  if (lastCoreIndex >= coreIndex) {
25493
- return Result.error(ReportsError.OutOfOrderGuarantee, `Core indices of work reports are not unique or in order. Got: ${coreIndex}, expected at least: ${lastCoreIndex + 1}`);
25517
+ return Result.error(ReportsError.OutOfOrderGuarantee, () => `Core indices of work reports are not unique or in order. Got: ${coreIndex}, expected at least: ${lastCoreIndex + 1}`);
25494
25518
  }
25495
25519
  if (coreIndex >= noOfCores) {
25496
- return Result.error(ReportsError.BadCoreIndex, `Invalid core index. Got: ${coreIndex}, max: ${noOfCores}`);
25520
+ return Result.error(ReportsError.BadCoreIndex, () => `Invalid core index. Got: ${coreIndex}, max: ${noOfCores}`);
25497
25521
  }
25498
25522
  lastCoreIndex = coreIndex;
25499
25523
  }
@@ -25518,7 +25542,7 @@ function verify_post_signature_verifyPostSignatureChecks(input, availabilityAssi
25518
25542
  * https://graypaper.fluffylabs.dev/#/5f542d7/15ea0015ea00
25519
25543
  */
25520
25544
  if (availabilityAssignment[coreIndex] !== null) {
25521
- return Result.error(ReportsError.CoreEngaged, `Report pending availability at core: ${coreIndex}`);
25545
+ return Result.error(ReportsError.CoreEngaged, () => `Report pending availability at core: ${coreIndex}`);
25522
25546
  }
25523
25547
  /**
25524
25548
  * A report is valid only if the authorizer hash is present
@@ -25531,7 +25555,7 @@ function verify_post_signature_verifyPostSignatureChecks(input, availabilityAssi
25531
25555
  const authorizerPool = authPools.get(coreIndex);
25532
25556
  const pool = authorizerPool?.materialize() ?? [];
25533
25557
  if (pool.find((hash) => hash.isEqualTo(authorizerHash)) === undefined) {
25534
- return Result.error(ReportsError.CoreUnauthorized, `Authorizer hash not found in the pool of core ${coreIndex}: ${authorizerHash}`);
25558
+ return Result.error(ReportsError.CoreUnauthorized, () => `Authorizer hash not found in the pool of core ${coreIndex}: ${authorizerHash}`);
25535
25559
  }
25536
25560
  /**
25537
25561
  * We require that the gas allotted for accumulation of each
@@ -25543,17 +25567,17 @@ function verify_post_signature_verifyPostSignatureChecks(input, availabilityAssi
25543
25567
  for (const result of report.results) {
25544
25568
  const service = services(result.serviceId);
25545
25569
  if (service === null) {
25546
- return Result.error(ReportsError.BadServiceId, `No service with id: ${result.serviceId}`);
25570
+ return Result.error(ReportsError.BadServiceId, () => `No service with id: ${result.serviceId}`);
25547
25571
  }
25548
25572
  const info = service.getInfo();
25549
25573
  // check minimal accumulation gas
25550
25574
  if (result.gas < info.accumulateMinGas) {
25551
- return Result.error(ReportsError.ServiceItemGasTooLow, `Service (${result.serviceId}) gas is less than minimal. Got: ${result.gas}, expected at least: ${info.accumulateMinGas}`);
25575
+ return Result.error(ReportsError.ServiceItemGasTooLow, () => `Service (${result.serviceId}) gas is less than minimal. Got: ${result.gas}, expected at least: ${info.accumulateMinGas}`);
25552
25576
  }
25553
25577
  }
25554
25578
  const totalGas = sumU64(...report.results.map((x) => x.gas));
25555
25579
  if (totalGas.overflow || totalGas.value > G_A) {
25556
- return Result.error(ReportsError.WorkReportGasTooHigh, `Total gas too high. Got: ${totalGas.value} (ovfl: ${totalGas.overflow}), maximal: ${G_A}`);
25580
+ return Result.error(ReportsError.WorkReportGasTooHigh, () => `Total gas too high. Got: ${totalGas.value} (ovfl: ${totalGas.overflow}), maximal: ${G_A}`);
25557
25581
  }
25558
25582
  }
25559
25583
  return Result.ok(OK);
@@ -25639,7 +25663,7 @@ class reports_Reports {
25639
25663
  }
25640
25664
  const reporters = SortedSet.fromArray(bytesBlobComparator, signaturesToVerify.ok.map((x) => x.key)).slice();
25641
25665
  if (hasAnyOffenders(reporters, input.offenders)) {
25642
- return Result.error(ReportsError.BannedValidator);
25666
+ return Result.error(ReportsError.BannedValidator, () => "One or more reporters are banned validators");
25643
25667
  }
25644
25668
  return Result.ok({
25645
25669
  stateUpdate: {
@@ -25679,7 +25703,7 @@ class reports_Reports {
25679
25703
  return signaturesToVerify[idx].key;
25680
25704
  })
25681
25705
  .filter((x) => x !== null);
25682
- return Result.error(ReportsError.BadSignature, `Invalid signatures for validators with keys: ${invalidKeys.join(", ")}`);
25706
+ return Result.error(ReportsError.BadSignature, () => `Invalid signatures for validators with keys: ${invalidKeys.join(", ")}`);
25683
25707
  }
25684
25708
  /**
25685
25709
  * Get the guarantor assignment (both core and validator data)
@@ -25695,10 +25719,10 @@ class reports_Reports {
25695
25719
  const minTimeSlot = Math.max(0, headerRotation - 1) * rotationPeriod;
25696
25720
  // https://graypaper.fluffylabs.dev/#/5f542d7/155e00156900
25697
25721
  if (guaranteeTimeSlot > headerTimeSlot) {
25698
- return Result.error(ReportsError.FutureReportSlot, `Report slot is in future. Block ${headerTimeSlot}, Report: ${guaranteeTimeSlot}`);
25722
+ return Result.error(ReportsError.FutureReportSlot, () => `Report slot is in future. Block ${headerTimeSlot}, Report: ${guaranteeTimeSlot}`);
25699
25723
  }
25700
25724
  if (guaranteeTimeSlot < minTimeSlot) {
25701
- return Result.error(ReportsError.ReportEpochBeforeLast, `Report slot is too old. Block ${headerTimeSlot}, Report: ${guaranteeTimeSlot}`);
25725
+ return Result.error(ReportsError.ReportEpochBeforeLast, () => `Report slot is too old. Block ${headerTimeSlot}, Report: ${guaranteeTimeSlot}`);
25702
25726
  }
25703
25727
  // TODO [ToDr] [opti] below code needs cache.
25704
25728
  // The `G` and `G*` sets should only be computed once per rotation.
@@ -26266,6 +26290,16 @@ class chain_stf_OnChain {
26266
26290
  });
26267
26291
  const { statistics, ...statisticsRest } = statisticsUpdate;
26268
26292
  assertEmpty(statisticsRest);
26293
+ // Concat accumulatePreimages updates with preimages
26294
+ for (const [serviceId, accPreimageUpdates] of accumulatePreimages.entries()) {
26295
+ const preimagesUpdates = preimages.get(serviceId);
26296
+ if (preimagesUpdates === undefined) {
26297
+ preimages.set(serviceId, accPreimageUpdates);
26298
+ }
26299
+ else {
26300
+ preimages.set(serviceId, preimagesUpdates.concat(accPreimageUpdates));
26301
+ }
26302
+ }
26269
26303
  return Result.ok({
26270
26304
  ...(maybeAuthorizationQueues !== undefined ? { authQueues: maybeAuthorizationQueues } : {}),
26271
26305
  ...(maybeDesignatedValidatorData !== undefined ? { designatedValidatorData: maybeDesignatedValidatorData } : {}),
@@ -26287,7 +26321,7 @@ class chain_stf_OnChain {
26287
26321
  recentlyAccumulated,
26288
26322
  accumulationOutputLog,
26289
26323
  ...servicesUpdate,
26290
- preimages: preimages.concat(accumulatePreimages),
26324
+ preimages,
26291
26325
  });
26292
26326
  }
26293
26327
  getUsedAuthorizerHashes(guarantees) {
@@ -26304,11 +26338,11 @@ class chain_stf_OnChain {
26304
26338
  }
26305
26339
  function checkOffendersMatch(offendersMark, headerOffendersMark) {
26306
26340
  if (offendersMark.size !== headerOffendersMark.length) {
26307
- return Result.error(OFFENDERS_ERROR, `Length mismatch: ${offendersMark.size} vs ${headerOffendersMark.length}`);
26341
+ return Result.error(OFFENDERS_ERROR, () => `Length mismatch: ${offendersMark.size} vs ${headerOffendersMark.length}`);
26308
26342
  }
26309
26343
  for (const key of headerOffendersMark) {
26310
26344
  if (!offendersMark.has(key)) {
26311
- return Result.error(OFFENDERS_ERROR, `Missing key: ${key}`);
26345
+ return Result.error(OFFENDERS_ERROR, () => `Missing key: ${key}`);
26312
26346
  }
26313
26347
  }
26314
26348
  return Result.ok(OK);