@typeberry/jam 0.2.0-74f246e → 0.2.0-adde0dd

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/index.js CHANGED
@@ -24603,7 +24603,7 @@ function resultToString(res) {
24603
24603
  if (res.isOk) {
24604
24604
  return `OK: ${typeof res.ok === "symbol" ? res.ok.toString() : res.ok}`;
24605
24605
  }
24606
- return `${res.details}\nError: ${maybeTaggedErrorToString(res.error)}`;
24606
+ return `${res.details()}\nError: ${maybeTaggedErrorToString(res.error)}`;
24607
24607
  }
24608
24608
  /** An indication of two possible outcomes returned from a function. */
24609
24609
  const result_Result = {
@@ -24617,7 +24617,7 @@ const result_Result = {
24617
24617
  };
24618
24618
  },
24619
24619
  /** Create new [`Result`] with `Error` status. */
24620
- error: (error, details = "") => {
24620
+ error: (error, details) => {
24621
24621
  debug_check `${error !== undefined} 'Error' type cannot be undefined.`;
24622
24622
  return {
24623
24623
  isOk: false,
@@ -24736,7 +24736,7 @@ function deepEqual(actual, expected, { context = [], errorsCollector, ignore = [
24736
24736
  }
24737
24737
  if (actual.isError && expected.isError) {
24738
24738
  deepEqual(actual.error, expected.error, { context: ctx.concat(["error"]), errorsCollector: errors, ignore });
24739
- deepEqual(actual.details, expected.details, {
24739
+ deepEqual(actual.details(), expected.details(), {
24740
24740
  context: ctx.concat(["details"]),
24741
24741
  errorsCollector: errors,
24742
24742
  // display details when error does not match
@@ -32026,6 +32026,7 @@ function accumulationOutputComparator(a, b) {
32026
32026
 
32027
32027
  ;// CONCATENATED MODULE: ./packages/jam/block/gp-constants.ts
32028
32028
 
32029
+
32029
32030
  /**
32030
32031
  * This file lists all of the constants defined in the GrayPaper appendix.
32031
32032
  *
@@ -32036,7 +32037,7 @@ function accumulationOutputComparator(a, b) {
32036
32037
  * here are only temporarily for convenience. When we figure out better names
32037
32038
  * and places for these this file will be eradicated.
32038
32039
  *
32039
- * https://graypaper.fluffylabs.dev/#/579bd12/413000413000
32040
+ * https://graypaper.fluffylabs.dev/#/ab2cdbd/442300442300?v=0.7.2
32040
32041
  */
32041
32042
  /** `G_I`: The gas allocated to invoke a work-package’s Is-Authorized logic. */
32042
32043
  const G_I = 50_000_000;
@@ -32052,8 +32053,8 @@ const S = 1024;
32052
32053
  const T = 128;
32053
32054
  /** `W_A`: The maximum size of is-authorized code in octets. */
32054
32055
  const W_A = 64_000;
32055
- /** `W_B`: The maximum size of an encoded work-package with extrinsic data and imports. */
32056
- const W_B = 13_794_305;
32056
+ /** `W_B`: The maximum size of the concatenated variable-size blobs, extrinsics and imported segments of a work-package, in octets */
32057
+ const W_B = Compatibility.isGreaterOrEqual(GpVersion.V0_7_2) ? 13_791_360 : 13_794_305;
32057
32058
  /** `W_C`: The maximum size of service code in octets. */
32058
32059
  const W_C = 4_000_000;
32059
32060
  /** `W_M`: The maximum number of imports in a work-package. */
@@ -33152,31 +33153,29 @@ var UpdatePreimageKind;
33152
33153
  * 3. Update `LookupHistory` with given value.
33153
33154
  */
33154
33155
  class UpdatePreimage {
33155
- serviceId;
33156
33156
  action;
33157
- constructor(serviceId, action) {
33158
- this.serviceId = serviceId;
33157
+ constructor(action) {
33159
33158
  this.action = action;
33160
33159
  }
33161
33160
  /** A preimage is provided. We should update the lookuphistory and add the preimage to db. */
33162
- static provide({ serviceId, preimage, slot, }) {
33163
- return new UpdatePreimage(serviceId, {
33161
+ static provide({ preimage, slot }) {
33162
+ return new UpdatePreimage({
33164
33163
  kind: UpdatePreimageKind.Provide,
33165
33164
  preimage,
33166
33165
  slot,
33167
33166
  });
33168
33167
  }
33169
33168
  /** The preimage should be removed completely from the database. */
33170
- static remove({ serviceId, hash, length }) {
33171
- return new UpdatePreimage(serviceId, {
33169
+ static remove({ hash, length }) {
33170
+ return new UpdatePreimage({
33172
33171
  kind: UpdatePreimageKind.Remove,
33173
33172
  hash,
33174
33173
  length,
33175
33174
  });
33176
33175
  }
33177
33176
  /** Update the lookup history of some preimage or add a new one (request). */
33178
- static updateOrAdd({ serviceId, lookupHistory }) {
33179
- return new UpdatePreimage(serviceId, {
33177
+ static updateOrAdd({ lookupHistory }) {
33178
+ return new UpdatePreimage({
33180
33179
  kind: UpdatePreimageKind.UpdateOrAdd,
33181
33180
  item: lookupHistory,
33182
33181
  });
@@ -33213,23 +33212,21 @@ var UpdateServiceKind;
33213
33212
  UpdateServiceKind[UpdateServiceKind["Create"] = 1] = "Create";
33214
33213
  })(UpdateServiceKind || (UpdateServiceKind = {}));
33215
33214
  /**
33216
- * Update service info of a particular `ServiceId` or create a new one.
33215
+ * Update service info or create a new one.
33217
33216
  */
33218
33217
  class UpdateService {
33219
- serviceId;
33220
33218
  action;
33221
- constructor(serviceId, action) {
33222
- this.serviceId = serviceId;
33219
+ constructor(action) {
33223
33220
  this.action = action;
33224
33221
  }
33225
- static update({ serviceId, serviceInfo }) {
33226
- return new UpdateService(serviceId, {
33222
+ static update({ serviceInfo }) {
33223
+ return new UpdateService({
33227
33224
  kind: UpdateServiceKind.Update,
33228
33225
  account: serviceInfo,
33229
33226
  });
33230
33227
  }
33231
- static create({ serviceId, serviceInfo, lookupHistory, }) {
33232
- return new UpdateService(serviceId, {
33228
+ static create({ serviceInfo, lookupHistory, }) {
33229
+ return new UpdateService({
33233
33230
  kind: UpdateServiceKind.Create,
33234
33231
  account: serviceInfo,
33235
33232
  lookupHistory,
@@ -33250,17 +33247,15 @@ var UpdateStorageKind;
33250
33247
  * Can either create/modify an entry or remove it.
33251
33248
  */
33252
33249
  class UpdateStorage {
33253
- serviceId;
33254
33250
  action;
33255
- constructor(serviceId, action) {
33256
- this.serviceId = serviceId;
33251
+ constructor(action) {
33257
33252
  this.action = action;
33258
33253
  }
33259
- static set({ serviceId, storage }) {
33260
- return new UpdateStorage(serviceId, { kind: UpdateStorageKind.Set, storage });
33254
+ static set({ storage }) {
33255
+ return new UpdateStorage({ kind: UpdateStorageKind.Set, storage });
33261
33256
  }
33262
- static remove({ serviceId, key }) {
33263
- return new UpdateStorage(serviceId, { kind: UpdateStorageKind.Remove, key });
33257
+ static remove({ key }) {
33258
+ return new UpdateStorage({ kind: UpdateStorageKind.Remove, key });
33264
33259
  }
33265
33260
  get key() {
33266
33261
  if (this.action.kind === UpdateStorageKind.Remove) {
@@ -33469,12 +33464,12 @@ class in_memory_state_InMemoryState extends WithDebug {
33469
33464
  * Modify the state and apply a single state update.
33470
33465
  */
33471
33466
  applyUpdate(update) {
33472
- const { servicesRemoved, servicesUpdates, preimages, storage, ...rest } = update;
33467
+ const { removed, created: _, updated, preimages, storage, ...rest } = update;
33473
33468
  // just assign all other variables
33474
33469
  Object.assign(this, rest);
33475
33470
  // and update the services state
33476
33471
  let result;
33477
- result = this.updateServices(servicesUpdates);
33472
+ result = this.updateServices(updated);
33478
33473
  if (result.isError) {
33479
33474
  return result;
33480
33475
  }
@@ -33486,7 +33481,7 @@ class in_memory_state_InMemoryState extends WithDebug {
33486
33481
  if (result.isError) {
33487
33482
  return result;
33488
33483
  }
33489
- this.removeServices(servicesRemoved);
33484
+ this.removeServices(removed);
33490
33485
  return result_Result.ok(result_OK);
33491
33486
  }
33492
33487
  removeServices(servicesRemoved) {
@@ -33495,89 +33490,102 @@ class in_memory_state_InMemoryState extends WithDebug {
33495
33490
  this.services.delete(serviceId);
33496
33491
  }
33497
33492
  }
33498
- updateStorage(storage) {
33499
- for (const { serviceId, action } of storage ?? []) {
33500
- const { kind } = action;
33501
- const service = this.services.get(serviceId);
33502
- if (service === undefined) {
33503
- return result_Result.error(in_memory_state_UpdateError.NoService, `Attempting to update storage of non-existing service: ${serviceId}`);
33504
- }
33505
- if (kind === UpdateStorageKind.Set) {
33506
- const { key, value } = action.storage;
33507
- service.data.storage.set(key.toString(), StorageItem.create({ key, value }));
33508
- }
33509
- else if (kind === UpdateStorageKind.Remove) {
33510
- const { key } = action;
33511
- debug_check `
33493
+ updateStorage(storageUpdates) {
33494
+ if (storageUpdates === undefined) {
33495
+ return result_Result.ok(result_OK);
33496
+ }
33497
+ for (const [serviceId, updates] of storageUpdates.entries()) {
33498
+ for (const update of updates) {
33499
+ const { kind } = update.action;
33500
+ const service = this.services.get(serviceId);
33501
+ if (service === undefined) {
33502
+ return result_Result.error(in_memory_state_UpdateError.NoService, () => `Attempting to update storage of non-existing service: ${serviceId}`);
33503
+ }
33504
+ if (kind === UpdateStorageKind.Set) {
33505
+ const { key, value } = update.action.storage;
33506
+ service.data.storage.set(key.toString(), StorageItem.create({ key, value }));
33507
+ }
33508
+ else if (kind === UpdateStorageKind.Remove) {
33509
+ const { key } = update.action;
33510
+ debug_check `
33512
33511
  ${service.data.storage.has(key.toString())}
33513
- Attempting to remove non-existing storage item at ${serviceId}: ${action.key}
33512
+ Attempting to remove non-existing storage item at ${serviceId}: ${update.action.key}
33514
33513
  `;
33515
- service.data.storage.delete(key.toString());
33516
- }
33517
- else {
33518
- debug_assertNever(kind);
33514
+ service.data.storage.delete(key.toString());
33515
+ }
33516
+ else {
33517
+ debug_assertNever(kind);
33518
+ }
33519
33519
  }
33520
33520
  }
33521
33521
  return result_Result.ok(result_OK);
33522
33522
  }
33523
- updatePreimages(preimages) {
33524
- for (const { serviceId, action } of preimages ?? []) {
33523
+ updatePreimages(preimagesUpdates) {
33524
+ if (preimagesUpdates === undefined) {
33525
+ return result_Result.ok(result_OK);
33526
+ }
33527
+ for (const [serviceId, updates] of preimagesUpdates.entries()) {
33525
33528
  const service = this.services.get(serviceId);
33526
33529
  if (service === undefined) {
33527
- return result_Result.error(in_memory_state_UpdateError.NoService, `Attempting to update preimage of non-existing service: ${serviceId}`);
33528
- }
33529
- const { kind } = action;
33530
- if (kind === UpdatePreimageKind.Provide) {
33531
- const { preimage, slot } = action;
33532
- if (service.data.preimages.has(preimage.hash)) {
33533
- return result_Result.error(in_memory_state_UpdateError.PreimageExists, `Overwriting existing preimage at ${serviceId}: ${preimage}`);
33534
- }
33535
- service.data.preimages.set(preimage.hash, preimage);
33536
- if (slot !== null) {
33537
- const lookupHistory = service.data.lookupHistory.get(preimage.hash);
33538
- const length = numbers_tryAsU32(preimage.blob.length);
33539
- const lookup = new LookupHistoryItem(preimage.hash, length, tryAsLookupHistorySlots([slot]));
33540
- if (lookupHistory === undefined) {
33541
- // no lookup history for that preimage at all (edge case, should be requested)
33542
- service.data.lookupHistory.set(preimage.hash, [lookup]);
33530
+ return result_Result.error(in_memory_state_UpdateError.NoService, () => `Attempting to update preimage of non-existing service: ${serviceId}`);
33531
+ }
33532
+ for (const update of updates) {
33533
+ const { kind } = update.action;
33534
+ if (kind === UpdatePreimageKind.Provide) {
33535
+ const { preimage, slot } = update.action;
33536
+ if (service.data.preimages.has(preimage.hash)) {
33537
+ return result_Result.error(in_memory_state_UpdateError.PreimageExists, () => `Overwriting existing preimage at ${serviceId}: ${preimage}`);
33543
33538
  }
33544
- else {
33545
- // insert or replace exiting entry
33546
- const index = lookupHistory.map((x) => x.length).indexOf(length);
33547
- lookupHistory.splice(index, index === -1 ? 0 : 1, lookup);
33539
+ service.data.preimages.set(preimage.hash, preimage);
33540
+ if (slot !== null) {
33541
+ const lookupHistory = service.data.lookupHistory.get(preimage.hash);
33542
+ const length = numbers_tryAsU32(preimage.blob.length);
33543
+ const lookup = new LookupHistoryItem(preimage.hash, length, tryAsLookupHistorySlots([slot]));
33544
+ if (lookupHistory === undefined) {
33545
+ // no lookup history for that preimage at all (edge case, should be requested)
33546
+ service.data.lookupHistory.set(preimage.hash, [lookup]);
33547
+ }
33548
+ else {
33549
+ // insert or replace exiting entry
33550
+ const index = lookupHistory.map((x) => x.length).indexOf(length);
33551
+ lookupHistory.splice(index, index === -1 ? 0 : 1, lookup);
33552
+ }
33548
33553
  }
33549
33554
  }
33550
- }
33551
- else if (kind === UpdatePreimageKind.Remove) {
33552
- const { hash, length } = action;
33553
- service.data.preimages.delete(hash);
33554
- const history = service.data.lookupHistory.get(hash) ?? [];
33555
- const idx = history.map((x) => x.length).indexOf(length);
33556
- if (idx !== -1) {
33557
- history.splice(idx, 1);
33555
+ else if (kind === UpdatePreimageKind.Remove) {
33556
+ const { hash, length } = update.action;
33557
+ service.data.preimages.delete(hash);
33558
+ const history = service.data.lookupHistory.get(hash) ?? [];
33559
+ const idx = history.map((x) => x.length).indexOf(length);
33560
+ if (idx !== -1) {
33561
+ history.splice(idx, 1);
33562
+ }
33563
+ }
33564
+ else if (kind === UpdatePreimageKind.UpdateOrAdd) {
33565
+ const { item } = update.action;
33566
+ const history = service.data.lookupHistory.get(item.hash) ?? [];
33567
+ const existingIdx = history.map((x) => x.length).indexOf(item.length);
33568
+ const removeCount = existingIdx === -1 ? 0 : 1;
33569
+ history.splice(existingIdx, removeCount, item);
33570
+ service.data.lookupHistory.set(item.hash, history);
33571
+ }
33572
+ else {
33573
+ debug_assertNever(kind);
33558
33574
  }
33559
- }
33560
- else if (kind === UpdatePreimageKind.UpdateOrAdd) {
33561
- const { item } = action;
33562
- const history = service.data.lookupHistory.get(item.hash) ?? [];
33563
- const existingIdx = history.map((x) => x.length).indexOf(item.length);
33564
- const removeCount = existingIdx === -1 ? 0 : 1;
33565
- history.splice(existingIdx, removeCount, item);
33566
- service.data.lookupHistory.set(item.hash, history);
33567
- }
33568
- else {
33569
- debug_assertNever(kind);
33570
33575
  }
33571
33576
  }
33572
33577
  return result_Result.ok(result_OK);
33573
33578
  }
33574
33579
  updateServices(servicesUpdates) {
33575
- for (const { serviceId, action } of servicesUpdates ?? []) {
33576
- const { kind, account } = action;
33580
+ if (servicesUpdates === undefined) {
33581
+ return result_Result.ok(result_OK);
33582
+ }
33583
+ for (const [serviceId, update] of servicesUpdates.entries()) {
33584
+ const { kind, account } = update.action;
33577
33585
  if (kind === UpdateServiceKind.Create) {
33578
- const { lookupHistory } = action;
33586
+ const { lookupHistory } = update.action;
33579
33587
  if (this.services.has(serviceId)) {
33580
- return result_Result.error(in_memory_state_UpdateError.DuplicateService, `${serviceId} already exists!`);
33588
+ return result_Result.error(in_memory_state_UpdateError.DuplicateService, () => `${serviceId} already exists!`);
33581
33589
  }
33582
33590
  this.services.set(serviceId, new InMemoryService(serviceId, {
33583
33591
  info: account,
@@ -33589,7 +33597,7 @@ class in_memory_state_InMemoryState extends WithDebug {
33589
33597
  else if (kind === UpdateServiceKind.Update) {
33590
33598
  const existingService = this.services.get(serviceId);
33591
33599
  if (existingService === undefined) {
33592
- return result_Result.error(in_memory_state_UpdateError.NoService, `Cannot update ${serviceId} because it does not exist.`);
33600
+ return result_Result.error(in_memory_state_UpdateError.NoService, () => `Cannot update ${serviceId} because it does not exist.`);
33593
33601
  }
33594
33602
  existingService.data.info = account;
33595
33603
  }
@@ -34836,7 +34844,6 @@ function getKeccakTrieHasher(hasher) {
34836
34844
 
34837
34845
 
34838
34846
 
34839
-
34840
34847
  /** What should be done with that key? */
34841
34848
  var StateEntryUpdateAction;
34842
34849
  (function (StateEntryUpdateAction) {
@@ -34852,76 +34859,88 @@ function* serializeStateUpdate(spec, blake2b, update) {
34852
34859
  yield* serializeBasicKeys(spec, update);
34853
34860
  const encode = (codec, val) => encoder_Encoder.encodeObject(codec, val, spec);
34854
34861
  // then let's proceed with service updates
34855
- yield* serializeServiceUpdates(update.servicesUpdates, encode, blake2b);
34862
+ yield* serializeServiceUpdates(update.updated, encode, blake2b);
34856
34863
  yield* serializePreimages(update.preimages, encode, blake2b);
34857
34864
  yield* serializeStorage(update.storage, blake2b);
34858
- yield* serializeRemovedServices(update.servicesRemoved);
34865
+ yield* serializeRemovedServices(update.removed);
34859
34866
  }
34860
34867
  function* serializeRemovedServices(servicesRemoved) {
34861
- for (const serviceId of servicesRemoved ?? []) {
34868
+ if (servicesRemoved === undefined) {
34869
+ return;
34870
+ }
34871
+ for (const serviceId of servicesRemoved) {
34862
34872
  // TODO [ToDr] what about all data associated with a service?
34863
34873
  const codec = serialize_serialize.serviceData(serviceId);
34864
34874
  yield [StateEntryUpdateAction.Remove, codec.key, EMPTY_BLOB];
34865
34875
  }
34866
34876
  }
34867
- function* serializeStorage(storage, blake2b) {
34868
- for (const { action, serviceId } of storage ?? []) {
34869
- switch (action.kind) {
34870
- case UpdateStorageKind.Set: {
34871
- const key = action.storage.key;
34872
- const codec = serialize_serialize.serviceStorage(blake2b, serviceId, key);
34873
- yield [StateEntryUpdateAction.Insert, codec.key, action.storage.value];
34874
- break;
34875
- }
34876
- case UpdateStorageKind.Remove: {
34877
- const key = action.key;
34878
- const codec = serialize_serialize.serviceStorage(blake2b, serviceId, key);
34879
- yield [StateEntryUpdateAction.Remove, codec.key, EMPTY_BLOB];
34880
- break;
34877
+ function* serializeStorage(storageUpdates, blake2b) {
34878
+ if (storageUpdates === undefined) {
34879
+ return;
34880
+ }
34881
+ for (const [serviceId, updates] of storageUpdates.entries()) {
34882
+ for (const { action } of updates) {
34883
+ switch (action.kind) {
34884
+ case UpdateStorageKind.Set: {
34885
+ const key = action.storage.key;
34886
+ const codec = serialize_serialize.serviceStorage(blake2b, serviceId, key);
34887
+ yield [StateEntryUpdateAction.Insert, codec.key, action.storage.value];
34888
+ break;
34889
+ }
34890
+ case UpdateStorageKind.Remove: {
34891
+ const key = action.key;
34892
+ const codec = serialize_serialize.serviceStorage(blake2b, serviceId, key);
34893
+ yield [StateEntryUpdateAction.Remove, codec.key, EMPTY_BLOB];
34894
+ break;
34895
+ }
34881
34896
  }
34882
- default:
34883
- debug_assertNever(action);
34884
34897
  }
34885
34898
  }
34886
34899
  }
34887
- function* serializePreimages(preimages, encode, blake2b) {
34888
- for (const { action, serviceId } of preimages ?? []) {
34889
- switch (action.kind) {
34890
- case UpdatePreimageKind.Provide: {
34891
- const { hash, blob } = action.preimage;
34892
- const codec = serialize_serialize.servicePreimages(blake2b, serviceId, hash);
34893
- yield [StateEntryUpdateAction.Insert, codec.key, blob];
34894
- if (action.slot !== null) {
34895
- const codec2 = serialize_serialize.serviceLookupHistory(blake2b, serviceId, hash, numbers_tryAsU32(blob.length));
34896
- yield [
34897
- StateEntryUpdateAction.Insert,
34898
- codec2.key,
34899
- encode(codec2.Codec, tryAsLookupHistorySlots([action.slot])),
34900
- ];
34900
+ function* serializePreimages(preimagesUpdates, encode, blake2b) {
34901
+ if (preimagesUpdates === undefined) {
34902
+ return;
34903
+ }
34904
+ for (const [serviceId, updates] of preimagesUpdates.entries()) {
34905
+ for (const { action } of updates) {
34906
+ switch (action.kind) {
34907
+ case UpdatePreimageKind.Provide: {
34908
+ const { hash, blob } = action.preimage;
34909
+ const codec = serialize_serialize.servicePreimages(blake2b, serviceId, hash);
34910
+ yield [StateEntryUpdateAction.Insert, codec.key, blob];
34911
+ if (action.slot !== null) {
34912
+ const codec2 = serialize_serialize.serviceLookupHistory(blake2b, serviceId, hash, numbers_tryAsU32(blob.length));
34913
+ yield [
34914
+ StateEntryUpdateAction.Insert,
34915
+ codec2.key,
34916
+ encode(codec2.Codec, tryAsLookupHistorySlots([action.slot])),
34917
+ ];
34918
+ }
34919
+ break;
34920
+ }
34921
+ case UpdatePreimageKind.UpdateOrAdd: {
34922
+ const { hash, length, slots } = action.item;
34923
+ const codec = serialize_serialize.serviceLookupHistory(blake2b, serviceId, hash, length);
34924
+ yield [StateEntryUpdateAction.Insert, codec.key, encode(codec.Codec, slots)];
34925
+ break;
34926
+ }
34927
+ case UpdatePreimageKind.Remove: {
34928
+ const { hash, length } = action;
34929
+ const codec = serialize_serialize.servicePreimages(blake2b, serviceId, hash);
34930
+ yield [StateEntryUpdateAction.Remove, codec.key, EMPTY_BLOB];
34931
+ const codec2 = serialize_serialize.serviceLookupHistory(blake2b, serviceId, hash, length);
34932
+ yield [StateEntryUpdateAction.Remove, codec2.key, EMPTY_BLOB];
34933
+ break;
34901
34934
  }
34902
- break;
34903
- }
34904
- case UpdatePreimageKind.UpdateOrAdd: {
34905
- const { hash, length, slots } = action.item;
34906
- const codec = serialize_serialize.serviceLookupHistory(blake2b, serviceId, hash, length);
34907
- yield [StateEntryUpdateAction.Insert, codec.key, encode(codec.Codec, slots)];
34908
- break;
34909
- }
34910
- case UpdatePreimageKind.Remove: {
34911
- const { hash, length } = action;
34912
- const codec = serialize_serialize.servicePreimages(blake2b, serviceId, hash);
34913
- yield [StateEntryUpdateAction.Remove, codec.key, EMPTY_BLOB];
34914
- const codec2 = serialize_serialize.serviceLookupHistory(blake2b, serviceId, hash, length);
34915
- yield [StateEntryUpdateAction.Remove, codec2.key, EMPTY_BLOB];
34916
- break;
34917
34935
  }
34918
- default:
34919
- debug_assertNever(action);
34920
34936
  }
34921
34937
  }
34922
34938
  }
34923
34939
  function* serializeServiceUpdates(servicesUpdates, encode, blake2b) {
34924
- for (const { action, serviceId } of servicesUpdates ?? []) {
34940
+ if (servicesUpdates === undefined) {
34941
+ return;
34942
+ }
34943
+ for (const [serviceId, { action }] of servicesUpdates.entries()) {
34925
34944
  // new service being created or updated
34926
34945
  const codec = serialize_serialize.serviceData(serviceId);
34927
34946
  yield [StateEntryUpdateAction.Insert, codec.key, encode(codec.Codec, action.account)];
@@ -35208,13 +35227,13 @@ class LeafDb {
35208
35227
  */
35209
35228
  static fromLeavesBlob(blob, db) {
35210
35229
  if (blob.length % TRIE_NODE_BYTES !== 0) {
35211
- return result_Result.error(LeafDbError.InvalidLeafData, `${blob.length} is not a multiply of ${TRIE_NODE_BYTES}: ${blob}`);
35230
+ return result_Result.error(LeafDbError.InvalidLeafData, () => `${blob.length} is not a multiply of ${TRIE_NODE_BYTES}: ${blob}`);
35212
35231
  }
35213
35232
  const leaves = SortedSet.fromArray(leafComparator, []);
35214
35233
  for (const nodeData of blob.chunks(TRIE_NODE_BYTES)) {
35215
35234
  const node = new TrieNode(nodeData.raw);
35216
35235
  if (node.getNodeType() === NodeType.Branch) {
35217
- return result_Result.error(LeafDbError.InvalidLeafData, `Branch node detected: ${nodeData}`);
35236
+ return result_Result.error(LeafDbError.InvalidLeafData, () => `Branch node detected: ${nodeData}`);
35218
35237
  }
35219
35238
  leaves.insert(node.asLeafNode());
35220
35239
  }
@@ -35607,7 +35626,7 @@ class LmdbStates {
35607
35626
  }
35608
35627
  catch (e) {
35609
35628
  states_logger.error `${e}`;
35610
- return result_Result.error(StateUpdateError.Commit);
35629
+ return result_Result.error(StateUpdateError.Commit, () => `Failed to commit state update: ${e}`);
35611
35630
  }
35612
35631
  return result_Result.ok(result_OK);
35613
35632
  }
@@ -36539,7 +36558,7 @@ const importBlockResultCodec = descriptors_codec.custom({
36539
36558
  }
36540
36559
  if (kind === 1) {
36541
36560
  const error = d.bytesBlob();
36542
- return result_Result.error(error.asText());
36561
+ return result_Result.error(error.asText(), () => error.asText());
36543
36562
  }
36544
36563
  throw new Error(`Invalid Result: ${kind}`);
36545
36564
  }, (s) => {
@@ -36590,7 +36609,7 @@ class MainReady extends State {
36590
36609
  if (res instanceof Uint8Array) {
36591
36610
  return decoder_Decoder.decodeObject(importBlockResultCodec, res);
36592
36611
  }
36593
- return result_Result.error("Invalid worker response.");
36612
+ return result_Result.error("Invalid worker response.", () => "Invalid worker response: expected Uint8Array");
36594
36613
  }
36595
36614
  async getStateEntries(port, hash) {
36596
36615
  const res = await port.sendRequest("getStateEntries", hash, [hash.buffer]);
@@ -36702,13 +36721,13 @@ class ImporterReady extends State {
36702
36721
  response = result_Result.ok(this.importer.getBestStateRootHash() ?? ZERO_HASH.asOpaque());
36703
36722
  }
36704
36723
  else {
36705
- response = result_Result.error(resultToString(res));
36724
+ response = result_Result.error(resultToString(res), () => resultToString(res));
36706
36725
  }
36707
36726
  }
36708
36727
  catch (e) {
36709
36728
  state_machine_logger.error `Failed to import block: ${e}`;
36710
36729
  state_machine_logger.error `${e instanceof Error ? e.stack : ""}`;
36711
- response = result_Result.error(`${e}`);
36730
+ response = result_Result.error(`${e}`, () => `${e}`);
36712
36731
  }
36713
36732
  const encoded = encoder_Encoder.encodeObject(importBlockResultCodec, response);
36714
36733
  return {
@@ -36986,32 +37005,33 @@ class Preimages {
36986
37005
  }
36987
37006
  if (prevPreimage.requester > currPreimage.requester ||
36988
37007
  currPreimage.blob.compare(prevPreimage.blob).isLessOrEqual()) {
36989
- return result_Result.error(PreimagesErrorCode.PreimagesNotSortedUnique);
37008
+ return result_Result.error(PreimagesErrorCode.PreimagesNotSortedUnique, () => `Preimages not sorted/unique at index ${i}`);
36990
37009
  }
36991
37010
  }
36992
37011
  const { preimages, slot } = input;
36993
- const pendingChanges = [];
37012
+ const pendingChanges = new Map();
36994
37013
  // select preimages for integration
36995
37014
  for (const preimage of preimages) {
36996
37015
  const { requester, blob } = preimage;
36997
37016
  const hash = this.blake2b.hashBytes(blob).asOpaque();
36998
37017
  const service = this.state.getService(requester);
36999
37018
  if (service === null) {
37000
- return result_Result.error(PreimagesErrorCode.AccountNotFound);
37019
+ return result_Result.error(PreimagesErrorCode.AccountNotFound, () => `Service not found: ${requester}`);
37001
37020
  }
37002
37021
  const hasPreimage = service.hasPreimage(hash);
37003
37022
  const slots = service.getLookupHistory(hash, numbers_tryAsU32(blob.length));
37004
37023
  // https://graypaper.fluffylabs.dev/#/5f542d7/181800181900
37005
37024
  // https://graypaper.fluffylabs.dev/#/5f542d7/116f0011a500
37006
37025
  if (hasPreimage || slots === null || !LookupHistoryItem.isRequested(slots)) {
37007
- return result_Result.error(PreimagesErrorCode.PreimageUnneeded);
37026
+ return result_Result.error(PreimagesErrorCode.PreimageUnneeded, () => `Preimage unneeded: requester=${requester}, hash=${hash}, hasPreimage=${hasPreimage}, isRequested=${slots !== null && LookupHistoryItem.isRequested(slots)}`);
37008
37027
  }
37009
37028
  // https://graypaper.fluffylabs.dev/#/5f542d7/18c00018f300
37010
- pendingChanges.push(UpdatePreimage.provide({
37011
- serviceId: requester,
37029
+ const updates = pendingChanges.get(requester) ?? [];
37030
+ updates.push(UpdatePreimage.provide({
37012
37031
  preimage: PreimageItem.create({ hash, blob }),
37013
37032
  slot,
37014
37033
  }));
37034
+ pendingChanges.set(requester, updates);
37015
37035
  }
37016
37036
  return result_Result.ok({
37017
37037
  preimages: pendingChanges,
@@ -38274,20 +38294,20 @@ async function verifyCertificate(certs) {
38274
38294
  // Must present exactly one cert
38275
38295
  if (certs.length !== 1) {
38276
38296
  certificate_logger.log `Rejecting peer: expected exactly one certificate, got: ${certs.length}`;
38277
- return result_Result.error(VerifyCertError.NoCertificate);
38297
+ return result_Result.error(VerifyCertError.NoCertificate, () => `Certificate validation failed: expected exactly one certificate, got ${certs.length}`);
38278
38298
  }
38279
38299
  // Parse with Node's X509Certificate (accepts PEM or DER)
38280
38300
  const xc = new (external_node_crypto_default()).X509Certificate(certs[0]);
38281
38301
  // Must be Ed25519 key
38282
38302
  if (xc.publicKey.asymmetricKeyType !== CURVE_NAME.toLowerCase()) {
38283
38303
  certificate_logger.log `Rejecting peer using non-ed25519 certificate: ${xc.publicKey.asymmetricKeyType}`;
38284
- return result_Result.error(VerifyCertError.NotEd25519);
38304
+ return result_Result.error(VerifyCertError.NotEd25519, () => `Certificate validation failed: expected Ed25519 key, got ${xc.publicKey.asymmetricKeyType}`);
38285
38305
  }
38286
38306
  // Extract raw public key via JWK export
38287
38307
  const jwk = xc.publicKey.export({ format: "jwk" });
38288
38308
  if (jwk.kty !== KEY_TYPE || jwk.crv !== CURVE_NAME) {
38289
38309
  certificate_logger.log `Public key type mismatch: ${jwk.kty}, ${jwk.crv}`;
38290
- return result_Result.error(VerifyCertError.PublicKeyTypeMismatch);
38310
+ return result_Result.error(VerifyCertError.PublicKeyTypeMismatch, () => `Certificate validation failed: public key type mismatch (kty: ${jwk.kty}, crv: ${jwk.crv})`);
38291
38311
  }
38292
38312
  // SAN must be exactly 'e'+base32(rawPub)
38293
38313
  const expectedSan = altNameJwk(jwk);
@@ -38295,11 +38315,11 @@ async function verifyCertificate(certs) {
38295
38315
  const m = sanField.match(/DNS:([^,]+)/);
38296
38316
  if (m === null || m[1] !== expectedSan) {
38297
38317
  certificate_logger.log `AltName mismatch. Expected: '${expectedSan}', got: '${m?.[1]}'`;
38298
- return result_Result.error(VerifyCertError.AltNameMismatch);
38318
+ return result_Result.error(VerifyCertError.AltNameMismatch, () => `Certificate validation failed: altName mismatch (expected: ${expectedSan}, got: ${m?.[1] ?? "none"})`);
38299
38319
  }
38300
38320
  const key = Buffer.from(jwk.x ?? "", "base64url");
38301
38321
  if (!xc.verify(xc.publicKey)) {
38302
- return result_Result.error(VerifyCertError.IncorrectSignature);
38322
+ return result_Result.error(VerifyCertError.IncorrectSignature, () => "Certificate validation failed: incorrect signature");
38303
38323
  }
38304
38324
  const publicKey = bytes_Bytes.fromBlob(new Uint8Array(key), ED25519_KEY_BYTES);
38305
38325
  return result_Result.ok({
@@ -39333,7 +39353,7 @@ function handleGetBlockSequence(chainSpec, blocks, startHash, direction, limit)
39333
39353
  };
39334
39354
  const startBlock = getBlockView(startHash);
39335
39355
  if (startBlock === null) {
39336
- return result_Result.error(BlockSequenceError.NoStartBlock);
39356
+ return result_Result.error(BlockSequenceError.NoStartBlock, () => `Block sequence error: start block ${startHash} not found`);
39337
39357
  }
39338
39358
  if (direction === Direction.AscExcl) {
39339
39359
  // Since we don't have an index of all blocks, we need to start from
@@ -39345,7 +39365,7 @@ function handleGetBlockSequence(chainSpec, blocks, startHash, direction, limit)
39345
39365
  const currentHeader = blocks.getHeader(currentHash);
39346
39366
  // some errornuous situation, we didn't really reach the block?
39347
39367
  if (currentHeader === null || currentHeader.timeSlotIndex.materialize() < startIndex) {
39348
- return result_Result.error(BlockSequenceError.BlockOnFork);
39368
+ return result_Result.error(BlockSequenceError.BlockOnFork, () => `Block sequence error: start block ${startHash} appears to be on a fork`);
39349
39369
  }
39350
39370
  // we have everything we need, let's return it now
39351
39371
  if (startHash.isEqualTo(currentHash)) {
@@ -40750,8 +40770,8 @@ class FuzzHandler {
40750
40770
  if (res.isOk) {
40751
40771
  return res;
40752
40772
  }
40753
- ipc_logger.log `Rejecting block with error: ${res.error}. ${res.details}`;
40754
- return result_Result.error(ErrorMessage.create({ message: res.error }));
40773
+ ipc_logger.log `Rejecting block with error: ${res.error}. ${res.details()}`;
40774
+ return result_Result.error(ErrorMessage.create({ message: res.error }), res.details);
40755
40775
  }
40756
40776
  async getPeerInfo(value) {
40757
40777
  ipc_logger.info `Fuzzer ${value} connected.`;
@@ -41006,7 +41026,7 @@ class BlockVerifier {
41006
41026
  const headerHash = this.hasher.header(headerView);
41007
41027
  // check if current block is already imported
41008
41028
  if (this.blocks.getHeader(headerHash.hash) !== null) {
41009
- return result_Result.error(BlockVerifierError.AlreadyImported, `Block ${headerHash.hash} is already imported.`);
41029
+ return result_Result.error(BlockVerifierError.AlreadyImported, () => `Block ${headerHash.hash} is already imported.`);
41010
41030
  }
41011
41031
  // Check if parent block exists.
41012
41032
  // https://graypaper.fluffylabs.dev/#/cc517d7/0c82000c8200?v=0.6.5
@@ -41016,14 +41036,14 @@ class BlockVerifier {
41016
41036
  if (!parentHash.isEqualTo(block_verifier_ZERO_HASH)) {
41017
41037
  const parentBlock = this.blocks.getHeader(parentHash);
41018
41038
  if (parentBlock === null) {
41019
- return result_Result.error(BlockVerifierError.ParentNotFound, `Parent ${parentHash.toString()} not found`);
41039
+ return result_Result.error(BlockVerifierError.ParentNotFound, () => `Parent ${parentHash.toString()} not found`);
41020
41040
  }
41021
41041
  // Check if the time slot index is consecutive and not from future.
41022
41042
  // https://graypaper.fluffylabs.dev/#/cc517d7/0c02010c0201?v=0.6.5
41023
41043
  const timeslot = headerView.timeSlotIndex.materialize();
41024
41044
  const parentTimeslot = parentBlock.timeSlotIndex.materialize();
41025
41045
  if (timeslot <= parentTimeslot) {
41026
- return result_Result.error(BlockVerifierError.InvalidTimeSlot, `Invalid time slot index: ${timeslot}, expected > ${parentTimeslot}`);
41046
+ return result_Result.error(BlockVerifierError.InvalidTimeSlot, () => `Invalid time slot index: ${timeslot}, expected > ${parentTimeslot}`);
41027
41047
  }
41028
41048
  }
41029
41049
  // Check if extrinsic is valid.
@@ -41031,17 +41051,17 @@ class BlockVerifier {
41031
41051
  const extrinsicHash = headerView.extrinsicHash.materialize();
41032
41052
  const extrinsicMerkleCommitment = this.hasher.extrinsic(block.extrinsic.view());
41033
41053
  if (!extrinsicHash.isEqualTo(extrinsicMerkleCommitment.hash)) {
41034
- return result_Result.error(BlockVerifierError.InvalidExtrinsic, `Invalid extrinsic hash: ${extrinsicHash.toString()}, expected ${extrinsicMerkleCommitment.hash.toString()}`);
41054
+ return result_Result.error(BlockVerifierError.InvalidExtrinsic, () => `Invalid extrinsic hash: ${extrinsicHash.toString()}, expected ${extrinsicMerkleCommitment.hash.toString()}`);
41035
41055
  }
41036
41056
  // Check if the state root is valid.
41037
41057
  // https://graypaper.fluffylabs.dev/#/cc517d7/0c18010c1801?v=0.6.5
41038
41058
  const stateRoot = headerView.priorStateRoot.materialize();
41039
41059
  const posteriorStateRoot = this.blocks.getPostStateRoot(parentHash);
41040
41060
  if (posteriorStateRoot === null) {
41041
- return result_Result.error(BlockVerifierError.StateRootNotFound, `Posterior state root ${parentHash.toString()} not found`);
41061
+ return result_Result.error(BlockVerifierError.StateRootNotFound, () => `Posterior state root ${parentHash.toString()} not found`);
41042
41062
  }
41043
41063
  if (!stateRoot.isEqualTo(posteriorStateRoot)) {
41044
- return result_Result.error(BlockVerifierError.InvalidStateRoot, `Invalid prior state root: ${stateRoot.toString()}, expected ${posteriorStateRoot.toString()} (ours)`);
41064
+ return result_Result.error(BlockVerifierError.InvalidStateRoot, () => `Invalid prior state root: ${stateRoot.toString()}, expected ${posteriorStateRoot.toString()} (ours)`);
41045
41065
  }
41046
41066
  return result_Result.ok(headerHash.hash);
41047
41067
  }
@@ -41166,7 +41186,7 @@ class Disputes {
41166
41186
  // check if culprits are sorted by key
41167
41187
  // https://graypaper.fluffylabs.dev/#/579bd12/12c50112c601
41168
41188
  if (!isUniqueSortedBy(disputes.culprits, "key")) {
41169
- return result_Result.error(DisputesErrorCode.CulpritsNotSortedUnique);
41189
+ return result_Result.error(DisputesErrorCode.CulpritsNotSortedUnique, () => "Culprits are not uniquely sorted by key");
41170
41190
  }
41171
41191
  const culpritsLength = disputes.culprits.length;
41172
41192
  for (let i = 0; i < culpritsLength; i++) {
@@ -41175,24 +41195,24 @@ class Disputes {
41175
41195
  // https://graypaper.fluffylabs.dev/#/579bd12/125501125501
41176
41196
  const isInPunishSet = this.state.disputesRecords.asDictionaries().punishSet.has(key);
41177
41197
  if (isInPunishSet) {
41178
- return result_Result.error(DisputesErrorCode.OffenderAlreadyReported);
41198
+ return result_Result.error(DisputesErrorCode.OffenderAlreadyReported, () => `Offender already reported: culprit ${i}, key=${key}`);
41179
41199
  }
41180
41200
  // check if the guarantor key is correct
41181
41201
  // https://graypaper.fluffylabs.dev/#/85129da/125501125501?v=0.6.3
41182
41202
  if (!allValidatorKeys.has(key)) {
41183
- return result_Result.error(DisputesErrorCode.BadGuarantorKey);
41203
+ return result_Result.error(DisputesErrorCode.BadGuarantorKey, () => `Bad guarantor key: culprit ${i}, key=${key}`);
41184
41204
  }
41185
41205
  // verify if the culprit will be in new bad set
41186
41206
  // https://graypaper.fluffylabs.dev/#/579bd12/124601124601
41187
41207
  const isInNewBadSet = newItems.asDictionaries().badSet.has(workReportHash);
41188
41208
  if (!isInNewBadSet) {
41189
- return result_Result.error(DisputesErrorCode.CulpritsVerdictNotBad);
41209
+ return result_Result.error(DisputesErrorCode.CulpritsVerdictNotBad, () => `Culprit verdict not bad: culprit ${i}, work report=${workReportHash}`);
41190
41210
  }
41191
41211
  // verify culprit signature
41192
41212
  // https://graypaper.fluffylabs.dev/#/579bd12/125c01125c01
41193
41213
  const result = verificationResult.culprits[i];
41194
41214
  if (!result?.isValid) {
41195
- return result_Result.error(DisputesErrorCode.BadSignature, `Invalid signature for culprit: ${i}`);
41215
+ return result_Result.error(DisputesErrorCode.BadSignature, () => `Invalid signature for culprit: ${i}`);
41196
41216
  }
41197
41217
  }
41198
41218
  return result_Result.ok(null);
@@ -41201,7 +41221,7 @@ class Disputes {
41201
41221
  // check if faults are sorted by key
41202
41222
  // https://graypaper.fluffylabs.dev/#/579bd12/12c50112c601
41203
41223
  if (!isUniqueSortedBy(disputes.faults, "key")) {
41204
- return result_Result.error(DisputesErrorCode.FaultsNotSortedUnique);
41224
+ return result_Result.error(DisputesErrorCode.FaultsNotSortedUnique, () => "Faults are not uniquely sorted by key");
41205
41225
  }
41206
41226
  const faultsLength = disputes.faults.length;
41207
41227
  for (let i = 0; i < faultsLength; i++) {
@@ -41210,12 +41230,12 @@ class Disputes {
41210
41230
  // https://graypaper.fluffylabs.dev/#/579bd12/12a20112a201
41211
41231
  const isInPunishSet = this.state.disputesRecords.asDictionaries().punishSet.has(key);
41212
41232
  if (isInPunishSet) {
41213
- return result_Result.error(DisputesErrorCode.OffenderAlreadyReported);
41233
+ return result_Result.error(DisputesErrorCode.OffenderAlreadyReported, () => `Offender already reported: fault ${i}, key=${key}`);
41214
41234
  }
41215
41235
  // check if the auditor key is correct
41216
41236
  // https://graypaper.fluffylabs.dev/#/85129da/12a20112a201?v=0.6.3
41217
41237
  if (!allValidatorKeys.has(key)) {
41218
- return result_Result.error(DisputesErrorCode.BadAuditorKey);
41238
+ return result_Result.error(DisputesErrorCode.BadAuditorKey, () => `Bad auditor key: fault ${i}, key=${key}`);
41219
41239
  }
41220
41240
  // verify if the fault will be included in new good/bad set
41221
41241
  // it may be not correct as in GP there is "iff" what means it should be rather
@@ -41227,14 +41247,14 @@ class Disputes {
41227
41247
  const isInNewGoodSet = goodSet.has(workReportHash);
41228
41248
  const isInNewBadSet = badSet.has(workReportHash);
41229
41249
  if (isInNewGoodSet || !isInNewBadSet) {
41230
- return result_Result.error(DisputesErrorCode.FaultVerdictWrong);
41250
+ return result_Result.error(DisputesErrorCode.FaultVerdictWrong, () => `Fault verdict wrong: fault ${i}, work report=${workReportHash}, inGood=${isInNewGoodSet}, inBad=${isInNewBadSet}`);
41231
41251
  }
41232
41252
  }
41233
41253
  // verify fault signature. Verification was done earlier, here we only check the result.
41234
41254
  // https://graypaper.fluffylabs.dev/#/579bd12/12a90112a901
41235
41255
  const result = verificationResult.faults[i];
41236
41256
  if (!result.isValid) {
41237
- return result_Result.error(DisputesErrorCode.BadSignature, `Invalid signature for fault: ${i}`);
41257
+ return result_Result.error(DisputesErrorCode.BadSignature, () => `Invalid signature for fault: ${i}`);
41238
41258
  }
41239
41259
  }
41240
41260
  return result_Result.ok(null);
@@ -41243,32 +41263,32 @@ class Disputes {
41243
41263
  // check if verdicts are correctly sorted
41244
41264
  // https://graypaper.fluffylabs.dev/#/579bd12/12c40112c401
41245
41265
  if (!isUniqueSortedBy(disputes.verdicts, "workReportHash")) {
41246
- return result_Result.error(DisputesErrorCode.VerdictsNotSortedUnique);
41266
+ return result_Result.error(DisputesErrorCode.VerdictsNotSortedUnique, () => "Verdicts are not uniquely sorted by work report hash");
41247
41267
  }
41248
41268
  // check if judgement are correctly sorted
41249
41269
  // https://graypaper.fluffylabs.dev/#/579bd12/123702123802
41250
41270
  if (disputes.verdicts.some((verdict) => !isUniqueSortedByIndex(verdict.votes))) {
41251
- return result_Result.error(DisputesErrorCode.JudgementsNotSortedUnique);
41271
+ return result_Result.error(DisputesErrorCode.JudgementsNotSortedUnique, () => "Judgements are not uniquely sorted by index");
41252
41272
  }
41253
41273
  const currentEpoch = Math.floor(this.state.timeslot / this.chainSpec.epochLength);
41254
41274
  let voteSignatureIndex = 0;
41255
41275
  for (const { votesEpoch, votes } of disputes.verdicts) {
41256
41276
  // https://graypaper.fluffylabs.dev/#/579bd12/12bb0012bc00
41257
41277
  if (votesEpoch !== currentEpoch && votesEpoch + 1 !== currentEpoch) {
41258
- return result_Result.error(DisputesErrorCode.BadJudgementAge);
41278
+ return result_Result.error(DisputesErrorCode.BadJudgementAge, () => `Bad judgement age: epoch=${votesEpoch}, current=${currentEpoch}`);
41259
41279
  }
41260
41280
  const k = votesEpoch === currentEpoch ? this.state.currentValidatorData : this.state.previousValidatorData;
41261
41281
  for (const { index } of votes) {
41262
41282
  const key = k[index]?.ed25519;
41263
41283
  // no particular GP fragment but I think we don't believe in ghosts
41264
41284
  if (key === undefined) {
41265
- return result_Result.error(DisputesErrorCode.BadValidatorIndex);
41285
+ return result_Result.error(DisputesErrorCode.BadValidatorIndex, () => `Bad validator index: ${index} in epoch ${votesEpoch}`);
41266
41286
  }
41267
41287
  // verify vote signature. Verification was done earlier, here we only check the result.
41268
41288
  // https://graypaper.fluffylabs.dev/#/579bd12/12cd0012cd00
41269
41289
  const result = verificationResult.judgements[voteSignatureIndex];
41270
41290
  if (!result.isValid) {
41271
- return result_Result.error(DisputesErrorCode.BadSignature, `Invalid signature for judgement: ${voteSignatureIndex}`);
41291
+ return result_Result.error(DisputesErrorCode.BadSignature, () => `Invalid signature for judgement: ${voteSignatureIndex}`);
41272
41292
  }
41273
41293
  voteSignatureIndex += 1;
41274
41294
  }
@@ -41284,7 +41304,7 @@ class Disputes {
41284
41304
  const isInBadSet = badSet.has(verdict.workReportHash);
41285
41305
  const isInWonkySet = wonkySet.has(verdict.workReportHash);
41286
41306
  if (isInGoodSet || isInBadSet || isInWonkySet) {
41287
- return result_Result.error(DisputesErrorCode.AlreadyJudged);
41307
+ return result_Result.error(DisputesErrorCode.AlreadyJudged, () => `Work report already judged: ${verdict.workReportHash}`);
41288
41308
  }
41289
41309
  }
41290
41310
  return result_Result.ok(null);
@@ -41315,7 +41335,7 @@ class Disputes {
41315
41335
  // https://graypaper.fluffylabs.dev/#/579bd12/12f10212fc02
41316
41336
  const f = disputes.faults.find((x) => x.workReportHash.isEqualTo(r));
41317
41337
  if (f === undefined) {
41318
- return result_Result.error(DisputesErrorCode.NotEnoughFaults);
41338
+ return result_Result.error(DisputesErrorCode.NotEnoughFaults, () => `Not enough faults for work report: ${r}`);
41319
41339
  }
41320
41340
  }
41321
41341
  else if (sum === 0) {
@@ -41324,13 +41344,13 @@ class Disputes {
41324
41344
  const c1 = disputes.culprits.find((x) => x.workReportHash.isEqualTo(r));
41325
41345
  const c2 = disputes.culprits.findLast((x) => x.workReportHash.isEqualTo(r));
41326
41346
  if (c1 === c2) {
41327
- return result_Result.error(DisputesErrorCode.NotEnoughCulprits);
41347
+ return result_Result.error(DisputesErrorCode.NotEnoughCulprits, () => `Not enough culprits for work report: ${r}`);
41328
41348
  }
41329
41349
  }
41330
41350
  else if (sum !== this.chainSpec.thirdOfValidators) {
41331
41351
  // positive votes count is not correct
41332
41352
  // https://graypaper.fluffylabs.dev/#/579bd12/125002128102
41333
- return result_Result.error(DisputesErrorCode.BadVoteSplit);
41353
+ return result_Result.error(DisputesErrorCode.BadVoteSplit, () => `Bad vote split: sum=${sum}, expected=${this.chainSpec.thirdOfValidators} for work report ${r}`);
41334
41354
  }
41335
41355
  }
41336
41356
  return result_Result.ok(null);
@@ -41418,7 +41438,7 @@ class Disputes {
41418
41438
  const validator = k[j.index];
41419
41439
  // no particular GP fragment but I think we don't believe in ghosts
41420
41440
  if (validator === undefined) {
41421
- return result_Result.error(DisputesErrorCode.BadValidatorIndex);
41441
+ return result_Result.error(DisputesErrorCode.BadValidatorIndex, () => `Bad validator index in signature verification: ${j.index}`);
41422
41442
  }
41423
41443
  const key = validator.ed25519;
41424
41444
  // verify vote signature
@@ -41526,7 +41546,7 @@ const ringCommitmentCache = [];
41526
41546
  async function verifySeal(bandersnatch, authorKey, signature, payload, encodedUnsealedHeader) {
41527
41547
  const sealResult = await bandersnatch.verifySeal(authorKey.raw, signature.raw, payload.raw, encodedUnsealedHeader.raw);
41528
41548
  if (sealResult[RESULT_INDEX] === ResultValues.Error) {
41529
- return result_Result.error(null);
41549
+ return result_Result.error(null, () => "Bandersnatch VRF seal verification failed");
41530
41550
  }
41531
41551
  return result_Result.ok(bytes_Bytes.fromBlob(sealResult.subarray(1), hash_HASH_SIZE).asOpaque());
41532
41552
  }
@@ -41552,7 +41572,7 @@ function getRingCommitment(bandersnatch, validators) {
41552
41572
  async function getRingCommitmentNoCache(bandersnatch, keys) {
41553
41573
  const commitmentResult = await bandersnatch.getRingCommitment(keys.raw);
41554
41574
  if (commitmentResult[RESULT_INDEX] === ResultValues.Error) {
41555
- return result_Result.error(null);
41575
+ return result_Result.error(null, () => "Bandersnatch ring commitment calculation failed");
41556
41576
  }
41557
41577
  return result_Result.ok(bytes_Bytes.fromBlob(commitmentResult.subarray(1), bandersnatch_BANDERSNATCH_RING_ROOT_BYTES).asOpaque());
41558
41578
  }
@@ -41727,7 +41747,7 @@ class Safrole {
41727
41747
  epochRoot: epochRootResult.ok,
41728
41748
  });
41729
41749
  }
41730
- return result_Result.error(SafroleErrorCode.IncorrectData);
41750
+ return result_Result.error(SafroleErrorCode.IncorrectData, () => "Safrole: failed to get epoch root for validator keys");
41731
41751
  }
41732
41752
  /**
41733
41753
  * Ticket sequencer that is used in standard mode
@@ -41818,10 +41838,10 @@ class Safrole {
41818
41838
  for (let i = 1; i < ticketsLength; i++) {
41819
41839
  const order = tickets[i - 1].id.compare(tickets[i].id);
41820
41840
  if (order.isEqual()) {
41821
- return result_Result.error(SafroleErrorCode.DuplicateTicket);
41841
+ return result_Result.error(SafroleErrorCode.DuplicateTicket, () => `Safrole: duplicate ticket found at index ${i}`);
41822
41842
  }
41823
41843
  if (order.isGreater()) {
41824
- return result_Result.error(SafroleErrorCode.BadTicketOrder);
41844
+ return result_Result.error(SafroleErrorCode.BadTicketOrder, () => `Safrole: bad ticket order at index ${i}`);
41825
41845
  }
41826
41846
  }
41827
41847
  return result_Result.ok(null);
@@ -41848,7 +41868,7 @@ class Safrole {
41848
41868
  attempt: ticket.attempt,
41849
41869
  }));
41850
41870
  if (!verificationResult.every((x) => x.isValid)) {
41851
- return result_Result.error(SafroleErrorCode.BadTicketProof);
41871
+ return result_Result.error(SafroleErrorCode.BadTicketProof, () => "Safrole: invalid ticket proof in extrinsic");
41852
41872
  }
41853
41873
  /**
41854
41874
  * Verify if tickets are sorted and unique
@@ -41857,7 +41877,7 @@ class Safrole {
41857
41877
  */
41858
41878
  const ticketsVerifcationResult = this.verifyTickets(tickets);
41859
41879
  if (ticketsVerifcationResult.isError) {
41860
- return result_Result.error(ticketsVerifcationResult.error);
41880
+ return result_Result.error(ticketsVerifcationResult.error, ticketsVerifcationResult.details);
41861
41881
  }
41862
41882
  if (this.isEpochChanged(timeslot)) {
41863
41883
  return result_Result.ok(tickets);
@@ -41866,7 +41886,7 @@ class Safrole {
41866
41886
  const ticketsFromExtrinsic = SortedSet.fromSortedArray(ticketComparator, tickets);
41867
41887
  const mergedTickets = SortedSet.fromTwoSortedCollections(ticketsFromState, ticketsFromExtrinsic);
41868
41888
  if (ticketsFromState.length + ticketsFromExtrinsic.length !== mergedTickets.length) {
41869
- return result_Result.error(SafroleErrorCode.DuplicateTicket);
41889
+ return result_Result.error(SafroleErrorCode.DuplicateTicket, () => "Safrole: duplicate ticket when merging state and extrinsic tickets");
41870
41890
  }
41871
41891
  /**
41872
41892
  * Remove tickets if size of accumulator exceeds E (epoch length).
@@ -41935,24 +41955,24 @@ class Safrole {
41935
41955
  }
41936
41956
  async transition(input) {
41937
41957
  if (this.state.timeslot >= input.slot) {
41938
- return result_Result.error(SafroleErrorCode.BadSlot);
41958
+ return result_Result.error(SafroleErrorCode.BadSlot, () => `Safrole: bad slot, state timeslot ${this.state.timeslot} >= input slot ${input.slot}`);
41939
41959
  }
41940
41960
  if (!this.isExtrinsicLengthValid(input.slot, input.extrinsic)) {
41941
- return result_Result.error(SafroleErrorCode.UnexpectedTicket);
41961
+ return result_Result.error(SafroleErrorCode.UnexpectedTicket, () => `Safrole: unexpected ticket, invalid extrinsic length ${input.extrinsic.length}`);
41942
41962
  }
41943
41963
  if (!this.areTicketAttemptsValid(input.extrinsic)) {
41944
- return result_Result.error(SafroleErrorCode.BadTicketAttempt);
41964
+ return result_Result.error(SafroleErrorCode.BadTicketAttempt, () => "Safrole: bad ticket attempt value in extrinsic");
41945
41965
  }
41946
41966
  const validatorKeysResult = await this.getValidatorKeys(input.slot, input.punishSet);
41947
41967
  if (validatorKeysResult.isError) {
41948
- return result_Result.error(validatorKeysResult.error);
41968
+ return result_Result.error(validatorKeysResult.error, validatorKeysResult.details);
41949
41969
  }
41950
41970
  const { nextValidatorData, currentValidatorData, previousValidatorData, epochRoot } = validatorKeysResult.ok;
41951
41971
  const entropy = this.getEntropy(input.slot, input.entropy);
41952
41972
  const sealingKeySeries = this.getSlotKeySequence(input.slot, currentValidatorData, entropy[2]);
41953
41973
  const newTicketsAccumulatorResult = await this.getNewTicketAccumulator(input.slot, input.extrinsic, this.state.nextValidatorData, epochRoot, entropy[2]);
41954
41974
  if (newTicketsAccumulatorResult.isError) {
41955
- return result_Result.error(newTicketsAccumulatorResult.error);
41975
+ return result_Result.error(newTicketsAccumulatorResult.error, newTicketsAccumulatorResult.details);
41956
41976
  }
41957
41977
  const stateUpdate = {
41958
41978
  nextValidatorData,
@@ -41986,14 +42006,14 @@ function compareWithEncoding(chainSpec, error, actual, expected, codec) {
41986
42006
  if (actual === null || expected === null) {
41987
42007
  // if one of them is `null`, both need to be.
41988
42008
  if (actual !== expected) {
41989
- return result_Result.error(error, `${SafroleErrorCode[error]} Expected: ${expected}, got: ${actual}`);
42009
+ return result_Result.error(error, () => `${SafroleErrorCode[error]} Expected: ${expected}, got: ${actual}`);
41990
42010
  }
41991
42011
  return result_Result.ok(result_OK);
41992
42012
  }
41993
42013
  // compare the literal encoding.
41994
42014
  const encoded = encoder_Encoder.encodeObject(codec, actual, chainSpec);
41995
42015
  if (!encoded.isEqualTo(expected.encoded())) {
41996
- return result_Result.error(error, `${SafroleErrorCode[error]} Expected: ${expected.encoded()}, got: ${encoded}`);
42016
+ return result_Result.error(error, () => `${SafroleErrorCode[error]} Expected: ${expected.encoded()}, got: ${encoded}`);
41997
42017
  }
41998
42018
  return result_Result.ok(result_OK);
41999
42019
  }
@@ -42036,7 +42056,7 @@ class SafroleSeal {
42036
42056
  const blockAuthorKey = state.currentValidatorData.at(blockAuthorIndex)?.bandersnatch;
42037
42057
  const entropySourceResult = await bandersnatch_vrf.verifySeal(await this.bandersnatch, blockAuthorKey ?? BANDERSNATCH_ZERO_KEY, headerView.entropySource.materialize(), payload, bytes_BytesBlob.blobFromNumbers([]));
42038
42058
  if (entropySourceResult.isError) {
42039
- return result_Result.error(SafroleSealError.IncorrectEntropySource);
42059
+ return result_Result.error(SafroleSealError.IncorrectEntropySource, () => "Safrole: incorrect entropy source in header seal");
42040
42060
  }
42041
42061
  return result_Result.ok(entropySourceResult.ok);
42042
42062
  }
@@ -42045,7 +42065,7 @@ class SafroleSeal {
42045
42065
  const validatorIndex = headerView.bandersnatchBlockAuthorIndex.materialize();
42046
42066
  const authorKeys = state.currentValidatorData.at(validatorIndex);
42047
42067
  if (authorKeys === undefined) {
42048
- return result_Result.error(SafroleSealError.InvalidValidatorIndex);
42068
+ return result_Result.error(SafroleSealError.InvalidValidatorIndex, () => `Safrole: invalid validator index ${validatorIndex}`);
42049
42069
  }
42050
42070
  const timeSlot = headerView.timeSlotIndex.materialize();
42051
42071
  const sealingKeys = state.sealingKeySeries;
@@ -42064,10 +42084,10 @@ class SafroleSeal {
42064
42084
  const authorKey = validatorData.bandersnatch;
42065
42085
  const result = await bandersnatch_vrf.verifySeal(await this.bandersnatch, authorKey ?? BANDERSNATCH_ZERO_KEY, headerView.seal.materialize(), payload, encodeUnsealedHeader(headerView));
42066
42086
  if (result.isError) {
42067
- return result_Result.error(SafroleSealError.IncorrectSeal);
42087
+ return result_Result.error(SafroleSealError.IncorrectSeal, () => "Safrole: incorrect seal with ticket");
42068
42088
  }
42069
42089
  if (ticket === undefined || !ticket.id.isEqualTo(result.ok)) {
42070
- return result_Result.error(SafroleSealError.InvalidTicket);
42090
+ return result_Result.error(SafroleSealError.InvalidTicket, () => `Safrole: invalid ticket, expected ${ticket?.id} got ${result.ok}`);
42071
42091
  }
42072
42092
  return result_Result.ok(result.ok);
42073
42093
  }
@@ -42077,13 +42097,13 @@ class SafroleSeal {
42077
42097
  const sealingKey = keys.at(index);
42078
42098
  const authorBandersnatchKey = authorKey.bandersnatch;
42079
42099
  if (sealingKey === undefined || !sealingKey.isEqualTo(authorBandersnatchKey)) {
42080
- return result_Result.error(SafroleSealError.InvalidValidator, `Invalid Validator. Expected: ${sealingKey}, got: ${authorKey.bandersnatch}`);
42100
+ return result_Result.error(SafroleSealError.InvalidValidator, () => `Invalid Validator. Expected: ${sealingKey}, got: ${authorKey.bandersnatch}`);
42081
42101
  }
42082
42102
  // verify seal correctness
42083
42103
  const payload = bytes_BytesBlob.blobFromParts(JAM_FALLBACK_SEAL, entropy.raw);
42084
42104
  const result = await bandersnatch_vrf.verifySeal(await this.bandersnatch, authorBandersnatchKey, headerView.seal.materialize(), payload, encodeUnsealedHeader(headerView));
42085
42105
  if (result.isError) {
42086
- return result_Result.error(SafroleSealError.IncorrectSeal);
42106
+ return result_Result.error(SafroleSealError.IncorrectSeal, () => "Safrole: incorrect seal with keys");
42087
42107
  }
42088
42108
  return result_Result.ok(result.ok);
42089
42109
  }
@@ -42126,6 +42146,14 @@ async function getRootHash(yieldedRoots) {
42126
42146
 
42127
42147
 
42128
42148
  const InsufficientFundsError = "insufficient funds";
42149
+ /** Deep clone of a map with array. */
42150
+ function deepCloneMapWithArray(map) {
42151
+ const cloned = [];
42152
+ for (const [k, v] of map.entries()) {
42153
+ cloned.push([k, v.slice()]);
42154
+ }
42155
+ return new Map(cloned);
42156
+ }
42129
42157
  /**
42130
42158
  * State updates that currently accumulating service produced.
42131
42159
  *
@@ -42155,10 +42183,11 @@ class AccumulationStateUpdate {
42155
42183
  /** Create new empty state update. */
42156
42184
  static empty() {
42157
42185
  return new AccumulationStateUpdate({
42158
- servicesUpdates: [],
42159
- servicesRemoved: [],
42160
- preimages: [],
42161
- storage: [],
42186
+ created: [],
42187
+ updated: new Map(),
42188
+ removed: [],
42189
+ preimages: new Map(),
42190
+ storage: new Map(),
42162
42191
  }, []);
42163
42192
  }
42164
42193
  /** Create a state update with some existing, yet uncommited services updates. */
@@ -42170,10 +42199,13 @@ class AccumulationStateUpdate {
42170
42199
  /** Create a copy of another `StateUpdate`. Used by checkpoints. */
42171
42200
  static copyFrom(from) {
42172
42201
  const serviceUpdates = {
42173
- servicesUpdates: [...from.services.servicesUpdates],
42174
- servicesRemoved: [...from.services.servicesRemoved],
42175
- preimages: [...from.services.preimages],
42176
- storage: [...from.services.storage],
42202
+ // shallow copy
42203
+ created: [...from.services.created],
42204
+ updated: new Map(from.services.updated),
42205
+ removed: [...from.services.removed],
42206
+ // deep copy
42207
+ preimages: deepCloneMapWithArray(from.services.preimages),
42208
+ storage: deepCloneMapWithArray(from.services.storage),
42177
42209
  };
42178
42210
  const transfers = [...from.transfers];
42179
42211
  const update = new AccumulationStateUpdate(serviceUpdates, transfers, new Map(from.yieldedRoots));
@@ -42221,9 +42253,9 @@ class PartiallyUpdatedState {
42221
42253
  if (destination === null) {
42222
42254
  return null;
42223
42255
  }
42224
- const maybeNewService = this.stateUpdate.services.servicesUpdates.find((update) => update.serviceId === destination);
42225
- if (maybeNewService !== undefined) {
42226
- return maybeNewService.action.account;
42256
+ const maybeUpdatedServiceInfo = this.stateUpdate.services.updated.get(destination);
42257
+ if (maybeUpdatedServiceInfo !== undefined) {
42258
+ return maybeUpdatedServiceInfo.action.account;
42227
42259
  }
42228
42260
  const maybeService = this.state.getService(destination);
42229
42261
  if (maybeService === null) {
@@ -42232,7 +42264,8 @@ class PartiallyUpdatedState {
42232
42264
  return maybeService.getInfo();
42233
42265
  }
42234
42266
  getStorage(serviceId, rawKey) {
42235
- const item = this.stateUpdate.services.storage.find((x) => x.serviceId === serviceId && x.key.isEqualTo(rawKey));
42267
+ const storages = this.stateUpdate.services.storage.get(serviceId) ?? [];
42268
+ const item = storages.find((x) => x.key.isEqualTo(rawKey));
42236
42269
  if (item !== undefined) {
42237
42270
  return item.value;
42238
42271
  }
@@ -42247,10 +42280,11 @@ class PartiallyUpdatedState {
42247
42280
  * the existence in `preimages` map.
42248
42281
  */
42249
42282
  hasPreimage(serviceId, hash) {
42250
- const providedPreimage = this.stateUpdate.services.preimages.find(
42283
+ const preimages = this.stateUpdate.services.preimages.get(serviceId) ?? [];
42284
+ const providedPreimage = preimages.find(
42251
42285
  // we ignore the action here, since if there is <any> update on that
42252
42286
  // hash it means it has to exist, right?
42253
- (p) => p.serviceId === serviceId && p.hash.isEqualTo(hash));
42287
+ (p) => p.hash.isEqualTo(hash));
42254
42288
  if (providedPreimage !== undefined) {
42255
42289
  return true;
42256
42290
  }
@@ -42263,7 +42297,8 @@ class PartiallyUpdatedState {
42263
42297
  }
42264
42298
  getPreimage(serviceId, hash) {
42265
42299
  // TODO [ToDr] Should we verify availability here?
42266
- const freshlyProvided = this.stateUpdate.services.preimages.find((x) => x.serviceId === serviceId && x.hash.isEqualTo(hash));
42300
+ const preimages = this.stateUpdate.services.preimages.get(serviceId) ?? [];
42301
+ const freshlyProvided = preimages.find((x) => x.hash.isEqualTo(hash));
42267
42302
  if (freshlyProvided !== undefined && freshlyProvided.action.kind === UpdatePreimageKind.Provide) {
42268
42303
  return freshlyProvided.action.preimage.blob;
42269
42304
  }
@@ -42272,10 +42307,11 @@ class PartiallyUpdatedState {
42272
42307
  }
42273
42308
  /** Get status of a preimage of current service taking into account any updates. */
42274
42309
  getLookupHistory(currentTimeslot, serviceId, hash, length) {
42310
+ const preimages = this.stateUpdate.services.preimages.get(serviceId) ?? [];
42275
42311
  // TODO [ToDr] This is most likely wrong. We may have `provide` and `remove` within
42276
42312
  // the same state update. We should however switch to proper "updated state"
42277
42313
  // representation soon.
42278
- const updatedPreimage = this.stateUpdate.services.preimages.findLast((update) => update.serviceId === serviceId && update.hash.isEqualTo(hash) && BigInt(update.length) === length);
42314
+ const updatedPreimage = preimages.findLast((update) => update.hash.isEqualTo(hash) && BigInt(update.length) === length);
42279
42315
  const stateFallback = () => {
42280
42316
  // fallback to state lookup
42281
42317
  const service = this.state.getService(serviceId);
@@ -42312,14 +42348,15 @@ class PartiallyUpdatedState {
42312
42348
  /* State update functions. */
42313
42349
  updateStorage(serviceId, key, value) {
42314
42350
  const update = value === null
42315
- ? UpdateStorage.remove({ serviceId, key })
42351
+ ? UpdateStorage.remove({ key })
42316
42352
  : UpdateStorage.set({
42317
- serviceId,
42318
42353
  storage: StorageItem.create({ key, value }),
42319
42354
  });
42320
- const index = this.stateUpdate.services.storage.findIndex((x) => x.serviceId === update.serviceId && x.key.isEqualTo(key));
42355
+ const storages = this.stateUpdate.services.storage.get(serviceId) ?? [];
42356
+ const index = storages.findIndex((x) => x.key.isEqualTo(key));
42321
42357
  const count = index === -1 ? 0 : 1;
42322
- this.stateUpdate.services.storage.splice(index, count, update);
42358
+ storages.splice(index, count, update);
42359
+ this.stateUpdate.services.storage.set(serviceId, storages);
42323
42360
  }
42324
42361
  /**
42325
42362
  * Update a preimage.
@@ -42327,8 +42364,10 @@ class PartiallyUpdatedState {
42327
42364
  * Note we store all previous entries as well, since there might be a sequence of:
42328
42365
  * `provide` -> `remove` and both should update the end state somehow.
42329
42366
  */
42330
- updatePreimage(newUpdate) {
42331
- this.stateUpdate.services.preimages.push(newUpdate);
42367
+ updatePreimage(serviceId, newUpdate) {
42368
+ const updatePreimages = this.stateUpdate.services.preimages.get(serviceId) ?? [];
42369
+ updatePreimages.push(newUpdate);
42370
+ this.stateUpdate.services.preimages.set(serviceId, updatePreimages);
42332
42371
  }
42333
42372
  updateServiceStorageUtilisation(serviceId, items, bytes, serviceInfo) {
42334
42373
  debug_check `${items >= 0} storageUtilisationCount has to be a positive number, got: ${items}`;
@@ -42337,11 +42376,11 @@ class PartiallyUpdatedState {
42337
42376
  const overflowBytes = !isU64(bytes);
42338
42377
  // TODO [ToDr] this is not specified in GP, but it seems sensible.
42339
42378
  if (overflowItems || overflowBytes) {
42340
- return result_Result.error(InsufficientFundsError);
42379
+ return result_Result.error(InsufficientFundsError, () => `Storage utilisation overflow: items=${overflowItems}, bytes=${overflowBytes}`);
42341
42380
  }
42342
42381
  const thresholdBalance = ServiceAccountInfo.calculateThresholdBalance(items, bytes, serviceInfo.gratisStorage);
42343
42382
  if (serviceInfo.balance < thresholdBalance) {
42344
- return result_Result.error(InsufficientFundsError);
42383
+ return result_Result.error(InsufficientFundsError, () => `Service balance (${serviceInfo.balance}) below threshold (${thresholdBalance})`);
42345
42384
  }
42346
42385
  // Update service info with new details.
42347
42386
  this.updateServiceInfo(serviceId, ServiceAccountInfo.create({
@@ -42352,22 +42391,25 @@ class PartiallyUpdatedState {
42352
42391
  return result_Result.ok(result_OK);
42353
42392
  }
42354
42393
  updateServiceInfo(serviceId, newInfo) {
42355
- const idx = this.stateUpdate.services.servicesUpdates.findIndex((x) => x.serviceId === serviceId);
42356
- const toRemove = idx === -1 ? 0 : 1;
42357
- const existingItem = this.stateUpdate.services.servicesUpdates[idx];
42358
- if (existingItem?.action.kind === UpdateServiceKind.Create) {
42359
- this.stateUpdate.services.servicesUpdates.splice(idx, toRemove, UpdateService.create({
42360
- serviceId,
42394
+ const existingUpdate = this.stateUpdate.services.updated.get(serviceId);
42395
+ if (existingUpdate?.action.kind === UpdateServiceKind.Create) {
42396
+ this.stateUpdate.services.updated.set(serviceId, UpdateService.create({
42361
42397
  serviceInfo: newInfo,
42362
- lookupHistory: existingItem.action.lookupHistory,
42398
+ lookupHistory: existingUpdate.action.lookupHistory,
42363
42399
  }));
42364
42400
  return;
42365
42401
  }
42366
- this.stateUpdate.services.servicesUpdates.splice(idx, toRemove, UpdateService.update({
42367
- serviceId,
42402
+ this.stateUpdate.services.updated.set(serviceId, UpdateService.update({
42368
42403
  serviceInfo: newInfo,
42369
42404
  }));
42370
42405
  }
42406
+ createService(serviceId, newInfo, newLookupHistory) {
42407
+ this.stateUpdate.services.created.push(serviceId);
42408
+ this.stateUpdate.services.updated.set(serviceId, UpdateService.create({
42409
+ serviceInfo: newInfo,
42410
+ lookupHistory: newLookupHistory,
42411
+ }));
42412
+ }
42371
42413
  getPrivilegedServices() {
42372
42414
  if (this.stateUpdate.privilegedServices !== null) {
42373
42415
  return this.stateUpdate.privilegedServices;
@@ -43841,7 +43883,7 @@ class ReadablePage extends MemoryPage {
43841
43883
  loadInto(result, startIndex, length) {
43842
43884
  const endIndex = startIndex + length;
43843
43885
  if (endIndex > PAGE_SIZE) {
43844
- return result_Result.error(PageFault.fromMemoryIndex(this.start + PAGE_SIZE));
43886
+ return result_Result.error(PageFault.fromMemoryIndex(this.start + PAGE_SIZE), () => `Page fault: read beyond page boundary at ${this.start + PAGE_SIZE}`);
43845
43887
  }
43846
43888
  const bytes = this.data.subarray(startIndex, endIndex);
43847
43889
  // we zero the bytes, since data might not yet be initialized at `endIndex`.
@@ -43850,7 +43892,7 @@ class ReadablePage extends MemoryPage {
43850
43892
  return result_Result.ok(result_OK);
43851
43893
  }
43852
43894
  storeFrom(_address, _data) {
43853
- return result_Result.error(PageFault.fromMemoryIndex(this.start, true));
43895
+ return result_Result.error(PageFault.fromMemoryIndex(this.start, true), () => `Page fault: attempted to write to read-only page at ${this.start}`);
43854
43896
  }
43855
43897
  setData(pageIndex, data) {
43856
43898
  this.data.set(data, pageIndex);
@@ -43884,7 +43926,7 @@ class WriteablePage extends MemoryPage {
43884
43926
  loadInto(result, startIndex, length) {
43885
43927
  const endIndex = startIndex + length;
43886
43928
  if (endIndex > PAGE_SIZE) {
43887
- return result_Result.error(PageFault.fromMemoryIndex(this.start + PAGE_SIZE));
43929
+ return result_Result.error(PageFault.fromMemoryIndex(this.start + PAGE_SIZE), () => `Page fault: read beyond page boundary at ${this.start + PAGE_SIZE}`);
43888
43930
  }
43889
43931
  const bytes = this.view.subarray(startIndex, endIndex);
43890
43932
  // we zero the bytes, since the view might not yet be initialized at `endIndex`.
@@ -43970,7 +44012,7 @@ class Memory {
43970
44012
  memory_logger.insane `MEM[${address}] <- ${bytes_BytesBlob.blobFrom(bytes)}`;
43971
44013
  const pagesResult = this.getPages(address, bytes.length, AccessType.WRITE);
43972
44014
  if (pagesResult.isError) {
43973
- return result_Result.error(pagesResult.error);
44015
+ return result_Result.error(pagesResult.error, pagesResult.details);
43974
44016
  }
43975
44017
  const pages = pagesResult.ok;
43976
44018
  let currentPosition = address;
@@ -43995,14 +44037,14 @@ class Memory {
43995
44037
  const pages = [];
43996
44038
  for (const pageNumber of pageRange) {
43997
44039
  if (pageNumber < RESERVED_NUMBER_OF_PAGES) {
43998
- return result_Result.error(PageFault.fromPageNumber(pageNumber, true));
44040
+ return result_Result.error(PageFault.fromPageNumber(pageNumber, true), () => `Page fault: attempted to access reserved page ${pageNumber}`);
43999
44041
  }
44000
44042
  const page = this.memory.get(pageNumber);
44001
44043
  if (page === undefined) {
44002
- return result_Result.error(PageFault.fromPageNumber(pageNumber));
44044
+ return result_Result.error(PageFault.fromPageNumber(pageNumber), () => `Page fault: page ${pageNumber} not allocated`);
44003
44045
  }
44004
44046
  if (accessType === AccessType.WRITE && !page.isWriteable()) {
44005
- return result_Result.error(PageFault.fromPageNumber(pageNumber, true));
44047
+ return result_Result.error(PageFault.fromPageNumber(pageNumber, true), () => `Page fault: attempted to write to read-only page ${pageNumber}`);
44006
44048
  }
44007
44049
  pages.push(page);
44008
44050
  }
@@ -44020,7 +44062,7 @@ class Memory {
44020
44062
  }
44021
44063
  const pagesResult = this.getPages(startAddress, result.length, AccessType.READ);
44022
44064
  if (pagesResult.isError) {
44023
- return result_Result.error(pagesResult.error);
44065
+ return result_Result.error(pagesResult.error, pagesResult.details);
44024
44066
  }
44025
44067
  const pages = pagesResult.ok;
44026
44068
  let currentPosition = startAddress;
@@ -45959,7 +46001,7 @@ class ProgramDecoder {
45959
46001
  }
45960
46002
  catch (e) {
45961
46003
  program_decoder_logger.error `Invalid program: ${e}`;
45962
- return result_Result.error(ProgramDecoderError.InvalidProgramError);
46004
+ return result_Result.error(ProgramDecoderError.InvalidProgramError, () => `Program decoder error: ${e}`);
45963
46005
  }
45964
46006
  }
45965
46007
  }
@@ -46702,10 +46744,10 @@ class AccumulateExternalities {
46702
46744
  const len = existingPreimage.slots.length;
46703
46745
  // https://graypaper.fluffylabs.dev/#/9a08063/380901380901?v=0.6.6
46704
46746
  if (len === PreimageStatusKind.Requested) {
46705
- return result_Result.error(RequestPreimageError.AlreadyRequested);
46747
+ return result_Result.error(RequestPreimageError.AlreadyRequested, () => `Preimage already requested: hash=${hash}`);
46706
46748
  }
46707
46749
  if (len === PreimageStatusKind.Available || len === PreimageStatusKind.Reavailable) {
46708
- return result_Result.error(RequestPreimageError.AlreadyAvailable);
46750
+ return result_Result.error(RequestPreimageError.AlreadyAvailable, () => `Preimage already available: hash=${hash}`);
46709
46751
  }
46710
46752
  // TODO [ToDr] Not sure if we should update the service info in that case,
46711
46753
  // but for now we let that case fall-through.
@@ -46730,15 +46772,13 @@ class AccumulateExternalities {
46730
46772
  const clampedLength = clampU64ToU32(length);
46731
46773
  if (existingPreimage === null) {
46732
46774
  // https://graypaper.fluffylabs.dev/#/9a08063/38a60038a600?v=0.6.6
46733
- this.updatedState.updatePreimage(UpdatePreimage.updateOrAdd({
46734
- serviceId: this.currentServiceId,
46775
+ this.updatedState.updatePreimage(this.currentServiceId, UpdatePreimage.updateOrAdd({
46735
46776
  lookupHistory: new LookupHistoryItem(hash, clampedLength, tryAsLookupHistorySlots([])),
46736
46777
  }));
46737
46778
  }
46738
46779
  else {
46739
46780
  /** https://graypaper.fluffylabs.dev/#/9a08063/38ca0038ca00?v=0.6.6 */
46740
- this.updatedState.updatePreimage(UpdatePreimage.updateOrAdd({
46741
- serviceId: this.currentServiceId,
46781
+ this.updatedState.updatePreimage(this.currentServiceId, UpdatePreimage.updateOrAdd({
46742
46782
  lookupHistory: new LookupHistoryItem(hash, clampedLength, tryAsLookupHistorySlots([...existingPreimage.slots, this.currentTimeslot])),
46743
46783
  }));
46744
46784
  }
@@ -46748,7 +46788,7 @@ class AccumulateExternalities {
46748
46788
  const serviceId = this.currentServiceId;
46749
46789
  const status = this.updatedState.getLookupHistory(this.currentTimeslot, this.currentServiceId, hash, length);
46750
46790
  if (status === null) {
46751
- return result_Result.error(ForgetPreimageError.NotFound);
46791
+ return result_Result.error(ForgetPreimageError.NotFound, () => `Preimage not found: hash=${hash}, length=${length}`);
46752
46792
  }
46753
46793
  const s = slotsToPreimageStatus(status.slots);
46754
46794
  const updateStorageUtilisation = () => {
@@ -46761,10 +46801,9 @@ class AccumulateExternalities {
46761
46801
  if (s.status === PreimageStatusKind.Requested) {
46762
46802
  const res = updateStorageUtilisation();
46763
46803
  if (res.isError) {
46764
- return result_Result.error(ForgetPreimageError.StorageUtilisationError);
46804
+ return result_Result.error(ForgetPreimageError.StorageUtilisationError, res.details);
46765
46805
  }
46766
- this.updatedState.updatePreimage(UpdatePreimage.remove({
46767
- serviceId,
46806
+ this.updatedState.updatePreimage(serviceId, UpdatePreimage.remove({
46768
46807
  hash: status.hash,
46769
46808
  length: status.length,
46770
46809
  }));
@@ -46777,21 +46816,19 @@ class AccumulateExternalities {
46777
46816
  if (y < t - this.chainSpec.preimageExpungePeriod) {
46778
46817
  const res = updateStorageUtilisation();
46779
46818
  if (res.isError) {
46780
- return result_Result.error(ForgetPreimageError.StorageUtilisationError);
46819
+ return result_Result.error(ForgetPreimageError.StorageUtilisationError, res.details);
46781
46820
  }
46782
- this.updatedState.updatePreimage(UpdatePreimage.remove({
46783
- serviceId,
46821
+ this.updatedState.updatePreimage(serviceId, UpdatePreimage.remove({
46784
46822
  hash: status.hash,
46785
46823
  length: status.length,
46786
46824
  }));
46787
46825
  return result_Result.ok(result_OK);
46788
46826
  }
46789
- return result_Result.error(ForgetPreimageError.NotExpired);
46827
+ return result_Result.error(ForgetPreimageError.NotExpired, () => `Preimage not expired: y=${y}, timeslot=${t}, period=${this.chainSpec.preimageExpungePeriod}`);
46790
46828
  }
46791
46829
  // https://graypaper.fluffylabs.dev/#/9a08063/38c80138c801?v=0.6.6
46792
46830
  if (s.status === PreimageStatusKind.Available) {
46793
- this.updatedState.updatePreimage(UpdatePreimage.updateOrAdd({
46794
- serviceId,
46831
+ this.updatedState.updatePreimage(serviceId, UpdatePreimage.updateOrAdd({
46795
46832
  lookupHistory: new LookupHistoryItem(status.hash, status.length, tryAsLookupHistorySlots([s.data[0], t])),
46796
46833
  }));
46797
46834
  return result_Result.ok(result_OK);
@@ -46800,13 +46837,12 @@ class AccumulateExternalities {
46800
46837
  if (s.status === PreimageStatusKind.Reavailable) {
46801
46838
  const y = s.data[1];
46802
46839
  if (y < t - this.chainSpec.preimageExpungePeriod) {
46803
- this.updatedState.updatePreimage(UpdatePreimage.updateOrAdd({
46804
- serviceId,
46840
+ this.updatedState.updatePreimage(serviceId, UpdatePreimage.updateOrAdd({
46805
46841
  lookupHistory: new LookupHistoryItem(status.hash, status.length, tryAsLookupHistorySlots([s.data[2], t])),
46806
46842
  }));
46807
46843
  return result_Result.ok(result_OK);
46808
46844
  }
46809
- return result_Result.error(ForgetPreimageError.NotExpired);
46845
+ return result_Result.error(ForgetPreimageError.NotExpired, () => `Preimage not expired: y=${y}, timeslot=${t}, period=${this.chainSpec.preimageExpungePeriod}`);
46810
46846
  }
46811
46847
  debug_assertNever(s);
46812
46848
  }
@@ -46815,17 +46851,17 @@ class AccumulateExternalities {
46815
46851
  const destination = this.getServiceInfo(destinationId);
46816
46852
  /** https://graypaper.fluffylabs.dev/#/9a08063/370401370401?v=0.6.6 */
46817
46853
  if (destination === null || destinationId === null) {
46818
- return result_Result.error(TransferError.DestinationNotFound);
46854
+ return result_Result.error(TransferError.DestinationNotFound, () => `Destination service not found: ${destinationId}`);
46819
46855
  }
46820
46856
  /** https://graypaper.fluffylabs.dev/#/9a08063/371301371301?v=0.6.6 */
46821
46857
  if (gas < destination.onTransferMinGas) {
46822
- return result_Result.error(TransferError.GasTooLow);
46858
+ return result_Result.error(TransferError.GasTooLow, () => `Gas ${gas} below minimum ${destination.onTransferMinGas}`);
46823
46859
  }
46824
46860
  /** https://graypaper.fluffylabs.dev/#/9a08063/371b01371b01?v=0.6.6 */
46825
46861
  const newBalance = source.balance - amount;
46826
46862
  const thresholdBalance = ServiceAccountInfo.calculateThresholdBalance(source.storageUtilisationCount, source.storageUtilisationBytes, source.gratisStorage);
46827
46863
  if (newBalance < thresholdBalance) {
46828
- return result_Result.error(TransferError.BalanceBelowThreshold);
46864
+ return result_Result.error(TransferError.BalanceBelowThreshold, () => `Balance ${newBalance} below threshold ${thresholdBalance}`);
46829
46865
  }
46830
46866
  // outgoing transfer
46831
46867
  this.updatedState.stateUpdate.transfers.push(PendingTransfer.create({
@@ -46852,7 +46888,7 @@ class AccumulateExternalities {
46852
46888
  // check if we are priviledged to set gratis storage
46853
46889
  // https://graypaper.fluffylabs.dev/#/7e6ff6a/369203369603?v=0.6.7
46854
46890
  if (gratisStorage !== numbers_tryAsU64(0) && this.currentServiceId !== this.updatedState.getPrivilegedServices().manager) {
46855
- return result_Result.error(NewServiceError.UnprivilegedService);
46891
+ return result_Result.error(NewServiceError.UnprivilegedService, () => `Service ${this.currentServiceId} not privileged to set gratis storage`);
46856
46892
  }
46857
46893
  // check if we have enough balance
46858
46894
  // https://graypaper.fluffylabs.dev/#/7e6ff6a/369e0336a303?v=0.6.7
@@ -46861,7 +46897,7 @@ class AccumulateExternalities {
46861
46897
  const thresholdForCurrent = ServiceAccountInfo.calculateThresholdBalance(currentService.storageUtilisationCount, currentService.storageUtilisationBytes, currentService.gratisStorage);
46862
46898
  const balanceLeftForCurrent = currentService.balance - thresholdForNew;
46863
46899
  if (balanceLeftForCurrent < thresholdForCurrent || bytes.overflow) {
46864
- return result_Result.error(NewServiceError.InsufficientFunds);
46900
+ return result_Result.error(NewServiceError.InsufficientFunds, () => `Insufficient funds: balance=${currentService.balance}, required=${thresholdForNew}, overflow=${bytes.overflow}`);
46865
46901
  }
46866
46902
  // `a`: https://graypaper.fluffylabs.dev/#/ab2cdbd/366b02366d02?v=0.7.2
46867
46903
  const newAccount = ServiceAccountInfo.create({
@@ -46888,15 +46924,11 @@ class AccumulateExternalities {
46888
46924
  // NOTE: It's safe to cast to `Number` here, bcs here service ID cannot be bigger than 2**16
46889
46925
  const newServiceId = tryAsServiceId(Number(wantedServiceId));
46890
46926
  if (this.getServiceInfo(newServiceId) !== null) {
46891
- return result_Result.error(NewServiceError.RegistrarServiceIdAlreadyTaken);
46927
+ return result_Result.error(NewServiceError.RegistrarServiceIdAlreadyTaken, () => `Service ID ${newServiceId} already taken`);
46892
46928
  }
46893
46929
  // add the new service with selected ID
46894
46930
  // https://graypaper.fluffylabs.dev/#/ab2cdbd/36be0336c003?v=0.7.2
46895
- this.updatedState.stateUpdate.services.servicesUpdates.push(UpdateService.create({
46896
- serviceId: newServiceId,
46897
- serviceInfo: newAccount,
46898
- lookupHistory: newLookupItem,
46899
- }));
46931
+ this.updatedState.createService(newServiceId, newAccount, newLookupItem);
46900
46932
  // update the balance of current service
46901
46933
  // https://graypaper.fluffylabs.dev/#/ab2cdbd/36c20336c403?v=0.7.2
46902
46934
  this.updatedState.updateServiceInfo(this.currentServiceId, updatedCurrentAccount);
@@ -46907,12 +46939,8 @@ class AccumulateExternalities {
46907
46939
  }
46908
46940
  const newServiceId = this.nextNewServiceId;
46909
46941
  // add the new service
46910
- // https://graypaper.fluffylabs.dev/#/ab2cdbd/36e70336e903?v=0.7.2
46911
- this.updatedState.stateUpdate.services.servicesUpdates.push(UpdateService.create({
46912
- serviceId: newServiceId,
46913
- serviceInfo: newAccount,
46914
- lookupHistory: newLookupItem,
46915
- }));
46942
+ // https://graypaper.fluffylabs.dev/#/7e6ff6a/36cb0236cb02?v=0.6.7
46943
+ this.updatedState.createService(newServiceId, newAccount, newLookupItem);
46916
46944
  // update the balance of current service
46917
46945
  // https://graypaper.fluffylabs.dev/#/ab2cdbd/36ec0336ee03?v=0.7.2
46918
46946
  this.updatedState.updateServiceInfo(this.currentServiceId, updatedCurrentAccount);
@@ -46936,7 +46964,7 @@ class AccumulateExternalities {
46936
46964
  const currentDelegator = this.updatedState.getPrivilegedServices().delegator;
46937
46965
  if (currentDelegator !== this.currentServiceId) {
46938
46966
  accumulate_externalities_logger.trace `Current service id (${this.currentServiceId}) is not a validators manager. (expected: ${currentDelegator}) and cannot update validators data. Ignoring`;
46939
- return result_Result.error(UnprivilegedError);
46967
+ return result_Result.error(UnprivilegedError, () => `Service ${this.currentServiceId} is not delegator (expected: ${currentDelegator})`);
46940
46968
  }
46941
46969
  this.updatedState.stateUpdate.validatorsData = validatorsData;
46942
46970
  return result_Result.ok(result_OK);
@@ -46951,11 +46979,11 @@ class AccumulateExternalities {
46951
46979
  const currentAssigners = this.updatedState.getPrivilegedServices().assigners[coreIndex];
46952
46980
  if (currentAssigners !== this.currentServiceId) {
46953
46981
  accumulate_externalities_logger.trace `Current service id (${this.currentServiceId}) is not an auth manager of core ${coreIndex} (expected: ${currentAssigners}) and cannot update authorization queue.`;
46954
- return result_Result.error(UpdatePrivilegesError.UnprivilegedService);
46982
+ return result_Result.error(UpdatePrivilegesError.UnprivilegedService, () => `Service ${this.currentServiceId} not assigner for core ${coreIndex} (expected: ${currentAssigners})`);
46955
46983
  }
46956
46984
  if (assigners === null && Compatibility.isGreaterOrEqual(GpVersion.V0_7_1)) {
46957
46985
  accumulate_externalities_logger.trace `The new auth manager is not a valid service id.`;
46958
- return result_Result.error(UpdatePrivilegesError.InvalidServiceId);
46986
+ return result_Result.error(UpdatePrivilegesError.InvalidServiceId, () => `New auth manager is null for core ${coreIndex}`);
46959
46987
  }
46960
46988
  this.updatedState.stateUpdate.authorizationQueues.set(coreIndex, authQueue);
46961
46989
  return result_Result.ok(result_OK);
@@ -46988,10 +47016,10 @@ class AccumulateExternalities {
46988
47016
  const isManager = current.manager === this.currentServiceId;
46989
47017
  if (Compatibility.isLessThan(GpVersion.V0_7_1)) {
46990
47018
  if (!isManager) {
46991
- return result_Result.error(UpdatePrivilegesError.UnprivilegedService);
47019
+ return result_Result.error(UpdatePrivilegesError.UnprivilegedService, () => `Service ${this.currentServiceId} is not manager`);
46992
47020
  }
46993
47021
  if (manager === null || delegator === null) {
46994
- return result_Result.error(UpdatePrivilegesError.InvalidServiceId, "Either manager or delegator is not a valid service id.");
47022
+ return result_Result.error(UpdatePrivilegesError.InvalidServiceId, () => "Either manager or delegator is not a valid service id.");
46995
47023
  }
46996
47024
  this.updatedState.stateUpdate.privilegedServices = PrivilegedServices.create({
46997
47025
  manager,
@@ -47004,7 +47032,7 @@ class AccumulateExternalities {
47004
47032
  }
47005
47033
  const original = this.updatedState.state.privilegedServices;
47006
47034
  if (manager === null || delegator === null || registrar === null) {
47007
- return result_Result.error(UpdatePrivilegesError.InvalidServiceId, "Either manager or delegator or registrar is not a valid service id.");
47035
+ return result_Result.error(UpdatePrivilegesError.InvalidServiceId, () => "Either manager or delegator or registrar is not a valid service id.");
47008
47036
  }
47009
47037
  const newDelegator = this.updatePrivilegedServiceId(delegator, current.delegator, {
47010
47038
  isManager,
@@ -47044,23 +47072,22 @@ class AccumulateExternalities {
47044
47072
  // TODO [ToDr] what about newly created services?
47045
47073
  const service = serviceId === null ? null : this.updatedState.state.getService(serviceId);
47046
47074
  if (service === null || serviceId === null) {
47047
- return result_Result.error(ProvidePreimageError.ServiceNotFound);
47075
+ return result_Result.error(ProvidePreimageError.ServiceNotFound, () => `Service not found: ${serviceId}`);
47048
47076
  }
47049
47077
  // calculating the hash
47050
47078
  const preimageHash = this.blake2b.hashBytes(preimage).asOpaque();
47051
47079
  // checking service internal lookup
47052
47080
  const stateLookup = this.updatedState.getLookupHistory(this.currentTimeslot, serviceId, preimageHash, numbers_tryAsU64(preimage.length));
47053
47081
  if (stateLookup === null || !LookupHistoryItem.isRequested(stateLookup)) {
47054
- return result_Result.error(ProvidePreimageError.WasNotRequested);
47082
+ return result_Result.error(ProvidePreimageError.WasNotRequested, () => `Preimage was not requested: hash=${preimageHash}, service=${serviceId}`);
47055
47083
  }
47056
47084
  // checking already provided preimages
47057
47085
  const hasPreimage = this.updatedState.hasPreimage(serviceId, preimageHash);
47058
47086
  if (hasPreimage) {
47059
- return result_Result.error(ProvidePreimageError.AlreadyProvided);
47087
+ return result_Result.error(ProvidePreimageError.AlreadyProvided, () => `Preimage already provided: hash=${preimageHash}, service=${serviceId}`);
47060
47088
  }
47061
47089
  // setting up the new preimage
47062
- this.updatedState.updatePreimage(UpdatePreimage.provide({
47063
- serviceId,
47090
+ this.updatedState.updatePreimage(serviceId, UpdatePreimage.provide({
47064
47091
  preimage: PreimageItem.create({
47065
47092
  hash: preimageHash,
47066
47093
  blob: preimage,
@@ -47072,31 +47099,31 @@ class AccumulateExternalities {
47072
47099
  eject(destination, previousCodeHash) {
47073
47100
  const service = this.getServiceInfo(destination);
47074
47101
  if (service === null || destination === null) {
47075
- return result_Result.error(EjectError.InvalidService, "Service missing");
47102
+ return result_Result.error(EjectError.InvalidService, () => "Service missing");
47076
47103
  }
47077
47104
  const currentService = this.getCurrentServiceInfo();
47078
47105
  // check if the service expects to be ejected by us:
47079
47106
  const expectedCodeHash = bytes_Bytes.zero(hash_HASH_SIZE).asOpaque();
47080
47107
  writeServiceIdAsLeBytes(this.currentServiceId, expectedCodeHash.raw);
47081
47108
  if (!service.codeHash.isEqualTo(expectedCodeHash)) {
47082
- return result_Result.error(EjectError.InvalidService, "Invalid code hash");
47109
+ return result_Result.error(EjectError.InvalidService, () => "Invalid code hash");
47083
47110
  }
47084
47111
  // make sure the service only has required number of storage items?
47085
47112
  if (service.storageUtilisationCount !== REQUIRED_NUMBER_OF_STORAGE_ITEMS_FOR_EJECT) {
47086
- return result_Result.error(EjectError.InvalidPreimage, "Too many storage items");
47113
+ return result_Result.error(EjectError.InvalidPreimage, () => "Too many storage items");
47087
47114
  }
47088
47115
  // storage items length
47089
47116
  const l = numbers_tryAsU64(maxU64(service.storageUtilisationBytes, LOOKUP_HISTORY_ENTRY_BYTES) - LOOKUP_HISTORY_ENTRY_BYTES);
47090
47117
  // check if we have a preimage with the entire storage.
47091
47118
  const [isPreviousCodeExpired, errorReason] = this.isPreviousCodeExpired(destination, previousCodeHash, l);
47092
47119
  if (!isPreviousCodeExpired) {
47093
- return result_Result.error(EjectError.InvalidPreimage, `Previous code available: ${errorReason}`);
47120
+ return result_Result.error(EjectError.InvalidPreimage, () => `Previous code available: ${errorReason}`);
47094
47121
  }
47095
47122
  // compute new balance of the service.
47096
47123
  const newBalance = sumU64(currentService.balance, service.balance);
47097
47124
  // TODO [ToDr] what to do in case of overflow?
47098
47125
  if (newBalance.overflow) {
47099
- return result_Result.error(EjectError.InvalidService, "Balance overflow");
47126
+ return result_Result.error(EjectError.InvalidService, () => "Balance overflow");
47100
47127
  }
47101
47128
  // update current service.
47102
47129
  this.updatedState.updateServiceInfo(this.currentServiceId, ServiceAccountInfo.create({
@@ -47104,11 +47131,13 @@ class AccumulateExternalities {
47104
47131
  balance: newBalance.value,
47105
47132
  }));
47106
47133
  // and finally add an ejected service.
47107
- this.updatedState.stateUpdate.services.servicesRemoved.push(destination);
47134
+ this.updatedState.stateUpdate.services.removed.push(destination);
47108
47135
  // take care of the code preimage and its lookup history
47109
47136
  // Safe, because we know the preimage is valid, and it's the code of the service, which is bounded by maximal service code size anyway (much smaller than 2**32 bytes).
47110
47137
  const preimageLength = numbers_tryAsU32(Number(l));
47111
- this.updatedState.stateUpdate.services.preimages.push(UpdatePreimage.remove({ serviceId: destination, hash: previousCodeHash, length: preimageLength }));
47138
+ const preimages = this.updatedState.stateUpdate.services.preimages.get(destination) ?? [];
47139
+ preimages.push(UpdatePreimage.remove({ hash: previousCodeHash, length: preimageLength }));
47140
+ this.updatedState.stateUpdate.services.preimages.set(destination, preimages);
47112
47141
  return result_Result.ok(result_OK);
47113
47142
  }
47114
47143
  read(serviceId, rawKey) {
@@ -47289,10 +47318,10 @@ class Assurances {
47289
47318
  for (const assurance of assurances) {
47290
47319
  const { anchor, validatorIndex, bitfield } = assurance;
47291
47320
  if (!anchor.isEqualTo(input.parentHash)) {
47292
- return result_Result.error(AssurancesError.InvalidAnchor, `anchor: expected: ${input.parentHash}, got ${anchor}`);
47321
+ return result_Result.error(AssurancesError.InvalidAnchor, () => `anchor: expected: ${input.parentHash}, got ${anchor}`);
47293
47322
  }
47294
47323
  if (prevValidatorIndex >= validatorIndex) {
47295
- return result_Result.error(AssurancesError.InvalidOrder, `order: expected: ${prevValidatorIndex + 1}, got: ${validatorIndex}`);
47324
+ return result_Result.error(AssurancesError.InvalidOrder, () => `order: expected: ${prevValidatorIndex + 1}, got: ${validatorIndex}`);
47296
47325
  }
47297
47326
  prevValidatorIndex = assurance.validatorIndex;
47298
47327
  debug_check `${bitfield.bitLength === coresCount} Invalid bitfield length of ${bitfield.bitLength}`;
@@ -47315,7 +47344,7 @@ class Assurances {
47315
47344
  * https://graypaper.fluffylabs.dev/#/579bd12/14e90014ea00
47316
47345
  */
47317
47346
  if (noOfAssurances > 0 && !isReportPending) {
47318
- return result_Result.error(AssurancesError.NoReportPending, `no report pending for core ${c} yet we got an assurance`);
47347
+ return result_Result.error(AssurancesError.NoReportPending, () => `no report pending for core ${c} yet we got an assurance`);
47319
47348
  }
47320
47349
  /**
47321
47350
  * Remove work report if it's became available or timed out.
@@ -47361,7 +47390,7 @@ class Assurances {
47361
47390
  const v = assurance.view();
47362
47391
  const key = validatorData[v.validatorIndex.materialize()];
47363
47392
  if (key === undefined) {
47364
- return result_Result.error(AssurancesError.InvalidValidatorIndex);
47393
+ return result_Result.error(AssurancesError.InvalidValidatorIndex, () => `Invalid validator index: ${v.validatorIndex.materialize()}`);
47365
47394
  }
47366
47395
  signatures.push({
47367
47396
  signature: v.signature.materialize(),
@@ -47373,7 +47402,7 @@ class Assurances {
47373
47402
  const isAllSignaturesValid = signaturesValid.every((x) => x);
47374
47403
  if (!isAllSignaturesValid) {
47375
47404
  const invalidIndices = signaturesValid.reduce((acc, isValid, idx) => (isValid ? acc : acc.concat(idx)), []);
47376
- return result_Result.error(AssurancesError.InvalidSignature, `invalid signatures at ${invalidIndices.join(", ")}`);
47405
+ return result_Result.error(AssurancesError.InvalidSignature, () => `invalid signatures at ${invalidIndices.join(", ")}`);
47377
47406
  }
47378
47407
  return result_Result.ok(result_OK);
47379
47408
  }
@@ -47984,7 +48013,7 @@ class HostCallMemory {
47984
48013
  return result_Result.ok(result_OK);
47985
48014
  }
47986
48015
  if (address + numbers_tryAsU64(bytes.length) > MEMORY_SIZE) {
47987
- return result_Result.error(new OutOfBounds());
48016
+ return result_Result.error(new OutOfBounds(), () => `Memory access out of bounds: address ${address} + length ${bytes.length} exceeds memory size`);
47988
48017
  }
47989
48018
  return this.memory.storeFrom(tryAsMemoryIndex(Number(address)), bytes);
47990
48019
  }
@@ -47993,13 +48022,10 @@ class HostCallMemory {
47993
48022
  return result_Result.ok(result_OK);
47994
48023
  }
47995
48024
  if (startAddress + numbers_tryAsU64(result.length) > MEMORY_SIZE) {
47996
- return result_Result.error(new OutOfBounds());
48025
+ return result_Result.error(new OutOfBounds(), () => `Memory access out of bounds: address ${startAddress} + length ${result.length} exceeds memory size`);
47997
48026
  }
47998
48027
  return this.memory.loadInto(result, tryAsMemoryIndex(Number(startAddress)));
47999
48028
  }
48000
- getMemory() {
48001
- return this.memory;
48002
- }
48003
48029
  }
48004
48030
 
48005
48031
  ;// CONCATENATED MODULE: ./packages/core/pvm-host-calls/host-call-registers.ts
@@ -49939,18 +49965,18 @@ class Accumulate {
49939
49965
  const serviceInfo = updatedState.getServiceInfo(serviceId);
49940
49966
  if (serviceInfo === null) {
49941
49967
  accumulate_logger.log `Service with id ${serviceId} not found.`;
49942
- return result_Result.error(PvmInvocationError.NoService);
49968
+ return result_Result.error(PvmInvocationError.NoService, () => `Accumulate: service ${serviceId} not found`);
49943
49969
  }
49944
49970
  const codeHash = serviceInfo.codeHash;
49945
49971
  // TODO [ToDr] Should we check that the preimage is still available?
49946
49972
  const code = updatedState.getPreimage(serviceId, codeHash.asOpaque());
49947
49973
  if (code === null) {
49948
49974
  accumulate_logger.log `Code with hash ${codeHash} not found for service ${serviceId}.`;
49949
- return result_Result.error(PvmInvocationError.NoPreimage);
49975
+ return result_Result.error(PvmInvocationError.NoPreimage, () => `Accumulate: code with hash ${codeHash} not found for service ${serviceId}`);
49950
49976
  }
49951
49977
  if (code.length > W_C) {
49952
49978
  accumulate_logger.log `Code with hash ${codeHash} is too long for service ${serviceId}.`;
49953
- return result_Result.error(PvmInvocationError.PreimageTooLong);
49979
+ return result_Result.error(PvmInvocationError.PreimageTooLong, () => `Accumulate: code length ${code.length} exceeds max ${W_C} for service ${serviceId}`);
49954
49980
  }
49955
49981
  const nextServiceId = generateNextServiceId({ serviceId, entropy, timeslot: slot }, this.chainSpec, this.blake2b);
49956
49982
  const partialState = new AccumulateExternalities(this.chainSpec, this.blake2b, updatedState, serviceId, nextServiceId, slot);
@@ -50205,19 +50231,16 @@ class Accumulate {
50205
50231
  const gasLimit = tryAsServiceGas(this.chainSpec.maxBlockGas > calculatedGasLimit ? this.chainSpec.maxBlockGas : calculatedGasLimit);
50206
50232
  return tryAsServiceGas(gasLimit);
50207
50233
  }
50208
- hasDuplicatedServicesCreated(updateServices) {
50209
- const createdServiceIds = new Set();
50210
- for (const update of updateServices) {
50211
- if (update.action.kind === UpdateServiceKind.Create) {
50212
- const serviceId = update.serviceId;
50213
- if (createdServiceIds.has(serviceId)) {
50214
- accumulate_logger.log `Duplicated Service creation detected ${serviceId}. Block is invalid.`;
50215
- return true;
50216
- }
50217
- createdServiceIds.add(serviceId);
50218
- }
50219
- }
50220
- return false;
50234
+ /**
50235
+ * Detects the very unlikely situation where multiple services are created with the same ID.
50236
+ *
50237
+ * https://graypaper.fluffylabs.dev/#/ab2cdbd/30f20330f403?v=0.7.2
50238
+ *
50239
+ * NOTE: This is public only for testing purposes and should not be used outside of accumulation.
50240
+ */
50241
+ hasDuplicatedServiceIdCreated(createdIds) {
50242
+ const uniqueIds = new Set(createdIds);
50243
+ return uniqueIds.size !== createdIds.length;
50221
50244
  }
50222
50245
  async transition({ reports, slot, entropy }) {
50223
50246
  const statistics = new Map();
@@ -50239,8 +50262,9 @@ class Accumulate {
50239
50262
  const accumulated = accumulatableReports.subview(0, accumulatedReports);
50240
50263
  const { services, yieldedRoots, transfers, validatorsData, privilegedServices, authorizationQueues, ...stateUpdateRest } = state;
50241
50264
  assertEmpty(stateUpdateRest);
50242
- if (this.hasDuplicatedServicesCreated(services.servicesUpdates)) {
50243
- return result_Result.error(ACCUMULATION_ERROR);
50265
+ if (this.hasDuplicatedServiceIdCreated(services.created)) {
50266
+ accumulate_logger.trace `Duplicated Service creation detected. Block is invalid.`;
50267
+ return result_Result.error(ACCUMULATION_ERROR, () => "Accumulate: duplicate service created");
50244
50268
  }
50245
50269
  const accStateUpdate = this.getAccumulationStateUpdate(accumulated.toArray(), toAccumulateLater, slot, Array.from(statistics.keys()), services);
50246
50270
  const accumulationOutputUnsorted = Array.from(yieldedRoots.entries()).map(([serviceId, root]) => {
@@ -50321,13 +50345,13 @@ class DeferredTransfers {
50321
50345
  .toSorted((a, b) => a.source - b.source);
50322
50346
  const info = partiallyUpdatedState.getServiceInfo(serviceId);
50323
50347
  if (info === null) {
50324
- return result_Result.error(DeferredTransfersErrorCode.ServiceInfoNotExist);
50348
+ return result_Result.error(DeferredTransfersErrorCode.ServiceInfoNotExist, () => `Deferred transfers: service info not found for ${serviceId}`);
50325
50349
  }
50326
50350
  const codeHash = info.codeHash;
50327
50351
  const code = partiallyUpdatedState.getPreimage(serviceId, codeHash.asOpaque());
50328
50352
  const newBalance = sumU64(info.balance, ...transfers.map((item) => item.amount));
50329
50353
  if (newBalance.overflow) {
50330
- return result_Result.error(DeferredTransfersErrorCode.ServiceBalanceOverflow);
50354
+ return result_Result.error(DeferredTransfersErrorCode.ServiceBalanceOverflow, () => `Deferred transfers: balance overflow for service ${serviceId}`);
50331
50355
  }
50332
50356
  const newInfo = ServiceAccountInfo.create({ ...info, balance: newBalance.value });
50333
50357
  partiallyUpdatedState.updateServiceInfo(serviceId, newInfo);
@@ -50805,7 +50829,7 @@ function verifyReportsBasic(input) {
50805
50829
  const noOfPrerequisites = reportView.context.view().prerequisites.view().length;
50806
50830
  const noOfSegmentRootLookups = reportView.segmentRootLookup.view().length;
50807
50831
  if (noOfPrerequisites + noOfSegmentRootLookups > MAX_REPORT_DEPENDENCIES) {
50808
- return result_Result.error(ReportsError.TooManyDependencies, `Report at ${reportView.coreIndex.materialize()} has too many dependencies. Got ${noOfPrerequisites} + ${noOfSegmentRootLookups}, max: ${MAX_REPORT_DEPENDENCIES}`);
50832
+ return result_Result.error(ReportsError.TooManyDependencies, () => `Report at ${reportView.coreIndex.materialize()} has too many dependencies. Got ${noOfPrerequisites} + ${noOfSegmentRootLookups}, max: ${MAX_REPORT_DEPENDENCIES}`);
50809
50833
  }
50810
50834
  /**
50811
50835
  * In order to ensure fair use of a block’s extrinsic space,
@@ -50824,7 +50848,7 @@ function verifyReportsBasic(input) {
50824
50848
  totalOutputsSize += item.view().result.view().okBlob?.raw.length ?? 0;
50825
50849
  }
50826
50850
  if (authOutputSize + totalOutputsSize > MAX_WORK_REPORT_SIZE_BYTES) {
50827
- return result_Result.error(ReportsError.WorkReportTooBig, `Work report at ${reportView.coreIndex.materialize()} too big. Got ${authOutputSize} + ${totalOutputsSize}, max: ${MAX_WORK_REPORT_SIZE_BYTES}`);
50851
+ return result_Result.error(ReportsError.WorkReportTooBig, () => `Work report at ${reportView.coreIndex.materialize()} too big. Got ${authOutputSize} + ${totalOutputsSize}, max: ${MAX_WORK_REPORT_SIZE_BYTES}`);
50828
50852
  }
50829
50853
  }
50830
50854
  return result_Result.ok(result_OK);
@@ -50858,12 +50882,12 @@ function verifyContextualValidity(input, state, headerChain, maxLookupAnchorAge)
50858
50882
  for (const result of guarantee.report.results) {
50859
50883
  const service = state.getService(result.serviceId);
50860
50884
  if (service === null) {
50861
- return result_Result.error(ReportsError.BadServiceId, `No service with id: ${result.serviceId}`);
50885
+ return result_Result.error(ReportsError.BadServiceId, () => `No service with id: ${result.serviceId}`);
50862
50886
  }
50863
50887
  // check service code hash
50864
50888
  // https://graypaper.fluffylabs.dev/#/5f542d7/154b02154b02
50865
50889
  if (!result.codeHash.isEqualTo(service.getInfo().codeHash)) {
50866
- return result_Result.error(ReportsError.BadCodeHash, `Service (${result.serviceId}) code hash mismatch. Got: ${result.codeHash}, expected: ${service.getInfo().codeHash}`);
50890
+ return result_Result.error(ReportsError.BadCodeHash, () => `Service (${result.serviceId}) code hash mismatch. Got: ${result.codeHash}, expected: ${service.getInfo().codeHash}`);
50867
50891
  }
50868
50892
  }
50869
50893
  }
@@ -50874,7 +50898,7 @@ function verifyContextualValidity(input, state, headerChain, maxLookupAnchorAge)
50874
50898
  * https://graypaper.fluffylabs.dev/#/5f542d7/151f01152101
50875
50899
  */
50876
50900
  if (currentWorkPackages.size !== input.guarantees.length) {
50877
- return result_Result.error(ReportsError.DuplicatePackage, "Duplicate work package detected.");
50901
+ return result_Result.error(ReportsError.DuplicatePackage, () => "Duplicate work package detected.");
50878
50902
  }
50879
50903
  const minLookupSlot = Math.max(0, input.slot - maxLookupAnchorAge);
50880
50904
  const contextResult = verifyRefineContexts(minLookupSlot, contexts, input.recentBlocksPartialUpdate, headerChain);
@@ -50919,7 +50943,7 @@ function verifyContextualValidity(input, state, headerChain, maxLookupAnchorAge)
50919
50943
  : undefined;
50920
50944
  }
50921
50945
  if (root === undefined || !root.segmentTreeRoot.isEqualTo(lookup.segmentTreeRoot)) {
50922
- return result_Result.error(ReportsError.SegmentRootLookupInvalid, `Mismatching segment tree root for package ${lookup.workPackageHash}. Got: ${lookup.segmentTreeRoot}, expected: ${root?.segmentTreeRoot}`);
50946
+ return result_Result.error(ReportsError.SegmentRootLookupInvalid, () => `Mismatching segment tree root for package ${lookup.workPackageHash}. Got: ${lookup.segmentTreeRoot}, expected: ${root?.segmentTreeRoot}`);
50923
50947
  }
50924
50948
  }
50925
50949
  }
@@ -50942,16 +50966,16 @@ function verifyRefineContexts(minLookupSlot, contexts, recentBlocksPartialUpdate
50942
50966
  */
50943
50967
  const recentBlock = recentBlocks.get(context.anchor);
50944
50968
  if (recentBlock === undefined) {
50945
- return result_Result.error(ReportsError.AnchorNotRecent, `Anchor block ${context.anchor} not found in recent blocks.`);
50969
+ return result_Result.error(ReportsError.AnchorNotRecent, () => `Anchor block ${context.anchor} not found in recent blocks.`);
50946
50970
  }
50947
50971
  // check state root
50948
50972
  if (!recentBlock.postStateRoot.isEqualTo(context.stateRoot)) {
50949
- return result_Result.error(ReportsError.BadStateRoot, `Anchor state root mismatch. Got: ${context.stateRoot}, expected: ${recentBlock.postStateRoot}.`);
50973
+ return result_Result.error(ReportsError.BadStateRoot, () => `Anchor state root mismatch. Got: ${context.stateRoot}, expected: ${recentBlock.postStateRoot}.`);
50950
50974
  }
50951
50975
  // check beefy root
50952
50976
  const beefyRoot = recentBlock.accumulationResult;
50953
50977
  if (!beefyRoot.isEqualTo(context.beefyRoot)) {
50954
- return result_Result.error(ReportsError.BadBeefyMmrRoot, `Invalid BEEFY super peak hash. Got: ${context.beefyRoot}, expected: ${beefyRoot}. Anchor: ${recentBlock.headerHash}`);
50978
+ return result_Result.error(ReportsError.BadBeefyMmrRoot, () => `Invalid BEEFY super peak hash. Got: ${context.beefyRoot}, expected: ${beefyRoot}. Anchor: ${recentBlock.headerHash}`);
50955
50979
  }
50956
50980
  /**
50957
50981
  * We require that each lookup-anchor block be within the
@@ -50960,7 +50984,7 @@ function verifyRefineContexts(minLookupSlot, contexts, recentBlocksPartialUpdate
50960
50984
  * https://graypaper.fluffylabs.dev/#/5f542d7/154601154701
50961
50985
  */
50962
50986
  if (context.lookupAnchorSlot < minLookupSlot) {
50963
- return result_Result.error(ReportsError.SegmentRootLookupInvalid, `Lookup anchor slot's too old. Got: ${context.lookupAnchorSlot}, minimal: ${minLookupSlot}`);
50987
+ return result_Result.error(ReportsError.SegmentRootLookupInvalid, () => `Lookup anchor slot's too old. Got: ${context.lookupAnchorSlot}, minimal: ${minLookupSlot}`);
50964
50988
  }
50965
50989
  /**
50966
50990
  * We also require that we have a record of it; this is one of
@@ -50977,7 +51001,7 @@ function verifyRefineContexts(minLookupSlot, contexts, recentBlocksPartialUpdate
50977
51001
  verify_contextual_logger.warn `Lookup anchor check for ${context.lookupAnchor} would fail, but override is active.`;
50978
51002
  }
50979
51003
  else {
50980
- return result_Result.error(ReportsError.SegmentRootLookupInvalid, `Lookup anchor is not found in chain. Hash: ${context.lookupAnchor} (slot: ${context.lookupAnchorSlot})`);
51004
+ return result_Result.error(ReportsError.SegmentRootLookupInvalid, () => `Lookup anchor is not found in chain. Hash: ${context.lookupAnchor} (slot: ${context.lookupAnchorSlot})`);
50981
51005
  }
50982
51006
  }
50983
51007
  }
@@ -51000,7 +51024,7 @@ function verifyDependencies({ currentWorkPackages, recentlyReported, prerequisit
51000
51024
  if (recentlyReported.has(preReqHash)) {
51001
51025
  continue;
51002
51026
  }
51003
- return result_Result.error(isSegmentRoot ? ReportsError.SegmentRootLookupInvalid : ReportsError.DependencyMissing, `Missing work package ${preReqHash} in current extrinsic or recent history.`);
51027
+ return result_Result.error(isSegmentRoot ? ReportsError.SegmentRootLookupInvalid : ReportsError.DependencyMissing, () => `Missing work package ${preReqHash} in current extrinsic or recent history.`);
51004
51028
  }
51005
51029
  return result_Result.ok(result_OK);
51006
51030
  };
@@ -51048,7 +51072,7 @@ function verifyWorkPackagesUniqueness(workPackageHashes, state) {
51048
51072
  // let's check if any of our packages is in the pipeline
51049
51073
  const intersection = packagesInPipeline.intersection(workPackageHashes);
51050
51074
  for (const packageHash of intersection) {
51051
- return result_Result.error(ReportsError.DuplicatePackage, `The same work package hash found in the pipeline (workPackageHash: ${packageHash})`);
51075
+ return result_Result.error(ReportsError.DuplicatePackage, () => `The same work package hash found in the pipeline (workPackageHash: ${packageHash})`);
51052
51076
  }
51053
51077
  return result_Result.ok(result_OK);
51054
51078
  }
@@ -51087,7 +51111,7 @@ workReportHashes, slot, getGuarantorAssignment) {
51087
51111
  const credentialsView = guaranteeView.credentials.view();
51088
51112
  if (credentialsView.length < REQUIRED_CREDENTIALS_RANGE[0] ||
51089
51113
  credentialsView.length > REQUIRED_CREDENTIALS_RANGE[1]) {
51090
- return result_Result.error(ReportsError.InsufficientGuarantees, `Invalid number of credentials. Expected ${REQUIRED_CREDENTIALS_RANGE}, got ${credentialsView.length}`);
51114
+ return result_Result.error(ReportsError.InsufficientGuarantees, () => `Invalid number of credentials. Expected ${REQUIRED_CREDENTIALS_RANGE}, got ${credentialsView.length}`);
51091
51115
  }
51092
51116
  /** Retrieve current core assignment. */
51093
51117
  const timeSlot = guaranteeView.slot.materialize();
@@ -51102,20 +51126,20 @@ workReportHashes, slot, getGuarantorAssignment) {
51102
51126
  const credentialView = credential.view();
51103
51127
  const validatorIndex = credentialView.validatorIndex.materialize();
51104
51128
  if (lastValidatorIndex >= validatorIndex) {
51105
- return result_Result.error(ReportsError.NotSortedOrUniqueGuarantors, `Credentials must be sorted by validator index. Got ${validatorIndex}, expected at least ${lastValidatorIndex + 1}`);
51129
+ return result_Result.error(ReportsError.NotSortedOrUniqueGuarantors, () => `Credentials must be sorted by validator index. Got ${validatorIndex}, expected at least ${lastValidatorIndex + 1}`);
51106
51130
  }
51107
51131
  lastValidatorIndex = validatorIndex;
51108
51132
  const signature = credentialView.signature.materialize();
51109
51133
  const guarantorData = guarantorAssignments[validatorIndex];
51110
51134
  if (guarantorData === undefined) {
51111
- return result_Result.error(ReportsError.BadValidatorIndex, `Invalid validator index: ${validatorIndex}`);
51135
+ return result_Result.error(ReportsError.BadValidatorIndex, () => `Invalid validator index: ${validatorIndex}`);
51112
51136
  }
51113
51137
  /**
51114
51138
  * Verify core assignment.
51115
51139
  * https://graypaper.fluffylabs.dev/#/5f542d7/14e40214e602
51116
51140
  */
51117
51141
  if (guarantorData.core !== coreIndex) {
51118
- return result_Result.error(ReportsError.WrongAssignment, `Invalid core assignment for validator ${validatorIndex}. Expected: ${guarantorData.core}, got: ${coreIndex}`);
51142
+ return result_Result.error(ReportsError.WrongAssignment, () => `Invalid core assignment for validator ${validatorIndex}. Expected: ${guarantorData.core}, got: ${coreIndex}`);
51119
51143
  }
51120
51144
  signaturesToVerify.push({
51121
51145
  signature,
@@ -51153,10 +51177,10 @@ function verifyReportsOrder(input, chainSpec) {
51153
51177
  const reportView = guarantee.view().report.view();
51154
51178
  const coreIndex = reportView.coreIndex.materialize();
51155
51179
  if (lastCoreIndex >= coreIndex) {
51156
- return result_Result.error(ReportsError.OutOfOrderGuarantee, `Core indices of work reports are not unique or in order. Got: ${coreIndex}, expected at least: ${lastCoreIndex + 1}`);
51180
+ return result_Result.error(ReportsError.OutOfOrderGuarantee, () => `Core indices of work reports are not unique or in order. Got: ${coreIndex}, expected at least: ${lastCoreIndex + 1}`);
51157
51181
  }
51158
51182
  if (coreIndex >= noOfCores) {
51159
- return result_Result.error(ReportsError.BadCoreIndex, `Invalid core index. Got: ${coreIndex}, max: ${noOfCores}`);
51183
+ return result_Result.error(ReportsError.BadCoreIndex, () => `Invalid core index. Got: ${coreIndex}, max: ${noOfCores}`);
51160
51184
  }
51161
51185
  lastCoreIndex = coreIndex;
51162
51186
  }
@@ -51181,7 +51205,7 @@ function verifyPostSignatureChecks(input, availabilityAssignment, authPools, ser
51181
51205
  * https://graypaper.fluffylabs.dev/#/5f542d7/15ea0015ea00
51182
51206
  */
51183
51207
  if (availabilityAssignment[coreIndex] !== null) {
51184
- return result_Result.error(ReportsError.CoreEngaged, `Report pending availability at core: ${coreIndex}`);
51208
+ return result_Result.error(ReportsError.CoreEngaged, () => `Report pending availability at core: ${coreIndex}`);
51185
51209
  }
51186
51210
  /**
51187
51211
  * A report is valid only if the authorizer hash is present
@@ -51194,7 +51218,7 @@ function verifyPostSignatureChecks(input, availabilityAssignment, authPools, ser
51194
51218
  const authorizerPool = authPools.get(coreIndex);
51195
51219
  const pool = authorizerPool?.materialize() ?? [];
51196
51220
  if (pool.find((hash) => hash.isEqualTo(authorizerHash)) === undefined) {
51197
- return result_Result.error(ReportsError.CoreUnauthorized, `Authorizer hash not found in the pool of core ${coreIndex}: ${authorizerHash}`);
51221
+ return result_Result.error(ReportsError.CoreUnauthorized, () => `Authorizer hash not found in the pool of core ${coreIndex}: ${authorizerHash}`);
51198
51222
  }
51199
51223
  /**
51200
51224
  * We require that the gas allotted for accumulation of each
@@ -51206,17 +51230,17 @@ function verifyPostSignatureChecks(input, availabilityAssignment, authPools, ser
51206
51230
  for (const result of report.results) {
51207
51231
  const service = services(result.serviceId);
51208
51232
  if (service === null) {
51209
- return result_Result.error(ReportsError.BadServiceId, `No service with id: ${result.serviceId}`);
51233
+ return result_Result.error(ReportsError.BadServiceId, () => `No service with id: ${result.serviceId}`);
51210
51234
  }
51211
51235
  const info = service.getInfo();
51212
51236
  // check minimal accumulation gas
51213
51237
  if (result.gas < info.accumulateMinGas) {
51214
- return result_Result.error(ReportsError.ServiceItemGasTooLow, `Service (${result.serviceId}) gas is less than minimal. Got: ${result.gas}, expected at least: ${info.accumulateMinGas}`);
51238
+ return result_Result.error(ReportsError.ServiceItemGasTooLow, () => `Service (${result.serviceId}) gas is less than minimal. Got: ${result.gas}, expected at least: ${info.accumulateMinGas}`);
51215
51239
  }
51216
51240
  }
51217
51241
  const totalGas = sumU64(...report.results.map((x) => x.gas));
51218
51242
  if (totalGas.overflow || totalGas.value > G_A) {
51219
- return result_Result.error(ReportsError.WorkReportGasTooHigh, `Total gas too high. Got: ${totalGas.value} (ovfl: ${totalGas.overflow}), maximal: ${G_A}`);
51243
+ return result_Result.error(ReportsError.WorkReportGasTooHigh, () => `Total gas too high. Got: ${totalGas.value} (ovfl: ${totalGas.overflow}), maximal: ${G_A}`);
51220
51244
  }
51221
51245
  }
51222
51246
  return result_Result.ok(result_OK);
@@ -51302,7 +51326,7 @@ class Reports {
51302
51326
  }
51303
51327
  const reporters = SortedSet.fromArray(bytesBlobComparator, signaturesToVerify.ok.map((x) => x.key)).slice();
51304
51328
  if (hasAnyOffenders(reporters, input.offenders)) {
51305
- return result_Result.error(ReportsError.BannedValidator);
51329
+ return result_Result.error(ReportsError.BannedValidator, () => "One or more reporters are banned validators");
51306
51330
  }
51307
51331
  return result_Result.ok({
51308
51332
  stateUpdate: {
@@ -51342,7 +51366,7 @@ class Reports {
51342
51366
  return signaturesToVerify[idx].key;
51343
51367
  })
51344
51368
  .filter((x) => x !== null);
51345
- return result_Result.error(ReportsError.BadSignature, `Invalid signatures for validators with keys: ${invalidKeys.join(", ")}`);
51369
+ return result_Result.error(ReportsError.BadSignature, () => `Invalid signatures for validators with keys: ${invalidKeys.join(", ")}`);
51346
51370
  }
51347
51371
  /**
51348
51372
  * Get the guarantor assignment (both core and validator data)
@@ -51358,10 +51382,10 @@ class Reports {
51358
51382
  const minTimeSlot = Math.max(0, headerRotation - 1) * rotationPeriod;
51359
51383
  // https://graypaper.fluffylabs.dev/#/5f542d7/155e00156900
51360
51384
  if (guaranteeTimeSlot > headerTimeSlot) {
51361
- return result_Result.error(ReportsError.FutureReportSlot, `Report slot is in future. Block ${headerTimeSlot}, Report: ${guaranteeTimeSlot}`);
51385
+ return result_Result.error(ReportsError.FutureReportSlot, () => `Report slot is in future. Block ${headerTimeSlot}, Report: ${guaranteeTimeSlot}`);
51362
51386
  }
51363
51387
  if (guaranteeTimeSlot < minTimeSlot) {
51364
- return result_Result.error(ReportsError.ReportEpochBeforeLast, `Report slot is too old. Block ${headerTimeSlot}, Report: ${guaranteeTimeSlot}`);
51388
+ return result_Result.error(ReportsError.ReportEpochBeforeLast, () => `Report slot is too old. Block ${headerTimeSlot}, Report: ${guaranteeTimeSlot}`);
51365
51389
  }
51366
51390
  // TODO [ToDr] [opti] below code needs cache.
51367
51391
  // The `G` and `G*` sets should only be computed once per rotation.
@@ -51929,6 +51953,16 @@ class OnChain {
51929
51953
  });
51930
51954
  const { statistics, ...statisticsRest } = statisticsUpdate;
51931
51955
  assertEmpty(statisticsRest);
51956
+ // Concat accumulatePreimages updates with preimages
51957
+ for (const [serviceId, accPreimageUpdates] of accumulatePreimages.entries()) {
51958
+ const preimagesUpdates = preimages.get(serviceId);
51959
+ if (preimagesUpdates === undefined) {
51960
+ preimages.set(serviceId, accPreimageUpdates);
51961
+ }
51962
+ else {
51963
+ preimages.set(serviceId, preimagesUpdates.concat(accPreimageUpdates));
51964
+ }
51965
+ }
51932
51966
  return result_Result.ok({
51933
51967
  ...(maybeAuthorizationQueues !== undefined ? { authQueues: maybeAuthorizationQueues } : {}),
51934
51968
  ...(maybeDesignatedValidatorData !== undefined ? { designatedValidatorData: maybeDesignatedValidatorData } : {}),
@@ -51950,7 +51984,7 @@ class OnChain {
51950
51984
  recentlyAccumulated,
51951
51985
  accumulationOutputLog,
51952
51986
  ...servicesUpdate,
51953
- preimages: preimages.concat(accumulatePreimages),
51987
+ preimages,
51954
51988
  });
51955
51989
  }
51956
51990
  getUsedAuthorizerHashes(guarantees) {
@@ -51967,11 +52001,11 @@ class OnChain {
51967
52001
  }
51968
52002
  function checkOffendersMatch(offendersMark, headerOffendersMark) {
51969
52003
  if (offendersMark.size !== headerOffendersMark.length) {
51970
- return result_Result.error(OFFENDERS_ERROR, `Length mismatch: ${offendersMark.size} vs ${headerOffendersMark.length}`);
52004
+ return result_Result.error(OFFENDERS_ERROR, () => `Length mismatch: ${offendersMark.size} vs ${headerOffendersMark.length}`);
51971
52005
  }
51972
52006
  for (const key of headerOffendersMark) {
51973
52007
  if (!offendersMark.has(key)) {
51974
- return result_Result.error(OFFENDERS_ERROR, `Missing key: ${key}`);
52008
+ return result_Result.error(OFFENDERS_ERROR, () => `Missing key: ${key}`);
51975
52009
  }
51976
52010
  }
51977
52011
  return result_Result.ok(result_OK);
@@ -52053,7 +52087,7 @@ class Importer {
52053
52087
  if (!this.currentHash.isEqualTo(parentHash)) {
52054
52088
  const state = this.states.getState(parentHash);
52055
52089
  if (state === null) {
52056
- const e = result_Result.error(BlockVerifierError.StateRootNotFound);
52090
+ const e = result_Result.error(BlockVerifierError.StateRootNotFound, () => `State not found for parent block ${parentHash}`);
52057
52091
  if (!e.isError) {
52058
52092
  throw new Error("unreachable, just adding to make compiler happy");
52059
52093
  }
@@ -52233,7 +52267,7 @@ async function mainImporter(config, withRelPath) {
52233
52267
  if (res !== null && res !== undefined) {
52234
52268
  return decoder_Decoder.decodeObject(importBlockResultCodec, res);
52235
52269
  }
52236
- return result_Result.error("");
52270
+ return result_Result.error("invalid response", () => "Importer: import block response was null or undefined");
52237
52271
  },
52238
52272
  async getStateEntries(hash) {
52239
52273
  return importer.getStateEntries(hash);
@@ -52282,7 +52316,7 @@ async function mainFuzz(fuzzConfig, withRelPath) {
52282
52316
  chainSpec,
52283
52317
  importBlock: async (blockView) => {
52284
52318
  if (runningNode === null) {
52285
- return result_Result.error("node not running");
52319
+ return result_Result.error("node not running", () => "Fuzzer: node not running when importing block");
52286
52320
  }
52287
52321
  const importResult = await runningNode.importBlock(blockView);
52288
52322
  return importResult;