@typeberry/jam 0.2.0-74f246e → 0.2.0-adde0dd
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bootstrap-generator.mjs +179 -159
- package/bootstrap-generator.mjs.map +1 -1
- package/bootstrap-importer.mjs +402 -368
- package/bootstrap-importer.mjs.map +1 -1
- package/bootstrap-network.mjs +179 -160
- package/bootstrap-network.mjs.map +1 -1
- package/index.js +413 -379
- package/index.js.map +1 -1
- package/package.json +1 -1
package/bootstrap-importer.mjs
CHANGED
|
@@ -3840,7 +3840,7 @@ function resultToString(res) {
|
|
|
3840
3840
|
if (res.isOk) {
|
|
3841
3841
|
return `OK: ${typeof res.ok === "symbol" ? res.ok.toString() : res.ok}`;
|
|
3842
3842
|
}
|
|
3843
|
-
return `${res.details}\nError: ${maybeTaggedErrorToString(res.error)}`;
|
|
3843
|
+
return `${res.details()}\nError: ${maybeTaggedErrorToString(res.error)}`;
|
|
3844
3844
|
}
|
|
3845
3845
|
/** An indication of two possible outcomes returned from a function. */
|
|
3846
3846
|
const result_Result = {
|
|
@@ -3854,7 +3854,7 @@ const result_Result = {
|
|
|
3854
3854
|
};
|
|
3855
3855
|
},
|
|
3856
3856
|
/** Create new [`Result`] with `Error` status. */
|
|
3857
|
-
error: (error, details
|
|
3857
|
+
error: (error, details) => {
|
|
3858
3858
|
debug_check `${error !== undefined} 'Error' type cannot be undefined.`;
|
|
3859
3859
|
return {
|
|
3860
3860
|
isOk: false,
|
|
@@ -3973,7 +3973,7 @@ function deepEqual(actual, expected, { context = [], errorsCollector, ignore = [
|
|
|
3973
3973
|
}
|
|
3974
3974
|
if (actual.isError && expected.isError) {
|
|
3975
3975
|
deepEqual(actual.error, expected.error, { context: ctx.concat(["error"]), errorsCollector: errors, ignore });
|
|
3976
|
-
deepEqual(actual.details, expected.details, {
|
|
3976
|
+
deepEqual(actual.details(), expected.details(), {
|
|
3977
3977
|
context: ctx.concat(["details"]),
|
|
3978
3978
|
errorsCollector: errors,
|
|
3979
3979
|
// display details when error does not match
|
|
@@ -9470,6 +9470,7 @@ function accumulationOutputComparator(a, b) {
|
|
|
9470
9470
|
|
|
9471
9471
|
;// CONCATENATED MODULE: ./packages/jam/block/gp-constants.ts
|
|
9472
9472
|
|
|
9473
|
+
|
|
9473
9474
|
/**
|
|
9474
9475
|
* This file lists all of the constants defined in the GrayPaper appendix.
|
|
9475
9476
|
*
|
|
@@ -9480,7 +9481,7 @@ function accumulationOutputComparator(a, b) {
|
|
|
9480
9481
|
* here are only temporarily for convenience. When we figure out better names
|
|
9481
9482
|
* and places for these this file will be eradicated.
|
|
9482
9483
|
*
|
|
9483
|
-
* https://graypaper.fluffylabs.dev/#/
|
|
9484
|
+
* https://graypaper.fluffylabs.dev/#/ab2cdbd/442300442300?v=0.7.2
|
|
9484
9485
|
*/
|
|
9485
9486
|
/** `G_I`: The gas allocated to invoke a work-package’s Is-Authorized logic. */
|
|
9486
9487
|
const G_I = 50_000_000;
|
|
@@ -9496,8 +9497,8 @@ const S = 1024;
|
|
|
9496
9497
|
const T = 128;
|
|
9497
9498
|
/** `W_A`: The maximum size of is-authorized code in octets. */
|
|
9498
9499
|
const W_A = 64_000;
|
|
9499
|
-
/** `W_B`: The maximum size of
|
|
9500
|
-
const W_B = 13_794_305;
|
|
9500
|
+
/** `W_B`: The maximum size of the concatenated variable-size blobs, extrinsics and imported segments of a work-package, in octets */
|
|
9501
|
+
const W_B = Compatibility.isGreaterOrEqual(GpVersion.V0_7_2) ? 13_791_360 : 13_794_305;
|
|
9501
9502
|
/** `W_C`: The maximum size of service code in octets. */
|
|
9502
9503
|
const W_C = 4_000_000;
|
|
9503
9504
|
/** `W_M`: The maximum number of imports in a work-package. */
|
|
@@ -10596,31 +10597,29 @@ var UpdatePreimageKind;
|
|
|
10596
10597
|
* 3. Update `LookupHistory` with given value.
|
|
10597
10598
|
*/
|
|
10598
10599
|
class UpdatePreimage {
|
|
10599
|
-
serviceId;
|
|
10600
10600
|
action;
|
|
10601
|
-
constructor(
|
|
10602
|
-
this.serviceId = serviceId;
|
|
10601
|
+
constructor(action) {
|
|
10603
10602
|
this.action = action;
|
|
10604
10603
|
}
|
|
10605
10604
|
/** A preimage is provided. We should update the lookuphistory and add the preimage to db. */
|
|
10606
|
-
static provide({
|
|
10607
|
-
return new UpdatePreimage(
|
|
10605
|
+
static provide({ preimage, slot }) {
|
|
10606
|
+
return new UpdatePreimage({
|
|
10608
10607
|
kind: UpdatePreimageKind.Provide,
|
|
10609
10608
|
preimage,
|
|
10610
10609
|
slot,
|
|
10611
10610
|
});
|
|
10612
10611
|
}
|
|
10613
10612
|
/** The preimage should be removed completely from the database. */
|
|
10614
|
-
static remove({
|
|
10615
|
-
return new UpdatePreimage(
|
|
10613
|
+
static remove({ hash, length }) {
|
|
10614
|
+
return new UpdatePreimage({
|
|
10616
10615
|
kind: UpdatePreimageKind.Remove,
|
|
10617
10616
|
hash,
|
|
10618
10617
|
length,
|
|
10619
10618
|
});
|
|
10620
10619
|
}
|
|
10621
10620
|
/** Update the lookup history of some preimage or add a new one (request). */
|
|
10622
|
-
static updateOrAdd({
|
|
10623
|
-
return new UpdatePreimage(
|
|
10621
|
+
static updateOrAdd({ lookupHistory }) {
|
|
10622
|
+
return new UpdatePreimage({
|
|
10624
10623
|
kind: UpdatePreimageKind.UpdateOrAdd,
|
|
10625
10624
|
item: lookupHistory,
|
|
10626
10625
|
});
|
|
@@ -10657,23 +10656,21 @@ var UpdateServiceKind;
|
|
|
10657
10656
|
UpdateServiceKind[UpdateServiceKind["Create"] = 1] = "Create";
|
|
10658
10657
|
})(UpdateServiceKind || (UpdateServiceKind = {}));
|
|
10659
10658
|
/**
|
|
10660
|
-
* Update service info
|
|
10659
|
+
* Update service info or create a new one.
|
|
10661
10660
|
*/
|
|
10662
10661
|
class UpdateService {
|
|
10663
|
-
serviceId;
|
|
10664
10662
|
action;
|
|
10665
|
-
constructor(
|
|
10666
|
-
this.serviceId = serviceId;
|
|
10663
|
+
constructor(action) {
|
|
10667
10664
|
this.action = action;
|
|
10668
10665
|
}
|
|
10669
|
-
static update({
|
|
10670
|
-
return new UpdateService(
|
|
10666
|
+
static update({ serviceInfo }) {
|
|
10667
|
+
return new UpdateService({
|
|
10671
10668
|
kind: UpdateServiceKind.Update,
|
|
10672
10669
|
account: serviceInfo,
|
|
10673
10670
|
});
|
|
10674
10671
|
}
|
|
10675
|
-
static create({
|
|
10676
|
-
return new UpdateService(
|
|
10672
|
+
static create({ serviceInfo, lookupHistory, }) {
|
|
10673
|
+
return new UpdateService({
|
|
10677
10674
|
kind: UpdateServiceKind.Create,
|
|
10678
10675
|
account: serviceInfo,
|
|
10679
10676
|
lookupHistory,
|
|
@@ -10694,17 +10691,15 @@ var UpdateStorageKind;
|
|
|
10694
10691
|
* Can either create/modify an entry or remove it.
|
|
10695
10692
|
*/
|
|
10696
10693
|
class UpdateStorage {
|
|
10697
|
-
serviceId;
|
|
10698
10694
|
action;
|
|
10699
|
-
constructor(
|
|
10700
|
-
this.serviceId = serviceId;
|
|
10695
|
+
constructor(action) {
|
|
10701
10696
|
this.action = action;
|
|
10702
10697
|
}
|
|
10703
|
-
static set({
|
|
10704
|
-
return new UpdateStorage(
|
|
10698
|
+
static set({ storage }) {
|
|
10699
|
+
return new UpdateStorage({ kind: UpdateStorageKind.Set, storage });
|
|
10705
10700
|
}
|
|
10706
|
-
static remove({
|
|
10707
|
-
return new UpdateStorage(
|
|
10701
|
+
static remove({ key }) {
|
|
10702
|
+
return new UpdateStorage({ kind: UpdateStorageKind.Remove, key });
|
|
10708
10703
|
}
|
|
10709
10704
|
get key() {
|
|
10710
10705
|
if (this.action.kind === UpdateStorageKind.Remove) {
|
|
@@ -10913,12 +10908,12 @@ class in_memory_state_InMemoryState extends WithDebug {
|
|
|
10913
10908
|
* Modify the state and apply a single state update.
|
|
10914
10909
|
*/
|
|
10915
10910
|
applyUpdate(update) {
|
|
10916
|
-
const {
|
|
10911
|
+
const { removed, created: _, updated, preimages, storage, ...rest } = update;
|
|
10917
10912
|
// just assign all other variables
|
|
10918
10913
|
Object.assign(this, rest);
|
|
10919
10914
|
// and update the services state
|
|
10920
10915
|
let result;
|
|
10921
|
-
result = this.updateServices(
|
|
10916
|
+
result = this.updateServices(updated);
|
|
10922
10917
|
if (result.isError) {
|
|
10923
10918
|
return result;
|
|
10924
10919
|
}
|
|
@@ -10930,7 +10925,7 @@ class in_memory_state_InMemoryState extends WithDebug {
|
|
|
10930
10925
|
if (result.isError) {
|
|
10931
10926
|
return result;
|
|
10932
10927
|
}
|
|
10933
|
-
this.removeServices(
|
|
10928
|
+
this.removeServices(removed);
|
|
10934
10929
|
return result_Result.ok(result_OK);
|
|
10935
10930
|
}
|
|
10936
10931
|
removeServices(servicesRemoved) {
|
|
@@ -10939,89 +10934,102 @@ class in_memory_state_InMemoryState extends WithDebug {
|
|
|
10939
10934
|
this.services.delete(serviceId);
|
|
10940
10935
|
}
|
|
10941
10936
|
}
|
|
10942
|
-
updateStorage(
|
|
10943
|
-
|
|
10944
|
-
|
|
10945
|
-
|
|
10946
|
-
|
|
10947
|
-
|
|
10948
|
-
|
|
10949
|
-
|
|
10950
|
-
|
|
10951
|
-
|
|
10952
|
-
|
|
10953
|
-
|
|
10954
|
-
|
|
10955
|
-
|
|
10937
|
+
updateStorage(storageUpdates) {
|
|
10938
|
+
if (storageUpdates === undefined) {
|
|
10939
|
+
return result_Result.ok(result_OK);
|
|
10940
|
+
}
|
|
10941
|
+
for (const [serviceId, updates] of storageUpdates.entries()) {
|
|
10942
|
+
for (const update of updates) {
|
|
10943
|
+
const { kind } = update.action;
|
|
10944
|
+
const service = this.services.get(serviceId);
|
|
10945
|
+
if (service === undefined) {
|
|
10946
|
+
return result_Result.error(in_memory_state_UpdateError.NoService, () => `Attempting to update storage of non-existing service: ${serviceId}`);
|
|
10947
|
+
}
|
|
10948
|
+
if (kind === UpdateStorageKind.Set) {
|
|
10949
|
+
const { key, value } = update.action.storage;
|
|
10950
|
+
service.data.storage.set(key.toString(), StorageItem.create({ key, value }));
|
|
10951
|
+
}
|
|
10952
|
+
else if (kind === UpdateStorageKind.Remove) {
|
|
10953
|
+
const { key } = update.action;
|
|
10954
|
+
debug_check `
|
|
10956
10955
|
${service.data.storage.has(key.toString())}
|
|
10957
|
-
Attempting to remove non-existing storage item at ${serviceId}: ${action.key}
|
|
10956
|
+
Attempting to remove non-existing storage item at ${serviceId}: ${update.action.key}
|
|
10958
10957
|
`;
|
|
10959
|
-
|
|
10960
|
-
|
|
10961
|
-
|
|
10962
|
-
|
|
10958
|
+
service.data.storage.delete(key.toString());
|
|
10959
|
+
}
|
|
10960
|
+
else {
|
|
10961
|
+
debug_assertNever(kind);
|
|
10962
|
+
}
|
|
10963
10963
|
}
|
|
10964
10964
|
}
|
|
10965
10965
|
return result_Result.ok(result_OK);
|
|
10966
10966
|
}
|
|
10967
|
-
updatePreimages(
|
|
10968
|
-
|
|
10967
|
+
updatePreimages(preimagesUpdates) {
|
|
10968
|
+
if (preimagesUpdates === undefined) {
|
|
10969
|
+
return result_Result.ok(result_OK);
|
|
10970
|
+
}
|
|
10971
|
+
for (const [serviceId, updates] of preimagesUpdates.entries()) {
|
|
10969
10972
|
const service = this.services.get(serviceId);
|
|
10970
10973
|
if (service === undefined) {
|
|
10971
|
-
return result_Result.error(in_memory_state_UpdateError.NoService, `Attempting to update preimage of non-existing service: ${serviceId}`);
|
|
10972
|
-
}
|
|
10973
|
-
const
|
|
10974
|
-
|
|
10975
|
-
|
|
10976
|
-
|
|
10977
|
-
|
|
10978
|
-
|
|
10979
|
-
service.data.preimages.set(preimage.hash, preimage);
|
|
10980
|
-
if (slot !== null) {
|
|
10981
|
-
const lookupHistory = service.data.lookupHistory.get(preimage.hash);
|
|
10982
|
-
const length = numbers_tryAsU32(preimage.blob.length);
|
|
10983
|
-
const lookup = new LookupHistoryItem(preimage.hash, length, tryAsLookupHistorySlots([slot]));
|
|
10984
|
-
if (lookupHistory === undefined) {
|
|
10985
|
-
// no lookup history for that preimage at all (edge case, should be requested)
|
|
10986
|
-
service.data.lookupHistory.set(preimage.hash, [lookup]);
|
|
10974
|
+
return result_Result.error(in_memory_state_UpdateError.NoService, () => `Attempting to update preimage of non-existing service: ${serviceId}`);
|
|
10975
|
+
}
|
|
10976
|
+
for (const update of updates) {
|
|
10977
|
+
const { kind } = update.action;
|
|
10978
|
+
if (kind === UpdatePreimageKind.Provide) {
|
|
10979
|
+
const { preimage, slot } = update.action;
|
|
10980
|
+
if (service.data.preimages.has(preimage.hash)) {
|
|
10981
|
+
return result_Result.error(in_memory_state_UpdateError.PreimageExists, () => `Overwriting existing preimage at ${serviceId}: ${preimage}`);
|
|
10987
10982
|
}
|
|
10988
|
-
|
|
10989
|
-
|
|
10990
|
-
const
|
|
10991
|
-
|
|
10983
|
+
service.data.preimages.set(preimage.hash, preimage);
|
|
10984
|
+
if (slot !== null) {
|
|
10985
|
+
const lookupHistory = service.data.lookupHistory.get(preimage.hash);
|
|
10986
|
+
const length = numbers_tryAsU32(preimage.blob.length);
|
|
10987
|
+
const lookup = new LookupHistoryItem(preimage.hash, length, tryAsLookupHistorySlots([slot]));
|
|
10988
|
+
if (lookupHistory === undefined) {
|
|
10989
|
+
// no lookup history for that preimage at all (edge case, should be requested)
|
|
10990
|
+
service.data.lookupHistory.set(preimage.hash, [lookup]);
|
|
10991
|
+
}
|
|
10992
|
+
else {
|
|
10993
|
+
// insert or replace exiting entry
|
|
10994
|
+
const index = lookupHistory.map((x) => x.length).indexOf(length);
|
|
10995
|
+
lookupHistory.splice(index, index === -1 ? 0 : 1, lookup);
|
|
10996
|
+
}
|
|
10992
10997
|
}
|
|
10993
10998
|
}
|
|
10994
|
-
|
|
10995
|
-
|
|
10996
|
-
|
|
10997
|
-
|
|
10998
|
-
|
|
10999
|
-
|
|
11000
|
-
|
|
11001
|
-
|
|
10999
|
+
else if (kind === UpdatePreimageKind.Remove) {
|
|
11000
|
+
const { hash, length } = update.action;
|
|
11001
|
+
service.data.preimages.delete(hash);
|
|
11002
|
+
const history = service.data.lookupHistory.get(hash) ?? [];
|
|
11003
|
+
const idx = history.map((x) => x.length).indexOf(length);
|
|
11004
|
+
if (idx !== -1) {
|
|
11005
|
+
history.splice(idx, 1);
|
|
11006
|
+
}
|
|
11007
|
+
}
|
|
11008
|
+
else if (kind === UpdatePreimageKind.UpdateOrAdd) {
|
|
11009
|
+
const { item } = update.action;
|
|
11010
|
+
const history = service.data.lookupHistory.get(item.hash) ?? [];
|
|
11011
|
+
const existingIdx = history.map((x) => x.length).indexOf(item.length);
|
|
11012
|
+
const removeCount = existingIdx === -1 ? 0 : 1;
|
|
11013
|
+
history.splice(existingIdx, removeCount, item);
|
|
11014
|
+
service.data.lookupHistory.set(item.hash, history);
|
|
11015
|
+
}
|
|
11016
|
+
else {
|
|
11017
|
+
debug_assertNever(kind);
|
|
11002
11018
|
}
|
|
11003
|
-
}
|
|
11004
|
-
else if (kind === UpdatePreimageKind.UpdateOrAdd) {
|
|
11005
|
-
const { item } = action;
|
|
11006
|
-
const history = service.data.lookupHistory.get(item.hash) ?? [];
|
|
11007
|
-
const existingIdx = history.map((x) => x.length).indexOf(item.length);
|
|
11008
|
-
const removeCount = existingIdx === -1 ? 0 : 1;
|
|
11009
|
-
history.splice(existingIdx, removeCount, item);
|
|
11010
|
-
service.data.lookupHistory.set(item.hash, history);
|
|
11011
|
-
}
|
|
11012
|
-
else {
|
|
11013
|
-
debug_assertNever(kind);
|
|
11014
11019
|
}
|
|
11015
11020
|
}
|
|
11016
11021
|
return result_Result.ok(result_OK);
|
|
11017
11022
|
}
|
|
11018
11023
|
updateServices(servicesUpdates) {
|
|
11019
|
-
|
|
11020
|
-
|
|
11024
|
+
if (servicesUpdates === undefined) {
|
|
11025
|
+
return result_Result.ok(result_OK);
|
|
11026
|
+
}
|
|
11027
|
+
for (const [serviceId, update] of servicesUpdates.entries()) {
|
|
11028
|
+
const { kind, account } = update.action;
|
|
11021
11029
|
if (kind === UpdateServiceKind.Create) {
|
|
11022
|
-
const { lookupHistory } = action;
|
|
11030
|
+
const { lookupHistory } = update.action;
|
|
11023
11031
|
if (this.services.has(serviceId)) {
|
|
11024
|
-
return result_Result.error(in_memory_state_UpdateError.DuplicateService, `${serviceId} already exists!`);
|
|
11032
|
+
return result_Result.error(in_memory_state_UpdateError.DuplicateService, () => `${serviceId} already exists!`);
|
|
11025
11033
|
}
|
|
11026
11034
|
this.services.set(serviceId, new InMemoryService(serviceId, {
|
|
11027
11035
|
info: account,
|
|
@@ -11033,7 +11041,7 @@ class in_memory_state_InMemoryState extends WithDebug {
|
|
|
11033
11041
|
else if (kind === UpdateServiceKind.Update) {
|
|
11034
11042
|
const existingService = this.services.get(serviceId);
|
|
11035
11043
|
if (existingService === undefined) {
|
|
11036
|
-
return result_Result.error(in_memory_state_UpdateError.NoService, `Cannot update ${serviceId} because it does not exist.`);
|
|
11044
|
+
return result_Result.error(in_memory_state_UpdateError.NoService, () => `Cannot update ${serviceId} because it does not exist.`);
|
|
11037
11045
|
}
|
|
11038
11046
|
existingService.data.info = account;
|
|
11039
11047
|
}
|
|
@@ -12280,7 +12288,6 @@ function getKeccakTrieHasher(hasher) {
|
|
|
12280
12288
|
|
|
12281
12289
|
|
|
12282
12290
|
|
|
12283
|
-
|
|
12284
12291
|
/** What should be done with that key? */
|
|
12285
12292
|
var StateEntryUpdateAction;
|
|
12286
12293
|
(function (StateEntryUpdateAction) {
|
|
@@ -12296,76 +12303,88 @@ function* serializeStateUpdate(spec, blake2b, update) {
|
|
|
12296
12303
|
yield* serializeBasicKeys(spec, update);
|
|
12297
12304
|
const encode = (codec, val) => encoder_Encoder.encodeObject(codec, val, spec);
|
|
12298
12305
|
// then let's proceed with service updates
|
|
12299
|
-
yield* serializeServiceUpdates(update.
|
|
12306
|
+
yield* serializeServiceUpdates(update.updated, encode, blake2b);
|
|
12300
12307
|
yield* serializePreimages(update.preimages, encode, blake2b);
|
|
12301
12308
|
yield* serializeStorage(update.storage, blake2b);
|
|
12302
|
-
yield* serializeRemovedServices(update.
|
|
12309
|
+
yield* serializeRemovedServices(update.removed);
|
|
12303
12310
|
}
|
|
12304
12311
|
function* serializeRemovedServices(servicesRemoved) {
|
|
12305
|
-
|
|
12312
|
+
if (servicesRemoved === undefined) {
|
|
12313
|
+
return;
|
|
12314
|
+
}
|
|
12315
|
+
for (const serviceId of servicesRemoved) {
|
|
12306
12316
|
// TODO [ToDr] what about all data associated with a service?
|
|
12307
12317
|
const codec = serialize_serialize.serviceData(serviceId);
|
|
12308
12318
|
yield [StateEntryUpdateAction.Remove, codec.key, EMPTY_BLOB];
|
|
12309
12319
|
}
|
|
12310
12320
|
}
|
|
12311
|
-
function* serializeStorage(
|
|
12312
|
-
|
|
12313
|
-
|
|
12314
|
-
|
|
12315
|
-
|
|
12316
|
-
|
|
12317
|
-
|
|
12318
|
-
|
|
12319
|
-
|
|
12320
|
-
|
|
12321
|
-
|
|
12322
|
-
|
|
12323
|
-
|
|
12324
|
-
|
|
12321
|
+
function* serializeStorage(storageUpdates, blake2b) {
|
|
12322
|
+
if (storageUpdates === undefined) {
|
|
12323
|
+
return;
|
|
12324
|
+
}
|
|
12325
|
+
for (const [serviceId, updates] of storageUpdates.entries()) {
|
|
12326
|
+
for (const { action } of updates) {
|
|
12327
|
+
switch (action.kind) {
|
|
12328
|
+
case UpdateStorageKind.Set: {
|
|
12329
|
+
const key = action.storage.key;
|
|
12330
|
+
const codec = serialize_serialize.serviceStorage(blake2b, serviceId, key);
|
|
12331
|
+
yield [StateEntryUpdateAction.Insert, codec.key, action.storage.value];
|
|
12332
|
+
break;
|
|
12333
|
+
}
|
|
12334
|
+
case UpdateStorageKind.Remove: {
|
|
12335
|
+
const key = action.key;
|
|
12336
|
+
const codec = serialize_serialize.serviceStorage(blake2b, serviceId, key);
|
|
12337
|
+
yield [StateEntryUpdateAction.Remove, codec.key, EMPTY_BLOB];
|
|
12338
|
+
break;
|
|
12339
|
+
}
|
|
12325
12340
|
}
|
|
12326
|
-
default:
|
|
12327
|
-
debug_assertNever(action);
|
|
12328
12341
|
}
|
|
12329
12342
|
}
|
|
12330
12343
|
}
|
|
12331
|
-
function* serializePreimages(
|
|
12332
|
-
|
|
12333
|
-
|
|
12334
|
-
|
|
12335
|
-
|
|
12336
|
-
|
|
12337
|
-
|
|
12338
|
-
|
|
12339
|
-
const
|
|
12340
|
-
|
|
12341
|
-
|
|
12342
|
-
|
|
12343
|
-
|
|
12344
|
-
|
|
12344
|
+
function* serializePreimages(preimagesUpdates, encode, blake2b) {
|
|
12345
|
+
if (preimagesUpdates === undefined) {
|
|
12346
|
+
return;
|
|
12347
|
+
}
|
|
12348
|
+
for (const [serviceId, updates] of preimagesUpdates.entries()) {
|
|
12349
|
+
for (const { action } of updates) {
|
|
12350
|
+
switch (action.kind) {
|
|
12351
|
+
case UpdatePreimageKind.Provide: {
|
|
12352
|
+
const { hash, blob } = action.preimage;
|
|
12353
|
+
const codec = serialize_serialize.servicePreimages(blake2b, serviceId, hash);
|
|
12354
|
+
yield [StateEntryUpdateAction.Insert, codec.key, blob];
|
|
12355
|
+
if (action.slot !== null) {
|
|
12356
|
+
const codec2 = serialize_serialize.serviceLookupHistory(blake2b, serviceId, hash, numbers_tryAsU32(blob.length));
|
|
12357
|
+
yield [
|
|
12358
|
+
StateEntryUpdateAction.Insert,
|
|
12359
|
+
codec2.key,
|
|
12360
|
+
encode(codec2.Codec, tryAsLookupHistorySlots([action.slot])),
|
|
12361
|
+
];
|
|
12362
|
+
}
|
|
12363
|
+
break;
|
|
12364
|
+
}
|
|
12365
|
+
case UpdatePreimageKind.UpdateOrAdd: {
|
|
12366
|
+
const { hash, length, slots } = action.item;
|
|
12367
|
+
const codec = serialize_serialize.serviceLookupHistory(blake2b, serviceId, hash, length);
|
|
12368
|
+
yield [StateEntryUpdateAction.Insert, codec.key, encode(codec.Codec, slots)];
|
|
12369
|
+
break;
|
|
12370
|
+
}
|
|
12371
|
+
case UpdatePreimageKind.Remove: {
|
|
12372
|
+
const { hash, length } = action;
|
|
12373
|
+
const codec = serialize_serialize.servicePreimages(blake2b, serviceId, hash);
|
|
12374
|
+
yield [StateEntryUpdateAction.Remove, codec.key, EMPTY_BLOB];
|
|
12375
|
+
const codec2 = serialize_serialize.serviceLookupHistory(blake2b, serviceId, hash, length);
|
|
12376
|
+
yield [StateEntryUpdateAction.Remove, codec2.key, EMPTY_BLOB];
|
|
12377
|
+
break;
|
|
12345
12378
|
}
|
|
12346
|
-
break;
|
|
12347
|
-
}
|
|
12348
|
-
case UpdatePreimageKind.UpdateOrAdd: {
|
|
12349
|
-
const { hash, length, slots } = action.item;
|
|
12350
|
-
const codec = serialize_serialize.serviceLookupHistory(blake2b, serviceId, hash, length);
|
|
12351
|
-
yield [StateEntryUpdateAction.Insert, codec.key, encode(codec.Codec, slots)];
|
|
12352
|
-
break;
|
|
12353
|
-
}
|
|
12354
|
-
case UpdatePreimageKind.Remove: {
|
|
12355
|
-
const { hash, length } = action;
|
|
12356
|
-
const codec = serialize_serialize.servicePreimages(blake2b, serviceId, hash);
|
|
12357
|
-
yield [StateEntryUpdateAction.Remove, codec.key, EMPTY_BLOB];
|
|
12358
|
-
const codec2 = serialize_serialize.serviceLookupHistory(blake2b, serviceId, hash, length);
|
|
12359
|
-
yield [StateEntryUpdateAction.Remove, codec2.key, EMPTY_BLOB];
|
|
12360
|
-
break;
|
|
12361
12379
|
}
|
|
12362
|
-
default:
|
|
12363
|
-
debug_assertNever(action);
|
|
12364
12380
|
}
|
|
12365
12381
|
}
|
|
12366
12382
|
}
|
|
12367
12383
|
function* serializeServiceUpdates(servicesUpdates, encode, blake2b) {
|
|
12368
|
-
|
|
12384
|
+
if (servicesUpdates === undefined) {
|
|
12385
|
+
return;
|
|
12386
|
+
}
|
|
12387
|
+
for (const [serviceId, { action }] of servicesUpdates.entries()) {
|
|
12369
12388
|
// new service being created or updated
|
|
12370
12389
|
const codec = serialize_serialize.serviceData(serviceId);
|
|
12371
12390
|
yield [StateEntryUpdateAction.Insert, codec.key, encode(codec.Codec, action.account)];
|
|
@@ -12652,13 +12671,13 @@ class LeafDb {
|
|
|
12652
12671
|
*/
|
|
12653
12672
|
static fromLeavesBlob(blob, db) {
|
|
12654
12673
|
if (blob.length % TRIE_NODE_BYTES !== 0) {
|
|
12655
|
-
return result_Result.error(LeafDbError.InvalidLeafData, `${blob.length} is not a multiply of ${TRIE_NODE_BYTES}: ${blob}`);
|
|
12674
|
+
return result_Result.error(LeafDbError.InvalidLeafData, () => `${blob.length} is not a multiply of ${TRIE_NODE_BYTES}: ${blob}`);
|
|
12656
12675
|
}
|
|
12657
12676
|
const leaves = SortedSet.fromArray(leafComparator, []);
|
|
12658
12677
|
for (const nodeData of blob.chunks(TRIE_NODE_BYTES)) {
|
|
12659
12678
|
const node = new TrieNode(nodeData.raw);
|
|
12660
12679
|
if (node.getNodeType() === NodeType.Branch) {
|
|
12661
|
-
return result_Result.error(LeafDbError.InvalidLeafData, `Branch node detected: ${nodeData}`);
|
|
12680
|
+
return result_Result.error(LeafDbError.InvalidLeafData, () => `Branch node detected: ${nodeData}`);
|
|
12662
12681
|
}
|
|
12663
12682
|
leaves.insert(node.asLeafNode());
|
|
12664
12683
|
}
|
|
@@ -13377,7 +13396,7 @@ class LmdbStates {
|
|
|
13377
13396
|
}
|
|
13378
13397
|
catch (e) {
|
|
13379
13398
|
logger.error `${e}`;
|
|
13380
|
-
return result_Result.error(StateUpdateError.Commit);
|
|
13399
|
+
return result_Result.error(StateUpdateError.Commit, () => `Failed to commit state update: ${e}`);
|
|
13381
13400
|
}
|
|
13382
13401
|
return result_Result.ok(result_OK);
|
|
13383
13402
|
}
|
|
@@ -14097,32 +14116,33 @@ class Preimages {
|
|
|
14097
14116
|
}
|
|
14098
14117
|
if (prevPreimage.requester > currPreimage.requester ||
|
|
14099
14118
|
currPreimage.blob.compare(prevPreimage.blob).isLessOrEqual()) {
|
|
14100
|
-
return result_Result.error(PreimagesErrorCode.PreimagesNotSortedUnique);
|
|
14119
|
+
return result_Result.error(PreimagesErrorCode.PreimagesNotSortedUnique, () => `Preimages not sorted/unique at index ${i}`);
|
|
14101
14120
|
}
|
|
14102
14121
|
}
|
|
14103
14122
|
const { preimages, slot } = input;
|
|
14104
|
-
const pendingChanges =
|
|
14123
|
+
const pendingChanges = new Map();
|
|
14105
14124
|
// select preimages for integration
|
|
14106
14125
|
for (const preimage of preimages) {
|
|
14107
14126
|
const { requester, blob } = preimage;
|
|
14108
14127
|
const hash = this.blake2b.hashBytes(blob).asOpaque();
|
|
14109
14128
|
const service = this.state.getService(requester);
|
|
14110
14129
|
if (service === null) {
|
|
14111
|
-
return result_Result.error(PreimagesErrorCode.AccountNotFound);
|
|
14130
|
+
return result_Result.error(PreimagesErrorCode.AccountNotFound, () => `Service not found: ${requester}`);
|
|
14112
14131
|
}
|
|
14113
14132
|
const hasPreimage = service.hasPreimage(hash);
|
|
14114
14133
|
const slots = service.getLookupHistory(hash, numbers_tryAsU32(blob.length));
|
|
14115
14134
|
// https://graypaper.fluffylabs.dev/#/5f542d7/181800181900
|
|
14116
14135
|
// https://graypaper.fluffylabs.dev/#/5f542d7/116f0011a500
|
|
14117
14136
|
if (hasPreimage || slots === null || !LookupHistoryItem.isRequested(slots)) {
|
|
14118
|
-
return result_Result.error(PreimagesErrorCode.PreimageUnneeded);
|
|
14137
|
+
return result_Result.error(PreimagesErrorCode.PreimageUnneeded, () => `Preimage unneeded: requester=${requester}, hash=${hash}, hasPreimage=${hasPreimage}, isRequested=${slots !== null && LookupHistoryItem.isRequested(slots)}`);
|
|
14119
14138
|
}
|
|
14120
14139
|
// https://graypaper.fluffylabs.dev/#/5f542d7/18c00018f300
|
|
14121
|
-
pendingChanges.
|
|
14122
|
-
|
|
14140
|
+
const updates = pendingChanges.get(requester) ?? [];
|
|
14141
|
+
updates.push(UpdatePreimage.provide({
|
|
14123
14142
|
preimage: PreimageItem.create({ hash, blob }),
|
|
14124
14143
|
slot,
|
|
14125
14144
|
}));
|
|
14145
|
+
pendingChanges.set(requester, updates);
|
|
14126
14146
|
}
|
|
14127
14147
|
return result_Result.ok({
|
|
14128
14148
|
preimages: pendingChanges,
|
|
@@ -14160,7 +14180,7 @@ class BlockVerifier {
|
|
|
14160
14180
|
const headerHash = this.hasher.header(headerView);
|
|
14161
14181
|
// check if current block is already imported
|
|
14162
14182
|
if (this.blocks.getHeader(headerHash.hash) !== null) {
|
|
14163
|
-
return result_Result.error(BlockVerifierError.AlreadyImported, `Block ${headerHash.hash} is already imported.`);
|
|
14183
|
+
return result_Result.error(BlockVerifierError.AlreadyImported, () => `Block ${headerHash.hash} is already imported.`);
|
|
14164
14184
|
}
|
|
14165
14185
|
// Check if parent block exists.
|
|
14166
14186
|
// https://graypaper.fluffylabs.dev/#/cc517d7/0c82000c8200?v=0.6.5
|
|
@@ -14170,14 +14190,14 @@ class BlockVerifier {
|
|
|
14170
14190
|
if (!parentHash.isEqualTo(block_verifier_ZERO_HASH)) {
|
|
14171
14191
|
const parentBlock = this.blocks.getHeader(parentHash);
|
|
14172
14192
|
if (parentBlock === null) {
|
|
14173
|
-
return result_Result.error(BlockVerifierError.ParentNotFound, `Parent ${parentHash.toString()} not found`);
|
|
14193
|
+
return result_Result.error(BlockVerifierError.ParentNotFound, () => `Parent ${parentHash.toString()} not found`);
|
|
14174
14194
|
}
|
|
14175
14195
|
// Check if the time slot index is consecutive and not from future.
|
|
14176
14196
|
// https://graypaper.fluffylabs.dev/#/cc517d7/0c02010c0201?v=0.6.5
|
|
14177
14197
|
const timeslot = headerView.timeSlotIndex.materialize();
|
|
14178
14198
|
const parentTimeslot = parentBlock.timeSlotIndex.materialize();
|
|
14179
14199
|
if (timeslot <= parentTimeslot) {
|
|
14180
|
-
return result_Result.error(BlockVerifierError.InvalidTimeSlot, `Invalid time slot index: ${timeslot}, expected > ${parentTimeslot}`);
|
|
14200
|
+
return result_Result.error(BlockVerifierError.InvalidTimeSlot, () => `Invalid time slot index: ${timeslot}, expected > ${parentTimeslot}`);
|
|
14181
14201
|
}
|
|
14182
14202
|
}
|
|
14183
14203
|
// Check if extrinsic is valid.
|
|
@@ -14185,17 +14205,17 @@ class BlockVerifier {
|
|
|
14185
14205
|
const extrinsicHash = headerView.extrinsicHash.materialize();
|
|
14186
14206
|
const extrinsicMerkleCommitment = this.hasher.extrinsic(block.extrinsic.view());
|
|
14187
14207
|
if (!extrinsicHash.isEqualTo(extrinsicMerkleCommitment.hash)) {
|
|
14188
|
-
return result_Result.error(BlockVerifierError.InvalidExtrinsic, `Invalid extrinsic hash: ${extrinsicHash.toString()}, expected ${extrinsicMerkleCommitment.hash.toString()}`);
|
|
14208
|
+
return result_Result.error(BlockVerifierError.InvalidExtrinsic, () => `Invalid extrinsic hash: ${extrinsicHash.toString()}, expected ${extrinsicMerkleCommitment.hash.toString()}`);
|
|
14189
14209
|
}
|
|
14190
14210
|
// Check if the state root is valid.
|
|
14191
14211
|
// https://graypaper.fluffylabs.dev/#/cc517d7/0c18010c1801?v=0.6.5
|
|
14192
14212
|
const stateRoot = headerView.priorStateRoot.materialize();
|
|
14193
14213
|
const posteriorStateRoot = this.blocks.getPostStateRoot(parentHash);
|
|
14194
14214
|
if (posteriorStateRoot === null) {
|
|
14195
|
-
return result_Result.error(BlockVerifierError.StateRootNotFound, `Posterior state root ${parentHash.toString()} not found`);
|
|
14215
|
+
return result_Result.error(BlockVerifierError.StateRootNotFound, () => `Posterior state root ${parentHash.toString()} not found`);
|
|
14196
14216
|
}
|
|
14197
14217
|
if (!stateRoot.isEqualTo(posteriorStateRoot)) {
|
|
14198
|
-
return result_Result.error(BlockVerifierError.InvalidStateRoot, `Invalid prior state root: ${stateRoot.toString()}, expected ${posteriorStateRoot.toString()} (ours)`);
|
|
14218
|
+
return result_Result.error(BlockVerifierError.InvalidStateRoot, () => `Invalid prior state root: ${stateRoot.toString()}, expected ${posteriorStateRoot.toString()} (ours)`);
|
|
14199
14219
|
}
|
|
14200
14220
|
return result_Result.ok(headerHash.hash);
|
|
14201
14221
|
}
|
|
@@ -14320,7 +14340,7 @@ class Disputes {
|
|
|
14320
14340
|
// check if culprits are sorted by key
|
|
14321
14341
|
// https://graypaper.fluffylabs.dev/#/579bd12/12c50112c601
|
|
14322
14342
|
if (!isUniqueSortedBy(disputes.culprits, "key")) {
|
|
14323
|
-
return result_Result.error(DisputesErrorCode.CulpritsNotSortedUnique);
|
|
14343
|
+
return result_Result.error(DisputesErrorCode.CulpritsNotSortedUnique, () => "Culprits are not uniquely sorted by key");
|
|
14324
14344
|
}
|
|
14325
14345
|
const culpritsLength = disputes.culprits.length;
|
|
14326
14346
|
for (let i = 0; i < culpritsLength; i++) {
|
|
@@ -14329,24 +14349,24 @@ class Disputes {
|
|
|
14329
14349
|
// https://graypaper.fluffylabs.dev/#/579bd12/125501125501
|
|
14330
14350
|
const isInPunishSet = this.state.disputesRecords.asDictionaries().punishSet.has(key);
|
|
14331
14351
|
if (isInPunishSet) {
|
|
14332
|
-
return result_Result.error(DisputesErrorCode.OffenderAlreadyReported);
|
|
14352
|
+
return result_Result.error(DisputesErrorCode.OffenderAlreadyReported, () => `Offender already reported: culprit ${i}, key=${key}`);
|
|
14333
14353
|
}
|
|
14334
14354
|
// check if the guarantor key is correct
|
|
14335
14355
|
// https://graypaper.fluffylabs.dev/#/85129da/125501125501?v=0.6.3
|
|
14336
14356
|
if (!allValidatorKeys.has(key)) {
|
|
14337
|
-
return result_Result.error(DisputesErrorCode.BadGuarantorKey);
|
|
14357
|
+
return result_Result.error(DisputesErrorCode.BadGuarantorKey, () => `Bad guarantor key: culprit ${i}, key=${key}`);
|
|
14338
14358
|
}
|
|
14339
14359
|
// verify if the culprit will be in new bad set
|
|
14340
14360
|
// https://graypaper.fluffylabs.dev/#/579bd12/124601124601
|
|
14341
14361
|
const isInNewBadSet = newItems.asDictionaries().badSet.has(workReportHash);
|
|
14342
14362
|
if (!isInNewBadSet) {
|
|
14343
|
-
return result_Result.error(DisputesErrorCode.CulpritsVerdictNotBad);
|
|
14363
|
+
return result_Result.error(DisputesErrorCode.CulpritsVerdictNotBad, () => `Culprit verdict not bad: culprit ${i}, work report=${workReportHash}`);
|
|
14344
14364
|
}
|
|
14345
14365
|
// verify culprit signature
|
|
14346
14366
|
// https://graypaper.fluffylabs.dev/#/579bd12/125c01125c01
|
|
14347
14367
|
const result = verificationResult.culprits[i];
|
|
14348
14368
|
if (!result?.isValid) {
|
|
14349
|
-
return result_Result.error(DisputesErrorCode.BadSignature, `Invalid signature for culprit: ${i}`);
|
|
14369
|
+
return result_Result.error(DisputesErrorCode.BadSignature, () => `Invalid signature for culprit: ${i}`);
|
|
14350
14370
|
}
|
|
14351
14371
|
}
|
|
14352
14372
|
return result_Result.ok(null);
|
|
@@ -14355,7 +14375,7 @@ class Disputes {
|
|
|
14355
14375
|
// check if faults are sorted by key
|
|
14356
14376
|
// https://graypaper.fluffylabs.dev/#/579bd12/12c50112c601
|
|
14357
14377
|
if (!isUniqueSortedBy(disputes.faults, "key")) {
|
|
14358
|
-
return result_Result.error(DisputesErrorCode.FaultsNotSortedUnique);
|
|
14378
|
+
return result_Result.error(DisputesErrorCode.FaultsNotSortedUnique, () => "Faults are not uniquely sorted by key");
|
|
14359
14379
|
}
|
|
14360
14380
|
const faultsLength = disputes.faults.length;
|
|
14361
14381
|
for (let i = 0; i < faultsLength; i++) {
|
|
@@ -14364,12 +14384,12 @@ class Disputes {
|
|
|
14364
14384
|
// https://graypaper.fluffylabs.dev/#/579bd12/12a20112a201
|
|
14365
14385
|
const isInPunishSet = this.state.disputesRecords.asDictionaries().punishSet.has(key);
|
|
14366
14386
|
if (isInPunishSet) {
|
|
14367
|
-
return result_Result.error(DisputesErrorCode.OffenderAlreadyReported);
|
|
14387
|
+
return result_Result.error(DisputesErrorCode.OffenderAlreadyReported, () => `Offender already reported: fault ${i}, key=${key}`);
|
|
14368
14388
|
}
|
|
14369
14389
|
// check if the auditor key is correct
|
|
14370
14390
|
// https://graypaper.fluffylabs.dev/#/85129da/12a20112a201?v=0.6.3
|
|
14371
14391
|
if (!allValidatorKeys.has(key)) {
|
|
14372
|
-
return result_Result.error(DisputesErrorCode.BadAuditorKey);
|
|
14392
|
+
return result_Result.error(DisputesErrorCode.BadAuditorKey, () => `Bad auditor key: fault ${i}, key=${key}`);
|
|
14373
14393
|
}
|
|
14374
14394
|
// verify if the fault will be included in new good/bad set
|
|
14375
14395
|
// it may be not correct as in GP there is "iff" what means it should be rather
|
|
@@ -14381,14 +14401,14 @@ class Disputes {
|
|
|
14381
14401
|
const isInNewGoodSet = goodSet.has(workReportHash);
|
|
14382
14402
|
const isInNewBadSet = badSet.has(workReportHash);
|
|
14383
14403
|
if (isInNewGoodSet || !isInNewBadSet) {
|
|
14384
|
-
return result_Result.error(DisputesErrorCode.FaultVerdictWrong);
|
|
14404
|
+
return result_Result.error(DisputesErrorCode.FaultVerdictWrong, () => `Fault verdict wrong: fault ${i}, work report=${workReportHash}, inGood=${isInNewGoodSet}, inBad=${isInNewBadSet}`);
|
|
14385
14405
|
}
|
|
14386
14406
|
}
|
|
14387
14407
|
// verify fault signature. Verification was done earlier, here we only check the result.
|
|
14388
14408
|
// https://graypaper.fluffylabs.dev/#/579bd12/12a90112a901
|
|
14389
14409
|
const result = verificationResult.faults[i];
|
|
14390
14410
|
if (!result.isValid) {
|
|
14391
|
-
return result_Result.error(DisputesErrorCode.BadSignature, `Invalid signature for fault: ${i}`);
|
|
14411
|
+
return result_Result.error(DisputesErrorCode.BadSignature, () => `Invalid signature for fault: ${i}`);
|
|
14392
14412
|
}
|
|
14393
14413
|
}
|
|
14394
14414
|
return result_Result.ok(null);
|
|
@@ -14397,32 +14417,32 @@ class Disputes {
|
|
|
14397
14417
|
// check if verdicts are correctly sorted
|
|
14398
14418
|
// https://graypaper.fluffylabs.dev/#/579bd12/12c40112c401
|
|
14399
14419
|
if (!isUniqueSortedBy(disputes.verdicts, "workReportHash")) {
|
|
14400
|
-
return result_Result.error(DisputesErrorCode.VerdictsNotSortedUnique);
|
|
14420
|
+
return result_Result.error(DisputesErrorCode.VerdictsNotSortedUnique, () => "Verdicts are not uniquely sorted by work report hash");
|
|
14401
14421
|
}
|
|
14402
14422
|
// check if judgement are correctly sorted
|
|
14403
14423
|
// https://graypaper.fluffylabs.dev/#/579bd12/123702123802
|
|
14404
14424
|
if (disputes.verdicts.some((verdict) => !isUniqueSortedByIndex(verdict.votes))) {
|
|
14405
|
-
return result_Result.error(DisputesErrorCode.JudgementsNotSortedUnique);
|
|
14425
|
+
return result_Result.error(DisputesErrorCode.JudgementsNotSortedUnique, () => "Judgements are not uniquely sorted by index");
|
|
14406
14426
|
}
|
|
14407
14427
|
const currentEpoch = Math.floor(this.state.timeslot / this.chainSpec.epochLength);
|
|
14408
14428
|
let voteSignatureIndex = 0;
|
|
14409
14429
|
for (const { votesEpoch, votes } of disputes.verdicts) {
|
|
14410
14430
|
// https://graypaper.fluffylabs.dev/#/579bd12/12bb0012bc00
|
|
14411
14431
|
if (votesEpoch !== currentEpoch && votesEpoch + 1 !== currentEpoch) {
|
|
14412
|
-
return result_Result.error(DisputesErrorCode.BadJudgementAge);
|
|
14432
|
+
return result_Result.error(DisputesErrorCode.BadJudgementAge, () => `Bad judgement age: epoch=${votesEpoch}, current=${currentEpoch}`);
|
|
14413
14433
|
}
|
|
14414
14434
|
const k = votesEpoch === currentEpoch ? this.state.currentValidatorData : this.state.previousValidatorData;
|
|
14415
14435
|
for (const { index } of votes) {
|
|
14416
14436
|
const key = k[index]?.ed25519;
|
|
14417
14437
|
// no particular GP fragment but I think we don't believe in ghosts
|
|
14418
14438
|
if (key === undefined) {
|
|
14419
|
-
return result_Result.error(DisputesErrorCode.BadValidatorIndex);
|
|
14439
|
+
return result_Result.error(DisputesErrorCode.BadValidatorIndex, () => `Bad validator index: ${index} in epoch ${votesEpoch}`);
|
|
14420
14440
|
}
|
|
14421
14441
|
// verify vote signature. Verification was done earlier, here we only check the result.
|
|
14422
14442
|
// https://graypaper.fluffylabs.dev/#/579bd12/12cd0012cd00
|
|
14423
14443
|
const result = verificationResult.judgements[voteSignatureIndex];
|
|
14424
14444
|
if (!result.isValid) {
|
|
14425
|
-
return result_Result.error(DisputesErrorCode.BadSignature, `Invalid signature for judgement: ${voteSignatureIndex}`);
|
|
14445
|
+
return result_Result.error(DisputesErrorCode.BadSignature, () => `Invalid signature for judgement: ${voteSignatureIndex}`);
|
|
14426
14446
|
}
|
|
14427
14447
|
voteSignatureIndex += 1;
|
|
14428
14448
|
}
|
|
@@ -14438,7 +14458,7 @@ class Disputes {
|
|
|
14438
14458
|
const isInBadSet = badSet.has(verdict.workReportHash);
|
|
14439
14459
|
const isInWonkySet = wonkySet.has(verdict.workReportHash);
|
|
14440
14460
|
if (isInGoodSet || isInBadSet || isInWonkySet) {
|
|
14441
|
-
return result_Result.error(DisputesErrorCode.AlreadyJudged);
|
|
14461
|
+
return result_Result.error(DisputesErrorCode.AlreadyJudged, () => `Work report already judged: ${verdict.workReportHash}`);
|
|
14442
14462
|
}
|
|
14443
14463
|
}
|
|
14444
14464
|
return result_Result.ok(null);
|
|
@@ -14469,7 +14489,7 @@ class Disputes {
|
|
|
14469
14489
|
// https://graypaper.fluffylabs.dev/#/579bd12/12f10212fc02
|
|
14470
14490
|
const f = disputes.faults.find((x) => x.workReportHash.isEqualTo(r));
|
|
14471
14491
|
if (f === undefined) {
|
|
14472
|
-
return result_Result.error(DisputesErrorCode.NotEnoughFaults);
|
|
14492
|
+
return result_Result.error(DisputesErrorCode.NotEnoughFaults, () => `Not enough faults for work report: ${r}`);
|
|
14473
14493
|
}
|
|
14474
14494
|
}
|
|
14475
14495
|
else if (sum === 0) {
|
|
@@ -14478,13 +14498,13 @@ class Disputes {
|
|
|
14478
14498
|
const c1 = disputes.culprits.find((x) => x.workReportHash.isEqualTo(r));
|
|
14479
14499
|
const c2 = disputes.culprits.findLast((x) => x.workReportHash.isEqualTo(r));
|
|
14480
14500
|
if (c1 === c2) {
|
|
14481
|
-
return result_Result.error(DisputesErrorCode.NotEnoughCulprits);
|
|
14501
|
+
return result_Result.error(DisputesErrorCode.NotEnoughCulprits, () => `Not enough culprits for work report: ${r}`);
|
|
14482
14502
|
}
|
|
14483
14503
|
}
|
|
14484
14504
|
else if (sum !== this.chainSpec.thirdOfValidators) {
|
|
14485
14505
|
// positive votes count is not correct
|
|
14486
14506
|
// https://graypaper.fluffylabs.dev/#/579bd12/125002128102
|
|
14487
|
-
return result_Result.error(DisputesErrorCode.BadVoteSplit);
|
|
14507
|
+
return result_Result.error(DisputesErrorCode.BadVoteSplit, () => `Bad vote split: sum=${sum}, expected=${this.chainSpec.thirdOfValidators} for work report ${r}`);
|
|
14488
14508
|
}
|
|
14489
14509
|
}
|
|
14490
14510
|
return result_Result.ok(null);
|
|
@@ -14572,7 +14592,7 @@ class Disputes {
|
|
|
14572
14592
|
const validator = k[j.index];
|
|
14573
14593
|
// no particular GP fragment but I think we don't believe in ghosts
|
|
14574
14594
|
if (validator === undefined) {
|
|
14575
|
-
return result_Result.error(DisputesErrorCode.BadValidatorIndex);
|
|
14595
|
+
return result_Result.error(DisputesErrorCode.BadValidatorIndex, () => `Bad validator index in signature verification: ${j.index}`);
|
|
14576
14596
|
}
|
|
14577
14597
|
const key = validator.ed25519;
|
|
14578
14598
|
// verify vote signature
|
|
@@ -14680,7 +14700,7 @@ const ringCommitmentCache = [];
|
|
|
14680
14700
|
async function verifySeal(bandersnatch, authorKey, signature, payload, encodedUnsealedHeader) {
|
|
14681
14701
|
const sealResult = await bandersnatch.verifySeal(authorKey.raw, signature.raw, payload.raw, encodedUnsealedHeader.raw);
|
|
14682
14702
|
if (sealResult[RESULT_INDEX] === ResultValues.Error) {
|
|
14683
|
-
return result_Result.error(null);
|
|
14703
|
+
return result_Result.error(null, () => "Bandersnatch VRF seal verification failed");
|
|
14684
14704
|
}
|
|
14685
14705
|
return result_Result.ok(bytes_Bytes.fromBlob(sealResult.subarray(1), hash_HASH_SIZE).asOpaque());
|
|
14686
14706
|
}
|
|
@@ -14706,7 +14726,7 @@ function getRingCommitment(bandersnatch, validators) {
|
|
|
14706
14726
|
async function getRingCommitmentNoCache(bandersnatch, keys) {
|
|
14707
14727
|
const commitmentResult = await bandersnatch.getRingCommitment(keys.raw);
|
|
14708
14728
|
if (commitmentResult[RESULT_INDEX] === ResultValues.Error) {
|
|
14709
|
-
return result_Result.error(null);
|
|
14729
|
+
return result_Result.error(null, () => "Bandersnatch ring commitment calculation failed");
|
|
14710
14730
|
}
|
|
14711
14731
|
return result_Result.ok(bytes_Bytes.fromBlob(commitmentResult.subarray(1), bandersnatch_BANDERSNATCH_RING_ROOT_BYTES).asOpaque());
|
|
14712
14732
|
}
|
|
@@ -14881,7 +14901,7 @@ class Safrole {
|
|
|
14881
14901
|
epochRoot: epochRootResult.ok,
|
|
14882
14902
|
});
|
|
14883
14903
|
}
|
|
14884
|
-
return result_Result.error(SafroleErrorCode.IncorrectData);
|
|
14904
|
+
return result_Result.error(SafroleErrorCode.IncorrectData, () => "Safrole: failed to get epoch root for validator keys");
|
|
14885
14905
|
}
|
|
14886
14906
|
/**
|
|
14887
14907
|
* Ticket sequencer that is used in standard mode
|
|
@@ -14972,10 +14992,10 @@ class Safrole {
|
|
|
14972
14992
|
for (let i = 1; i < ticketsLength; i++) {
|
|
14973
14993
|
const order = tickets[i - 1].id.compare(tickets[i].id);
|
|
14974
14994
|
if (order.isEqual()) {
|
|
14975
|
-
return result_Result.error(SafroleErrorCode.DuplicateTicket);
|
|
14995
|
+
return result_Result.error(SafroleErrorCode.DuplicateTicket, () => `Safrole: duplicate ticket found at index ${i}`);
|
|
14976
14996
|
}
|
|
14977
14997
|
if (order.isGreater()) {
|
|
14978
|
-
return result_Result.error(SafroleErrorCode.BadTicketOrder);
|
|
14998
|
+
return result_Result.error(SafroleErrorCode.BadTicketOrder, () => `Safrole: bad ticket order at index ${i}`);
|
|
14979
14999
|
}
|
|
14980
15000
|
}
|
|
14981
15001
|
return result_Result.ok(null);
|
|
@@ -15002,7 +15022,7 @@ class Safrole {
|
|
|
15002
15022
|
attempt: ticket.attempt,
|
|
15003
15023
|
}));
|
|
15004
15024
|
if (!verificationResult.every((x) => x.isValid)) {
|
|
15005
|
-
return result_Result.error(SafroleErrorCode.BadTicketProof);
|
|
15025
|
+
return result_Result.error(SafroleErrorCode.BadTicketProof, () => "Safrole: invalid ticket proof in extrinsic");
|
|
15006
15026
|
}
|
|
15007
15027
|
/**
|
|
15008
15028
|
* Verify if tickets are sorted and unique
|
|
@@ -15011,7 +15031,7 @@ class Safrole {
|
|
|
15011
15031
|
*/
|
|
15012
15032
|
const ticketsVerifcationResult = this.verifyTickets(tickets);
|
|
15013
15033
|
if (ticketsVerifcationResult.isError) {
|
|
15014
|
-
return result_Result.error(ticketsVerifcationResult.error);
|
|
15034
|
+
return result_Result.error(ticketsVerifcationResult.error, ticketsVerifcationResult.details);
|
|
15015
15035
|
}
|
|
15016
15036
|
if (this.isEpochChanged(timeslot)) {
|
|
15017
15037
|
return result_Result.ok(tickets);
|
|
@@ -15020,7 +15040,7 @@ class Safrole {
|
|
|
15020
15040
|
const ticketsFromExtrinsic = SortedSet.fromSortedArray(ticketComparator, tickets);
|
|
15021
15041
|
const mergedTickets = SortedSet.fromTwoSortedCollections(ticketsFromState, ticketsFromExtrinsic);
|
|
15022
15042
|
if (ticketsFromState.length + ticketsFromExtrinsic.length !== mergedTickets.length) {
|
|
15023
|
-
return result_Result.error(SafroleErrorCode.DuplicateTicket);
|
|
15043
|
+
return result_Result.error(SafroleErrorCode.DuplicateTicket, () => "Safrole: duplicate ticket when merging state and extrinsic tickets");
|
|
15024
15044
|
}
|
|
15025
15045
|
/**
|
|
15026
15046
|
* Remove tickets if size of accumulator exceeds E (epoch length).
|
|
@@ -15089,24 +15109,24 @@ class Safrole {
|
|
|
15089
15109
|
}
|
|
15090
15110
|
async transition(input) {
|
|
15091
15111
|
if (this.state.timeslot >= input.slot) {
|
|
15092
|
-
return result_Result.error(SafroleErrorCode.BadSlot);
|
|
15112
|
+
return result_Result.error(SafroleErrorCode.BadSlot, () => `Safrole: bad slot, state timeslot ${this.state.timeslot} >= input slot ${input.slot}`);
|
|
15093
15113
|
}
|
|
15094
15114
|
if (!this.isExtrinsicLengthValid(input.slot, input.extrinsic)) {
|
|
15095
|
-
return result_Result.error(SafroleErrorCode.UnexpectedTicket);
|
|
15115
|
+
return result_Result.error(SafroleErrorCode.UnexpectedTicket, () => `Safrole: unexpected ticket, invalid extrinsic length ${input.extrinsic.length}`);
|
|
15096
15116
|
}
|
|
15097
15117
|
if (!this.areTicketAttemptsValid(input.extrinsic)) {
|
|
15098
|
-
return result_Result.error(SafroleErrorCode.BadTicketAttempt);
|
|
15118
|
+
return result_Result.error(SafroleErrorCode.BadTicketAttempt, () => "Safrole: bad ticket attempt value in extrinsic");
|
|
15099
15119
|
}
|
|
15100
15120
|
const validatorKeysResult = await this.getValidatorKeys(input.slot, input.punishSet);
|
|
15101
15121
|
if (validatorKeysResult.isError) {
|
|
15102
|
-
return result_Result.error(validatorKeysResult.error);
|
|
15122
|
+
return result_Result.error(validatorKeysResult.error, validatorKeysResult.details);
|
|
15103
15123
|
}
|
|
15104
15124
|
const { nextValidatorData, currentValidatorData, previousValidatorData, epochRoot } = validatorKeysResult.ok;
|
|
15105
15125
|
const entropy = this.getEntropy(input.slot, input.entropy);
|
|
15106
15126
|
const sealingKeySeries = this.getSlotKeySequence(input.slot, currentValidatorData, entropy[2]);
|
|
15107
15127
|
const newTicketsAccumulatorResult = await this.getNewTicketAccumulator(input.slot, input.extrinsic, this.state.nextValidatorData, epochRoot, entropy[2]);
|
|
15108
15128
|
if (newTicketsAccumulatorResult.isError) {
|
|
15109
|
-
return result_Result.error(newTicketsAccumulatorResult.error);
|
|
15129
|
+
return result_Result.error(newTicketsAccumulatorResult.error, newTicketsAccumulatorResult.details);
|
|
15110
15130
|
}
|
|
15111
15131
|
const stateUpdate = {
|
|
15112
15132
|
nextValidatorData,
|
|
@@ -15140,14 +15160,14 @@ function compareWithEncoding(chainSpec, error, actual, expected, codec) {
|
|
|
15140
15160
|
if (actual === null || expected === null) {
|
|
15141
15161
|
// if one of them is `null`, both need to be.
|
|
15142
15162
|
if (actual !== expected) {
|
|
15143
|
-
return result_Result.error(error, `${SafroleErrorCode[error]} Expected: ${expected}, got: ${actual}`);
|
|
15163
|
+
return result_Result.error(error, () => `${SafroleErrorCode[error]} Expected: ${expected}, got: ${actual}`);
|
|
15144
15164
|
}
|
|
15145
15165
|
return result_Result.ok(result_OK);
|
|
15146
15166
|
}
|
|
15147
15167
|
// compare the literal encoding.
|
|
15148
15168
|
const encoded = encoder_Encoder.encodeObject(codec, actual, chainSpec);
|
|
15149
15169
|
if (!encoded.isEqualTo(expected.encoded())) {
|
|
15150
|
-
return result_Result.error(error, `${SafroleErrorCode[error]} Expected: ${expected.encoded()}, got: ${encoded}`);
|
|
15170
|
+
return result_Result.error(error, () => `${SafroleErrorCode[error]} Expected: ${expected.encoded()}, got: ${encoded}`);
|
|
15151
15171
|
}
|
|
15152
15172
|
return result_Result.ok(result_OK);
|
|
15153
15173
|
}
|
|
@@ -15190,7 +15210,7 @@ class SafroleSeal {
|
|
|
15190
15210
|
const blockAuthorKey = state.currentValidatorData.at(blockAuthorIndex)?.bandersnatch;
|
|
15191
15211
|
const entropySourceResult = await bandersnatch_vrf.verifySeal(await this.bandersnatch, blockAuthorKey ?? BANDERSNATCH_ZERO_KEY, headerView.entropySource.materialize(), payload, bytes_BytesBlob.blobFromNumbers([]));
|
|
15192
15212
|
if (entropySourceResult.isError) {
|
|
15193
|
-
return result_Result.error(SafroleSealError.IncorrectEntropySource);
|
|
15213
|
+
return result_Result.error(SafroleSealError.IncorrectEntropySource, () => "Safrole: incorrect entropy source in header seal");
|
|
15194
15214
|
}
|
|
15195
15215
|
return result_Result.ok(entropySourceResult.ok);
|
|
15196
15216
|
}
|
|
@@ -15199,7 +15219,7 @@ class SafroleSeal {
|
|
|
15199
15219
|
const validatorIndex = headerView.bandersnatchBlockAuthorIndex.materialize();
|
|
15200
15220
|
const authorKeys = state.currentValidatorData.at(validatorIndex);
|
|
15201
15221
|
if (authorKeys === undefined) {
|
|
15202
|
-
return result_Result.error(SafroleSealError.InvalidValidatorIndex);
|
|
15222
|
+
return result_Result.error(SafroleSealError.InvalidValidatorIndex, () => `Safrole: invalid validator index ${validatorIndex}`);
|
|
15203
15223
|
}
|
|
15204
15224
|
const timeSlot = headerView.timeSlotIndex.materialize();
|
|
15205
15225
|
const sealingKeys = state.sealingKeySeries;
|
|
@@ -15218,10 +15238,10 @@ class SafroleSeal {
|
|
|
15218
15238
|
const authorKey = validatorData.bandersnatch;
|
|
15219
15239
|
const result = await bandersnatch_vrf.verifySeal(await this.bandersnatch, authorKey ?? BANDERSNATCH_ZERO_KEY, headerView.seal.materialize(), payload, encodeUnsealedHeader(headerView));
|
|
15220
15240
|
if (result.isError) {
|
|
15221
|
-
return result_Result.error(SafroleSealError.IncorrectSeal);
|
|
15241
|
+
return result_Result.error(SafroleSealError.IncorrectSeal, () => "Safrole: incorrect seal with ticket");
|
|
15222
15242
|
}
|
|
15223
15243
|
if (ticket === undefined || !ticket.id.isEqualTo(result.ok)) {
|
|
15224
|
-
return result_Result.error(SafroleSealError.InvalidTicket);
|
|
15244
|
+
return result_Result.error(SafroleSealError.InvalidTicket, () => `Safrole: invalid ticket, expected ${ticket?.id} got ${result.ok}`);
|
|
15225
15245
|
}
|
|
15226
15246
|
return result_Result.ok(result.ok);
|
|
15227
15247
|
}
|
|
@@ -15231,13 +15251,13 @@ class SafroleSeal {
|
|
|
15231
15251
|
const sealingKey = keys.at(index);
|
|
15232
15252
|
const authorBandersnatchKey = authorKey.bandersnatch;
|
|
15233
15253
|
if (sealingKey === undefined || !sealingKey.isEqualTo(authorBandersnatchKey)) {
|
|
15234
|
-
return result_Result.error(SafroleSealError.InvalidValidator, `Invalid Validator. Expected: ${sealingKey}, got: ${authorKey.bandersnatch}`);
|
|
15254
|
+
return result_Result.error(SafroleSealError.InvalidValidator, () => `Invalid Validator. Expected: ${sealingKey}, got: ${authorKey.bandersnatch}`);
|
|
15235
15255
|
}
|
|
15236
15256
|
// verify seal correctness
|
|
15237
15257
|
const payload = bytes_BytesBlob.blobFromParts(JAM_FALLBACK_SEAL, entropy.raw);
|
|
15238
15258
|
const result = await bandersnatch_vrf.verifySeal(await this.bandersnatch, authorBandersnatchKey, headerView.seal.materialize(), payload, encodeUnsealedHeader(headerView));
|
|
15239
15259
|
if (result.isError) {
|
|
15240
|
-
return result_Result.error(SafroleSealError.IncorrectSeal);
|
|
15260
|
+
return result_Result.error(SafroleSealError.IncorrectSeal, () => "Safrole: incorrect seal with keys");
|
|
15241
15261
|
}
|
|
15242
15262
|
return result_Result.ok(result.ok);
|
|
15243
15263
|
}
|
|
@@ -15280,6 +15300,14 @@ async function getRootHash(yieldedRoots) {
|
|
|
15280
15300
|
|
|
15281
15301
|
|
|
15282
15302
|
const InsufficientFundsError = "insufficient funds";
|
|
15303
|
+
/** Deep clone of a map with array. */
|
|
15304
|
+
function deepCloneMapWithArray(map) {
|
|
15305
|
+
const cloned = [];
|
|
15306
|
+
for (const [k, v] of map.entries()) {
|
|
15307
|
+
cloned.push([k, v.slice()]);
|
|
15308
|
+
}
|
|
15309
|
+
return new Map(cloned);
|
|
15310
|
+
}
|
|
15283
15311
|
/**
|
|
15284
15312
|
* State updates that currently accumulating service produced.
|
|
15285
15313
|
*
|
|
@@ -15309,10 +15337,11 @@ class AccumulationStateUpdate {
|
|
|
15309
15337
|
/** Create new empty state update. */
|
|
15310
15338
|
static empty() {
|
|
15311
15339
|
return new AccumulationStateUpdate({
|
|
15312
|
-
|
|
15313
|
-
|
|
15314
|
-
|
|
15315
|
-
|
|
15340
|
+
created: [],
|
|
15341
|
+
updated: new Map(),
|
|
15342
|
+
removed: [],
|
|
15343
|
+
preimages: new Map(),
|
|
15344
|
+
storage: new Map(),
|
|
15316
15345
|
}, []);
|
|
15317
15346
|
}
|
|
15318
15347
|
/** Create a state update with some existing, yet uncommited services updates. */
|
|
@@ -15324,10 +15353,13 @@ class AccumulationStateUpdate {
|
|
|
15324
15353
|
/** Create a copy of another `StateUpdate`. Used by checkpoints. */
|
|
15325
15354
|
static copyFrom(from) {
|
|
15326
15355
|
const serviceUpdates = {
|
|
15327
|
-
|
|
15328
|
-
|
|
15329
|
-
|
|
15330
|
-
|
|
15356
|
+
// shallow copy
|
|
15357
|
+
created: [...from.services.created],
|
|
15358
|
+
updated: new Map(from.services.updated),
|
|
15359
|
+
removed: [...from.services.removed],
|
|
15360
|
+
// deep copy
|
|
15361
|
+
preimages: deepCloneMapWithArray(from.services.preimages),
|
|
15362
|
+
storage: deepCloneMapWithArray(from.services.storage),
|
|
15331
15363
|
};
|
|
15332
15364
|
const transfers = [...from.transfers];
|
|
15333
15365
|
const update = new AccumulationStateUpdate(serviceUpdates, transfers, new Map(from.yieldedRoots));
|
|
@@ -15375,9 +15407,9 @@ class PartiallyUpdatedState {
|
|
|
15375
15407
|
if (destination === null) {
|
|
15376
15408
|
return null;
|
|
15377
15409
|
}
|
|
15378
|
-
const
|
|
15379
|
-
if (
|
|
15380
|
-
return
|
|
15410
|
+
const maybeUpdatedServiceInfo = this.stateUpdate.services.updated.get(destination);
|
|
15411
|
+
if (maybeUpdatedServiceInfo !== undefined) {
|
|
15412
|
+
return maybeUpdatedServiceInfo.action.account;
|
|
15381
15413
|
}
|
|
15382
15414
|
const maybeService = this.state.getService(destination);
|
|
15383
15415
|
if (maybeService === null) {
|
|
@@ -15386,7 +15418,8 @@ class PartiallyUpdatedState {
|
|
|
15386
15418
|
return maybeService.getInfo();
|
|
15387
15419
|
}
|
|
15388
15420
|
getStorage(serviceId, rawKey) {
|
|
15389
|
-
const
|
|
15421
|
+
const storages = this.stateUpdate.services.storage.get(serviceId) ?? [];
|
|
15422
|
+
const item = storages.find((x) => x.key.isEqualTo(rawKey));
|
|
15390
15423
|
if (item !== undefined) {
|
|
15391
15424
|
return item.value;
|
|
15392
15425
|
}
|
|
@@ -15401,10 +15434,11 @@ class PartiallyUpdatedState {
|
|
|
15401
15434
|
* the existence in `preimages` map.
|
|
15402
15435
|
*/
|
|
15403
15436
|
hasPreimage(serviceId, hash) {
|
|
15404
|
-
const
|
|
15437
|
+
const preimages = this.stateUpdate.services.preimages.get(serviceId) ?? [];
|
|
15438
|
+
const providedPreimage = preimages.find(
|
|
15405
15439
|
// we ignore the action here, since if there is <any> update on that
|
|
15406
15440
|
// hash it means it has to exist, right?
|
|
15407
|
-
(p) => p.
|
|
15441
|
+
(p) => p.hash.isEqualTo(hash));
|
|
15408
15442
|
if (providedPreimage !== undefined) {
|
|
15409
15443
|
return true;
|
|
15410
15444
|
}
|
|
@@ -15417,7 +15451,8 @@ class PartiallyUpdatedState {
|
|
|
15417
15451
|
}
|
|
15418
15452
|
getPreimage(serviceId, hash) {
|
|
15419
15453
|
// TODO [ToDr] Should we verify availability here?
|
|
15420
|
-
const
|
|
15454
|
+
const preimages = this.stateUpdate.services.preimages.get(serviceId) ?? [];
|
|
15455
|
+
const freshlyProvided = preimages.find((x) => x.hash.isEqualTo(hash));
|
|
15421
15456
|
if (freshlyProvided !== undefined && freshlyProvided.action.kind === UpdatePreimageKind.Provide) {
|
|
15422
15457
|
return freshlyProvided.action.preimage.blob;
|
|
15423
15458
|
}
|
|
@@ -15426,10 +15461,11 @@ class PartiallyUpdatedState {
|
|
|
15426
15461
|
}
|
|
15427
15462
|
/** Get status of a preimage of current service taking into account any updates. */
|
|
15428
15463
|
getLookupHistory(currentTimeslot, serviceId, hash, length) {
|
|
15464
|
+
const preimages = this.stateUpdate.services.preimages.get(serviceId) ?? [];
|
|
15429
15465
|
// TODO [ToDr] This is most likely wrong. We may have `provide` and `remove` within
|
|
15430
15466
|
// the same state update. We should however switch to proper "updated state"
|
|
15431
15467
|
// representation soon.
|
|
15432
|
-
const updatedPreimage =
|
|
15468
|
+
const updatedPreimage = preimages.findLast((update) => update.hash.isEqualTo(hash) && BigInt(update.length) === length);
|
|
15433
15469
|
const stateFallback = () => {
|
|
15434
15470
|
// fallback to state lookup
|
|
15435
15471
|
const service = this.state.getService(serviceId);
|
|
@@ -15466,14 +15502,15 @@ class PartiallyUpdatedState {
|
|
|
15466
15502
|
/* State update functions. */
|
|
15467
15503
|
updateStorage(serviceId, key, value) {
|
|
15468
15504
|
const update = value === null
|
|
15469
|
-
? UpdateStorage.remove({
|
|
15505
|
+
? UpdateStorage.remove({ key })
|
|
15470
15506
|
: UpdateStorage.set({
|
|
15471
|
-
serviceId,
|
|
15472
15507
|
storage: StorageItem.create({ key, value }),
|
|
15473
15508
|
});
|
|
15474
|
-
const
|
|
15509
|
+
const storages = this.stateUpdate.services.storage.get(serviceId) ?? [];
|
|
15510
|
+
const index = storages.findIndex((x) => x.key.isEqualTo(key));
|
|
15475
15511
|
const count = index === -1 ? 0 : 1;
|
|
15476
|
-
|
|
15512
|
+
storages.splice(index, count, update);
|
|
15513
|
+
this.stateUpdate.services.storage.set(serviceId, storages);
|
|
15477
15514
|
}
|
|
15478
15515
|
/**
|
|
15479
15516
|
* Update a preimage.
|
|
@@ -15481,8 +15518,10 @@ class PartiallyUpdatedState {
|
|
|
15481
15518
|
* Note we store all previous entries as well, since there might be a sequence of:
|
|
15482
15519
|
* `provide` -> `remove` and both should update the end state somehow.
|
|
15483
15520
|
*/
|
|
15484
|
-
updatePreimage(newUpdate) {
|
|
15485
|
-
this.stateUpdate.services.preimages.
|
|
15521
|
+
updatePreimage(serviceId, newUpdate) {
|
|
15522
|
+
const updatePreimages = this.stateUpdate.services.preimages.get(serviceId) ?? [];
|
|
15523
|
+
updatePreimages.push(newUpdate);
|
|
15524
|
+
this.stateUpdate.services.preimages.set(serviceId, updatePreimages);
|
|
15486
15525
|
}
|
|
15487
15526
|
updateServiceStorageUtilisation(serviceId, items, bytes, serviceInfo) {
|
|
15488
15527
|
debug_check `${items >= 0} storageUtilisationCount has to be a positive number, got: ${items}`;
|
|
@@ -15491,11 +15530,11 @@ class PartiallyUpdatedState {
|
|
|
15491
15530
|
const overflowBytes = !isU64(bytes);
|
|
15492
15531
|
// TODO [ToDr] this is not specified in GP, but it seems sensible.
|
|
15493
15532
|
if (overflowItems || overflowBytes) {
|
|
15494
|
-
return result_Result.error(InsufficientFundsError);
|
|
15533
|
+
return result_Result.error(InsufficientFundsError, () => `Storage utilisation overflow: items=${overflowItems}, bytes=${overflowBytes}`);
|
|
15495
15534
|
}
|
|
15496
15535
|
const thresholdBalance = ServiceAccountInfo.calculateThresholdBalance(items, bytes, serviceInfo.gratisStorage);
|
|
15497
15536
|
if (serviceInfo.balance < thresholdBalance) {
|
|
15498
|
-
return result_Result.error(InsufficientFundsError);
|
|
15537
|
+
return result_Result.error(InsufficientFundsError, () => `Service balance (${serviceInfo.balance}) below threshold (${thresholdBalance})`);
|
|
15499
15538
|
}
|
|
15500
15539
|
// Update service info with new details.
|
|
15501
15540
|
this.updateServiceInfo(serviceId, ServiceAccountInfo.create({
|
|
@@ -15506,22 +15545,25 @@ class PartiallyUpdatedState {
|
|
|
15506
15545
|
return result_Result.ok(result_OK);
|
|
15507
15546
|
}
|
|
15508
15547
|
updateServiceInfo(serviceId, newInfo) {
|
|
15509
|
-
const
|
|
15510
|
-
|
|
15511
|
-
|
|
15512
|
-
if (existingItem?.action.kind === UpdateServiceKind.Create) {
|
|
15513
|
-
this.stateUpdate.services.servicesUpdates.splice(idx, toRemove, UpdateService.create({
|
|
15514
|
-
serviceId,
|
|
15548
|
+
const existingUpdate = this.stateUpdate.services.updated.get(serviceId);
|
|
15549
|
+
if (existingUpdate?.action.kind === UpdateServiceKind.Create) {
|
|
15550
|
+
this.stateUpdate.services.updated.set(serviceId, UpdateService.create({
|
|
15515
15551
|
serviceInfo: newInfo,
|
|
15516
|
-
lookupHistory:
|
|
15552
|
+
lookupHistory: existingUpdate.action.lookupHistory,
|
|
15517
15553
|
}));
|
|
15518
15554
|
return;
|
|
15519
15555
|
}
|
|
15520
|
-
this.stateUpdate.services.
|
|
15521
|
-
serviceId,
|
|
15556
|
+
this.stateUpdate.services.updated.set(serviceId, UpdateService.update({
|
|
15522
15557
|
serviceInfo: newInfo,
|
|
15523
15558
|
}));
|
|
15524
15559
|
}
|
|
15560
|
+
createService(serviceId, newInfo, newLookupHistory) {
|
|
15561
|
+
this.stateUpdate.services.created.push(serviceId);
|
|
15562
|
+
this.stateUpdate.services.updated.set(serviceId, UpdateService.create({
|
|
15563
|
+
serviceInfo: newInfo,
|
|
15564
|
+
lookupHistory: newLookupHistory,
|
|
15565
|
+
}));
|
|
15566
|
+
}
|
|
15525
15567
|
getPrivilegedServices() {
|
|
15526
15568
|
if (this.stateUpdate.privilegedServices !== null) {
|
|
15527
15569
|
return this.stateUpdate.privilegedServices;
|
|
@@ -16995,7 +17037,7 @@ class ReadablePage extends MemoryPage {
|
|
|
16995
17037
|
loadInto(result, startIndex, length) {
|
|
16996
17038
|
const endIndex = startIndex + length;
|
|
16997
17039
|
if (endIndex > PAGE_SIZE) {
|
|
16998
|
-
return result_Result.error(PageFault.fromMemoryIndex(this.start + PAGE_SIZE));
|
|
17040
|
+
return result_Result.error(PageFault.fromMemoryIndex(this.start + PAGE_SIZE), () => `Page fault: read beyond page boundary at ${this.start + PAGE_SIZE}`);
|
|
16999
17041
|
}
|
|
17000
17042
|
const bytes = this.data.subarray(startIndex, endIndex);
|
|
17001
17043
|
// we zero the bytes, since data might not yet be initialized at `endIndex`.
|
|
@@ -17004,7 +17046,7 @@ class ReadablePage extends MemoryPage {
|
|
|
17004
17046
|
return result_Result.ok(result_OK);
|
|
17005
17047
|
}
|
|
17006
17048
|
storeFrom(_address, _data) {
|
|
17007
|
-
return result_Result.error(PageFault.fromMemoryIndex(this.start, true));
|
|
17049
|
+
return result_Result.error(PageFault.fromMemoryIndex(this.start, true), () => `Page fault: attempted to write to read-only page at ${this.start}`);
|
|
17008
17050
|
}
|
|
17009
17051
|
setData(pageIndex, data) {
|
|
17010
17052
|
this.data.set(data, pageIndex);
|
|
@@ -17038,7 +17080,7 @@ class WriteablePage extends MemoryPage {
|
|
|
17038
17080
|
loadInto(result, startIndex, length) {
|
|
17039
17081
|
const endIndex = startIndex + length;
|
|
17040
17082
|
if (endIndex > PAGE_SIZE) {
|
|
17041
|
-
return result_Result.error(PageFault.fromMemoryIndex(this.start + PAGE_SIZE));
|
|
17083
|
+
return result_Result.error(PageFault.fromMemoryIndex(this.start + PAGE_SIZE), () => `Page fault: read beyond page boundary at ${this.start + PAGE_SIZE}`);
|
|
17042
17084
|
}
|
|
17043
17085
|
const bytes = this.view.subarray(startIndex, endIndex);
|
|
17044
17086
|
// we zero the bytes, since the view might not yet be initialized at `endIndex`.
|
|
@@ -17124,7 +17166,7 @@ class Memory {
|
|
|
17124
17166
|
memory_logger.insane `MEM[${address}] <- ${bytes_BytesBlob.blobFrom(bytes)}`;
|
|
17125
17167
|
const pagesResult = this.getPages(address, bytes.length, AccessType.WRITE);
|
|
17126
17168
|
if (pagesResult.isError) {
|
|
17127
|
-
return result_Result.error(pagesResult.error);
|
|
17169
|
+
return result_Result.error(pagesResult.error, pagesResult.details);
|
|
17128
17170
|
}
|
|
17129
17171
|
const pages = pagesResult.ok;
|
|
17130
17172
|
let currentPosition = address;
|
|
@@ -17149,14 +17191,14 @@ class Memory {
|
|
|
17149
17191
|
const pages = [];
|
|
17150
17192
|
for (const pageNumber of pageRange) {
|
|
17151
17193
|
if (pageNumber < RESERVED_NUMBER_OF_PAGES) {
|
|
17152
|
-
return result_Result.error(PageFault.fromPageNumber(pageNumber, true));
|
|
17194
|
+
return result_Result.error(PageFault.fromPageNumber(pageNumber, true), () => `Page fault: attempted to access reserved page ${pageNumber}`);
|
|
17153
17195
|
}
|
|
17154
17196
|
const page = this.memory.get(pageNumber);
|
|
17155
17197
|
if (page === undefined) {
|
|
17156
|
-
return result_Result.error(PageFault.fromPageNumber(pageNumber));
|
|
17198
|
+
return result_Result.error(PageFault.fromPageNumber(pageNumber), () => `Page fault: page ${pageNumber} not allocated`);
|
|
17157
17199
|
}
|
|
17158
17200
|
if (accessType === AccessType.WRITE && !page.isWriteable()) {
|
|
17159
|
-
return result_Result.error(PageFault.fromPageNumber(pageNumber, true));
|
|
17201
|
+
return result_Result.error(PageFault.fromPageNumber(pageNumber, true), () => `Page fault: attempted to write to read-only page ${pageNumber}`);
|
|
17160
17202
|
}
|
|
17161
17203
|
pages.push(page);
|
|
17162
17204
|
}
|
|
@@ -17174,7 +17216,7 @@ class Memory {
|
|
|
17174
17216
|
}
|
|
17175
17217
|
const pagesResult = this.getPages(startAddress, result.length, AccessType.READ);
|
|
17176
17218
|
if (pagesResult.isError) {
|
|
17177
|
-
return result_Result.error(pagesResult.error);
|
|
17219
|
+
return result_Result.error(pagesResult.error, pagesResult.details);
|
|
17178
17220
|
}
|
|
17179
17221
|
const pages = pagesResult.ok;
|
|
17180
17222
|
let currentPosition = startAddress;
|
|
@@ -19113,7 +19155,7 @@ class ProgramDecoder {
|
|
|
19113
19155
|
}
|
|
19114
19156
|
catch (e) {
|
|
19115
19157
|
program_decoder_logger.error `Invalid program: ${e}`;
|
|
19116
|
-
return result_Result.error(ProgramDecoderError.InvalidProgramError);
|
|
19158
|
+
return result_Result.error(ProgramDecoderError.InvalidProgramError, () => `Program decoder error: ${e}`);
|
|
19117
19159
|
}
|
|
19118
19160
|
}
|
|
19119
19161
|
}
|
|
@@ -19856,10 +19898,10 @@ class AccumulateExternalities {
|
|
|
19856
19898
|
const len = existingPreimage.slots.length;
|
|
19857
19899
|
// https://graypaper.fluffylabs.dev/#/9a08063/380901380901?v=0.6.6
|
|
19858
19900
|
if (len === PreimageStatusKind.Requested) {
|
|
19859
|
-
return result_Result.error(RequestPreimageError.AlreadyRequested);
|
|
19901
|
+
return result_Result.error(RequestPreimageError.AlreadyRequested, () => `Preimage already requested: hash=${hash}`);
|
|
19860
19902
|
}
|
|
19861
19903
|
if (len === PreimageStatusKind.Available || len === PreimageStatusKind.Reavailable) {
|
|
19862
|
-
return result_Result.error(RequestPreimageError.AlreadyAvailable);
|
|
19904
|
+
return result_Result.error(RequestPreimageError.AlreadyAvailable, () => `Preimage already available: hash=${hash}`);
|
|
19863
19905
|
}
|
|
19864
19906
|
// TODO [ToDr] Not sure if we should update the service info in that case,
|
|
19865
19907
|
// but for now we let that case fall-through.
|
|
@@ -19884,15 +19926,13 @@ class AccumulateExternalities {
|
|
|
19884
19926
|
const clampedLength = clampU64ToU32(length);
|
|
19885
19927
|
if (existingPreimage === null) {
|
|
19886
19928
|
// https://graypaper.fluffylabs.dev/#/9a08063/38a60038a600?v=0.6.6
|
|
19887
|
-
this.updatedState.updatePreimage(UpdatePreimage.updateOrAdd({
|
|
19888
|
-
serviceId: this.currentServiceId,
|
|
19929
|
+
this.updatedState.updatePreimage(this.currentServiceId, UpdatePreimage.updateOrAdd({
|
|
19889
19930
|
lookupHistory: new LookupHistoryItem(hash, clampedLength, tryAsLookupHistorySlots([])),
|
|
19890
19931
|
}));
|
|
19891
19932
|
}
|
|
19892
19933
|
else {
|
|
19893
19934
|
/** https://graypaper.fluffylabs.dev/#/9a08063/38ca0038ca00?v=0.6.6 */
|
|
19894
|
-
this.updatedState.updatePreimage(UpdatePreimage.updateOrAdd({
|
|
19895
|
-
serviceId: this.currentServiceId,
|
|
19935
|
+
this.updatedState.updatePreimage(this.currentServiceId, UpdatePreimage.updateOrAdd({
|
|
19896
19936
|
lookupHistory: new LookupHistoryItem(hash, clampedLength, tryAsLookupHistorySlots([...existingPreimage.slots, this.currentTimeslot])),
|
|
19897
19937
|
}));
|
|
19898
19938
|
}
|
|
@@ -19902,7 +19942,7 @@ class AccumulateExternalities {
|
|
|
19902
19942
|
const serviceId = this.currentServiceId;
|
|
19903
19943
|
const status = this.updatedState.getLookupHistory(this.currentTimeslot, this.currentServiceId, hash, length);
|
|
19904
19944
|
if (status === null) {
|
|
19905
|
-
return result_Result.error(ForgetPreimageError.NotFound);
|
|
19945
|
+
return result_Result.error(ForgetPreimageError.NotFound, () => `Preimage not found: hash=${hash}, length=${length}`);
|
|
19906
19946
|
}
|
|
19907
19947
|
const s = slotsToPreimageStatus(status.slots);
|
|
19908
19948
|
const updateStorageUtilisation = () => {
|
|
@@ -19915,10 +19955,9 @@ class AccumulateExternalities {
|
|
|
19915
19955
|
if (s.status === PreimageStatusKind.Requested) {
|
|
19916
19956
|
const res = updateStorageUtilisation();
|
|
19917
19957
|
if (res.isError) {
|
|
19918
|
-
return result_Result.error(ForgetPreimageError.StorageUtilisationError);
|
|
19958
|
+
return result_Result.error(ForgetPreimageError.StorageUtilisationError, res.details);
|
|
19919
19959
|
}
|
|
19920
|
-
this.updatedState.updatePreimage(UpdatePreimage.remove({
|
|
19921
|
-
serviceId,
|
|
19960
|
+
this.updatedState.updatePreimage(serviceId, UpdatePreimage.remove({
|
|
19922
19961
|
hash: status.hash,
|
|
19923
19962
|
length: status.length,
|
|
19924
19963
|
}));
|
|
@@ -19931,21 +19970,19 @@ class AccumulateExternalities {
|
|
|
19931
19970
|
if (y < t - this.chainSpec.preimageExpungePeriod) {
|
|
19932
19971
|
const res = updateStorageUtilisation();
|
|
19933
19972
|
if (res.isError) {
|
|
19934
|
-
return result_Result.error(ForgetPreimageError.StorageUtilisationError);
|
|
19973
|
+
return result_Result.error(ForgetPreimageError.StorageUtilisationError, res.details);
|
|
19935
19974
|
}
|
|
19936
|
-
this.updatedState.updatePreimage(UpdatePreimage.remove({
|
|
19937
|
-
serviceId,
|
|
19975
|
+
this.updatedState.updatePreimage(serviceId, UpdatePreimage.remove({
|
|
19938
19976
|
hash: status.hash,
|
|
19939
19977
|
length: status.length,
|
|
19940
19978
|
}));
|
|
19941
19979
|
return result_Result.ok(result_OK);
|
|
19942
19980
|
}
|
|
19943
|
-
return result_Result.error(ForgetPreimageError.NotExpired);
|
|
19981
|
+
return result_Result.error(ForgetPreimageError.NotExpired, () => `Preimage not expired: y=${y}, timeslot=${t}, period=${this.chainSpec.preimageExpungePeriod}`);
|
|
19944
19982
|
}
|
|
19945
19983
|
// https://graypaper.fluffylabs.dev/#/9a08063/38c80138c801?v=0.6.6
|
|
19946
19984
|
if (s.status === PreimageStatusKind.Available) {
|
|
19947
|
-
this.updatedState.updatePreimage(UpdatePreimage.updateOrAdd({
|
|
19948
|
-
serviceId,
|
|
19985
|
+
this.updatedState.updatePreimage(serviceId, UpdatePreimage.updateOrAdd({
|
|
19949
19986
|
lookupHistory: new LookupHistoryItem(status.hash, status.length, tryAsLookupHistorySlots([s.data[0], t])),
|
|
19950
19987
|
}));
|
|
19951
19988
|
return result_Result.ok(result_OK);
|
|
@@ -19954,13 +19991,12 @@ class AccumulateExternalities {
|
|
|
19954
19991
|
if (s.status === PreimageStatusKind.Reavailable) {
|
|
19955
19992
|
const y = s.data[1];
|
|
19956
19993
|
if (y < t - this.chainSpec.preimageExpungePeriod) {
|
|
19957
|
-
this.updatedState.updatePreimage(UpdatePreimage.updateOrAdd({
|
|
19958
|
-
serviceId,
|
|
19994
|
+
this.updatedState.updatePreimage(serviceId, UpdatePreimage.updateOrAdd({
|
|
19959
19995
|
lookupHistory: new LookupHistoryItem(status.hash, status.length, tryAsLookupHistorySlots([s.data[2], t])),
|
|
19960
19996
|
}));
|
|
19961
19997
|
return result_Result.ok(result_OK);
|
|
19962
19998
|
}
|
|
19963
|
-
return result_Result.error(ForgetPreimageError.NotExpired);
|
|
19999
|
+
return result_Result.error(ForgetPreimageError.NotExpired, () => `Preimage not expired: y=${y}, timeslot=${t}, period=${this.chainSpec.preimageExpungePeriod}`);
|
|
19964
20000
|
}
|
|
19965
20001
|
debug_assertNever(s);
|
|
19966
20002
|
}
|
|
@@ -19969,17 +20005,17 @@ class AccumulateExternalities {
|
|
|
19969
20005
|
const destination = this.getServiceInfo(destinationId);
|
|
19970
20006
|
/** https://graypaper.fluffylabs.dev/#/9a08063/370401370401?v=0.6.6 */
|
|
19971
20007
|
if (destination === null || destinationId === null) {
|
|
19972
|
-
return result_Result.error(TransferError.DestinationNotFound);
|
|
20008
|
+
return result_Result.error(TransferError.DestinationNotFound, () => `Destination service not found: ${destinationId}`);
|
|
19973
20009
|
}
|
|
19974
20010
|
/** https://graypaper.fluffylabs.dev/#/9a08063/371301371301?v=0.6.6 */
|
|
19975
20011
|
if (gas < destination.onTransferMinGas) {
|
|
19976
|
-
return result_Result.error(TransferError.GasTooLow);
|
|
20012
|
+
return result_Result.error(TransferError.GasTooLow, () => `Gas ${gas} below minimum ${destination.onTransferMinGas}`);
|
|
19977
20013
|
}
|
|
19978
20014
|
/** https://graypaper.fluffylabs.dev/#/9a08063/371b01371b01?v=0.6.6 */
|
|
19979
20015
|
const newBalance = source.balance - amount;
|
|
19980
20016
|
const thresholdBalance = ServiceAccountInfo.calculateThresholdBalance(source.storageUtilisationCount, source.storageUtilisationBytes, source.gratisStorage);
|
|
19981
20017
|
if (newBalance < thresholdBalance) {
|
|
19982
|
-
return result_Result.error(TransferError.BalanceBelowThreshold);
|
|
20018
|
+
return result_Result.error(TransferError.BalanceBelowThreshold, () => `Balance ${newBalance} below threshold ${thresholdBalance}`);
|
|
19983
20019
|
}
|
|
19984
20020
|
// outgoing transfer
|
|
19985
20021
|
this.updatedState.stateUpdate.transfers.push(PendingTransfer.create({
|
|
@@ -20006,7 +20042,7 @@ class AccumulateExternalities {
|
|
|
20006
20042
|
// check if we are priviledged to set gratis storage
|
|
20007
20043
|
// https://graypaper.fluffylabs.dev/#/7e6ff6a/369203369603?v=0.6.7
|
|
20008
20044
|
if (gratisStorage !== numbers_tryAsU64(0) && this.currentServiceId !== this.updatedState.getPrivilegedServices().manager) {
|
|
20009
|
-
return result_Result.error(NewServiceError.UnprivilegedService);
|
|
20045
|
+
return result_Result.error(NewServiceError.UnprivilegedService, () => `Service ${this.currentServiceId} not privileged to set gratis storage`);
|
|
20010
20046
|
}
|
|
20011
20047
|
// check if we have enough balance
|
|
20012
20048
|
// https://graypaper.fluffylabs.dev/#/7e6ff6a/369e0336a303?v=0.6.7
|
|
@@ -20015,7 +20051,7 @@ class AccumulateExternalities {
|
|
|
20015
20051
|
const thresholdForCurrent = ServiceAccountInfo.calculateThresholdBalance(currentService.storageUtilisationCount, currentService.storageUtilisationBytes, currentService.gratisStorage);
|
|
20016
20052
|
const balanceLeftForCurrent = currentService.balance - thresholdForNew;
|
|
20017
20053
|
if (balanceLeftForCurrent < thresholdForCurrent || bytes.overflow) {
|
|
20018
|
-
return result_Result.error(NewServiceError.InsufficientFunds);
|
|
20054
|
+
return result_Result.error(NewServiceError.InsufficientFunds, () => `Insufficient funds: balance=${currentService.balance}, required=${thresholdForNew}, overflow=${bytes.overflow}`);
|
|
20019
20055
|
}
|
|
20020
20056
|
// `a`: https://graypaper.fluffylabs.dev/#/ab2cdbd/366b02366d02?v=0.7.2
|
|
20021
20057
|
const newAccount = ServiceAccountInfo.create({
|
|
@@ -20042,15 +20078,11 @@ class AccumulateExternalities {
|
|
|
20042
20078
|
// NOTE: It's safe to cast to `Number` here, bcs here service ID cannot be bigger than 2**16
|
|
20043
20079
|
const newServiceId = tryAsServiceId(Number(wantedServiceId));
|
|
20044
20080
|
if (this.getServiceInfo(newServiceId) !== null) {
|
|
20045
|
-
return result_Result.error(NewServiceError.RegistrarServiceIdAlreadyTaken);
|
|
20081
|
+
return result_Result.error(NewServiceError.RegistrarServiceIdAlreadyTaken, () => `Service ID ${newServiceId} already taken`);
|
|
20046
20082
|
}
|
|
20047
20083
|
// add the new service with selected ID
|
|
20048
20084
|
// https://graypaper.fluffylabs.dev/#/ab2cdbd/36be0336c003?v=0.7.2
|
|
20049
|
-
this.updatedState.
|
|
20050
|
-
serviceId: newServiceId,
|
|
20051
|
-
serviceInfo: newAccount,
|
|
20052
|
-
lookupHistory: newLookupItem,
|
|
20053
|
-
}));
|
|
20085
|
+
this.updatedState.createService(newServiceId, newAccount, newLookupItem);
|
|
20054
20086
|
// update the balance of current service
|
|
20055
20087
|
// https://graypaper.fluffylabs.dev/#/ab2cdbd/36c20336c403?v=0.7.2
|
|
20056
20088
|
this.updatedState.updateServiceInfo(this.currentServiceId, updatedCurrentAccount);
|
|
@@ -20061,12 +20093,8 @@ class AccumulateExternalities {
|
|
|
20061
20093
|
}
|
|
20062
20094
|
const newServiceId = this.nextNewServiceId;
|
|
20063
20095
|
// add the new service
|
|
20064
|
-
// https://graypaper.fluffylabs.dev/#/
|
|
20065
|
-
this.updatedState.
|
|
20066
|
-
serviceId: newServiceId,
|
|
20067
|
-
serviceInfo: newAccount,
|
|
20068
|
-
lookupHistory: newLookupItem,
|
|
20069
|
-
}));
|
|
20096
|
+
// https://graypaper.fluffylabs.dev/#/7e6ff6a/36cb0236cb02?v=0.6.7
|
|
20097
|
+
this.updatedState.createService(newServiceId, newAccount, newLookupItem);
|
|
20070
20098
|
// update the balance of current service
|
|
20071
20099
|
// https://graypaper.fluffylabs.dev/#/ab2cdbd/36ec0336ee03?v=0.7.2
|
|
20072
20100
|
this.updatedState.updateServiceInfo(this.currentServiceId, updatedCurrentAccount);
|
|
@@ -20090,7 +20118,7 @@ class AccumulateExternalities {
|
|
|
20090
20118
|
const currentDelegator = this.updatedState.getPrivilegedServices().delegator;
|
|
20091
20119
|
if (currentDelegator !== this.currentServiceId) {
|
|
20092
20120
|
accumulate_externalities_logger.trace `Current service id (${this.currentServiceId}) is not a validators manager. (expected: ${currentDelegator}) and cannot update validators data. Ignoring`;
|
|
20093
|
-
return result_Result.error(UnprivilegedError);
|
|
20121
|
+
return result_Result.error(UnprivilegedError, () => `Service ${this.currentServiceId} is not delegator (expected: ${currentDelegator})`);
|
|
20094
20122
|
}
|
|
20095
20123
|
this.updatedState.stateUpdate.validatorsData = validatorsData;
|
|
20096
20124
|
return result_Result.ok(result_OK);
|
|
@@ -20105,11 +20133,11 @@ class AccumulateExternalities {
|
|
|
20105
20133
|
const currentAssigners = this.updatedState.getPrivilegedServices().assigners[coreIndex];
|
|
20106
20134
|
if (currentAssigners !== this.currentServiceId) {
|
|
20107
20135
|
accumulate_externalities_logger.trace `Current service id (${this.currentServiceId}) is not an auth manager of core ${coreIndex} (expected: ${currentAssigners}) and cannot update authorization queue.`;
|
|
20108
|
-
return result_Result.error(UpdatePrivilegesError.UnprivilegedService);
|
|
20136
|
+
return result_Result.error(UpdatePrivilegesError.UnprivilegedService, () => `Service ${this.currentServiceId} not assigner for core ${coreIndex} (expected: ${currentAssigners})`);
|
|
20109
20137
|
}
|
|
20110
20138
|
if (assigners === null && Compatibility.isGreaterOrEqual(GpVersion.V0_7_1)) {
|
|
20111
20139
|
accumulate_externalities_logger.trace `The new auth manager is not a valid service id.`;
|
|
20112
|
-
return result_Result.error(UpdatePrivilegesError.InvalidServiceId);
|
|
20140
|
+
return result_Result.error(UpdatePrivilegesError.InvalidServiceId, () => `New auth manager is null for core ${coreIndex}`);
|
|
20113
20141
|
}
|
|
20114
20142
|
this.updatedState.stateUpdate.authorizationQueues.set(coreIndex, authQueue);
|
|
20115
20143
|
return result_Result.ok(result_OK);
|
|
@@ -20142,10 +20170,10 @@ class AccumulateExternalities {
|
|
|
20142
20170
|
const isManager = current.manager === this.currentServiceId;
|
|
20143
20171
|
if (Compatibility.isLessThan(GpVersion.V0_7_1)) {
|
|
20144
20172
|
if (!isManager) {
|
|
20145
|
-
return result_Result.error(UpdatePrivilegesError.UnprivilegedService);
|
|
20173
|
+
return result_Result.error(UpdatePrivilegesError.UnprivilegedService, () => `Service ${this.currentServiceId} is not manager`);
|
|
20146
20174
|
}
|
|
20147
20175
|
if (manager === null || delegator === null) {
|
|
20148
|
-
return result_Result.error(UpdatePrivilegesError.InvalidServiceId, "Either manager or delegator is not a valid service id.");
|
|
20176
|
+
return result_Result.error(UpdatePrivilegesError.InvalidServiceId, () => "Either manager or delegator is not a valid service id.");
|
|
20149
20177
|
}
|
|
20150
20178
|
this.updatedState.stateUpdate.privilegedServices = PrivilegedServices.create({
|
|
20151
20179
|
manager,
|
|
@@ -20158,7 +20186,7 @@ class AccumulateExternalities {
|
|
|
20158
20186
|
}
|
|
20159
20187
|
const original = this.updatedState.state.privilegedServices;
|
|
20160
20188
|
if (manager === null || delegator === null || registrar === null) {
|
|
20161
|
-
return result_Result.error(UpdatePrivilegesError.InvalidServiceId, "Either manager or delegator or registrar is not a valid service id.");
|
|
20189
|
+
return result_Result.error(UpdatePrivilegesError.InvalidServiceId, () => "Either manager or delegator or registrar is not a valid service id.");
|
|
20162
20190
|
}
|
|
20163
20191
|
const newDelegator = this.updatePrivilegedServiceId(delegator, current.delegator, {
|
|
20164
20192
|
isManager,
|
|
@@ -20198,23 +20226,22 @@ class AccumulateExternalities {
|
|
|
20198
20226
|
// TODO [ToDr] what about newly created services?
|
|
20199
20227
|
const service = serviceId === null ? null : this.updatedState.state.getService(serviceId);
|
|
20200
20228
|
if (service === null || serviceId === null) {
|
|
20201
|
-
return result_Result.error(ProvidePreimageError.ServiceNotFound);
|
|
20229
|
+
return result_Result.error(ProvidePreimageError.ServiceNotFound, () => `Service not found: ${serviceId}`);
|
|
20202
20230
|
}
|
|
20203
20231
|
// calculating the hash
|
|
20204
20232
|
const preimageHash = this.blake2b.hashBytes(preimage).asOpaque();
|
|
20205
20233
|
// checking service internal lookup
|
|
20206
20234
|
const stateLookup = this.updatedState.getLookupHistory(this.currentTimeslot, serviceId, preimageHash, numbers_tryAsU64(preimage.length));
|
|
20207
20235
|
if (stateLookup === null || !LookupHistoryItem.isRequested(stateLookup)) {
|
|
20208
|
-
return result_Result.error(ProvidePreimageError.WasNotRequested);
|
|
20236
|
+
return result_Result.error(ProvidePreimageError.WasNotRequested, () => `Preimage was not requested: hash=${preimageHash}, service=${serviceId}`);
|
|
20209
20237
|
}
|
|
20210
20238
|
// checking already provided preimages
|
|
20211
20239
|
const hasPreimage = this.updatedState.hasPreimage(serviceId, preimageHash);
|
|
20212
20240
|
if (hasPreimage) {
|
|
20213
|
-
return result_Result.error(ProvidePreimageError.AlreadyProvided);
|
|
20241
|
+
return result_Result.error(ProvidePreimageError.AlreadyProvided, () => `Preimage already provided: hash=${preimageHash}, service=${serviceId}`);
|
|
20214
20242
|
}
|
|
20215
20243
|
// setting up the new preimage
|
|
20216
|
-
this.updatedState.updatePreimage(UpdatePreimage.provide({
|
|
20217
|
-
serviceId,
|
|
20244
|
+
this.updatedState.updatePreimage(serviceId, UpdatePreimage.provide({
|
|
20218
20245
|
preimage: PreimageItem.create({
|
|
20219
20246
|
hash: preimageHash,
|
|
20220
20247
|
blob: preimage,
|
|
@@ -20226,31 +20253,31 @@ class AccumulateExternalities {
|
|
|
20226
20253
|
eject(destination, previousCodeHash) {
|
|
20227
20254
|
const service = this.getServiceInfo(destination);
|
|
20228
20255
|
if (service === null || destination === null) {
|
|
20229
|
-
return result_Result.error(EjectError.InvalidService, "Service missing");
|
|
20256
|
+
return result_Result.error(EjectError.InvalidService, () => "Service missing");
|
|
20230
20257
|
}
|
|
20231
20258
|
const currentService = this.getCurrentServiceInfo();
|
|
20232
20259
|
// check if the service expects to be ejected by us:
|
|
20233
20260
|
const expectedCodeHash = bytes_Bytes.zero(hash_HASH_SIZE).asOpaque();
|
|
20234
20261
|
writeServiceIdAsLeBytes(this.currentServiceId, expectedCodeHash.raw);
|
|
20235
20262
|
if (!service.codeHash.isEqualTo(expectedCodeHash)) {
|
|
20236
|
-
return result_Result.error(EjectError.InvalidService, "Invalid code hash");
|
|
20263
|
+
return result_Result.error(EjectError.InvalidService, () => "Invalid code hash");
|
|
20237
20264
|
}
|
|
20238
20265
|
// make sure the service only has required number of storage items?
|
|
20239
20266
|
if (service.storageUtilisationCount !== REQUIRED_NUMBER_OF_STORAGE_ITEMS_FOR_EJECT) {
|
|
20240
|
-
return result_Result.error(EjectError.InvalidPreimage, "Too many storage items");
|
|
20267
|
+
return result_Result.error(EjectError.InvalidPreimage, () => "Too many storage items");
|
|
20241
20268
|
}
|
|
20242
20269
|
// storage items length
|
|
20243
20270
|
const l = numbers_tryAsU64(maxU64(service.storageUtilisationBytes, LOOKUP_HISTORY_ENTRY_BYTES) - LOOKUP_HISTORY_ENTRY_BYTES);
|
|
20244
20271
|
// check if we have a preimage with the entire storage.
|
|
20245
20272
|
const [isPreviousCodeExpired, errorReason] = this.isPreviousCodeExpired(destination, previousCodeHash, l);
|
|
20246
20273
|
if (!isPreviousCodeExpired) {
|
|
20247
|
-
return result_Result.error(EjectError.InvalidPreimage, `Previous code available: ${errorReason}`);
|
|
20274
|
+
return result_Result.error(EjectError.InvalidPreimage, () => `Previous code available: ${errorReason}`);
|
|
20248
20275
|
}
|
|
20249
20276
|
// compute new balance of the service.
|
|
20250
20277
|
const newBalance = sumU64(currentService.balance, service.balance);
|
|
20251
20278
|
// TODO [ToDr] what to do in case of overflow?
|
|
20252
20279
|
if (newBalance.overflow) {
|
|
20253
|
-
return result_Result.error(EjectError.InvalidService, "Balance overflow");
|
|
20280
|
+
return result_Result.error(EjectError.InvalidService, () => "Balance overflow");
|
|
20254
20281
|
}
|
|
20255
20282
|
// update current service.
|
|
20256
20283
|
this.updatedState.updateServiceInfo(this.currentServiceId, ServiceAccountInfo.create({
|
|
@@ -20258,11 +20285,13 @@ class AccumulateExternalities {
|
|
|
20258
20285
|
balance: newBalance.value,
|
|
20259
20286
|
}));
|
|
20260
20287
|
// and finally add an ejected service.
|
|
20261
|
-
this.updatedState.stateUpdate.services.
|
|
20288
|
+
this.updatedState.stateUpdate.services.removed.push(destination);
|
|
20262
20289
|
// take care of the code preimage and its lookup history
|
|
20263
20290
|
// Safe, because we know the preimage is valid, and it's the code of the service, which is bounded by maximal service code size anyway (much smaller than 2**32 bytes).
|
|
20264
20291
|
const preimageLength = numbers_tryAsU32(Number(l));
|
|
20265
|
-
this.updatedState.stateUpdate.services.preimages.
|
|
20292
|
+
const preimages = this.updatedState.stateUpdate.services.preimages.get(destination) ?? [];
|
|
20293
|
+
preimages.push(UpdatePreimage.remove({ hash: previousCodeHash, length: preimageLength }));
|
|
20294
|
+
this.updatedState.stateUpdate.services.preimages.set(destination, preimages);
|
|
20266
20295
|
return result_Result.ok(result_OK);
|
|
20267
20296
|
}
|
|
20268
20297
|
read(serviceId, rawKey) {
|
|
@@ -20443,10 +20472,10 @@ class Assurances {
|
|
|
20443
20472
|
for (const assurance of assurances) {
|
|
20444
20473
|
const { anchor, validatorIndex, bitfield } = assurance;
|
|
20445
20474
|
if (!anchor.isEqualTo(input.parentHash)) {
|
|
20446
|
-
return result_Result.error(AssurancesError.InvalidAnchor, `anchor: expected: ${input.parentHash}, got ${anchor}`);
|
|
20475
|
+
return result_Result.error(AssurancesError.InvalidAnchor, () => `anchor: expected: ${input.parentHash}, got ${anchor}`);
|
|
20447
20476
|
}
|
|
20448
20477
|
if (prevValidatorIndex >= validatorIndex) {
|
|
20449
|
-
return result_Result.error(AssurancesError.InvalidOrder, `order: expected: ${prevValidatorIndex + 1}, got: ${validatorIndex}`);
|
|
20478
|
+
return result_Result.error(AssurancesError.InvalidOrder, () => `order: expected: ${prevValidatorIndex + 1}, got: ${validatorIndex}`);
|
|
20450
20479
|
}
|
|
20451
20480
|
prevValidatorIndex = assurance.validatorIndex;
|
|
20452
20481
|
debug_check `${bitfield.bitLength === coresCount} Invalid bitfield length of ${bitfield.bitLength}`;
|
|
@@ -20469,7 +20498,7 @@ class Assurances {
|
|
|
20469
20498
|
* https://graypaper.fluffylabs.dev/#/579bd12/14e90014ea00
|
|
20470
20499
|
*/
|
|
20471
20500
|
if (noOfAssurances > 0 && !isReportPending) {
|
|
20472
|
-
return result_Result.error(AssurancesError.NoReportPending, `no report pending for core ${c} yet we got an assurance`);
|
|
20501
|
+
return result_Result.error(AssurancesError.NoReportPending, () => `no report pending for core ${c} yet we got an assurance`);
|
|
20473
20502
|
}
|
|
20474
20503
|
/**
|
|
20475
20504
|
* Remove work report if it's became available or timed out.
|
|
@@ -20515,7 +20544,7 @@ class Assurances {
|
|
|
20515
20544
|
const v = assurance.view();
|
|
20516
20545
|
const key = validatorData[v.validatorIndex.materialize()];
|
|
20517
20546
|
if (key === undefined) {
|
|
20518
|
-
return result_Result.error(AssurancesError.InvalidValidatorIndex);
|
|
20547
|
+
return result_Result.error(AssurancesError.InvalidValidatorIndex, () => `Invalid validator index: ${v.validatorIndex.materialize()}`);
|
|
20519
20548
|
}
|
|
20520
20549
|
signatures.push({
|
|
20521
20550
|
signature: v.signature.materialize(),
|
|
@@ -20527,7 +20556,7 @@ class Assurances {
|
|
|
20527
20556
|
const isAllSignaturesValid = signaturesValid.every((x) => x);
|
|
20528
20557
|
if (!isAllSignaturesValid) {
|
|
20529
20558
|
const invalidIndices = signaturesValid.reduce((acc, isValid, idx) => (isValid ? acc : acc.concat(idx)), []);
|
|
20530
|
-
return result_Result.error(AssurancesError.InvalidSignature, `invalid signatures at ${invalidIndices.join(", ")}`);
|
|
20559
|
+
return result_Result.error(AssurancesError.InvalidSignature, () => `invalid signatures at ${invalidIndices.join(", ")}`);
|
|
20531
20560
|
}
|
|
20532
20561
|
return result_Result.ok(result_OK);
|
|
20533
20562
|
}
|
|
@@ -21138,7 +21167,7 @@ class HostCallMemory {
|
|
|
21138
21167
|
return result_Result.ok(result_OK);
|
|
21139
21168
|
}
|
|
21140
21169
|
if (address + numbers_tryAsU64(bytes.length) > MEMORY_SIZE) {
|
|
21141
|
-
return result_Result.error(new OutOfBounds());
|
|
21170
|
+
return result_Result.error(new OutOfBounds(), () => `Memory access out of bounds: address ${address} + length ${bytes.length} exceeds memory size`);
|
|
21142
21171
|
}
|
|
21143
21172
|
return this.memory.storeFrom(tryAsMemoryIndex(Number(address)), bytes);
|
|
21144
21173
|
}
|
|
@@ -21147,13 +21176,10 @@ class HostCallMemory {
|
|
|
21147
21176
|
return result_Result.ok(result_OK);
|
|
21148
21177
|
}
|
|
21149
21178
|
if (startAddress + numbers_tryAsU64(result.length) > MEMORY_SIZE) {
|
|
21150
|
-
return result_Result.error(new OutOfBounds());
|
|
21179
|
+
return result_Result.error(new OutOfBounds(), () => `Memory access out of bounds: address ${startAddress} + length ${result.length} exceeds memory size`);
|
|
21151
21180
|
}
|
|
21152
21181
|
return this.memory.loadInto(result, tryAsMemoryIndex(Number(startAddress)));
|
|
21153
21182
|
}
|
|
21154
|
-
getMemory() {
|
|
21155
|
-
return this.memory;
|
|
21156
|
-
}
|
|
21157
21183
|
}
|
|
21158
21184
|
|
|
21159
21185
|
;// CONCATENATED MODULE: ./packages/core/pvm-host-calls/host-call-registers.ts
|
|
@@ -23093,18 +23119,18 @@ class Accumulate {
|
|
|
23093
23119
|
const serviceInfo = updatedState.getServiceInfo(serviceId);
|
|
23094
23120
|
if (serviceInfo === null) {
|
|
23095
23121
|
accumulate_logger.log `Service with id ${serviceId} not found.`;
|
|
23096
|
-
return result_Result.error(PvmInvocationError.NoService);
|
|
23122
|
+
return result_Result.error(PvmInvocationError.NoService, () => `Accumulate: service ${serviceId} not found`);
|
|
23097
23123
|
}
|
|
23098
23124
|
const codeHash = serviceInfo.codeHash;
|
|
23099
23125
|
// TODO [ToDr] Should we check that the preimage is still available?
|
|
23100
23126
|
const code = updatedState.getPreimage(serviceId, codeHash.asOpaque());
|
|
23101
23127
|
if (code === null) {
|
|
23102
23128
|
accumulate_logger.log `Code with hash ${codeHash} not found for service ${serviceId}.`;
|
|
23103
|
-
return result_Result.error(PvmInvocationError.NoPreimage);
|
|
23129
|
+
return result_Result.error(PvmInvocationError.NoPreimage, () => `Accumulate: code with hash ${codeHash} not found for service ${serviceId}`);
|
|
23104
23130
|
}
|
|
23105
23131
|
if (code.length > W_C) {
|
|
23106
23132
|
accumulate_logger.log `Code with hash ${codeHash} is too long for service ${serviceId}.`;
|
|
23107
|
-
return result_Result.error(PvmInvocationError.PreimageTooLong);
|
|
23133
|
+
return result_Result.error(PvmInvocationError.PreimageTooLong, () => `Accumulate: code length ${code.length} exceeds max ${W_C} for service ${serviceId}`);
|
|
23108
23134
|
}
|
|
23109
23135
|
const nextServiceId = generateNextServiceId({ serviceId, entropy, timeslot: slot }, this.chainSpec, this.blake2b);
|
|
23110
23136
|
const partialState = new AccumulateExternalities(this.chainSpec, this.blake2b, updatedState, serviceId, nextServiceId, slot);
|
|
@@ -23359,19 +23385,16 @@ class Accumulate {
|
|
|
23359
23385
|
const gasLimit = tryAsServiceGas(this.chainSpec.maxBlockGas > calculatedGasLimit ? this.chainSpec.maxBlockGas : calculatedGasLimit);
|
|
23360
23386
|
return tryAsServiceGas(gasLimit);
|
|
23361
23387
|
}
|
|
23362
|
-
|
|
23363
|
-
|
|
23364
|
-
|
|
23365
|
-
|
|
23366
|
-
|
|
23367
|
-
|
|
23368
|
-
|
|
23369
|
-
|
|
23370
|
-
|
|
23371
|
-
|
|
23372
|
-
}
|
|
23373
|
-
}
|
|
23374
|
-
return false;
|
|
23388
|
+
/**
|
|
23389
|
+
* Detects the very unlikely situation where multiple services are created with the same ID.
|
|
23390
|
+
*
|
|
23391
|
+
* https://graypaper.fluffylabs.dev/#/ab2cdbd/30f20330f403?v=0.7.2
|
|
23392
|
+
*
|
|
23393
|
+
* NOTE: This is public only for testing purposes and should not be used outside of accumulation.
|
|
23394
|
+
*/
|
|
23395
|
+
hasDuplicatedServiceIdCreated(createdIds) {
|
|
23396
|
+
const uniqueIds = new Set(createdIds);
|
|
23397
|
+
return uniqueIds.size !== createdIds.length;
|
|
23375
23398
|
}
|
|
23376
23399
|
async transition({ reports, slot, entropy }) {
|
|
23377
23400
|
const statistics = new Map();
|
|
@@ -23393,8 +23416,9 @@ class Accumulate {
|
|
|
23393
23416
|
const accumulated = accumulatableReports.subview(0, accumulatedReports);
|
|
23394
23417
|
const { services, yieldedRoots, transfers, validatorsData, privilegedServices, authorizationQueues, ...stateUpdateRest } = state;
|
|
23395
23418
|
assertEmpty(stateUpdateRest);
|
|
23396
|
-
if (this.
|
|
23397
|
-
|
|
23419
|
+
if (this.hasDuplicatedServiceIdCreated(services.created)) {
|
|
23420
|
+
accumulate_logger.trace `Duplicated Service creation detected. Block is invalid.`;
|
|
23421
|
+
return result_Result.error(ACCUMULATION_ERROR, () => "Accumulate: duplicate service created");
|
|
23398
23422
|
}
|
|
23399
23423
|
const accStateUpdate = this.getAccumulationStateUpdate(accumulated.toArray(), toAccumulateLater, slot, Array.from(statistics.keys()), services);
|
|
23400
23424
|
const accumulationOutputUnsorted = Array.from(yieldedRoots.entries()).map(([serviceId, root]) => {
|
|
@@ -23475,13 +23499,13 @@ class DeferredTransfers {
|
|
|
23475
23499
|
.toSorted((a, b) => a.source - b.source);
|
|
23476
23500
|
const info = partiallyUpdatedState.getServiceInfo(serviceId);
|
|
23477
23501
|
if (info === null) {
|
|
23478
|
-
return result_Result.error(DeferredTransfersErrorCode.ServiceInfoNotExist);
|
|
23502
|
+
return result_Result.error(DeferredTransfersErrorCode.ServiceInfoNotExist, () => `Deferred transfers: service info not found for ${serviceId}`);
|
|
23479
23503
|
}
|
|
23480
23504
|
const codeHash = info.codeHash;
|
|
23481
23505
|
const code = partiallyUpdatedState.getPreimage(serviceId, codeHash.asOpaque());
|
|
23482
23506
|
const newBalance = sumU64(info.balance, ...transfers.map((item) => item.amount));
|
|
23483
23507
|
if (newBalance.overflow) {
|
|
23484
|
-
return result_Result.error(DeferredTransfersErrorCode.ServiceBalanceOverflow);
|
|
23508
|
+
return result_Result.error(DeferredTransfersErrorCode.ServiceBalanceOverflow, () => `Deferred transfers: balance overflow for service ${serviceId}`);
|
|
23485
23509
|
}
|
|
23486
23510
|
const newInfo = ServiceAccountInfo.create({ ...info, balance: newBalance.value });
|
|
23487
23511
|
partiallyUpdatedState.updateServiceInfo(serviceId, newInfo);
|
|
@@ -23959,7 +23983,7 @@ function verifyReportsBasic(input) {
|
|
|
23959
23983
|
const noOfPrerequisites = reportView.context.view().prerequisites.view().length;
|
|
23960
23984
|
const noOfSegmentRootLookups = reportView.segmentRootLookup.view().length;
|
|
23961
23985
|
if (noOfPrerequisites + noOfSegmentRootLookups > MAX_REPORT_DEPENDENCIES) {
|
|
23962
|
-
return result_Result.error(ReportsError.TooManyDependencies, `Report at ${reportView.coreIndex.materialize()} has too many dependencies. Got ${noOfPrerequisites} + ${noOfSegmentRootLookups}, max: ${MAX_REPORT_DEPENDENCIES}`);
|
|
23986
|
+
return result_Result.error(ReportsError.TooManyDependencies, () => `Report at ${reportView.coreIndex.materialize()} has too many dependencies. Got ${noOfPrerequisites} + ${noOfSegmentRootLookups}, max: ${MAX_REPORT_DEPENDENCIES}`);
|
|
23963
23987
|
}
|
|
23964
23988
|
/**
|
|
23965
23989
|
* In order to ensure fair use of a block’s extrinsic space,
|
|
@@ -23978,7 +24002,7 @@ function verifyReportsBasic(input) {
|
|
|
23978
24002
|
totalOutputsSize += item.view().result.view().okBlob?.raw.length ?? 0;
|
|
23979
24003
|
}
|
|
23980
24004
|
if (authOutputSize + totalOutputsSize > MAX_WORK_REPORT_SIZE_BYTES) {
|
|
23981
|
-
return result_Result.error(ReportsError.WorkReportTooBig, `Work report at ${reportView.coreIndex.materialize()} too big. Got ${authOutputSize} + ${totalOutputsSize}, max: ${MAX_WORK_REPORT_SIZE_BYTES}`);
|
|
24005
|
+
return result_Result.error(ReportsError.WorkReportTooBig, () => `Work report at ${reportView.coreIndex.materialize()} too big. Got ${authOutputSize} + ${totalOutputsSize}, max: ${MAX_WORK_REPORT_SIZE_BYTES}`);
|
|
23982
24006
|
}
|
|
23983
24007
|
}
|
|
23984
24008
|
return result_Result.ok(result_OK);
|
|
@@ -24012,12 +24036,12 @@ function verifyContextualValidity(input, state, headerChain, maxLookupAnchorAge)
|
|
|
24012
24036
|
for (const result of guarantee.report.results) {
|
|
24013
24037
|
const service = state.getService(result.serviceId);
|
|
24014
24038
|
if (service === null) {
|
|
24015
|
-
return result_Result.error(ReportsError.BadServiceId, `No service with id: ${result.serviceId}`);
|
|
24039
|
+
return result_Result.error(ReportsError.BadServiceId, () => `No service with id: ${result.serviceId}`);
|
|
24016
24040
|
}
|
|
24017
24041
|
// check service code hash
|
|
24018
24042
|
// https://graypaper.fluffylabs.dev/#/5f542d7/154b02154b02
|
|
24019
24043
|
if (!result.codeHash.isEqualTo(service.getInfo().codeHash)) {
|
|
24020
|
-
return result_Result.error(ReportsError.BadCodeHash, `Service (${result.serviceId}) code hash mismatch. Got: ${result.codeHash}, expected: ${service.getInfo().codeHash}`);
|
|
24044
|
+
return result_Result.error(ReportsError.BadCodeHash, () => `Service (${result.serviceId}) code hash mismatch. Got: ${result.codeHash}, expected: ${service.getInfo().codeHash}`);
|
|
24021
24045
|
}
|
|
24022
24046
|
}
|
|
24023
24047
|
}
|
|
@@ -24028,7 +24052,7 @@ function verifyContextualValidity(input, state, headerChain, maxLookupAnchorAge)
|
|
|
24028
24052
|
* https://graypaper.fluffylabs.dev/#/5f542d7/151f01152101
|
|
24029
24053
|
*/
|
|
24030
24054
|
if (currentWorkPackages.size !== input.guarantees.length) {
|
|
24031
|
-
return result_Result.error(ReportsError.DuplicatePackage, "Duplicate work package detected.");
|
|
24055
|
+
return result_Result.error(ReportsError.DuplicatePackage, () => "Duplicate work package detected.");
|
|
24032
24056
|
}
|
|
24033
24057
|
const minLookupSlot = Math.max(0, input.slot - maxLookupAnchorAge);
|
|
24034
24058
|
const contextResult = verifyRefineContexts(minLookupSlot, contexts, input.recentBlocksPartialUpdate, headerChain);
|
|
@@ -24073,7 +24097,7 @@ function verifyContextualValidity(input, state, headerChain, maxLookupAnchorAge)
|
|
|
24073
24097
|
: undefined;
|
|
24074
24098
|
}
|
|
24075
24099
|
if (root === undefined || !root.segmentTreeRoot.isEqualTo(lookup.segmentTreeRoot)) {
|
|
24076
|
-
return result_Result.error(ReportsError.SegmentRootLookupInvalid, `Mismatching segment tree root for package ${lookup.workPackageHash}. Got: ${lookup.segmentTreeRoot}, expected: ${root?.segmentTreeRoot}`);
|
|
24100
|
+
return result_Result.error(ReportsError.SegmentRootLookupInvalid, () => `Mismatching segment tree root for package ${lookup.workPackageHash}. Got: ${lookup.segmentTreeRoot}, expected: ${root?.segmentTreeRoot}`);
|
|
24077
24101
|
}
|
|
24078
24102
|
}
|
|
24079
24103
|
}
|
|
@@ -24096,16 +24120,16 @@ function verifyRefineContexts(minLookupSlot, contexts, recentBlocksPartialUpdate
|
|
|
24096
24120
|
*/
|
|
24097
24121
|
const recentBlock = recentBlocks.get(context.anchor);
|
|
24098
24122
|
if (recentBlock === undefined) {
|
|
24099
|
-
return result_Result.error(ReportsError.AnchorNotRecent, `Anchor block ${context.anchor} not found in recent blocks.`);
|
|
24123
|
+
return result_Result.error(ReportsError.AnchorNotRecent, () => `Anchor block ${context.anchor} not found in recent blocks.`);
|
|
24100
24124
|
}
|
|
24101
24125
|
// check state root
|
|
24102
24126
|
if (!recentBlock.postStateRoot.isEqualTo(context.stateRoot)) {
|
|
24103
|
-
return result_Result.error(ReportsError.BadStateRoot, `Anchor state root mismatch. Got: ${context.stateRoot}, expected: ${recentBlock.postStateRoot}.`);
|
|
24127
|
+
return result_Result.error(ReportsError.BadStateRoot, () => `Anchor state root mismatch. Got: ${context.stateRoot}, expected: ${recentBlock.postStateRoot}.`);
|
|
24104
24128
|
}
|
|
24105
24129
|
// check beefy root
|
|
24106
24130
|
const beefyRoot = recentBlock.accumulationResult;
|
|
24107
24131
|
if (!beefyRoot.isEqualTo(context.beefyRoot)) {
|
|
24108
|
-
return result_Result.error(ReportsError.BadBeefyMmrRoot, `Invalid BEEFY super peak hash. Got: ${context.beefyRoot}, expected: ${beefyRoot}. Anchor: ${recentBlock.headerHash}`);
|
|
24132
|
+
return result_Result.error(ReportsError.BadBeefyMmrRoot, () => `Invalid BEEFY super peak hash. Got: ${context.beefyRoot}, expected: ${beefyRoot}. Anchor: ${recentBlock.headerHash}`);
|
|
24109
24133
|
}
|
|
24110
24134
|
/**
|
|
24111
24135
|
* We require that each lookup-anchor block be within the
|
|
@@ -24114,7 +24138,7 @@ function verifyRefineContexts(minLookupSlot, contexts, recentBlocksPartialUpdate
|
|
|
24114
24138
|
* https://graypaper.fluffylabs.dev/#/5f542d7/154601154701
|
|
24115
24139
|
*/
|
|
24116
24140
|
if (context.lookupAnchorSlot < minLookupSlot) {
|
|
24117
|
-
return result_Result.error(ReportsError.SegmentRootLookupInvalid, `Lookup anchor slot's too old. Got: ${context.lookupAnchorSlot}, minimal: ${minLookupSlot}`);
|
|
24141
|
+
return result_Result.error(ReportsError.SegmentRootLookupInvalid, () => `Lookup anchor slot's too old. Got: ${context.lookupAnchorSlot}, minimal: ${minLookupSlot}`);
|
|
24118
24142
|
}
|
|
24119
24143
|
/**
|
|
24120
24144
|
* We also require that we have a record of it; this is one of
|
|
@@ -24131,7 +24155,7 @@ function verifyRefineContexts(minLookupSlot, contexts, recentBlocksPartialUpdate
|
|
|
24131
24155
|
verify_contextual_logger.warn `Lookup anchor check for ${context.lookupAnchor} would fail, but override is active.`;
|
|
24132
24156
|
}
|
|
24133
24157
|
else {
|
|
24134
|
-
return result_Result.error(ReportsError.SegmentRootLookupInvalid, `Lookup anchor is not found in chain. Hash: ${context.lookupAnchor} (slot: ${context.lookupAnchorSlot})`);
|
|
24158
|
+
return result_Result.error(ReportsError.SegmentRootLookupInvalid, () => `Lookup anchor is not found in chain. Hash: ${context.lookupAnchor} (slot: ${context.lookupAnchorSlot})`);
|
|
24135
24159
|
}
|
|
24136
24160
|
}
|
|
24137
24161
|
}
|
|
@@ -24154,7 +24178,7 @@ function verifyDependencies({ currentWorkPackages, recentlyReported, prerequisit
|
|
|
24154
24178
|
if (recentlyReported.has(preReqHash)) {
|
|
24155
24179
|
continue;
|
|
24156
24180
|
}
|
|
24157
|
-
return result_Result.error(isSegmentRoot ? ReportsError.SegmentRootLookupInvalid : ReportsError.DependencyMissing, `Missing work package ${preReqHash} in current extrinsic or recent history.`);
|
|
24181
|
+
return result_Result.error(isSegmentRoot ? ReportsError.SegmentRootLookupInvalid : ReportsError.DependencyMissing, () => `Missing work package ${preReqHash} in current extrinsic or recent history.`);
|
|
24158
24182
|
}
|
|
24159
24183
|
return result_Result.ok(result_OK);
|
|
24160
24184
|
};
|
|
@@ -24202,7 +24226,7 @@ function verifyWorkPackagesUniqueness(workPackageHashes, state) {
|
|
|
24202
24226
|
// let's check if any of our packages is in the pipeline
|
|
24203
24227
|
const intersection = packagesInPipeline.intersection(workPackageHashes);
|
|
24204
24228
|
for (const packageHash of intersection) {
|
|
24205
|
-
return result_Result.error(ReportsError.DuplicatePackage, `The same work package hash found in the pipeline (workPackageHash: ${packageHash})`);
|
|
24229
|
+
return result_Result.error(ReportsError.DuplicatePackage, () => `The same work package hash found in the pipeline (workPackageHash: ${packageHash})`);
|
|
24206
24230
|
}
|
|
24207
24231
|
return result_Result.ok(result_OK);
|
|
24208
24232
|
}
|
|
@@ -24241,7 +24265,7 @@ workReportHashes, slot, getGuarantorAssignment) {
|
|
|
24241
24265
|
const credentialsView = guaranteeView.credentials.view();
|
|
24242
24266
|
if (credentialsView.length < REQUIRED_CREDENTIALS_RANGE[0] ||
|
|
24243
24267
|
credentialsView.length > REQUIRED_CREDENTIALS_RANGE[1]) {
|
|
24244
|
-
return result_Result.error(ReportsError.InsufficientGuarantees, `Invalid number of credentials. Expected ${REQUIRED_CREDENTIALS_RANGE}, got ${credentialsView.length}`);
|
|
24268
|
+
return result_Result.error(ReportsError.InsufficientGuarantees, () => `Invalid number of credentials. Expected ${REQUIRED_CREDENTIALS_RANGE}, got ${credentialsView.length}`);
|
|
24245
24269
|
}
|
|
24246
24270
|
/** Retrieve current core assignment. */
|
|
24247
24271
|
const timeSlot = guaranteeView.slot.materialize();
|
|
@@ -24256,20 +24280,20 @@ workReportHashes, slot, getGuarantorAssignment) {
|
|
|
24256
24280
|
const credentialView = credential.view();
|
|
24257
24281
|
const validatorIndex = credentialView.validatorIndex.materialize();
|
|
24258
24282
|
if (lastValidatorIndex >= validatorIndex) {
|
|
24259
|
-
return result_Result.error(ReportsError.NotSortedOrUniqueGuarantors, `Credentials must be sorted by validator index. Got ${validatorIndex}, expected at least ${lastValidatorIndex + 1}`);
|
|
24283
|
+
return result_Result.error(ReportsError.NotSortedOrUniqueGuarantors, () => `Credentials must be sorted by validator index. Got ${validatorIndex}, expected at least ${lastValidatorIndex + 1}`);
|
|
24260
24284
|
}
|
|
24261
24285
|
lastValidatorIndex = validatorIndex;
|
|
24262
24286
|
const signature = credentialView.signature.materialize();
|
|
24263
24287
|
const guarantorData = guarantorAssignments[validatorIndex];
|
|
24264
24288
|
if (guarantorData === undefined) {
|
|
24265
|
-
return result_Result.error(ReportsError.BadValidatorIndex, `Invalid validator index: ${validatorIndex}`);
|
|
24289
|
+
return result_Result.error(ReportsError.BadValidatorIndex, () => `Invalid validator index: ${validatorIndex}`);
|
|
24266
24290
|
}
|
|
24267
24291
|
/**
|
|
24268
24292
|
* Verify core assignment.
|
|
24269
24293
|
* https://graypaper.fluffylabs.dev/#/5f542d7/14e40214e602
|
|
24270
24294
|
*/
|
|
24271
24295
|
if (guarantorData.core !== coreIndex) {
|
|
24272
|
-
return result_Result.error(ReportsError.WrongAssignment, `Invalid core assignment for validator ${validatorIndex}. Expected: ${guarantorData.core}, got: ${coreIndex}`);
|
|
24296
|
+
return result_Result.error(ReportsError.WrongAssignment, () => `Invalid core assignment for validator ${validatorIndex}. Expected: ${guarantorData.core}, got: ${coreIndex}`);
|
|
24273
24297
|
}
|
|
24274
24298
|
signaturesToVerify.push({
|
|
24275
24299
|
signature,
|
|
@@ -24307,10 +24331,10 @@ function verifyReportsOrder(input, chainSpec) {
|
|
|
24307
24331
|
const reportView = guarantee.view().report.view();
|
|
24308
24332
|
const coreIndex = reportView.coreIndex.materialize();
|
|
24309
24333
|
if (lastCoreIndex >= coreIndex) {
|
|
24310
|
-
return result_Result.error(ReportsError.OutOfOrderGuarantee, `Core indices of work reports are not unique or in order. Got: ${coreIndex}, expected at least: ${lastCoreIndex + 1}`);
|
|
24334
|
+
return result_Result.error(ReportsError.OutOfOrderGuarantee, () => `Core indices of work reports are not unique or in order. Got: ${coreIndex}, expected at least: ${lastCoreIndex + 1}`);
|
|
24311
24335
|
}
|
|
24312
24336
|
if (coreIndex >= noOfCores) {
|
|
24313
|
-
return result_Result.error(ReportsError.BadCoreIndex, `Invalid core index. Got: ${coreIndex}, max: ${noOfCores}`);
|
|
24337
|
+
return result_Result.error(ReportsError.BadCoreIndex, () => `Invalid core index. Got: ${coreIndex}, max: ${noOfCores}`);
|
|
24314
24338
|
}
|
|
24315
24339
|
lastCoreIndex = coreIndex;
|
|
24316
24340
|
}
|
|
@@ -24335,7 +24359,7 @@ function verifyPostSignatureChecks(input, availabilityAssignment, authPools, ser
|
|
|
24335
24359
|
* https://graypaper.fluffylabs.dev/#/5f542d7/15ea0015ea00
|
|
24336
24360
|
*/
|
|
24337
24361
|
if (availabilityAssignment[coreIndex] !== null) {
|
|
24338
|
-
return result_Result.error(ReportsError.CoreEngaged, `Report pending availability at core: ${coreIndex}`);
|
|
24362
|
+
return result_Result.error(ReportsError.CoreEngaged, () => `Report pending availability at core: ${coreIndex}`);
|
|
24339
24363
|
}
|
|
24340
24364
|
/**
|
|
24341
24365
|
* A report is valid only if the authorizer hash is present
|
|
@@ -24348,7 +24372,7 @@ function verifyPostSignatureChecks(input, availabilityAssignment, authPools, ser
|
|
|
24348
24372
|
const authorizerPool = authPools.get(coreIndex);
|
|
24349
24373
|
const pool = authorizerPool?.materialize() ?? [];
|
|
24350
24374
|
if (pool.find((hash) => hash.isEqualTo(authorizerHash)) === undefined) {
|
|
24351
|
-
return result_Result.error(ReportsError.CoreUnauthorized, `Authorizer hash not found in the pool of core ${coreIndex}: ${authorizerHash}`);
|
|
24375
|
+
return result_Result.error(ReportsError.CoreUnauthorized, () => `Authorizer hash not found in the pool of core ${coreIndex}: ${authorizerHash}`);
|
|
24352
24376
|
}
|
|
24353
24377
|
/**
|
|
24354
24378
|
* We require that the gas allotted for accumulation of each
|
|
@@ -24360,17 +24384,17 @@ function verifyPostSignatureChecks(input, availabilityAssignment, authPools, ser
|
|
|
24360
24384
|
for (const result of report.results) {
|
|
24361
24385
|
const service = services(result.serviceId);
|
|
24362
24386
|
if (service === null) {
|
|
24363
|
-
return result_Result.error(ReportsError.BadServiceId, `No service with id: ${result.serviceId}`);
|
|
24387
|
+
return result_Result.error(ReportsError.BadServiceId, () => `No service with id: ${result.serviceId}`);
|
|
24364
24388
|
}
|
|
24365
24389
|
const info = service.getInfo();
|
|
24366
24390
|
// check minimal accumulation gas
|
|
24367
24391
|
if (result.gas < info.accumulateMinGas) {
|
|
24368
|
-
return result_Result.error(ReportsError.ServiceItemGasTooLow, `Service (${result.serviceId}) gas is less than minimal. Got: ${result.gas}, expected at least: ${info.accumulateMinGas}`);
|
|
24392
|
+
return result_Result.error(ReportsError.ServiceItemGasTooLow, () => `Service (${result.serviceId}) gas is less than minimal. Got: ${result.gas}, expected at least: ${info.accumulateMinGas}`);
|
|
24369
24393
|
}
|
|
24370
24394
|
}
|
|
24371
24395
|
const totalGas = sumU64(...report.results.map((x) => x.gas));
|
|
24372
24396
|
if (totalGas.overflow || totalGas.value > G_A) {
|
|
24373
|
-
return result_Result.error(ReportsError.WorkReportGasTooHigh, `Total gas too high. Got: ${totalGas.value} (ovfl: ${totalGas.overflow}), maximal: ${G_A}`);
|
|
24397
|
+
return result_Result.error(ReportsError.WorkReportGasTooHigh, () => `Total gas too high. Got: ${totalGas.value} (ovfl: ${totalGas.overflow}), maximal: ${G_A}`);
|
|
24374
24398
|
}
|
|
24375
24399
|
}
|
|
24376
24400
|
return result_Result.ok(result_OK);
|
|
@@ -24456,7 +24480,7 @@ class Reports {
|
|
|
24456
24480
|
}
|
|
24457
24481
|
const reporters = SortedSet.fromArray(bytesBlobComparator, signaturesToVerify.ok.map((x) => x.key)).slice();
|
|
24458
24482
|
if (hasAnyOffenders(reporters, input.offenders)) {
|
|
24459
|
-
return result_Result.error(ReportsError.BannedValidator);
|
|
24483
|
+
return result_Result.error(ReportsError.BannedValidator, () => "One or more reporters are banned validators");
|
|
24460
24484
|
}
|
|
24461
24485
|
return result_Result.ok({
|
|
24462
24486
|
stateUpdate: {
|
|
@@ -24496,7 +24520,7 @@ class Reports {
|
|
|
24496
24520
|
return signaturesToVerify[idx].key;
|
|
24497
24521
|
})
|
|
24498
24522
|
.filter((x) => x !== null);
|
|
24499
|
-
return result_Result.error(ReportsError.BadSignature, `Invalid signatures for validators with keys: ${invalidKeys.join(", ")}`);
|
|
24523
|
+
return result_Result.error(ReportsError.BadSignature, () => `Invalid signatures for validators with keys: ${invalidKeys.join(", ")}`);
|
|
24500
24524
|
}
|
|
24501
24525
|
/**
|
|
24502
24526
|
* Get the guarantor assignment (both core and validator data)
|
|
@@ -24512,10 +24536,10 @@ class Reports {
|
|
|
24512
24536
|
const minTimeSlot = Math.max(0, headerRotation - 1) * rotationPeriod;
|
|
24513
24537
|
// https://graypaper.fluffylabs.dev/#/5f542d7/155e00156900
|
|
24514
24538
|
if (guaranteeTimeSlot > headerTimeSlot) {
|
|
24515
|
-
return result_Result.error(ReportsError.FutureReportSlot, `Report slot is in future. Block ${headerTimeSlot}, Report: ${guaranteeTimeSlot}`);
|
|
24539
|
+
return result_Result.error(ReportsError.FutureReportSlot, () => `Report slot is in future. Block ${headerTimeSlot}, Report: ${guaranteeTimeSlot}`);
|
|
24516
24540
|
}
|
|
24517
24541
|
if (guaranteeTimeSlot < minTimeSlot) {
|
|
24518
|
-
return result_Result.error(ReportsError.ReportEpochBeforeLast, `Report slot is too old. Block ${headerTimeSlot}, Report: ${guaranteeTimeSlot}`);
|
|
24542
|
+
return result_Result.error(ReportsError.ReportEpochBeforeLast, () => `Report slot is too old. Block ${headerTimeSlot}, Report: ${guaranteeTimeSlot}`);
|
|
24519
24543
|
}
|
|
24520
24544
|
// TODO [ToDr] [opti] below code needs cache.
|
|
24521
24545
|
// The `G` and `G*` sets should only be computed once per rotation.
|
|
@@ -25083,6 +25107,16 @@ class OnChain {
|
|
|
25083
25107
|
});
|
|
25084
25108
|
const { statistics, ...statisticsRest } = statisticsUpdate;
|
|
25085
25109
|
assertEmpty(statisticsRest);
|
|
25110
|
+
// Concat accumulatePreimages updates with preimages
|
|
25111
|
+
for (const [serviceId, accPreimageUpdates] of accumulatePreimages.entries()) {
|
|
25112
|
+
const preimagesUpdates = preimages.get(serviceId);
|
|
25113
|
+
if (preimagesUpdates === undefined) {
|
|
25114
|
+
preimages.set(serviceId, accPreimageUpdates);
|
|
25115
|
+
}
|
|
25116
|
+
else {
|
|
25117
|
+
preimages.set(serviceId, preimagesUpdates.concat(accPreimageUpdates));
|
|
25118
|
+
}
|
|
25119
|
+
}
|
|
25086
25120
|
return result_Result.ok({
|
|
25087
25121
|
...(maybeAuthorizationQueues !== undefined ? { authQueues: maybeAuthorizationQueues } : {}),
|
|
25088
25122
|
...(maybeDesignatedValidatorData !== undefined ? { designatedValidatorData: maybeDesignatedValidatorData } : {}),
|
|
@@ -25104,7 +25138,7 @@ class OnChain {
|
|
|
25104
25138
|
recentlyAccumulated,
|
|
25105
25139
|
accumulationOutputLog,
|
|
25106
25140
|
...servicesUpdate,
|
|
25107
|
-
preimages
|
|
25141
|
+
preimages,
|
|
25108
25142
|
});
|
|
25109
25143
|
}
|
|
25110
25144
|
getUsedAuthorizerHashes(guarantees) {
|
|
@@ -25121,11 +25155,11 @@ class OnChain {
|
|
|
25121
25155
|
}
|
|
25122
25156
|
function checkOffendersMatch(offendersMark, headerOffendersMark) {
|
|
25123
25157
|
if (offendersMark.size !== headerOffendersMark.length) {
|
|
25124
|
-
return result_Result.error(OFFENDERS_ERROR, `Length mismatch: ${offendersMark.size} vs ${headerOffendersMark.length}`);
|
|
25158
|
+
return result_Result.error(OFFENDERS_ERROR, () => `Length mismatch: ${offendersMark.size} vs ${headerOffendersMark.length}`);
|
|
25125
25159
|
}
|
|
25126
25160
|
for (const key of headerOffendersMark) {
|
|
25127
25161
|
if (!offendersMark.has(key)) {
|
|
25128
|
-
return result_Result.error(OFFENDERS_ERROR, `Missing key: ${key}`);
|
|
25162
|
+
return result_Result.error(OFFENDERS_ERROR, () => `Missing key: ${key}`);
|
|
25129
25163
|
}
|
|
25130
25164
|
}
|
|
25131
25165
|
return result_Result.ok(result_OK);
|
|
@@ -25207,7 +25241,7 @@ class Importer {
|
|
|
25207
25241
|
if (!this.currentHash.isEqualTo(parentHash)) {
|
|
25208
25242
|
const state = this.states.getState(parentHash);
|
|
25209
25243
|
if (state === null) {
|
|
25210
|
-
const e = result_Result.error(BlockVerifierError.StateRootNotFound);
|
|
25244
|
+
const e = result_Result.error(BlockVerifierError.StateRootNotFound, () => `State not found for parent block ${parentHash}`);
|
|
25211
25245
|
if (!e.isError) {
|
|
25212
25246
|
throw new Error("unreachable, just adding to make compiler happy");
|
|
25213
25247
|
}
|
|
@@ -25403,7 +25437,7 @@ const importBlockResultCodec = descriptors_codec.custom({
|
|
|
25403
25437
|
}
|
|
25404
25438
|
if (kind === 1) {
|
|
25405
25439
|
const error = d.bytesBlob();
|
|
25406
|
-
return result_Result.error(error.asText());
|
|
25440
|
+
return result_Result.error(error.asText(), () => error.asText());
|
|
25407
25441
|
}
|
|
25408
25442
|
throw new Error(`Invalid Result: ${kind}`);
|
|
25409
25443
|
}, (s) => {
|
|
@@ -25454,7 +25488,7 @@ class MainReady extends State {
|
|
|
25454
25488
|
if (res instanceof Uint8Array) {
|
|
25455
25489
|
return decoder_Decoder.decodeObject(importBlockResultCodec, res);
|
|
25456
25490
|
}
|
|
25457
|
-
return result_Result.error("Invalid worker response.");
|
|
25491
|
+
return result_Result.error("Invalid worker response.", () => "Invalid worker response: expected Uint8Array");
|
|
25458
25492
|
}
|
|
25459
25493
|
async getStateEntries(port, hash) {
|
|
25460
25494
|
const res = await port.sendRequest("getStateEntries", hash, [hash.buffer]);
|
|
@@ -25566,13 +25600,13 @@ class ImporterReady extends State {
|
|
|
25566
25600
|
response = result_Result.ok(this.importer.getBestStateRootHash() ?? ZERO_HASH.asOpaque());
|
|
25567
25601
|
}
|
|
25568
25602
|
else {
|
|
25569
|
-
response = result_Result.error(resultToString(res));
|
|
25603
|
+
response = result_Result.error(resultToString(res), () => resultToString(res));
|
|
25570
25604
|
}
|
|
25571
25605
|
}
|
|
25572
25606
|
catch (e) {
|
|
25573
25607
|
state_machine_logger.error `Failed to import block: ${e}`;
|
|
25574
25608
|
state_machine_logger.error `${e instanceof Error ? e.stack : ""}`;
|
|
25575
|
-
response = result_Result.error(`${e}`);
|
|
25609
|
+
response = result_Result.error(`${e}`, () => `${e}`);
|
|
25576
25610
|
}
|
|
25577
25611
|
const encoded = encoder_Encoder.encodeObject(importBlockResultCodec, response);
|
|
25578
25612
|
return {
|