@typeberry/jam 0.2.0-c3df163 → 0.2.0-f506473
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bootstrap-generator.mjs +184 -159
- package/bootstrap-generator.mjs.map +1 -1
- package/bootstrap-importer.mjs +409 -367
- package/bootstrap-importer.mjs.map +1 -1
- package/bootstrap-network.mjs +628 -589
- package/bootstrap-network.mjs.map +1 -1
- package/index.js +6928 -6797
- package/index.js.map +1 -1
- package/package.json +1 -1
package/bootstrap-importer.mjs
CHANGED
|
@@ -3840,7 +3840,7 @@ function resultToString(res) {
|
|
|
3840
3840
|
if (res.isOk) {
|
|
3841
3841
|
return `OK: ${typeof res.ok === "symbol" ? res.ok.toString() : res.ok}`;
|
|
3842
3842
|
}
|
|
3843
|
-
return `${res.details}\nError: ${maybeTaggedErrorToString(res.error)}`;
|
|
3843
|
+
return `${res.details()}\nError: ${maybeTaggedErrorToString(res.error)}`;
|
|
3844
3844
|
}
|
|
3845
3845
|
/** An indication of two possible outcomes returned from a function. */
|
|
3846
3846
|
const result_Result = {
|
|
@@ -3854,7 +3854,7 @@ const result_Result = {
|
|
|
3854
3854
|
};
|
|
3855
3855
|
},
|
|
3856
3856
|
/** Create new [`Result`] with `Error` status. */
|
|
3857
|
-
error: (error, details
|
|
3857
|
+
error: (error, details) => {
|
|
3858
3858
|
debug_check `${error !== undefined} 'Error' type cannot be undefined.`;
|
|
3859
3859
|
return {
|
|
3860
3860
|
isOk: false,
|
|
@@ -3973,7 +3973,7 @@ function deepEqual(actual, expected, { context = [], errorsCollector, ignore = [
|
|
|
3973
3973
|
}
|
|
3974
3974
|
if (actual.isError && expected.isError) {
|
|
3975
3975
|
deepEqual(actual.error, expected.error, { context: ctx.concat(["error"]), errorsCollector: errors, ignore });
|
|
3976
|
-
deepEqual(actual.details, expected.details, {
|
|
3976
|
+
deepEqual(actual.details(), expected.details(), {
|
|
3977
3977
|
context: ctx.concat(["details"]),
|
|
3978
3978
|
errorsCollector: errors,
|
|
3979
3979
|
// display details when error does not match
|
|
@@ -5214,8 +5214,8 @@ class decoder_Decoder {
|
|
|
5214
5214
|
/**
|
|
5215
5215
|
* Create a new [`Decoder`] instance given a raw array of bytes as a source.
|
|
5216
5216
|
*/
|
|
5217
|
-
static fromBlob(source) {
|
|
5218
|
-
return new decoder_Decoder(source);
|
|
5217
|
+
static fromBlob(source, context) {
|
|
5218
|
+
return new decoder_Decoder(source, undefined, context);
|
|
5219
5219
|
}
|
|
5220
5220
|
/**
|
|
5221
5221
|
* Decode a single object from all of the source bytes.
|
|
@@ -5510,7 +5510,7 @@ class decoder_Decoder {
|
|
|
5510
5510
|
ensureHasBytes(bytes) {
|
|
5511
5511
|
debug_check `${bytes >= 0} Negative number of bytes given.`;
|
|
5512
5512
|
if (this.offset + bytes > this.source.length) {
|
|
5513
|
-
throw new
|
|
5513
|
+
throw new EndOfDataError(`Attempting to decode more data than there is left. Need ${bytes}, left: ${this.source.length - this.offset}.`);
|
|
5514
5514
|
}
|
|
5515
5515
|
}
|
|
5516
5516
|
}
|
|
@@ -5524,6 +5524,8 @@ function decodeVariableLengthExtraBytes(firstByte) {
|
|
|
5524
5524
|
}
|
|
5525
5525
|
return 0;
|
|
5526
5526
|
}
|
|
5527
|
+
class EndOfDataError extends Error {
|
|
5528
|
+
}
|
|
5527
5529
|
|
|
5528
5530
|
;// CONCATENATED MODULE: ./packages/core/codec/skip.ts
|
|
5529
5531
|
|
|
@@ -6315,6 +6317,7 @@ class SequenceView {
|
|
|
6315
6317
|
|
|
6316
6318
|
|
|
6317
6319
|
|
|
6320
|
+
|
|
6318
6321
|
/**
|
|
6319
6322
|
* For sequences with unknown length we need to give some size hint.
|
|
6320
6323
|
* TODO [ToDr] [opti] This value should be updated when we run some real-data bechmarks.
|
|
@@ -6564,6 +6567,9 @@ function forEachDescriptor(descriptors, f) {
|
|
|
6564
6567
|
f(k, descriptors[k]);
|
|
6565
6568
|
}
|
|
6566
6569
|
catch (e) {
|
|
6570
|
+
if (e instanceof EndOfDataError) {
|
|
6571
|
+
throw new EndOfDataError(`${key}: ${e}`);
|
|
6572
|
+
}
|
|
6567
6573
|
throw new Error(`${key}: ${e}`);
|
|
6568
6574
|
}
|
|
6569
6575
|
}
|
|
@@ -10597,31 +10603,29 @@ var UpdatePreimageKind;
|
|
|
10597
10603
|
* 3. Update `LookupHistory` with given value.
|
|
10598
10604
|
*/
|
|
10599
10605
|
class UpdatePreimage {
|
|
10600
|
-
serviceId;
|
|
10601
10606
|
action;
|
|
10602
|
-
constructor(
|
|
10603
|
-
this.serviceId = serviceId;
|
|
10607
|
+
constructor(action) {
|
|
10604
10608
|
this.action = action;
|
|
10605
10609
|
}
|
|
10606
10610
|
/** A preimage is provided. We should update the lookuphistory and add the preimage to db. */
|
|
10607
|
-
static provide({
|
|
10608
|
-
return new UpdatePreimage(
|
|
10611
|
+
static provide({ preimage, slot }) {
|
|
10612
|
+
return new UpdatePreimage({
|
|
10609
10613
|
kind: UpdatePreimageKind.Provide,
|
|
10610
10614
|
preimage,
|
|
10611
10615
|
slot,
|
|
10612
10616
|
});
|
|
10613
10617
|
}
|
|
10614
10618
|
/** The preimage should be removed completely from the database. */
|
|
10615
|
-
static remove({
|
|
10616
|
-
return new UpdatePreimage(
|
|
10619
|
+
static remove({ hash, length }) {
|
|
10620
|
+
return new UpdatePreimage({
|
|
10617
10621
|
kind: UpdatePreimageKind.Remove,
|
|
10618
10622
|
hash,
|
|
10619
10623
|
length,
|
|
10620
10624
|
});
|
|
10621
10625
|
}
|
|
10622
10626
|
/** Update the lookup history of some preimage or add a new one (request). */
|
|
10623
|
-
static updateOrAdd({
|
|
10624
|
-
return new UpdatePreimage(
|
|
10627
|
+
static updateOrAdd({ lookupHistory }) {
|
|
10628
|
+
return new UpdatePreimage({
|
|
10625
10629
|
kind: UpdatePreimageKind.UpdateOrAdd,
|
|
10626
10630
|
item: lookupHistory,
|
|
10627
10631
|
});
|
|
@@ -10658,23 +10662,21 @@ var UpdateServiceKind;
|
|
|
10658
10662
|
UpdateServiceKind[UpdateServiceKind["Create"] = 1] = "Create";
|
|
10659
10663
|
})(UpdateServiceKind || (UpdateServiceKind = {}));
|
|
10660
10664
|
/**
|
|
10661
|
-
* Update service info
|
|
10665
|
+
* Update service info or create a new one.
|
|
10662
10666
|
*/
|
|
10663
10667
|
class UpdateService {
|
|
10664
|
-
serviceId;
|
|
10665
10668
|
action;
|
|
10666
|
-
constructor(
|
|
10667
|
-
this.serviceId = serviceId;
|
|
10669
|
+
constructor(action) {
|
|
10668
10670
|
this.action = action;
|
|
10669
10671
|
}
|
|
10670
|
-
static update({
|
|
10671
|
-
return new UpdateService(
|
|
10672
|
+
static update({ serviceInfo }) {
|
|
10673
|
+
return new UpdateService({
|
|
10672
10674
|
kind: UpdateServiceKind.Update,
|
|
10673
10675
|
account: serviceInfo,
|
|
10674
10676
|
});
|
|
10675
10677
|
}
|
|
10676
|
-
static create({
|
|
10677
|
-
return new UpdateService(
|
|
10678
|
+
static create({ serviceInfo, lookupHistory, }) {
|
|
10679
|
+
return new UpdateService({
|
|
10678
10680
|
kind: UpdateServiceKind.Create,
|
|
10679
10681
|
account: serviceInfo,
|
|
10680
10682
|
lookupHistory,
|
|
@@ -10695,17 +10697,15 @@ var UpdateStorageKind;
|
|
|
10695
10697
|
* Can either create/modify an entry or remove it.
|
|
10696
10698
|
*/
|
|
10697
10699
|
class UpdateStorage {
|
|
10698
|
-
serviceId;
|
|
10699
10700
|
action;
|
|
10700
|
-
constructor(
|
|
10701
|
-
this.serviceId = serviceId;
|
|
10701
|
+
constructor(action) {
|
|
10702
10702
|
this.action = action;
|
|
10703
10703
|
}
|
|
10704
|
-
static set({
|
|
10705
|
-
return new UpdateStorage(
|
|
10704
|
+
static set({ storage }) {
|
|
10705
|
+
return new UpdateStorage({ kind: UpdateStorageKind.Set, storage });
|
|
10706
10706
|
}
|
|
10707
|
-
static remove({
|
|
10708
|
-
return new UpdateStorage(
|
|
10707
|
+
static remove({ key }) {
|
|
10708
|
+
return new UpdateStorage({ kind: UpdateStorageKind.Remove, key });
|
|
10709
10709
|
}
|
|
10710
10710
|
get key() {
|
|
10711
10711
|
if (this.action.kind === UpdateStorageKind.Remove) {
|
|
@@ -10914,12 +10914,12 @@ class in_memory_state_InMemoryState extends WithDebug {
|
|
|
10914
10914
|
* Modify the state and apply a single state update.
|
|
10915
10915
|
*/
|
|
10916
10916
|
applyUpdate(update) {
|
|
10917
|
-
const {
|
|
10917
|
+
const { removed, created: _, updated, preimages, storage, ...rest } = update;
|
|
10918
10918
|
// just assign all other variables
|
|
10919
10919
|
Object.assign(this, rest);
|
|
10920
10920
|
// and update the services state
|
|
10921
10921
|
let result;
|
|
10922
|
-
result = this.updateServices(
|
|
10922
|
+
result = this.updateServices(updated);
|
|
10923
10923
|
if (result.isError) {
|
|
10924
10924
|
return result;
|
|
10925
10925
|
}
|
|
@@ -10931,7 +10931,7 @@ class in_memory_state_InMemoryState extends WithDebug {
|
|
|
10931
10931
|
if (result.isError) {
|
|
10932
10932
|
return result;
|
|
10933
10933
|
}
|
|
10934
|
-
this.removeServices(
|
|
10934
|
+
this.removeServices(removed);
|
|
10935
10935
|
return result_Result.ok(result_OK);
|
|
10936
10936
|
}
|
|
10937
10937
|
removeServices(servicesRemoved) {
|
|
@@ -10940,89 +10940,102 @@ class in_memory_state_InMemoryState extends WithDebug {
|
|
|
10940
10940
|
this.services.delete(serviceId);
|
|
10941
10941
|
}
|
|
10942
10942
|
}
|
|
10943
|
-
updateStorage(
|
|
10944
|
-
|
|
10945
|
-
|
|
10946
|
-
|
|
10947
|
-
|
|
10948
|
-
|
|
10949
|
-
|
|
10950
|
-
|
|
10951
|
-
|
|
10952
|
-
|
|
10953
|
-
|
|
10954
|
-
|
|
10955
|
-
|
|
10956
|
-
|
|
10943
|
+
updateStorage(storageUpdates) {
|
|
10944
|
+
if (storageUpdates === undefined) {
|
|
10945
|
+
return result_Result.ok(result_OK);
|
|
10946
|
+
}
|
|
10947
|
+
for (const [serviceId, updates] of storageUpdates.entries()) {
|
|
10948
|
+
for (const update of updates) {
|
|
10949
|
+
const { kind } = update.action;
|
|
10950
|
+
const service = this.services.get(serviceId);
|
|
10951
|
+
if (service === undefined) {
|
|
10952
|
+
return result_Result.error(in_memory_state_UpdateError.NoService, () => `Attempting to update storage of non-existing service: ${serviceId}`);
|
|
10953
|
+
}
|
|
10954
|
+
if (kind === UpdateStorageKind.Set) {
|
|
10955
|
+
const { key, value } = update.action.storage;
|
|
10956
|
+
service.data.storage.set(key.toString(), StorageItem.create({ key, value }));
|
|
10957
|
+
}
|
|
10958
|
+
else if (kind === UpdateStorageKind.Remove) {
|
|
10959
|
+
const { key } = update.action;
|
|
10960
|
+
debug_check `
|
|
10957
10961
|
${service.data.storage.has(key.toString())}
|
|
10958
|
-
Attempting to remove non-existing storage item at ${serviceId}: ${action.key}
|
|
10962
|
+
Attempting to remove non-existing storage item at ${serviceId}: ${update.action.key}
|
|
10959
10963
|
`;
|
|
10960
|
-
|
|
10961
|
-
|
|
10962
|
-
|
|
10963
|
-
|
|
10964
|
+
service.data.storage.delete(key.toString());
|
|
10965
|
+
}
|
|
10966
|
+
else {
|
|
10967
|
+
debug_assertNever(kind);
|
|
10968
|
+
}
|
|
10964
10969
|
}
|
|
10965
10970
|
}
|
|
10966
10971
|
return result_Result.ok(result_OK);
|
|
10967
10972
|
}
|
|
10968
|
-
updatePreimages(
|
|
10969
|
-
|
|
10973
|
+
updatePreimages(preimagesUpdates) {
|
|
10974
|
+
if (preimagesUpdates === undefined) {
|
|
10975
|
+
return result_Result.ok(result_OK);
|
|
10976
|
+
}
|
|
10977
|
+
for (const [serviceId, updates] of preimagesUpdates.entries()) {
|
|
10970
10978
|
const service = this.services.get(serviceId);
|
|
10971
10979
|
if (service === undefined) {
|
|
10972
|
-
return result_Result.error(in_memory_state_UpdateError.NoService, `Attempting to update preimage of non-existing service: ${serviceId}`);
|
|
10973
|
-
}
|
|
10974
|
-
const
|
|
10975
|
-
|
|
10976
|
-
|
|
10977
|
-
|
|
10978
|
-
|
|
10979
|
-
|
|
10980
|
-
service.data.preimages.set(preimage.hash, preimage);
|
|
10981
|
-
if (slot !== null) {
|
|
10982
|
-
const lookupHistory = service.data.lookupHistory.get(preimage.hash);
|
|
10983
|
-
const length = numbers_tryAsU32(preimage.blob.length);
|
|
10984
|
-
const lookup = new LookupHistoryItem(preimage.hash, length, tryAsLookupHistorySlots([slot]));
|
|
10985
|
-
if (lookupHistory === undefined) {
|
|
10986
|
-
// no lookup history for that preimage at all (edge case, should be requested)
|
|
10987
|
-
service.data.lookupHistory.set(preimage.hash, [lookup]);
|
|
10980
|
+
return result_Result.error(in_memory_state_UpdateError.NoService, () => `Attempting to update preimage of non-existing service: ${serviceId}`);
|
|
10981
|
+
}
|
|
10982
|
+
for (const update of updates) {
|
|
10983
|
+
const { kind } = update.action;
|
|
10984
|
+
if (kind === UpdatePreimageKind.Provide) {
|
|
10985
|
+
const { preimage, slot } = update.action;
|
|
10986
|
+
if (service.data.preimages.has(preimage.hash)) {
|
|
10987
|
+
return result_Result.error(in_memory_state_UpdateError.PreimageExists, () => `Overwriting existing preimage at ${serviceId}: ${preimage}`);
|
|
10988
10988
|
}
|
|
10989
|
-
|
|
10990
|
-
|
|
10991
|
-
const
|
|
10992
|
-
|
|
10989
|
+
service.data.preimages.set(preimage.hash, preimage);
|
|
10990
|
+
if (slot !== null) {
|
|
10991
|
+
const lookupHistory = service.data.lookupHistory.get(preimage.hash);
|
|
10992
|
+
const length = numbers_tryAsU32(preimage.blob.length);
|
|
10993
|
+
const lookup = new LookupHistoryItem(preimage.hash, length, tryAsLookupHistorySlots([slot]));
|
|
10994
|
+
if (lookupHistory === undefined) {
|
|
10995
|
+
// no lookup history for that preimage at all (edge case, should be requested)
|
|
10996
|
+
service.data.lookupHistory.set(preimage.hash, [lookup]);
|
|
10997
|
+
}
|
|
10998
|
+
else {
|
|
10999
|
+
// insert or replace exiting entry
|
|
11000
|
+
const index = lookupHistory.map((x) => x.length).indexOf(length);
|
|
11001
|
+
lookupHistory.splice(index, index === -1 ? 0 : 1, lookup);
|
|
11002
|
+
}
|
|
10993
11003
|
}
|
|
10994
11004
|
}
|
|
10995
|
-
|
|
10996
|
-
|
|
10997
|
-
|
|
10998
|
-
|
|
10999
|
-
|
|
11000
|
-
|
|
11001
|
-
|
|
11002
|
-
|
|
11005
|
+
else if (kind === UpdatePreimageKind.Remove) {
|
|
11006
|
+
const { hash, length } = update.action;
|
|
11007
|
+
service.data.preimages.delete(hash);
|
|
11008
|
+
const history = service.data.lookupHistory.get(hash) ?? [];
|
|
11009
|
+
const idx = history.map((x) => x.length).indexOf(length);
|
|
11010
|
+
if (idx !== -1) {
|
|
11011
|
+
history.splice(idx, 1);
|
|
11012
|
+
}
|
|
11013
|
+
}
|
|
11014
|
+
else if (kind === UpdatePreimageKind.UpdateOrAdd) {
|
|
11015
|
+
const { item } = update.action;
|
|
11016
|
+
const history = service.data.lookupHistory.get(item.hash) ?? [];
|
|
11017
|
+
const existingIdx = history.map((x) => x.length).indexOf(item.length);
|
|
11018
|
+
const removeCount = existingIdx === -1 ? 0 : 1;
|
|
11019
|
+
history.splice(existingIdx, removeCount, item);
|
|
11020
|
+
service.data.lookupHistory.set(item.hash, history);
|
|
11021
|
+
}
|
|
11022
|
+
else {
|
|
11023
|
+
debug_assertNever(kind);
|
|
11003
11024
|
}
|
|
11004
|
-
}
|
|
11005
|
-
else if (kind === UpdatePreimageKind.UpdateOrAdd) {
|
|
11006
|
-
const { item } = action;
|
|
11007
|
-
const history = service.data.lookupHistory.get(item.hash) ?? [];
|
|
11008
|
-
const existingIdx = history.map((x) => x.length).indexOf(item.length);
|
|
11009
|
-
const removeCount = existingIdx === -1 ? 0 : 1;
|
|
11010
|
-
history.splice(existingIdx, removeCount, item);
|
|
11011
|
-
service.data.lookupHistory.set(item.hash, history);
|
|
11012
|
-
}
|
|
11013
|
-
else {
|
|
11014
|
-
debug_assertNever(kind);
|
|
11015
11025
|
}
|
|
11016
11026
|
}
|
|
11017
11027
|
return result_Result.ok(result_OK);
|
|
11018
11028
|
}
|
|
11019
11029
|
updateServices(servicesUpdates) {
|
|
11020
|
-
|
|
11021
|
-
|
|
11030
|
+
if (servicesUpdates === undefined) {
|
|
11031
|
+
return result_Result.ok(result_OK);
|
|
11032
|
+
}
|
|
11033
|
+
for (const [serviceId, update] of servicesUpdates.entries()) {
|
|
11034
|
+
const { kind, account } = update.action;
|
|
11022
11035
|
if (kind === UpdateServiceKind.Create) {
|
|
11023
|
-
const { lookupHistory } = action;
|
|
11036
|
+
const { lookupHistory } = update.action;
|
|
11024
11037
|
if (this.services.has(serviceId)) {
|
|
11025
|
-
return result_Result.error(in_memory_state_UpdateError.DuplicateService, `${serviceId} already exists!`);
|
|
11038
|
+
return result_Result.error(in_memory_state_UpdateError.DuplicateService, () => `${serviceId} already exists!`);
|
|
11026
11039
|
}
|
|
11027
11040
|
this.services.set(serviceId, new InMemoryService(serviceId, {
|
|
11028
11041
|
info: account,
|
|
@@ -11034,7 +11047,7 @@ class in_memory_state_InMemoryState extends WithDebug {
|
|
|
11034
11047
|
else if (kind === UpdateServiceKind.Update) {
|
|
11035
11048
|
const existingService = this.services.get(serviceId);
|
|
11036
11049
|
if (existingService === undefined) {
|
|
11037
|
-
return result_Result.error(in_memory_state_UpdateError.NoService, `Cannot update ${serviceId} because it does not exist.`);
|
|
11050
|
+
return result_Result.error(in_memory_state_UpdateError.NoService, () => `Cannot update ${serviceId} because it does not exist.`);
|
|
11038
11051
|
}
|
|
11039
11052
|
existingService.data.info = account;
|
|
11040
11053
|
}
|
|
@@ -12281,7 +12294,6 @@ function getKeccakTrieHasher(hasher) {
|
|
|
12281
12294
|
|
|
12282
12295
|
|
|
12283
12296
|
|
|
12284
|
-
|
|
12285
12297
|
/** What should be done with that key? */
|
|
12286
12298
|
var StateEntryUpdateAction;
|
|
12287
12299
|
(function (StateEntryUpdateAction) {
|
|
@@ -12297,76 +12309,88 @@ function* serializeStateUpdate(spec, blake2b, update) {
|
|
|
12297
12309
|
yield* serializeBasicKeys(spec, update);
|
|
12298
12310
|
const encode = (codec, val) => encoder_Encoder.encodeObject(codec, val, spec);
|
|
12299
12311
|
// then let's proceed with service updates
|
|
12300
|
-
yield* serializeServiceUpdates(update.
|
|
12312
|
+
yield* serializeServiceUpdates(update.updated, encode, blake2b);
|
|
12301
12313
|
yield* serializePreimages(update.preimages, encode, blake2b);
|
|
12302
12314
|
yield* serializeStorage(update.storage, blake2b);
|
|
12303
|
-
yield* serializeRemovedServices(update.
|
|
12315
|
+
yield* serializeRemovedServices(update.removed);
|
|
12304
12316
|
}
|
|
12305
12317
|
function* serializeRemovedServices(servicesRemoved) {
|
|
12306
|
-
|
|
12318
|
+
if (servicesRemoved === undefined) {
|
|
12319
|
+
return;
|
|
12320
|
+
}
|
|
12321
|
+
for (const serviceId of servicesRemoved) {
|
|
12307
12322
|
// TODO [ToDr] what about all data associated with a service?
|
|
12308
12323
|
const codec = serialize_serialize.serviceData(serviceId);
|
|
12309
12324
|
yield [StateEntryUpdateAction.Remove, codec.key, EMPTY_BLOB];
|
|
12310
12325
|
}
|
|
12311
12326
|
}
|
|
12312
|
-
function* serializeStorage(
|
|
12313
|
-
|
|
12314
|
-
|
|
12315
|
-
|
|
12316
|
-
|
|
12317
|
-
|
|
12318
|
-
|
|
12319
|
-
|
|
12320
|
-
|
|
12321
|
-
|
|
12322
|
-
|
|
12323
|
-
|
|
12324
|
-
|
|
12325
|
-
|
|
12327
|
+
function* serializeStorage(storageUpdates, blake2b) {
|
|
12328
|
+
if (storageUpdates === undefined) {
|
|
12329
|
+
return;
|
|
12330
|
+
}
|
|
12331
|
+
for (const [serviceId, updates] of storageUpdates.entries()) {
|
|
12332
|
+
for (const { action } of updates) {
|
|
12333
|
+
switch (action.kind) {
|
|
12334
|
+
case UpdateStorageKind.Set: {
|
|
12335
|
+
const key = action.storage.key;
|
|
12336
|
+
const codec = serialize_serialize.serviceStorage(blake2b, serviceId, key);
|
|
12337
|
+
yield [StateEntryUpdateAction.Insert, codec.key, action.storage.value];
|
|
12338
|
+
break;
|
|
12339
|
+
}
|
|
12340
|
+
case UpdateStorageKind.Remove: {
|
|
12341
|
+
const key = action.key;
|
|
12342
|
+
const codec = serialize_serialize.serviceStorage(blake2b, serviceId, key);
|
|
12343
|
+
yield [StateEntryUpdateAction.Remove, codec.key, EMPTY_BLOB];
|
|
12344
|
+
break;
|
|
12345
|
+
}
|
|
12326
12346
|
}
|
|
12327
|
-
default:
|
|
12328
|
-
debug_assertNever(action);
|
|
12329
12347
|
}
|
|
12330
12348
|
}
|
|
12331
12349
|
}
|
|
12332
|
-
function* serializePreimages(
|
|
12333
|
-
|
|
12334
|
-
|
|
12335
|
-
|
|
12336
|
-
|
|
12337
|
-
|
|
12338
|
-
|
|
12339
|
-
|
|
12340
|
-
const
|
|
12341
|
-
|
|
12342
|
-
|
|
12343
|
-
|
|
12344
|
-
|
|
12345
|
-
|
|
12350
|
+
function* serializePreimages(preimagesUpdates, encode, blake2b) {
|
|
12351
|
+
if (preimagesUpdates === undefined) {
|
|
12352
|
+
return;
|
|
12353
|
+
}
|
|
12354
|
+
for (const [serviceId, updates] of preimagesUpdates.entries()) {
|
|
12355
|
+
for (const { action } of updates) {
|
|
12356
|
+
switch (action.kind) {
|
|
12357
|
+
case UpdatePreimageKind.Provide: {
|
|
12358
|
+
const { hash, blob } = action.preimage;
|
|
12359
|
+
const codec = serialize_serialize.servicePreimages(blake2b, serviceId, hash);
|
|
12360
|
+
yield [StateEntryUpdateAction.Insert, codec.key, blob];
|
|
12361
|
+
if (action.slot !== null) {
|
|
12362
|
+
const codec2 = serialize_serialize.serviceLookupHistory(blake2b, serviceId, hash, numbers_tryAsU32(blob.length));
|
|
12363
|
+
yield [
|
|
12364
|
+
StateEntryUpdateAction.Insert,
|
|
12365
|
+
codec2.key,
|
|
12366
|
+
encode(codec2.Codec, tryAsLookupHistorySlots([action.slot])),
|
|
12367
|
+
];
|
|
12368
|
+
}
|
|
12369
|
+
break;
|
|
12370
|
+
}
|
|
12371
|
+
case UpdatePreimageKind.UpdateOrAdd: {
|
|
12372
|
+
const { hash, length, slots } = action.item;
|
|
12373
|
+
const codec = serialize_serialize.serviceLookupHistory(blake2b, serviceId, hash, length);
|
|
12374
|
+
yield [StateEntryUpdateAction.Insert, codec.key, encode(codec.Codec, slots)];
|
|
12375
|
+
break;
|
|
12376
|
+
}
|
|
12377
|
+
case UpdatePreimageKind.Remove: {
|
|
12378
|
+
const { hash, length } = action;
|
|
12379
|
+
const codec = serialize_serialize.servicePreimages(blake2b, serviceId, hash);
|
|
12380
|
+
yield [StateEntryUpdateAction.Remove, codec.key, EMPTY_BLOB];
|
|
12381
|
+
const codec2 = serialize_serialize.serviceLookupHistory(blake2b, serviceId, hash, length);
|
|
12382
|
+
yield [StateEntryUpdateAction.Remove, codec2.key, EMPTY_BLOB];
|
|
12383
|
+
break;
|
|
12346
12384
|
}
|
|
12347
|
-
break;
|
|
12348
|
-
}
|
|
12349
|
-
case UpdatePreimageKind.UpdateOrAdd: {
|
|
12350
|
-
const { hash, length, slots } = action.item;
|
|
12351
|
-
const codec = serialize_serialize.serviceLookupHistory(blake2b, serviceId, hash, length);
|
|
12352
|
-
yield [StateEntryUpdateAction.Insert, codec.key, encode(codec.Codec, slots)];
|
|
12353
|
-
break;
|
|
12354
|
-
}
|
|
12355
|
-
case UpdatePreimageKind.Remove: {
|
|
12356
|
-
const { hash, length } = action;
|
|
12357
|
-
const codec = serialize_serialize.servicePreimages(blake2b, serviceId, hash);
|
|
12358
|
-
yield [StateEntryUpdateAction.Remove, codec.key, EMPTY_BLOB];
|
|
12359
|
-
const codec2 = serialize_serialize.serviceLookupHistory(blake2b, serviceId, hash, length);
|
|
12360
|
-
yield [StateEntryUpdateAction.Remove, codec2.key, EMPTY_BLOB];
|
|
12361
|
-
break;
|
|
12362
12385
|
}
|
|
12363
|
-
default:
|
|
12364
|
-
debug_assertNever(action);
|
|
12365
12386
|
}
|
|
12366
12387
|
}
|
|
12367
12388
|
}
|
|
12368
12389
|
function* serializeServiceUpdates(servicesUpdates, encode, blake2b) {
|
|
12369
|
-
|
|
12390
|
+
if (servicesUpdates === undefined) {
|
|
12391
|
+
return;
|
|
12392
|
+
}
|
|
12393
|
+
for (const [serviceId, { action }] of servicesUpdates.entries()) {
|
|
12370
12394
|
// new service being created or updated
|
|
12371
12395
|
const codec = serialize_serialize.serviceData(serviceId);
|
|
12372
12396
|
yield [StateEntryUpdateAction.Insert, codec.key, encode(codec.Codec, action.account)];
|
|
@@ -12653,13 +12677,13 @@ class LeafDb {
|
|
|
12653
12677
|
*/
|
|
12654
12678
|
static fromLeavesBlob(blob, db) {
|
|
12655
12679
|
if (blob.length % TRIE_NODE_BYTES !== 0) {
|
|
12656
|
-
return result_Result.error(LeafDbError.InvalidLeafData, `${blob.length} is not a multiply of ${TRIE_NODE_BYTES}: ${blob}`);
|
|
12680
|
+
return result_Result.error(LeafDbError.InvalidLeafData, () => `${blob.length} is not a multiply of ${TRIE_NODE_BYTES}: ${blob}`);
|
|
12657
12681
|
}
|
|
12658
12682
|
const leaves = SortedSet.fromArray(leafComparator, []);
|
|
12659
12683
|
for (const nodeData of blob.chunks(TRIE_NODE_BYTES)) {
|
|
12660
12684
|
const node = new TrieNode(nodeData.raw);
|
|
12661
12685
|
if (node.getNodeType() === NodeType.Branch) {
|
|
12662
|
-
return result_Result.error(LeafDbError.InvalidLeafData, `Branch node detected: ${nodeData}`);
|
|
12686
|
+
return result_Result.error(LeafDbError.InvalidLeafData, () => `Branch node detected: ${nodeData}`);
|
|
12663
12687
|
}
|
|
12664
12688
|
leaves.insert(node.asLeafNode());
|
|
12665
12689
|
}
|
|
@@ -13378,7 +13402,7 @@ class LmdbStates {
|
|
|
13378
13402
|
}
|
|
13379
13403
|
catch (e) {
|
|
13380
13404
|
logger.error `${e}`;
|
|
13381
|
-
return result_Result.error(StateUpdateError.Commit);
|
|
13405
|
+
return result_Result.error(StateUpdateError.Commit, () => `Failed to commit state update: ${e}`);
|
|
13382
13406
|
}
|
|
13383
13407
|
return result_Result.ok(result_OK);
|
|
13384
13408
|
}
|
|
@@ -14098,32 +14122,33 @@ class Preimages {
|
|
|
14098
14122
|
}
|
|
14099
14123
|
if (prevPreimage.requester > currPreimage.requester ||
|
|
14100
14124
|
currPreimage.blob.compare(prevPreimage.blob).isLessOrEqual()) {
|
|
14101
|
-
return result_Result.error(PreimagesErrorCode.PreimagesNotSortedUnique);
|
|
14125
|
+
return result_Result.error(PreimagesErrorCode.PreimagesNotSortedUnique, () => `Preimages not sorted/unique at index ${i}`);
|
|
14102
14126
|
}
|
|
14103
14127
|
}
|
|
14104
14128
|
const { preimages, slot } = input;
|
|
14105
|
-
const pendingChanges =
|
|
14129
|
+
const pendingChanges = new Map();
|
|
14106
14130
|
// select preimages for integration
|
|
14107
14131
|
for (const preimage of preimages) {
|
|
14108
14132
|
const { requester, blob } = preimage;
|
|
14109
14133
|
const hash = this.blake2b.hashBytes(blob).asOpaque();
|
|
14110
14134
|
const service = this.state.getService(requester);
|
|
14111
14135
|
if (service === null) {
|
|
14112
|
-
return result_Result.error(PreimagesErrorCode.AccountNotFound);
|
|
14136
|
+
return result_Result.error(PreimagesErrorCode.AccountNotFound, () => `Service not found: ${requester}`);
|
|
14113
14137
|
}
|
|
14114
14138
|
const hasPreimage = service.hasPreimage(hash);
|
|
14115
14139
|
const slots = service.getLookupHistory(hash, numbers_tryAsU32(blob.length));
|
|
14116
14140
|
// https://graypaper.fluffylabs.dev/#/5f542d7/181800181900
|
|
14117
14141
|
// https://graypaper.fluffylabs.dev/#/5f542d7/116f0011a500
|
|
14118
14142
|
if (hasPreimage || slots === null || !LookupHistoryItem.isRequested(slots)) {
|
|
14119
|
-
return result_Result.error(PreimagesErrorCode.PreimageUnneeded);
|
|
14143
|
+
return result_Result.error(PreimagesErrorCode.PreimageUnneeded, () => `Preimage unneeded: requester=${requester}, hash=${hash}, hasPreimage=${hasPreimage}, isRequested=${slots !== null && LookupHistoryItem.isRequested(slots)}`);
|
|
14120
14144
|
}
|
|
14121
14145
|
// https://graypaper.fluffylabs.dev/#/5f542d7/18c00018f300
|
|
14122
|
-
pendingChanges.
|
|
14123
|
-
|
|
14146
|
+
const updates = pendingChanges.get(requester) ?? [];
|
|
14147
|
+
updates.push(UpdatePreimage.provide({
|
|
14124
14148
|
preimage: PreimageItem.create({ hash, blob }),
|
|
14125
14149
|
slot,
|
|
14126
14150
|
}));
|
|
14151
|
+
pendingChanges.set(requester, updates);
|
|
14127
14152
|
}
|
|
14128
14153
|
return result_Result.ok({
|
|
14129
14154
|
preimages: pendingChanges,
|
|
@@ -14161,7 +14186,7 @@ class BlockVerifier {
|
|
|
14161
14186
|
const headerHash = this.hasher.header(headerView);
|
|
14162
14187
|
// check if current block is already imported
|
|
14163
14188
|
if (this.blocks.getHeader(headerHash.hash) !== null) {
|
|
14164
|
-
return result_Result.error(BlockVerifierError.AlreadyImported, `Block ${headerHash.hash} is already imported.`);
|
|
14189
|
+
return result_Result.error(BlockVerifierError.AlreadyImported, () => `Block ${headerHash.hash} is already imported.`);
|
|
14165
14190
|
}
|
|
14166
14191
|
// Check if parent block exists.
|
|
14167
14192
|
// https://graypaper.fluffylabs.dev/#/cc517d7/0c82000c8200?v=0.6.5
|
|
@@ -14171,14 +14196,14 @@ class BlockVerifier {
|
|
|
14171
14196
|
if (!parentHash.isEqualTo(block_verifier_ZERO_HASH)) {
|
|
14172
14197
|
const parentBlock = this.blocks.getHeader(parentHash);
|
|
14173
14198
|
if (parentBlock === null) {
|
|
14174
|
-
return result_Result.error(BlockVerifierError.ParentNotFound, `Parent ${parentHash.toString()} not found`);
|
|
14199
|
+
return result_Result.error(BlockVerifierError.ParentNotFound, () => `Parent ${parentHash.toString()} not found`);
|
|
14175
14200
|
}
|
|
14176
14201
|
// Check if the time slot index is consecutive and not from future.
|
|
14177
14202
|
// https://graypaper.fluffylabs.dev/#/cc517d7/0c02010c0201?v=0.6.5
|
|
14178
14203
|
const timeslot = headerView.timeSlotIndex.materialize();
|
|
14179
14204
|
const parentTimeslot = parentBlock.timeSlotIndex.materialize();
|
|
14180
14205
|
if (timeslot <= parentTimeslot) {
|
|
14181
|
-
return result_Result.error(BlockVerifierError.InvalidTimeSlot, `Invalid time slot index: ${timeslot}, expected > ${parentTimeslot}`);
|
|
14206
|
+
return result_Result.error(BlockVerifierError.InvalidTimeSlot, () => `Invalid time slot index: ${timeslot}, expected > ${parentTimeslot}`);
|
|
14182
14207
|
}
|
|
14183
14208
|
}
|
|
14184
14209
|
// Check if extrinsic is valid.
|
|
@@ -14186,17 +14211,17 @@ class BlockVerifier {
|
|
|
14186
14211
|
const extrinsicHash = headerView.extrinsicHash.materialize();
|
|
14187
14212
|
const extrinsicMerkleCommitment = this.hasher.extrinsic(block.extrinsic.view());
|
|
14188
14213
|
if (!extrinsicHash.isEqualTo(extrinsicMerkleCommitment.hash)) {
|
|
14189
|
-
return result_Result.error(BlockVerifierError.InvalidExtrinsic, `Invalid extrinsic hash: ${extrinsicHash.toString()}, expected ${extrinsicMerkleCommitment.hash.toString()}`);
|
|
14214
|
+
return result_Result.error(BlockVerifierError.InvalidExtrinsic, () => `Invalid extrinsic hash: ${extrinsicHash.toString()}, expected ${extrinsicMerkleCommitment.hash.toString()}`);
|
|
14190
14215
|
}
|
|
14191
14216
|
// Check if the state root is valid.
|
|
14192
14217
|
// https://graypaper.fluffylabs.dev/#/cc517d7/0c18010c1801?v=0.6.5
|
|
14193
14218
|
const stateRoot = headerView.priorStateRoot.materialize();
|
|
14194
14219
|
const posteriorStateRoot = this.blocks.getPostStateRoot(parentHash);
|
|
14195
14220
|
if (posteriorStateRoot === null) {
|
|
14196
|
-
return result_Result.error(BlockVerifierError.StateRootNotFound, `Posterior state root ${parentHash.toString()} not found`);
|
|
14221
|
+
return result_Result.error(BlockVerifierError.StateRootNotFound, () => `Posterior state root ${parentHash.toString()} not found`);
|
|
14197
14222
|
}
|
|
14198
14223
|
if (!stateRoot.isEqualTo(posteriorStateRoot)) {
|
|
14199
|
-
return result_Result.error(BlockVerifierError.InvalidStateRoot, `Invalid prior state root: ${stateRoot.toString()}, expected ${posteriorStateRoot.toString()} (ours)`);
|
|
14224
|
+
return result_Result.error(BlockVerifierError.InvalidStateRoot, () => `Invalid prior state root: ${stateRoot.toString()}, expected ${posteriorStateRoot.toString()} (ours)`);
|
|
14200
14225
|
}
|
|
14201
14226
|
return result_Result.ok(headerHash.hash);
|
|
14202
14227
|
}
|
|
@@ -14321,7 +14346,7 @@ class Disputes {
|
|
|
14321
14346
|
// check if culprits are sorted by key
|
|
14322
14347
|
// https://graypaper.fluffylabs.dev/#/579bd12/12c50112c601
|
|
14323
14348
|
if (!isUniqueSortedBy(disputes.culprits, "key")) {
|
|
14324
|
-
return result_Result.error(DisputesErrorCode.CulpritsNotSortedUnique);
|
|
14349
|
+
return result_Result.error(DisputesErrorCode.CulpritsNotSortedUnique, () => "Culprits are not uniquely sorted by key");
|
|
14325
14350
|
}
|
|
14326
14351
|
const culpritsLength = disputes.culprits.length;
|
|
14327
14352
|
for (let i = 0; i < culpritsLength; i++) {
|
|
@@ -14330,24 +14355,24 @@ class Disputes {
|
|
|
14330
14355
|
// https://graypaper.fluffylabs.dev/#/579bd12/125501125501
|
|
14331
14356
|
const isInPunishSet = this.state.disputesRecords.asDictionaries().punishSet.has(key);
|
|
14332
14357
|
if (isInPunishSet) {
|
|
14333
|
-
return result_Result.error(DisputesErrorCode.OffenderAlreadyReported);
|
|
14358
|
+
return result_Result.error(DisputesErrorCode.OffenderAlreadyReported, () => `Offender already reported: culprit ${i}, key=${key}`);
|
|
14334
14359
|
}
|
|
14335
14360
|
// check if the guarantor key is correct
|
|
14336
14361
|
// https://graypaper.fluffylabs.dev/#/85129da/125501125501?v=0.6.3
|
|
14337
14362
|
if (!allValidatorKeys.has(key)) {
|
|
14338
|
-
return result_Result.error(DisputesErrorCode.BadGuarantorKey);
|
|
14363
|
+
return result_Result.error(DisputesErrorCode.BadGuarantorKey, () => `Bad guarantor key: culprit ${i}, key=${key}`);
|
|
14339
14364
|
}
|
|
14340
14365
|
// verify if the culprit will be in new bad set
|
|
14341
14366
|
// https://graypaper.fluffylabs.dev/#/579bd12/124601124601
|
|
14342
14367
|
const isInNewBadSet = newItems.asDictionaries().badSet.has(workReportHash);
|
|
14343
14368
|
if (!isInNewBadSet) {
|
|
14344
|
-
return result_Result.error(DisputesErrorCode.CulpritsVerdictNotBad);
|
|
14369
|
+
return result_Result.error(DisputesErrorCode.CulpritsVerdictNotBad, () => `Culprit verdict not bad: culprit ${i}, work report=${workReportHash}`);
|
|
14345
14370
|
}
|
|
14346
14371
|
// verify culprit signature
|
|
14347
14372
|
// https://graypaper.fluffylabs.dev/#/579bd12/125c01125c01
|
|
14348
14373
|
const result = verificationResult.culprits[i];
|
|
14349
14374
|
if (!result?.isValid) {
|
|
14350
|
-
return result_Result.error(DisputesErrorCode.BadSignature, `Invalid signature for culprit: ${i}`);
|
|
14375
|
+
return result_Result.error(DisputesErrorCode.BadSignature, () => `Invalid signature for culprit: ${i}`);
|
|
14351
14376
|
}
|
|
14352
14377
|
}
|
|
14353
14378
|
return result_Result.ok(null);
|
|
@@ -14356,7 +14381,7 @@ class Disputes {
|
|
|
14356
14381
|
// check if faults are sorted by key
|
|
14357
14382
|
// https://graypaper.fluffylabs.dev/#/579bd12/12c50112c601
|
|
14358
14383
|
if (!isUniqueSortedBy(disputes.faults, "key")) {
|
|
14359
|
-
return result_Result.error(DisputesErrorCode.FaultsNotSortedUnique);
|
|
14384
|
+
return result_Result.error(DisputesErrorCode.FaultsNotSortedUnique, () => "Faults are not uniquely sorted by key");
|
|
14360
14385
|
}
|
|
14361
14386
|
const faultsLength = disputes.faults.length;
|
|
14362
14387
|
for (let i = 0; i < faultsLength; i++) {
|
|
@@ -14365,12 +14390,12 @@ class Disputes {
|
|
|
14365
14390
|
// https://graypaper.fluffylabs.dev/#/579bd12/12a20112a201
|
|
14366
14391
|
const isInPunishSet = this.state.disputesRecords.asDictionaries().punishSet.has(key);
|
|
14367
14392
|
if (isInPunishSet) {
|
|
14368
|
-
return result_Result.error(DisputesErrorCode.OffenderAlreadyReported);
|
|
14393
|
+
return result_Result.error(DisputesErrorCode.OffenderAlreadyReported, () => `Offender already reported: fault ${i}, key=${key}`);
|
|
14369
14394
|
}
|
|
14370
14395
|
// check if the auditor key is correct
|
|
14371
14396
|
// https://graypaper.fluffylabs.dev/#/85129da/12a20112a201?v=0.6.3
|
|
14372
14397
|
if (!allValidatorKeys.has(key)) {
|
|
14373
|
-
return result_Result.error(DisputesErrorCode.BadAuditorKey);
|
|
14398
|
+
return result_Result.error(DisputesErrorCode.BadAuditorKey, () => `Bad auditor key: fault ${i}, key=${key}`);
|
|
14374
14399
|
}
|
|
14375
14400
|
// verify if the fault will be included in new good/bad set
|
|
14376
14401
|
// it may be not correct as in GP there is "iff" what means it should be rather
|
|
@@ -14382,14 +14407,14 @@ class Disputes {
|
|
|
14382
14407
|
const isInNewGoodSet = goodSet.has(workReportHash);
|
|
14383
14408
|
const isInNewBadSet = badSet.has(workReportHash);
|
|
14384
14409
|
if (isInNewGoodSet || !isInNewBadSet) {
|
|
14385
|
-
return result_Result.error(DisputesErrorCode.FaultVerdictWrong);
|
|
14410
|
+
return result_Result.error(DisputesErrorCode.FaultVerdictWrong, () => `Fault verdict wrong: fault ${i}, work report=${workReportHash}, inGood=${isInNewGoodSet}, inBad=${isInNewBadSet}`);
|
|
14386
14411
|
}
|
|
14387
14412
|
}
|
|
14388
14413
|
// verify fault signature. Verification was done earlier, here we only check the result.
|
|
14389
14414
|
// https://graypaper.fluffylabs.dev/#/579bd12/12a90112a901
|
|
14390
14415
|
const result = verificationResult.faults[i];
|
|
14391
14416
|
if (!result.isValid) {
|
|
14392
|
-
return result_Result.error(DisputesErrorCode.BadSignature, `Invalid signature for fault: ${i}`);
|
|
14417
|
+
return result_Result.error(DisputesErrorCode.BadSignature, () => `Invalid signature for fault: ${i}`);
|
|
14393
14418
|
}
|
|
14394
14419
|
}
|
|
14395
14420
|
return result_Result.ok(null);
|
|
@@ -14398,32 +14423,32 @@ class Disputes {
|
|
|
14398
14423
|
// check if verdicts are correctly sorted
|
|
14399
14424
|
// https://graypaper.fluffylabs.dev/#/579bd12/12c40112c401
|
|
14400
14425
|
if (!isUniqueSortedBy(disputes.verdicts, "workReportHash")) {
|
|
14401
|
-
return result_Result.error(DisputesErrorCode.VerdictsNotSortedUnique);
|
|
14426
|
+
return result_Result.error(DisputesErrorCode.VerdictsNotSortedUnique, () => "Verdicts are not uniquely sorted by work report hash");
|
|
14402
14427
|
}
|
|
14403
14428
|
// check if judgement are correctly sorted
|
|
14404
14429
|
// https://graypaper.fluffylabs.dev/#/579bd12/123702123802
|
|
14405
14430
|
if (disputes.verdicts.some((verdict) => !isUniqueSortedByIndex(verdict.votes))) {
|
|
14406
|
-
return result_Result.error(DisputesErrorCode.JudgementsNotSortedUnique);
|
|
14431
|
+
return result_Result.error(DisputesErrorCode.JudgementsNotSortedUnique, () => "Judgements are not uniquely sorted by index");
|
|
14407
14432
|
}
|
|
14408
14433
|
const currentEpoch = Math.floor(this.state.timeslot / this.chainSpec.epochLength);
|
|
14409
14434
|
let voteSignatureIndex = 0;
|
|
14410
14435
|
for (const { votesEpoch, votes } of disputes.verdicts) {
|
|
14411
14436
|
// https://graypaper.fluffylabs.dev/#/579bd12/12bb0012bc00
|
|
14412
14437
|
if (votesEpoch !== currentEpoch && votesEpoch + 1 !== currentEpoch) {
|
|
14413
|
-
return result_Result.error(DisputesErrorCode.BadJudgementAge);
|
|
14438
|
+
return result_Result.error(DisputesErrorCode.BadJudgementAge, () => `Bad judgement age: epoch=${votesEpoch}, current=${currentEpoch}`);
|
|
14414
14439
|
}
|
|
14415
14440
|
const k = votesEpoch === currentEpoch ? this.state.currentValidatorData : this.state.previousValidatorData;
|
|
14416
14441
|
for (const { index } of votes) {
|
|
14417
14442
|
const key = k[index]?.ed25519;
|
|
14418
14443
|
// no particular GP fragment but I think we don't believe in ghosts
|
|
14419
14444
|
if (key === undefined) {
|
|
14420
|
-
return result_Result.error(DisputesErrorCode.BadValidatorIndex);
|
|
14445
|
+
return result_Result.error(DisputesErrorCode.BadValidatorIndex, () => `Bad validator index: ${index} in epoch ${votesEpoch}`);
|
|
14421
14446
|
}
|
|
14422
14447
|
// verify vote signature. Verification was done earlier, here we only check the result.
|
|
14423
14448
|
// https://graypaper.fluffylabs.dev/#/579bd12/12cd0012cd00
|
|
14424
14449
|
const result = verificationResult.judgements[voteSignatureIndex];
|
|
14425
14450
|
if (!result.isValid) {
|
|
14426
|
-
return result_Result.error(DisputesErrorCode.BadSignature, `Invalid signature for judgement: ${voteSignatureIndex}`);
|
|
14451
|
+
return result_Result.error(DisputesErrorCode.BadSignature, () => `Invalid signature for judgement: ${voteSignatureIndex}`);
|
|
14427
14452
|
}
|
|
14428
14453
|
voteSignatureIndex += 1;
|
|
14429
14454
|
}
|
|
@@ -14439,7 +14464,7 @@ class Disputes {
|
|
|
14439
14464
|
const isInBadSet = badSet.has(verdict.workReportHash);
|
|
14440
14465
|
const isInWonkySet = wonkySet.has(verdict.workReportHash);
|
|
14441
14466
|
if (isInGoodSet || isInBadSet || isInWonkySet) {
|
|
14442
|
-
return result_Result.error(DisputesErrorCode.AlreadyJudged);
|
|
14467
|
+
return result_Result.error(DisputesErrorCode.AlreadyJudged, () => `Work report already judged: ${verdict.workReportHash}`);
|
|
14443
14468
|
}
|
|
14444
14469
|
}
|
|
14445
14470
|
return result_Result.ok(null);
|
|
@@ -14470,7 +14495,7 @@ class Disputes {
|
|
|
14470
14495
|
// https://graypaper.fluffylabs.dev/#/579bd12/12f10212fc02
|
|
14471
14496
|
const f = disputes.faults.find((x) => x.workReportHash.isEqualTo(r));
|
|
14472
14497
|
if (f === undefined) {
|
|
14473
|
-
return result_Result.error(DisputesErrorCode.NotEnoughFaults);
|
|
14498
|
+
return result_Result.error(DisputesErrorCode.NotEnoughFaults, () => `Not enough faults for work report: ${r}`);
|
|
14474
14499
|
}
|
|
14475
14500
|
}
|
|
14476
14501
|
else if (sum === 0) {
|
|
@@ -14479,13 +14504,13 @@ class Disputes {
|
|
|
14479
14504
|
const c1 = disputes.culprits.find((x) => x.workReportHash.isEqualTo(r));
|
|
14480
14505
|
const c2 = disputes.culprits.findLast((x) => x.workReportHash.isEqualTo(r));
|
|
14481
14506
|
if (c1 === c2) {
|
|
14482
|
-
return result_Result.error(DisputesErrorCode.NotEnoughCulprits);
|
|
14507
|
+
return result_Result.error(DisputesErrorCode.NotEnoughCulprits, () => `Not enough culprits for work report: ${r}`);
|
|
14483
14508
|
}
|
|
14484
14509
|
}
|
|
14485
14510
|
else if (sum !== this.chainSpec.thirdOfValidators) {
|
|
14486
14511
|
// positive votes count is not correct
|
|
14487
14512
|
// https://graypaper.fluffylabs.dev/#/579bd12/125002128102
|
|
14488
|
-
return result_Result.error(DisputesErrorCode.BadVoteSplit);
|
|
14513
|
+
return result_Result.error(DisputesErrorCode.BadVoteSplit, () => `Bad vote split: sum=${sum}, expected=${this.chainSpec.thirdOfValidators} for work report ${r}`);
|
|
14489
14514
|
}
|
|
14490
14515
|
}
|
|
14491
14516
|
return result_Result.ok(null);
|
|
@@ -14573,7 +14598,7 @@ class Disputes {
|
|
|
14573
14598
|
const validator = k[j.index];
|
|
14574
14599
|
// no particular GP fragment but I think we don't believe in ghosts
|
|
14575
14600
|
if (validator === undefined) {
|
|
14576
|
-
return result_Result.error(DisputesErrorCode.BadValidatorIndex);
|
|
14601
|
+
return result_Result.error(DisputesErrorCode.BadValidatorIndex, () => `Bad validator index in signature verification: ${j.index}`);
|
|
14577
14602
|
}
|
|
14578
14603
|
const key = validator.ed25519;
|
|
14579
14604
|
// verify vote signature
|
|
@@ -14681,7 +14706,7 @@ const ringCommitmentCache = [];
|
|
|
14681
14706
|
async function verifySeal(bandersnatch, authorKey, signature, payload, encodedUnsealedHeader) {
|
|
14682
14707
|
const sealResult = await bandersnatch.verifySeal(authorKey.raw, signature.raw, payload.raw, encodedUnsealedHeader.raw);
|
|
14683
14708
|
if (sealResult[RESULT_INDEX] === ResultValues.Error) {
|
|
14684
|
-
return result_Result.error(null);
|
|
14709
|
+
return result_Result.error(null, () => "Bandersnatch VRF seal verification failed");
|
|
14685
14710
|
}
|
|
14686
14711
|
return result_Result.ok(bytes_Bytes.fromBlob(sealResult.subarray(1), hash_HASH_SIZE).asOpaque());
|
|
14687
14712
|
}
|
|
@@ -14707,7 +14732,7 @@ function getRingCommitment(bandersnatch, validators) {
|
|
|
14707
14732
|
async function getRingCommitmentNoCache(bandersnatch, keys) {
|
|
14708
14733
|
const commitmentResult = await bandersnatch.getRingCommitment(keys.raw);
|
|
14709
14734
|
if (commitmentResult[RESULT_INDEX] === ResultValues.Error) {
|
|
14710
|
-
return result_Result.error(null);
|
|
14735
|
+
return result_Result.error(null, () => "Bandersnatch ring commitment calculation failed");
|
|
14711
14736
|
}
|
|
14712
14737
|
return result_Result.ok(bytes_Bytes.fromBlob(commitmentResult.subarray(1), bandersnatch_BANDERSNATCH_RING_ROOT_BYTES).asOpaque());
|
|
14713
14738
|
}
|
|
@@ -14882,7 +14907,7 @@ class Safrole {
|
|
|
14882
14907
|
epochRoot: epochRootResult.ok,
|
|
14883
14908
|
});
|
|
14884
14909
|
}
|
|
14885
|
-
return result_Result.error(SafroleErrorCode.IncorrectData);
|
|
14910
|
+
return result_Result.error(SafroleErrorCode.IncorrectData, () => "Safrole: failed to get epoch root for validator keys");
|
|
14886
14911
|
}
|
|
14887
14912
|
/**
|
|
14888
14913
|
* Ticket sequencer that is used in standard mode
|
|
@@ -14973,10 +14998,10 @@ class Safrole {
|
|
|
14973
14998
|
for (let i = 1; i < ticketsLength; i++) {
|
|
14974
14999
|
const order = tickets[i - 1].id.compare(tickets[i].id);
|
|
14975
15000
|
if (order.isEqual()) {
|
|
14976
|
-
return result_Result.error(SafroleErrorCode.DuplicateTicket);
|
|
15001
|
+
return result_Result.error(SafroleErrorCode.DuplicateTicket, () => `Safrole: duplicate ticket found at index ${i}`);
|
|
14977
15002
|
}
|
|
14978
15003
|
if (order.isGreater()) {
|
|
14979
|
-
return result_Result.error(SafroleErrorCode.BadTicketOrder);
|
|
15004
|
+
return result_Result.error(SafroleErrorCode.BadTicketOrder, () => `Safrole: bad ticket order at index ${i}`);
|
|
14980
15005
|
}
|
|
14981
15006
|
}
|
|
14982
15007
|
return result_Result.ok(null);
|
|
@@ -15003,7 +15028,7 @@ class Safrole {
|
|
|
15003
15028
|
attempt: ticket.attempt,
|
|
15004
15029
|
}));
|
|
15005
15030
|
if (!verificationResult.every((x) => x.isValid)) {
|
|
15006
|
-
return result_Result.error(SafroleErrorCode.BadTicketProof);
|
|
15031
|
+
return result_Result.error(SafroleErrorCode.BadTicketProof, () => "Safrole: invalid ticket proof in extrinsic");
|
|
15007
15032
|
}
|
|
15008
15033
|
/**
|
|
15009
15034
|
* Verify if tickets are sorted and unique
|
|
@@ -15012,7 +15037,7 @@ class Safrole {
|
|
|
15012
15037
|
*/
|
|
15013
15038
|
const ticketsVerifcationResult = this.verifyTickets(tickets);
|
|
15014
15039
|
if (ticketsVerifcationResult.isError) {
|
|
15015
|
-
return result_Result.error(ticketsVerifcationResult.error);
|
|
15040
|
+
return result_Result.error(ticketsVerifcationResult.error, ticketsVerifcationResult.details);
|
|
15016
15041
|
}
|
|
15017
15042
|
if (this.isEpochChanged(timeslot)) {
|
|
15018
15043
|
return result_Result.ok(tickets);
|
|
@@ -15021,7 +15046,7 @@ class Safrole {
|
|
|
15021
15046
|
const ticketsFromExtrinsic = SortedSet.fromSortedArray(ticketComparator, tickets);
|
|
15022
15047
|
const mergedTickets = SortedSet.fromTwoSortedCollections(ticketsFromState, ticketsFromExtrinsic);
|
|
15023
15048
|
if (ticketsFromState.length + ticketsFromExtrinsic.length !== mergedTickets.length) {
|
|
15024
|
-
return result_Result.error(SafroleErrorCode.DuplicateTicket);
|
|
15049
|
+
return result_Result.error(SafroleErrorCode.DuplicateTicket, () => "Safrole: duplicate ticket when merging state and extrinsic tickets");
|
|
15025
15050
|
}
|
|
15026
15051
|
/**
|
|
15027
15052
|
* Remove tickets if size of accumulator exceeds E (epoch length).
|
|
@@ -15090,24 +15115,24 @@ class Safrole {
|
|
|
15090
15115
|
}
|
|
15091
15116
|
async transition(input) {
|
|
15092
15117
|
if (this.state.timeslot >= input.slot) {
|
|
15093
|
-
return result_Result.error(SafroleErrorCode.BadSlot);
|
|
15118
|
+
return result_Result.error(SafroleErrorCode.BadSlot, () => `Safrole: bad slot, state timeslot ${this.state.timeslot} >= input slot ${input.slot}`);
|
|
15094
15119
|
}
|
|
15095
15120
|
if (!this.isExtrinsicLengthValid(input.slot, input.extrinsic)) {
|
|
15096
|
-
return result_Result.error(SafroleErrorCode.UnexpectedTicket);
|
|
15121
|
+
return result_Result.error(SafroleErrorCode.UnexpectedTicket, () => `Safrole: unexpected ticket, invalid extrinsic length ${input.extrinsic.length}`);
|
|
15097
15122
|
}
|
|
15098
15123
|
if (!this.areTicketAttemptsValid(input.extrinsic)) {
|
|
15099
|
-
return result_Result.error(SafroleErrorCode.BadTicketAttempt);
|
|
15124
|
+
return result_Result.error(SafroleErrorCode.BadTicketAttempt, () => "Safrole: bad ticket attempt value in extrinsic");
|
|
15100
15125
|
}
|
|
15101
15126
|
const validatorKeysResult = await this.getValidatorKeys(input.slot, input.punishSet);
|
|
15102
15127
|
if (validatorKeysResult.isError) {
|
|
15103
|
-
return result_Result.error(validatorKeysResult.error);
|
|
15128
|
+
return result_Result.error(validatorKeysResult.error, validatorKeysResult.details);
|
|
15104
15129
|
}
|
|
15105
15130
|
const { nextValidatorData, currentValidatorData, previousValidatorData, epochRoot } = validatorKeysResult.ok;
|
|
15106
15131
|
const entropy = this.getEntropy(input.slot, input.entropy);
|
|
15107
15132
|
const sealingKeySeries = this.getSlotKeySequence(input.slot, currentValidatorData, entropy[2]);
|
|
15108
15133
|
const newTicketsAccumulatorResult = await this.getNewTicketAccumulator(input.slot, input.extrinsic, this.state.nextValidatorData, epochRoot, entropy[2]);
|
|
15109
15134
|
if (newTicketsAccumulatorResult.isError) {
|
|
15110
|
-
return result_Result.error(newTicketsAccumulatorResult.error);
|
|
15135
|
+
return result_Result.error(newTicketsAccumulatorResult.error, newTicketsAccumulatorResult.details);
|
|
15111
15136
|
}
|
|
15112
15137
|
const stateUpdate = {
|
|
15113
15138
|
nextValidatorData,
|
|
@@ -15141,14 +15166,14 @@ function compareWithEncoding(chainSpec, error, actual, expected, codec) {
|
|
|
15141
15166
|
if (actual === null || expected === null) {
|
|
15142
15167
|
// if one of them is `null`, both need to be.
|
|
15143
15168
|
if (actual !== expected) {
|
|
15144
|
-
return result_Result.error(error, `${SafroleErrorCode[error]} Expected: ${expected}, got: ${actual}`);
|
|
15169
|
+
return result_Result.error(error, () => `${SafroleErrorCode[error]} Expected: ${expected}, got: ${actual}`);
|
|
15145
15170
|
}
|
|
15146
15171
|
return result_Result.ok(result_OK);
|
|
15147
15172
|
}
|
|
15148
15173
|
// compare the literal encoding.
|
|
15149
15174
|
const encoded = encoder_Encoder.encodeObject(codec, actual, chainSpec);
|
|
15150
15175
|
if (!encoded.isEqualTo(expected.encoded())) {
|
|
15151
|
-
return result_Result.error(error, `${SafroleErrorCode[error]} Expected: ${expected.encoded()}, got: ${encoded}`);
|
|
15176
|
+
return result_Result.error(error, () => `${SafroleErrorCode[error]} Expected: ${expected.encoded()}, got: ${encoded}`);
|
|
15152
15177
|
}
|
|
15153
15178
|
return result_Result.ok(result_OK);
|
|
15154
15179
|
}
|
|
@@ -15191,7 +15216,7 @@ class SafroleSeal {
|
|
|
15191
15216
|
const blockAuthorKey = state.currentValidatorData.at(blockAuthorIndex)?.bandersnatch;
|
|
15192
15217
|
const entropySourceResult = await bandersnatch_vrf.verifySeal(await this.bandersnatch, blockAuthorKey ?? BANDERSNATCH_ZERO_KEY, headerView.entropySource.materialize(), payload, bytes_BytesBlob.blobFromNumbers([]));
|
|
15193
15218
|
if (entropySourceResult.isError) {
|
|
15194
|
-
return result_Result.error(SafroleSealError.IncorrectEntropySource);
|
|
15219
|
+
return result_Result.error(SafroleSealError.IncorrectEntropySource, () => "Safrole: incorrect entropy source in header seal");
|
|
15195
15220
|
}
|
|
15196
15221
|
return result_Result.ok(entropySourceResult.ok);
|
|
15197
15222
|
}
|
|
@@ -15200,7 +15225,7 @@ class SafroleSeal {
|
|
|
15200
15225
|
const validatorIndex = headerView.bandersnatchBlockAuthorIndex.materialize();
|
|
15201
15226
|
const authorKeys = state.currentValidatorData.at(validatorIndex);
|
|
15202
15227
|
if (authorKeys === undefined) {
|
|
15203
|
-
return result_Result.error(SafroleSealError.InvalidValidatorIndex);
|
|
15228
|
+
return result_Result.error(SafroleSealError.InvalidValidatorIndex, () => `Safrole: invalid validator index ${validatorIndex}`);
|
|
15204
15229
|
}
|
|
15205
15230
|
const timeSlot = headerView.timeSlotIndex.materialize();
|
|
15206
15231
|
const sealingKeys = state.sealingKeySeries;
|
|
@@ -15219,10 +15244,10 @@ class SafroleSeal {
|
|
|
15219
15244
|
const authorKey = validatorData.bandersnatch;
|
|
15220
15245
|
const result = await bandersnatch_vrf.verifySeal(await this.bandersnatch, authorKey ?? BANDERSNATCH_ZERO_KEY, headerView.seal.materialize(), payload, encodeUnsealedHeader(headerView));
|
|
15221
15246
|
if (result.isError) {
|
|
15222
|
-
return result_Result.error(SafroleSealError.IncorrectSeal);
|
|
15247
|
+
return result_Result.error(SafroleSealError.IncorrectSeal, () => "Safrole: incorrect seal with ticket");
|
|
15223
15248
|
}
|
|
15224
15249
|
if (ticket === undefined || !ticket.id.isEqualTo(result.ok)) {
|
|
15225
|
-
return result_Result.error(SafroleSealError.InvalidTicket);
|
|
15250
|
+
return result_Result.error(SafroleSealError.InvalidTicket, () => `Safrole: invalid ticket, expected ${ticket?.id} got ${result.ok}`);
|
|
15226
15251
|
}
|
|
15227
15252
|
return result_Result.ok(result.ok);
|
|
15228
15253
|
}
|
|
@@ -15232,13 +15257,13 @@ class SafroleSeal {
|
|
|
15232
15257
|
const sealingKey = keys.at(index);
|
|
15233
15258
|
const authorBandersnatchKey = authorKey.bandersnatch;
|
|
15234
15259
|
if (sealingKey === undefined || !sealingKey.isEqualTo(authorBandersnatchKey)) {
|
|
15235
|
-
return result_Result.error(SafroleSealError.InvalidValidator, `Invalid Validator. Expected: ${sealingKey}, got: ${authorKey.bandersnatch}`);
|
|
15260
|
+
return result_Result.error(SafroleSealError.InvalidValidator, () => `Invalid Validator. Expected: ${sealingKey}, got: ${authorKey.bandersnatch}`);
|
|
15236
15261
|
}
|
|
15237
15262
|
// verify seal correctness
|
|
15238
15263
|
const payload = bytes_BytesBlob.blobFromParts(JAM_FALLBACK_SEAL, entropy.raw);
|
|
15239
15264
|
const result = await bandersnatch_vrf.verifySeal(await this.bandersnatch, authorBandersnatchKey, headerView.seal.materialize(), payload, encodeUnsealedHeader(headerView));
|
|
15240
15265
|
if (result.isError) {
|
|
15241
|
-
return result_Result.error(SafroleSealError.IncorrectSeal);
|
|
15266
|
+
return result_Result.error(SafroleSealError.IncorrectSeal, () => "Safrole: incorrect seal with keys");
|
|
15242
15267
|
}
|
|
15243
15268
|
return result_Result.ok(result.ok);
|
|
15244
15269
|
}
|
|
@@ -15281,6 +15306,14 @@ async function getRootHash(yieldedRoots) {
|
|
|
15281
15306
|
|
|
15282
15307
|
|
|
15283
15308
|
const InsufficientFundsError = "insufficient funds";
|
|
15309
|
+
/** Deep clone of a map with array. */
|
|
15310
|
+
function deepCloneMapWithArray(map) {
|
|
15311
|
+
const cloned = [];
|
|
15312
|
+
for (const [k, v] of map.entries()) {
|
|
15313
|
+
cloned.push([k, v.slice()]);
|
|
15314
|
+
}
|
|
15315
|
+
return new Map(cloned);
|
|
15316
|
+
}
|
|
15284
15317
|
/**
|
|
15285
15318
|
* State updates that currently accumulating service produced.
|
|
15286
15319
|
*
|
|
@@ -15310,10 +15343,11 @@ class AccumulationStateUpdate {
|
|
|
15310
15343
|
/** Create new empty state update. */
|
|
15311
15344
|
static empty() {
|
|
15312
15345
|
return new AccumulationStateUpdate({
|
|
15313
|
-
|
|
15314
|
-
|
|
15315
|
-
|
|
15316
|
-
|
|
15346
|
+
created: [],
|
|
15347
|
+
updated: new Map(),
|
|
15348
|
+
removed: [],
|
|
15349
|
+
preimages: new Map(),
|
|
15350
|
+
storage: new Map(),
|
|
15317
15351
|
}, []);
|
|
15318
15352
|
}
|
|
15319
15353
|
/** Create a state update with some existing, yet uncommited services updates. */
|
|
@@ -15325,10 +15359,13 @@ class AccumulationStateUpdate {
|
|
|
15325
15359
|
/** Create a copy of another `StateUpdate`. Used by checkpoints. */
|
|
15326
15360
|
static copyFrom(from) {
|
|
15327
15361
|
const serviceUpdates = {
|
|
15328
|
-
|
|
15329
|
-
|
|
15330
|
-
|
|
15331
|
-
|
|
15362
|
+
// shallow copy
|
|
15363
|
+
created: [...from.services.created],
|
|
15364
|
+
updated: new Map(from.services.updated),
|
|
15365
|
+
removed: [...from.services.removed],
|
|
15366
|
+
// deep copy
|
|
15367
|
+
preimages: deepCloneMapWithArray(from.services.preimages),
|
|
15368
|
+
storage: deepCloneMapWithArray(from.services.storage),
|
|
15332
15369
|
};
|
|
15333
15370
|
const transfers = [...from.transfers];
|
|
15334
15371
|
const update = new AccumulationStateUpdate(serviceUpdates, transfers, new Map(from.yieldedRoots));
|
|
@@ -15376,9 +15413,9 @@ class PartiallyUpdatedState {
|
|
|
15376
15413
|
if (destination === null) {
|
|
15377
15414
|
return null;
|
|
15378
15415
|
}
|
|
15379
|
-
const
|
|
15380
|
-
if (
|
|
15381
|
-
return
|
|
15416
|
+
const maybeUpdatedServiceInfo = this.stateUpdate.services.updated.get(destination);
|
|
15417
|
+
if (maybeUpdatedServiceInfo !== undefined) {
|
|
15418
|
+
return maybeUpdatedServiceInfo.action.account;
|
|
15382
15419
|
}
|
|
15383
15420
|
const maybeService = this.state.getService(destination);
|
|
15384
15421
|
if (maybeService === null) {
|
|
@@ -15387,7 +15424,8 @@ class PartiallyUpdatedState {
|
|
|
15387
15424
|
return maybeService.getInfo();
|
|
15388
15425
|
}
|
|
15389
15426
|
getStorage(serviceId, rawKey) {
|
|
15390
|
-
const
|
|
15427
|
+
const storages = this.stateUpdate.services.storage.get(serviceId) ?? [];
|
|
15428
|
+
const item = storages.find((x) => x.key.isEqualTo(rawKey));
|
|
15391
15429
|
if (item !== undefined) {
|
|
15392
15430
|
return item.value;
|
|
15393
15431
|
}
|
|
@@ -15402,10 +15440,11 @@ class PartiallyUpdatedState {
|
|
|
15402
15440
|
* the existence in `preimages` map.
|
|
15403
15441
|
*/
|
|
15404
15442
|
hasPreimage(serviceId, hash) {
|
|
15405
|
-
const
|
|
15443
|
+
const preimages = this.stateUpdate.services.preimages.get(serviceId) ?? [];
|
|
15444
|
+
const providedPreimage = preimages.find(
|
|
15406
15445
|
// we ignore the action here, since if there is <any> update on that
|
|
15407
15446
|
// hash it means it has to exist, right?
|
|
15408
|
-
(p) => p.
|
|
15447
|
+
(p) => p.hash.isEqualTo(hash));
|
|
15409
15448
|
if (providedPreimage !== undefined) {
|
|
15410
15449
|
return true;
|
|
15411
15450
|
}
|
|
@@ -15418,7 +15457,8 @@ class PartiallyUpdatedState {
|
|
|
15418
15457
|
}
|
|
15419
15458
|
getPreimage(serviceId, hash) {
|
|
15420
15459
|
// TODO [ToDr] Should we verify availability here?
|
|
15421
|
-
const
|
|
15460
|
+
const preimages = this.stateUpdate.services.preimages.get(serviceId) ?? [];
|
|
15461
|
+
const freshlyProvided = preimages.find((x) => x.hash.isEqualTo(hash));
|
|
15422
15462
|
if (freshlyProvided !== undefined && freshlyProvided.action.kind === UpdatePreimageKind.Provide) {
|
|
15423
15463
|
return freshlyProvided.action.preimage.blob;
|
|
15424
15464
|
}
|
|
@@ -15427,10 +15467,11 @@ class PartiallyUpdatedState {
|
|
|
15427
15467
|
}
|
|
15428
15468
|
/** Get status of a preimage of current service taking into account any updates. */
|
|
15429
15469
|
getLookupHistory(currentTimeslot, serviceId, hash, length) {
|
|
15470
|
+
const preimages = this.stateUpdate.services.preimages.get(serviceId) ?? [];
|
|
15430
15471
|
// TODO [ToDr] This is most likely wrong. We may have `provide` and `remove` within
|
|
15431
15472
|
// the same state update. We should however switch to proper "updated state"
|
|
15432
15473
|
// representation soon.
|
|
15433
|
-
const updatedPreimage =
|
|
15474
|
+
const updatedPreimage = preimages.findLast((update) => update.hash.isEqualTo(hash) && BigInt(update.length) === length);
|
|
15434
15475
|
const stateFallback = () => {
|
|
15435
15476
|
// fallback to state lookup
|
|
15436
15477
|
const service = this.state.getService(serviceId);
|
|
@@ -15467,14 +15508,15 @@ class PartiallyUpdatedState {
|
|
|
15467
15508
|
/* State update functions. */
|
|
15468
15509
|
updateStorage(serviceId, key, value) {
|
|
15469
15510
|
const update = value === null
|
|
15470
|
-
? UpdateStorage.remove({
|
|
15511
|
+
? UpdateStorage.remove({ key })
|
|
15471
15512
|
: UpdateStorage.set({
|
|
15472
|
-
serviceId,
|
|
15473
15513
|
storage: StorageItem.create({ key, value }),
|
|
15474
15514
|
});
|
|
15475
|
-
const
|
|
15515
|
+
const storages = this.stateUpdate.services.storage.get(serviceId) ?? [];
|
|
15516
|
+
const index = storages.findIndex((x) => x.key.isEqualTo(key));
|
|
15476
15517
|
const count = index === -1 ? 0 : 1;
|
|
15477
|
-
|
|
15518
|
+
storages.splice(index, count, update);
|
|
15519
|
+
this.stateUpdate.services.storage.set(serviceId, storages);
|
|
15478
15520
|
}
|
|
15479
15521
|
/**
|
|
15480
15522
|
* Update a preimage.
|
|
@@ -15482,8 +15524,10 @@ class PartiallyUpdatedState {
|
|
|
15482
15524
|
* Note we store all previous entries as well, since there might be a sequence of:
|
|
15483
15525
|
* `provide` -> `remove` and both should update the end state somehow.
|
|
15484
15526
|
*/
|
|
15485
|
-
updatePreimage(newUpdate) {
|
|
15486
|
-
this.stateUpdate.services.preimages.
|
|
15527
|
+
updatePreimage(serviceId, newUpdate) {
|
|
15528
|
+
const updatePreimages = this.stateUpdate.services.preimages.get(serviceId) ?? [];
|
|
15529
|
+
updatePreimages.push(newUpdate);
|
|
15530
|
+
this.stateUpdate.services.preimages.set(serviceId, updatePreimages);
|
|
15487
15531
|
}
|
|
15488
15532
|
updateServiceStorageUtilisation(serviceId, items, bytes, serviceInfo) {
|
|
15489
15533
|
debug_check `${items >= 0} storageUtilisationCount has to be a positive number, got: ${items}`;
|
|
@@ -15492,11 +15536,11 @@ class PartiallyUpdatedState {
|
|
|
15492
15536
|
const overflowBytes = !isU64(bytes);
|
|
15493
15537
|
// TODO [ToDr] this is not specified in GP, but it seems sensible.
|
|
15494
15538
|
if (overflowItems || overflowBytes) {
|
|
15495
|
-
return result_Result.error(InsufficientFundsError);
|
|
15539
|
+
return result_Result.error(InsufficientFundsError, () => `Storage utilisation overflow: items=${overflowItems}, bytes=${overflowBytes}`);
|
|
15496
15540
|
}
|
|
15497
15541
|
const thresholdBalance = ServiceAccountInfo.calculateThresholdBalance(items, bytes, serviceInfo.gratisStorage);
|
|
15498
15542
|
if (serviceInfo.balance < thresholdBalance) {
|
|
15499
|
-
return result_Result.error(InsufficientFundsError);
|
|
15543
|
+
return result_Result.error(InsufficientFundsError, () => `Service balance (${serviceInfo.balance}) below threshold (${thresholdBalance})`);
|
|
15500
15544
|
}
|
|
15501
15545
|
// Update service info with new details.
|
|
15502
15546
|
this.updateServiceInfo(serviceId, ServiceAccountInfo.create({
|
|
@@ -15507,20 +15551,23 @@ class PartiallyUpdatedState {
|
|
|
15507
15551
|
return result_Result.ok(result_OK);
|
|
15508
15552
|
}
|
|
15509
15553
|
updateServiceInfo(serviceId, newInfo) {
|
|
15510
|
-
const
|
|
15511
|
-
|
|
15512
|
-
|
|
15513
|
-
if (existingItem?.action.kind === UpdateServiceKind.Create) {
|
|
15514
|
-
this.stateUpdate.services.servicesUpdates.splice(idx, toRemove, UpdateService.create({
|
|
15515
|
-
serviceId,
|
|
15554
|
+
const existingUpdate = this.stateUpdate.services.updated.get(serviceId);
|
|
15555
|
+
if (existingUpdate?.action.kind === UpdateServiceKind.Create) {
|
|
15556
|
+
this.stateUpdate.services.updated.set(serviceId, UpdateService.create({
|
|
15516
15557
|
serviceInfo: newInfo,
|
|
15517
|
-
lookupHistory:
|
|
15558
|
+
lookupHistory: existingUpdate.action.lookupHistory,
|
|
15518
15559
|
}));
|
|
15519
15560
|
return;
|
|
15520
15561
|
}
|
|
15521
|
-
this.stateUpdate.services.
|
|
15522
|
-
|
|
15562
|
+
this.stateUpdate.services.updated.set(serviceId, UpdateService.update({
|
|
15563
|
+
serviceInfo: newInfo,
|
|
15564
|
+
}));
|
|
15565
|
+
}
|
|
15566
|
+
createService(serviceId, newInfo, newLookupHistory) {
|
|
15567
|
+
this.stateUpdate.services.created.push(serviceId);
|
|
15568
|
+
this.stateUpdate.services.updated.set(serviceId, UpdateService.create({
|
|
15523
15569
|
serviceInfo: newInfo,
|
|
15570
|
+
lookupHistory: newLookupHistory,
|
|
15524
15571
|
}));
|
|
15525
15572
|
}
|
|
15526
15573
|
getPrivilegedServices() {
|
|
@@ -16996,7 +17043,7 @@ class ReadablePage extends MemoryPage {
|
|
|
16996
17043
|
loadInto(result, startIndex, length) {
|
|
16997
17044
|
const endIndex = startIndex + length;
|
|
16998
17045
|
if (endIndex > PAGE_SIZE) {
|
|
16999
|
-
return result_Result.error(PageFault.fromMemoryIndex(this.start + PAGE_SIZE));
|
|
17046
|
+
return result_Result.error(PageFault.fromMemoryIndex(this.start + PAGE_SIZE), () => `Page fault: read beyond page boundary at ${this.start + PAGE_SIZE}`);
|
|
17000
17047
|
}
|
|
17001
17048
|
const bytes = this.data.subarray(startIndex, endIndex);
|
|
17002
17049
|
// we zero the bytes, since data might not yet be initialized at `endIndex`.
|
|
@@ -17005,7 +17052,7 @@ class ReadablePage extends MemoryPage {
|
|
|
17005
17052
|
return result_Result.ok(result_OK);
|
|
17006
17053
|
}
|
|
17007
17054
|
storeFrom(_address, _data) {
|
|
17008
|
-
return result_Result.error(PageFault.fromMemoryIndex(this.start, true));
|
|
17055
|
+
return result_Result.error(PageFault.fromMemoryIndex(this.start, true), () => `Page fault: attempted to write to read-only page at ${this.start}`);
|
|
17009
17056
|
}
|
|
17010
17057
|
setData(pageIndex, data) {
|
|
17011
17058
|
this.data.set(data, pageIndex);
|
|
@@ -17039,7 +17086,7 @@ class WriteablePage extends MemoryPage {
|
|
|
17039
17086
|
loadInto(result, startIndex, length) {
|
|
17040
17087
|
const endIndex = startIndex + length;
|
|
17041
17088
|
if (endIndex > PAGE_SIZE) {
|
|
17042
|
-
return result_Result.error(PageFault.fromMemoryIndex(this.start + PAGE_SIZE));
|
|
17089
|
+
return result_Result.error(PageFault.fromMemoryIndex(this.start + PAGE_SIZE), () => `Page fault: read beyond page boundary at ${this.start + PAGE_SIZE}`);
|
|
17043
17090
|
}
|
|
17044
17091
|
const bytes = this.view.subarray(startIndex, endIndex);
|
|
17045
17092
|
// we zero the bytes, since the view might not yet be initialized at `endIndex`.
|
|
@@ -17125,7 +17172,7 @@ class Memory {
|
|
|
17125
17172
|
memory_logger.insane `MEM[${address}] <- ${bytes_BytesBlob.blobFrom(bytes)}`;
|
|
17126
17173
|
const pagesResult = this.getPages(address, bytes.length, AccessType.WRITE);
|
|
17127
17174
|
if (pagesResult.isError) {
|
|
17128
|
-
return result_Result.error(pagesResult.error);
|
|
17175
|
+
return result_Result.error(pagesResult.error, pagesResult.details);
|
|
17129
17176
|
}
|
|
17130
17177
|
const pages = pagesResult.ok;
|
|
17131
17178
|
let currentPosition = address;
|
|
@@ -17150,14 +17197,14 @@ class Memory {
|
|
|
17150
17197
|
const pages = [];
|
|
17151
17198
|
for (const pageNumber of pageRange) {
|
|
17152
17199
|
if (pageNumber < RESERVED_NUMBER_OF_PAGES) {
|
|
17153
|
-
return result_Result.error(PageFault.fromPageNumber(pageNumber, true));
|
|
17200
|
+
return result_Result.error(PageFault.fromPageNumber(pageNumber, true), () => `Page fault: attempted to access reserved page ${pageNumber}`);
|
|
17154
17201
|
}
|
|
17155
17202
|
const page = this.memory.get(pageNumber);
|
|
17156
17203
|
if (page === undefined) {
|
|
17157
|
-
return result_Result.error(PageFault.fromPageNumber(pageNumber));
|
|
17204
|
+
return result_Result.error(PageFault.fromPageNumber(pageNumber), () => `Page fault: page ${pageNumber} not allocated`);
|
|
17158
17205
|
}
|
|
17159
17206
|
if (accessType === AccessType.WRITE && !page.isWriteable()) {
|
|
17160
|
-
return result_Result.error(PageFault.fromPageNumber(pageNumber, true));
|
|
17207
|
+
return result_Result.error(PageFault.fromPageNumber(pageNumber, true), () => `Page fault: attempted to write to read-only page ${pageNumber}`);
|
|
17161
17208
|
}
|
|
17162
17209
|
pages.push(page);
|
|
17163
17210
|
}
|
|
@@ -17175,7 +17222,7 @@ class Memory {
|
|
|
17175
17222
|
}
|
|
17176
17223
|
const pagesResult = this.getPages(startAddress, result.length, AccessType.READ);
|
|
17177
17224
|
if (pagesResult.isError) {
|
|
17178
|
-
return result_Result.error(pagesResult.error);
|
|
17225
|
+
return result_Result.error(pagesResult.error, pagesResult.details);
|
|
17179
17226
|
}
|
|
17180
17227
|
const pages = pagesResult.ok;
|
|
17181
17228
|
let currentPosition = startAddress;
|
|
@@ -19114,7 +19161,7 @@ class ProgramDecoder {
|
|
|
19114
19161
|
}
|
|
19115
19162
|
catch (e) {
|
|
19116
19163
|
program_decoder_logger.error `Invalid program: ${e}`;
|
|
19117
|
-
return result_Result.error(ProgramDecoderError.InvalidProgramError);
|
|
19164
|
+
return result_Result.error(ProgramDecoderError.InvalidProgramError, () => `Program decoder error: ${e}`);
|
|
19118
19165
|
}
|
|
19119
19166
|
}
|
|
19120
19167
|
}
|
|
@@ -19857,10 +19904,10 @@ class AccumulateExternalities {
|
|
|
19857
19904
|
const len = existingPreimage.slots.length;
|
|
19858
19905
|
// https://graypaper.fluffylabs.dev/#/9a08063/380901380901?v=0.6.6
|
|
19859
19906
|
if (len === PreimageStatusKind.Requested) {
|
|
19860
|
-
return result_Result.error(RequestPreimageError.AlreadyRequested);
|
|
19907
|
+
return result_Result.error(RequestPreimageError.AlreadyRequested, () => `Preimage already requested: hash=${hash}`);
|
|
19861
19908
|
}
|
|
19862
19909
|
if (len === PreimageStatusKind.Available || len === PreimageStatusKind.Reavailable) {
|
|
19863
|
-
return result_Result.error(RequestPreimageError.AlreadyAvailable);
|
|
19910
|
+
return result_Result.error(RequestPreimageError.AlreadyAvailable, () => `Preimage already available: hash=${hash}`);
|
|
19864
19911
|
}
|
|
19865
19912
|
// TODO [ToDr] Not sure if we should update the service info in that case,
|
|
19866
19913
|
// but for now we let that case fall-through.
|
|
@@ -19885,15 +19932,13 @@ class AccumulateExternalities {
|
|
|
19885
19932
|
const clampedLength = clampU64ToU32(length);
|
|
19886
19933
|
if (existingPreimage === null) {
|
|
19887
19934
|
// https://graypaper.fluffylabs.dev/#/9a08063/38a60038a600?v=0.6.6
|
|
19888
|
-
this.updatedState.updatePreimage(UpdatePreimage.updateOrAdd({
|
|
19889
|
-
serviceId: this.currentServiceId,
|
|
19935
|
+
this.updatedState.updatePreimage(this.currentServiceId, UpdatePreimage.updateOrAdd({
|
|
19890
19936
|
lookupHistory: new LookupHistoryItem(hash, clampedLength, tryAsLookupHistorySlots([])),
|
|
19891
19937
|
}));
|
|
19892
19938
|
}
|
|
19893
19939
|
else {
|
|
19894
19940
|
/** https://graypaper.fluffylabs.dev/#/9a08063/38ca0038ca00?v=0.6.6 */
|
|
19895
|
-
this.updatedState.updatePreimage(UpdatePreimage.updateOrAdd({
|
|
19896
|
-
serviceId: this.currentServiceId,
|
|
19941
|
+
this.updatedState.updatePreimage(this.currentServiceId, UpdatePreimage.updateOrAdd({
|
|
19897
19942
|
lookupHistory: new LookupHistoryItem(hash, clampedLength, tryAsLookupHistorySlots([...existingPreimage.slots, this.currentTimeslot])),
|
|
19898
19943
|
}));
|
|
19899
19944
|
}
|
|
@@ -19903,7 +19948,7 @@ class AccumulateExternalities {
|
|
|
19903
19948
|
const serviceId = this.currentServiceId;
|
|
19904
19949
|
const status = this.updatedState.getLookupHistory(this.currentTimeslot, this.currentServiceId, hash, length);
|
|
19905
19950
|
if (status === null) {
|
|
19906
|
-
return result_Result.error(ForgetPreimageError.NotFound);
|
|
19951
|
+
return result_Result.error(ForgetPreimageError.NotFound, () => `Preimage not found: hash=${hash}, length=${length}`);
|
|
19907
19952
|
}
|
|
19908
19953
|
const s = slotsToPreimageStatus(status.slots);
|
|
19909
19954
|
const updateStorageUtilisation = () => {
|
|
@@ -19916,10 +19961,9 @@ class AccumulateExternalities {
|
|
|
19916
19961
|
if (s.status === PreimageStatusKind.Requested) {
|
|
19917
19962
|
const res = updateStorageUtilisation();
|
|
19918
19963
|
if (res.isError) {
|
|
19919
|
-
return result_Result.error(ForgetPreimageError.StorageUtilisationError);
|
|
19964
|
+
return result_Result.error(ForgetPreimageError.StorageUtilisationError, res.details);
|
|
19920
19965
|
}
|
|
19921
|
-
this.updatedState.updatePreimage(UpdatePreimage.remove({
|
|
19922
|
-
serviceId,
|
|
19966
|
+
this.updatedState.updatePreimage(serviceId, UpdatePreimage.remove({
|
|
19923
19967
|
hash: status.hash,
|
|
19924
19968
|
length: status.length,
|
|
19925
19969
|
}));
|
|
@@ -19932,21 +19976,19 @@ class AccumulateExternalities {
|
|
|
19932
19976
|
if (y < t - this.chainSpec.preimageExpungePeriod) {
|
|
19933
19977
|
const res = updateStorageUtilisation();
|
|
19934
19978
|
if (res.isError) {
|
|
19935
|
-
return result_Result.error(ForgetPreimageError.StorageUtilisationError);
|
|
19979
|
+
return result_Result.error(ForgetPreimageError.StorageUtilisationError, res.details);
|
|
19936
19980
|
}
|
|
19937
|
-
this.updatedState.updatePreimage(UpdatePreimage.remove({
|
|
19938
|
-
serviceId,
|
|
19981
|
+
this.updatedState.updatePreimage(serviceId, UpdatePreimage.remove({
|
|
19939
19982
|
hash: status.hash,
|
|
19940
19983
|
length: status.length,
|
|
19941
19984
|
}));
|
|
19942
19985
|
return result_Result.ok(result_OK);
|
|
19943
19986
|
}
|
|
19944
|
-
return result_Result.error(ForgetPreimageError.NotExpired);
|
|
19987
|
+
return result_Result.error(ForgetPreimageError.NotExpired, () => `Preimage not expired: y=${y}, timeslot=${t}, period=${this.chainSpec.preimageExpungePeriod}`);
|
|
19945
19988
|
}
|
|
19946
19989
|
// https://graypaper.fluffylabs.dev/#/9a08063/38c80138c801?v=0.6.6
|
|
19947
19990
|
if (s.status === PreimageStatusKind.Available) {
|
|
19948
|
-
this.updatedState.updatePreimage(UpdatePreimage.updateOrAdd({
|
|
19949
|
-
serviceId,
|
|
19991
|
+
this.updatedState.updatePreimage(serviceId, UpdatePreimage.updateOrAdd({
|
|
19950
19992
|
lookupHistory: new LookupHistoryItem(status.hash, status.length, tryAsLookupHistorySlots([s.data[0], t])),
|
|
19951
19993
|
}));
|
|
19952
19994
|
return result_Result.ok(result_OK);
|
|
@@ -19955,13 +19997,12 @@ class AccumulateExternalities {
|
|
|
19955
19997
|
if (s.status === PreimageStatusKind.Reavailable) {
|
|
19956
19998
|
const y = s.data[1];
|
|
19957
19999
|
if (y < t - this.chainSpec.preimageExpungePeriod) {
|
|
19958
|
-
this.updatedState.updatePreimage(UpdatePreimage.updateOrAdd({
|
|
19959
|
-
serviceId,
|
|
20000
|
+
this.updatedState.updatePreimage(serviceId, UpdatePreimage.updateOrAdd({
|
|
19960
20001
|
lookupHistory: new LookupHistoryItem(status.hash, status.length, tryAsLookupHistorySlots([s.data[2], t])),
|
|
19961
20002
|
}));
|
|
19962
20003
|
return result_Result.ok(result_OK);
|
|
19963
20004
|
}
|
|
19964
|
-
return result_Result.error(ForgetPreimageError.NotExpired);
|
|
20005
|
+
return result_Result.error(ForgetPreimageError.NotExpired, () => `Preimage not expired: y=${y}, timeslot=${t}, period=${this.chainSpec.preimageExpungePeriod}`);
|
|
19965
20006
|
}
|
|
19966
20007
|
debug_assertNever(s);
|
|
19967
20008
|
}
|
|
@@ -19970,17 +20011,17 @@ class AccumulateExternalities {
|
|
|
19970
20011
|
const destination = this.getServiceInfo(destinationId);
|
|
19971
20012
|
/** https://graypaper.fluffylabs.dev/#/9a08063/370401370401?v=0.6.6 */
|
|
19972
20013
|
if (destination === null || destinationId === null) {
|
|
19973
|
-
return result_Result.error(TransferError.DestinationNotFound);
|
|
20014
|
+
return result_Result.error(TransferError.DestinationNotFound, () => `Destination service not found: ${destinationId}`);
|
|
19974
20015
|
}
|
|
19975
20016
|
/** https://graypaper.fluffylabs.dev/#/9a08063/371301371301?v=0.6.6 */
|
|
19976
20017
|
if (gas < destination.onTransferMinGas) {
|
|
19977
|
-
return result_Result.error(TransferError.GasTooLow);
|
|
20018
|
+
return result_Result.error(TransferError.GasTooLow, () => `Gas ${gas} below minimum ${destination.onTransferMinGas}`);
|
|
19978
20019
|
}
|
|
19979
20020
|
/** https://graypaper.fluffylabs.dev/#/9a08063/371b01371b01?v=0.6.6 */
|
|
19980
20021
|
const newBalance = source.balance - amount;
|
|
19981
20022
|
const thresholdBalance = ServiceAccountInfo.calculateThresholdBalance(source.storageUtilisationCount, source.storageUtilisationBytes, source.gratisStorage);
|
|
19982
20023
|
if (newBalance < thresholdBalance) {
|
|
19983
|
-
return result_Result.error(TransferError.BalanceBelowThreshold);
|
|
20024
|
+
return result_Result.error(TransferError.BalanceBelowThreshold, () => `Balance ${newBalance} below threshold ${thresholdBalance}`);
|
|
19984
20025
|
}
|
|
19985
20026
|
// outgoing transfer
|
|
19986
20027
|
this.updatedState.stateUpdate.transfers.push(PendingTransfer.create({
|
|
@@ -20007,7 +20048,7 @@ class AccumulateExternalities {
|
|
|
20007
20048
|
// check if we are priviledged to set gratis storage
|
|
20008
20049
|
// https://graypaper.fluffylabs.dev/#/7e6ff6a/369203369603?v=0.6.7
|
|
20009
20050
|
if (gratisStorage !== numbers_tryAsU64(0) && this.currentServiceId !== this.updatedState.getPrivilegedServices().manager) {
|
|
20010
|
-
return result_Result.error(NewServiceError.UnprivilegedService);
|
|
20051
|
+
return result_Result.error(NewServiceError.UnprivilegedService, () => `Service ${this.currentServiceId} not privileged to set gratis storage`);
|
|
20011
20052
|
}
|
|
20012
20053
|
// check if we have enough balance
|
|
20013
20054
|
// https://graypaper.fluffylabs.dev/#/7e6ff6a/369e0336a303?v=0.6.7
|
|
@@ -20016,7 +20057,7 @@ class AccumulateExternalities {
|
|
|
20016
20057
|
const thresholdForCurrent = ServiceAccountInfo.calculateThresholdBalance(currentService.storageUtilisationCount, currentService.storageUtilisationBytes, currentService.gratisStorage);
|
|
20017
20058
|
const balanceLeftForCurrent = currentService.balance - thresholdForNew;
|
|
20018
20059
|
if (balanceLeftForCurrent < thresholdForCurrent || bytes.overflow) {
|
|
20019
|
-
return result_Result.error(NewServiceError.InsufficientFunds);
|
|
20060
|
+
return result_Result.error(NewServiceError.InsufficientFunds, () => `Insufficient funds: balance=${currentService.balance}, required=${thresholdForNew}, overflow=${bytes.overflow}`);
|
|
20020
20061
|
}
|
|
20021
20062
|
// `a`: https://graypaper.fluffylabs.dev/#/ab2cdbd/366b02366d02?v=0.7.2
|
|
20022
20063
|
const newAccount = ServiceAccountInfo.create({
|
|
@@ -20043,15 +20084,11 @@ class AccumulateExternalities {
|
|
|
20043
20084
|
// NOTE: It's safe to cast to `Number` here, bcs here service ID cannot be bigger than 2**16
|
|
20044
20085
|
const newServiceId = tryAsServiceId(Number(wantedServiceId));
|
|
20045
20086
|
if (this.getServiceInfo(newServiceId) !== null) {
|
|
20046
|
-
return result_Result.error(NewServiceError.RegistrarServiceIdAlreadyTaken);
|
|
20087
|
+
return result_Result.error(NewServiceError.RegistrarServiceIdAlreadyTaken, () => `Service ID ${newServiceId} already taken`);
|
|
20047
20088
|
}
|
|
20048
20089
|
// add the new service with selected ID
|
|
20049
20090
|
// https://graypaper.fluffylabs.dev/#/ab2cdbd/36be0336c003?v=0.7.2
|
|
20050
|
-
this.updatedState.
|
|
20051
|
-
serviceId: newServiceId,
|
|
20052
|
-
serviceInfo: newAccount,
|
|
20053
|
-
lookupHistory: newLookupItem,
|
|
20054
|
-
}));
|
|
20091
|
+
this.updatedState.createService(newServiceId, newAccount, newLookupItem);
|
|
20055
20092
|
// update the balance of current service
|
|
20056
20093
|
// https://graypaper.fluffylabs.dev/#/ab2cdbd/36c20336c403?v=0.7.2
|
|
20057
20094
|
this.updatedState.updateServiceInfo(this.currentServiceId, updatedCurrentAccount);
|
|
@@ -20062,12 +20099,8 @@ class AccumulateExternalities {
|
|
|
20062
20099
|
}
|
|
20063
20100
|
const newServiceId = this.nextNewServiceId;
|
|
20064
20101
|
// add the new service
|
|
20065
|
-
// https://graypaper.fluffylabs.dev/#/
|
|
20066
|
-
this.updatedState.
|
|
20067
|
-
serviceId: newServiceId,
|
|
20068
|
-
serviceInfo: newAccount,
|
|
20069
|
-
lookupHistory: newLookupItem,
|
|
20070
|
-
}));
|
|
20102
|
+
// https://graypaper.fluffylabs.dev/#/7e6ff6a/36cb0236cb02?v=0.6.7
|
|
20103
|
+
this.updatedState.createService(newServiceId, newAccount, newLookupItem);
|
|
20071
20104
|
// update the balance of current service
|
|
20072
20105
|
// https://graypaper.fluffylabs.dev/#/ab2cdbd/36ec0336ee03?v=0.7.2
|
|
20073
20106
|
this.updatedState.updateServiceInfo(this.currentServiceId, updatedCurrentAccount);
|
|
@@ -20091,7 +20124,7 @@ class AccumulateExternalities {
|
|
|
20091
20124
|
const currentDelegator = this.updatedState.getPrivilegedServices().delegator;
|
|
20092
20125
|
if (currentDelegator !== this.currentServiceId) {
|
|
20093
20126
|
accumulate_externalities_logger.trace `Current service id (${this.currentServiceId}) is not a validators manager. (expected: ${currentDelegator}) and cannot update validators data. Ignoring`;
|
|
20094
|
-
return result_Result.error(UnprivilegedError);
|
|
20127
|
+
return result_Result.error(UnprivilegedError, () => `Service ${this.currentServiceId} is not delegator (expected: ${currentDelegator})`);
|
|
20095
20128
|
}
|
|
20096
20129
|
this.updatedState.stateUpdate.validatorsData = validatorsData;
|
|
20097
20130
|
return result_Result.ok(result_OK);
|
|
@@ -20106,11 +20139,11 @@ class AccumulateExternalities {
|
|
|
20106
20139
|
const currentAssigners = this.updatedState.getPrivilegedServices().assigners[coreIndex];
|
|
20107
20140
|
if (currentAssigners !== this.currentServiceId) {
|
|
20108
20141
|
accumulate_externalities_logger.trace `Current service id (${this.currentServiceId}) is not an auth manager of core ${coreIndex} (expected: ${currentAssigners}) and cannot update authorization queue.`;
|
|
20109
|
-
return result_Result.error(UpdatePrivilegesError.UnprivilegedService);
|
|
20142
|
+
return result_Result.error(UpdatePrivilegesError.UnprivilegedService, () => `Service ${this.currentServiceId} not assigner for core ${coreIndex} (expected: ${currentAssigners})`);
|
|
20110
20143
|
}
|
|
20111
20144
|
if (assigners === null && Compatibility.isGreaterOrEqual(GpVersion.V0_7_1)) {
|
|
20112
20145
|
accumulate_externalities_logger.trace `The new auth manager is not a valid service id.`;
|
|
20113
|
-
return result_Result.error(UpdatePrivilegesError.InvalidServiceId);
|
|
20146
|
+
return result_Result.error(UpdatePrivilegesError.InvalidServiceId, () => `New auth manager is null for core ${coreIndex}`);
|
|
20114
20147
|
}
|
|
20115
20148
|
this.updatedState.stateUpdate.authorizationQueues.set(coreIndex, authQueue);
|
|
20116
20149
|
return result_Result.ok(result_OK);
|
|
@@ -20143,10 +20176,10 @@ class AccumulateExternalities {
|
|
|
20143
20176
|
const isManager = current.manager === this.currentServiceId;
|
|
20144
20177
|
if (Compatibility.isLessThan(GpVersion.V0_7_1)) {
|
|
20145
20178
|
if (!isManager) {
|
|
20146
|
-
return result_Result.error(UpdatePrivilegesError.UnprivilegedService);
|
|
20179
|
+
return result_Result.error(UpdatePrivilegesError.UnprivilegedService, () => `Service ${this.currentServiceId} is not manager`);
|
|
20147
20180
|
}
|
|
20148
20181
|
if (manager === null || delegator === null) {
|
|
20149
|
-
return result_Result.error(UpdatePrivilegesError.InvalidServiceId, "Either manager or delegator is not a valid service id.");
|
|
20182
|
+
return result_Result.error(UpdatePrivilegesError.InvalidServiceId, () => "Either manager or delegator is not a valid service id.");
|
|
20150
20183
|
}
|
|
20151
20184
|
this.updatedState.stateUpdate.privilegedServices = PrivilegedServices.create({
|
|
20152
20185
|
manager,
|
|
@@ -20159,7 +20192,7 @@ class AccumulateExternalities {
|
|
|
20159
20192
|
}
|
|
20160
20193
|
const original = this.updatedState.state.privilegedServices;
|
|
20161
20194
|
if (manager === null || delegator === null || registrar === null) {
|
|
20162
|
-
return result_Result.error(UpdatePrivilegesError.InvalidServiceId, "Either manager or delegator or registrar is not a valid service id.");
|
|
20195
|
+
return result_Result.error(UpdatePrivilegesError.InvalidServiceId, () => "Either manager or delegator or registrar is not a valid service id.");
|
|
20163
20196
|
}
|
|
20164
20197
|
const newDelegator = this.updatePrivilegedServiceId(delegator, current.delegator, {
|
|
20165
20198
|
isManager,
|
|
@@ -20199,23 +20232,22 @@ class AccumulateExternalities {
|
|
|
20199
20232
|
// TODO [ToDr] what about newly created services?
|
|
20200
20233
|
const service = serviceId === null ? null : this.updatedState.state.getService(serviceId);
|
|
20201
20234
|
if (service === null || serviceId === null) {
|
|
20202
|
-
return result_Result.error(ProvidePreimageError.ServiceNotFound);
|
|
20235
|
+
return result_Result.error(ProvidePreimageError.ServiceNotFound, () => `Service not found: ${serviceId}`);
|
|
20203
20236
|
}
|
|
20204
20237
|
// calculating the hash
|
|
20205
20238
|
const preimageHash = this.blake2b.hashBytes(preimage).asOpaque();
|
|
20206
20239
|
// checking service internal lookup
|
|
20207
20240
|
const stateLookup = this.updatedState.getLookupHistory(this.currentTimeslot, serviceId, preimageHash, numbers_tryAsU64(preimage.length));
|
|
20208
20241
|
if (stateLookup === null || !LookupHistoryItem.isRequested(stateLookup)) {
|
|
20209
|
-
return result_Result.error(ProvidePreimageError.WasNotRequested);
|
|
20242
|
+
return result_Result.error(ProvidePreimageError.WasNotRequested, () => `Preimage was not requested: hash=${preimageHash}, service=${serviceId}`);
|
|
20210
20243
|
}
|
|
20211
20244
|
// checking already provided preimages
|
|
20212
20245
|
const hasPreimage = this.updatedState.hasPreimage(serviceId, preimageHash);
|
|
20213
20246
|
if (hasPreimage) {
|
|
20214
|
-
return result_Result.error(ProvidePreimageError.AlreadyProvided);
|
|
20247
|
+
return result_Result.error(ProvidePreimageError.AlreadyProvided, () => `Preimage already provided: hash=${preimageHash}, service=${serviceId}`);
|
|
20215
20248
|
}
|
|
20216
20249
|
// setting up the new preimage
|
|
20217
|
-
this.updatedState.updatePreimage(UpdatePreimage.provide({
|
|
20218
|
-
serviceId,
|
|
20250
|
+
this.updatedState.updatePreimage(serviceId, UpdatePreimage.provide({
|
|
20219
20251
|
preimage: PreimageItem.create({
|
|
20220
20252
|
hash: preimageHash,
|
|
20221
20253
|
blob: preimage,
|
|
@@ -20227,31 +20259,31 @@ class AccumulateExternalities {
|
|
|
20227
20259
|
eject(destination, previousCodeHash) {
|
|
20228
20260
|
const service = this.getServiceInfo(destination);
|
|
20229
20261
|
if (service === null || destination === null) {
|
|
20230
|
-
return result_Result.error(EjectError.InvalidService, "Service missing");
|
|
20262
|
+
return result_Result.error(EjectError.InvalidService, () => "Service missing");
|
|
20231
20263
|
}
|
|
20232
20264
|
const currentService = this.getCurrentServiceInfo();
|
|
20233
20265
|
// check if the service expects to be ejected by us:
|
|
20234
20266
|
const expectedCodeHash = bytes_Bytes.zero(hash_HASH_SIZE).asOpaque();
|
|
20235
20267
|
writeServiceIdAsLeBytes(this.currentServiceId, expectedCodeHash.raw);
|
|
20236
20268
|
if (!service.codeHash.isEqualTo(expectedCodeHash)) {
|
|
20237
|
-
return result_Result.error(EjectError.InvalidService, "Invalid code hash");
|
|
20269
|
+
return result_Result.error(EjectError.InvalidService, () => "Invalid code hash");
|
|
20238
20270
|
}
|
|
20239
20271
|
// make sure the service only has required number of storage items?
|
|
20240
20272
|
if (service.storageUtilisationCount !== REQUIRED_NUMBER_OF_STORAGE_ITEMS_FOR_EJECT) {
|
|
20241
|
-
return result_Result.error(EjectError.InvalidPreimage, "Too many storage items");
|
|
20273
|
+
return result_Result.error(EjectError.InvalidPreimage, () => "Too many storage items");
|
|
20242
20274
|
}
|
|
20243
20275
|
// storage items length
|
|
20244
20276
|
const l = numbers_tryAsU64(maxU64(service.storageUtilisationBytes, LOOKUP_HISTORY_ENTRY_BYTES) - LOOKUP_HISTORY_ENTRY_BYTES);
|
|
20245
20277
|
// check if we have a preimage with the entire storage.
|
|
20246
20278
|
const [isPreviousCodeExpired, errorReason] = this.isPreviousCodeExpired(destination, previousCodeHash, l);
|
|
20247
20279
|
if (!isPreviousCodeExpired) {
|
|
20248
|
-
return result_Result.error(EjectError.InvalidPreimage, `Previous code available: ${errorReason}`);
|
|
20280
|
+
return result_Result.error(EjectError.InvalidPreimage, () => `Previous code available: ${errorReason}`);
|
|
20249
20281
|
}
|
|
20250
20282
|
// compute new balance of the service.
|
|
20251
20283
|
const newBalance = sumU64(currentService.balance, service.balance);
|
|
20252
20284
|
// TODO [ToDr] what to do in case of overflow?
|
|
20253
20285
|
if (newBalance.overflow) {
|
|
20254
|
-
return result_Result.error(EjectError.InvalidService, "Balance overflow");
|
|
20286
|
+
return result_Result.error(EjectError.InvalidService, () => "Balance overflow");
|
|
20255
20287
|
}
|
|
20256
20288
|
// update current service.
|
|
20257
20289
|
this.updatedState.updateServiceInfo(this.currentServiceId, ServiceAccountInfo.create({
|
|
@@ -20259,11 +20291,13 @@ class AccumulateExternalities {
|
|
|
20259
20291
|
balance: newBalance.value,
|
|
20260
20292
|
}));
|
|
20261
20293
|
// and finally add an ejected service.
|
|
20262
|
-
this.updatedState.stateUpdate.services.
|
|
20294
|
+
this.updatedState.stateUpdate.services.removed.push(destination);
|
|
20263
20295
|
// take care of the code preimage and its lookup history
|
|
20264
20296
|
// Safe, because we know the preimage is valid, and it's the code of the service, which is bounded by maximal service code size anyway (much smaller than 2**32 bytes).
|
|
20265
20297
|
const preimageLength = numbers_tryAsU32(Number(l));
|
|
20266
|
-
this.updatedState.stateUpdate.services.preimages.
|
|
20298
|
+
const preimages = this.updatedState.stateUpdate.services.preimages.get(destination) ?? [];
|
|
20299
|
+
preimages.push(UpdatePreimage.remove({ hash: previousCodeHash, length: preimageLength }));
|
|
20300
|
+
this.updatedState.stateUpdate.services.preimages.set(destination, preimages);
|
|
20267
20301
|
return result_Result.ok(result_OK);
|
|
20268
20302
|
}
|
|
20269
20303
|
read(serviceId, rawKey) {
|
|
@@ -20444,10 +20478,10 @@ class Assurances {
|
|
|
20444
20478
|
for (const assurance of assurances) {
|
|
20445
20479
|
const { anchor, validatorIndex, bitfield } = assurance;
|
|
20446
20480
|
if (!anchor.isEqualTo(input.parentHash)) {
|
|
20447
|
-
return result_Result.error(AssurancesError.InvalidAnchor, `anchor: expected: ${input.parentHash}, got ${anchor}`);
|
|
20481
|
+
return result_Result.error(AssurancesError.InvalidAnchor, () => `anchor: expected: ${input.parentHash}, got ${anchor}`);
|
|
20448
20482
|
}
|
|
20449
20483
|
if (prevValidatorIndex >= validatorIndex) {
|
|
20450
|
-
return result_Result.error(AssurancesError.InvalidOrder, `order: expected: ${prevValidatorIndex + 1}, got: ${validatorIndex}`);
|
|
20484
|
+
return result_Result.error(AssurancesError.InvalidOrder, () => `order: expected: ${prevValidatorIndex + 1}, got: ${validatorIndex}`);
|
|
20451
20485
|
}
|
|
20452
20486
|
prevValidatorIndex = assurance.validatorIndex;
|
|
20453
20487
|
debug_check `${bitfield.bitLength === coresCount} Invalid bitfield length of ${bitfield.bitLength}`;
|
|
@@ -20470,7 +20504,7 @@ class Assurances {
|
|
|
20470
20504
|
* https://graypaper.fluffylabs.dev/#/579bd12/14e90014ea00
|
|
20471
20505
|
*/
|
|
20472
20506
|
if (noOfAssurances > 0 && !isReportPending) {
|
|
20473
|
-
return result_Result.error(AssurancesError.NoReportPending, `no report pending for core ${c} yet we got an assurance`);
|
|
20507
|
+
return result_Result.error(AssurancesError.NoReportPending, () => `no report pending for core ${c} yet we got an assurance`);
|
|
20474
20508
|
}
|
|
20475
20509
|
/**
|
|
20476
20510
|
* Remove work report if it's became available or timed out.
|
|
@@ -20516,7 +20550,7 @@ class Assurances {
|
|
|
20516
20550
|
const v = assurance.view();
|
|
20517
20551
|
const key = validatorData[v.validatorIndex.materialize()];
|
|
20518
20552
|
if (key === undefined) {
|
|
20519
|
-
return result_Result.error(AssurancesError.InvalidValidatorIndex);
|
|
20553
|
+
return result_Result.error(AssurancesError.InvalidValidatorIndex, () => `Invalid validator index: ${v.validatorIndex.materialize()}`);
|
|
20520
20554
|
}
|
|
20521
20555
|
signatures.push({
|
|
20522
20556
|
signature: v.signature.materialize(),
|
|
@@ -20528,7 +20562,7 @@ class Assurances {
|
|
|
20528
20562
|
const isAllSignaturesValid = signaturesValid.every((x) => x);
|
|
20529
20563
|
if (!isAllSignaturesValid) {
|
|
20530
20564
|
const invalidIndices = signaturesValid.reduce((acc, isValid, idx) => (isValid ? acc : acc.concat(idx)), []);
|
|
20531
|
-
return result_Result.error(AssurancesError.InvalidSignature, `invalid signatures at ${invalidIndices.join(", ")}`);
|
|
20565
|
+
return result_Result.error(AssurancesError.InvalidSignature, () => `invalid signatures at ${invalidIndices.join(", ")}`);
|
|
20532
20566
|
}
|
|
20533
20567
|
return result_Result.ok(result_OK);
|
|
20534
20568
|
}
|
|
@@ -21139,7 +21173,7 @@ class HostCallMemory {
|
|
|
21139
21173
|
return result_Result.ok(result_OK);
|
|
21140
21174
|
}
|
|
21141
21175
|
if (address + numbers_tryAsU64(bytes.length) > MEMORY_SIZE) {
|
|
21142
|
-
return result_Result.error(new OutOfBounds());
|
|
21176
|
+
return result_Result.error(new OutOfBounds(), () => `Memory access out of bounds: address ${address} + length ${bytes.length} exceeds memory size`);
|
|
21143
21177
|
}
|
|
21144
21178
|
return this.memory.storeFrom(tryAsMemoryIndex(Number(address)), bytes);
|
|
21145
21179
|
}
|
|
@@ -21148,7 +21182,7 @@ class HostCallMemory {
|
|
|
21148
21182
|
return result_Result.ok(result_OK);
|
|
21149
21183
|
}
|
|
21150
21184
|
if (startAddress + numbers_tryAsU64(result.length) > MEMORY_SIZE) {
|
|
21151
|
-
return result_Result.error(new OutOfBounds());
|
|
21185
|
+
return result_Result.error(new OutOfBounds(), () => `Memory access out of bounds: address ${startAddress} + length ${result.length} exceeds memory size`);
|
|
21152
21186
|
}
|
|
21153
21187
|
return this.memory.loadInto(result, tryAsMemoryIndex(Number(startAddress)));
|
|
21154
21188
|
}
|
|
@@ -23091,18 +23125,18 @@ class Accumulate {
|
|
|
23091
23125
|
const serviceInfo = updatedState.getServiceInfo(serviceId);
|
|
23092
23126
|
if (serviceInfo === null) {
|
|
23093
23127
|
accumulate_logger.log `Service with id ${serviceId} not found.`;
|
|
23094
|
-
return result_Result.error(PvmInvocationError.NoService);
|
|
23128
|
+
return result_Result.error(PvmInvocationError.NoService, () => `Accumulate: service ${serviceId} not found`);
|
|
23095
23129
|
}
|
|
23096
23130
|
const codeHash = serviceInfo.codeHash;
|
|
23097
23131
|
// TODO [ToDr] Should we check that the preimage is still available?
|
|
23098
23132
|
const code = updatedState.getPreimage(serviceId, codeHash.asOpaque());
|
|
23099
23133
|
if (code === null) {
|
|
23100
23134
|
accumulate_logger.log `Code with hash ${codeHash} not found for service ${serviceId}.`;
|
|
23101
|
-
return result_Result.error(PvmInvocationError.NoPreimage);
|
|
23135
|
+
return result_Result.error(PvmInvocationError.NoPreimage, () => `Accumulate: code with hash ${codeHash} not found for service ${serviceId}`);
|
|
23102
23136
|
}
|
|
23103
23137
|
if (code.length > W_C) {
|
|
23104
23138
|
accumulate_logger.log `Code with hash ${codeHash} is too long for service ${serviceId}.`;
|
|
23105
|
-
return result_Result.error(PvmInvocationError.PreimageTooLong);
|
|
23139
|
+
return result_Result.error(PvmInvocationError.PreimageTooLong, () => `Accumulate: code length ${code.length} exceeds max ${W_C} for service ${serviceId}`);
|
|
23106
23140
|
}
|
|
23107
23141
|
const nextServiceId = generateNextServiceId({ serviceId, entropy, timeslot: slot }, this.chainSpec, this.blake2b);
|
|
23108
23142
|
const partialState = new AccumulateExternalities(this.chainSpec, this.blake2b, updatedState, serviceId, nextServiceId, slot);
|
|
@@ -23357,19 +23391,16 @@ class Accumulate {
|
|
|
23357
23391
|
const gasLimit = tryAsServiceGas(this.chainSpec.maxBlockGas > calculatedGasLimit ? this.chainSpec.maxBlockGas : calculatedGasLimit);
|
|
23358
23392
|
return tryAsServiceGas(gasLimit);
|
|
23359
23393
|
}
|
|
23360
|
-
|
|
23361
|
-
|
|
23362
|
-
|
|
23363
|
-
|
|
23364
|
-
|
|
23365
|
-
|
|
23366
|
-
|
|
23367
|
-
|
|
23368
|
-
|
|
23369
|
-
|
|
23370
|
-
}
|
|
23371
|
-
}
|
|
23372
|
-
return false;
|
|
23394
|
+
/**
|
|
23395
|
+
* Detects the very unlikely situation where multiple services are created with the same ID.
|
|
23396
|
+
*
|
|
23397
|
+
* https://graypaper.fluffylabs.dev/#/ab2cdbd/30f20330f403?v=0.7.2
|
|
23398
|
+
*
|
|
23399
|
+
* NOTE: This is public only for testing purposes and should not be used outside of accumulation.
|
|
23400
|
+
*/
|
|
23401
|
+
hasDuplicatedServiceIdCreated(createdIds) {
|
|
23402
|
+
const uniqueIds = new Set(createdIds);
|
|
23403
|
+
return uniqueIds.size !== createdIds.length;
|
|
23373
23404
|
}
|
|
23374
23405
|
async transition({ reports, slot, entropy }) {
|
|
23375
23406
|
const statistics = new Map();
|
|
@@ -23391,8 +23422,9 @@ class Accumulate {
|
|
|
23391
23422
|
const accumulated = accumulatableReports.subview(0, accumulatedReports);
|
|
23392
23423
|
const { services, yieldedRoots, transfers, validatorsData, privilegedServices, authorizationQueues, ...stateUpdateRest } = state;
|
|
23393
23424
|
assertEmpty(stateUpdateRest);
|
|
23394
|
-
if (this.
|
|
23395
|
-
|
|
23425
|
+
if (this.hasDuplicatedServiceIdCreated(services.created)) {
|
|
23426
|
+
accumulate_logger.trace `Duplicated Service creation detected. Block is invalid.`;
|
|
23427
|
+
return result_Result.error(ACCUMULATION_ERROR, () => "Accumulate: duplicate service created");
|
|
23396
23428
|
}
|
|
23397
23429
|
const accStateUpdate = this.getAccumulationStateUpdate(accumulated.toArray(), toAccumulateLater, slot, Array.from(statistics.keys()), services);
|
|
23398
23430
|
const accumulationOutputUnsorted = Array.from(yieldedRoots.entries()).map(([serviceId, root]) => {
|
|
@@ -23473,13 +23505,13 @@ class DeferredTransfers {
|
|
|
23473
23505
|
.toSorted((a, b) => a.source - b.source);
|
|
23474
23506
|
const info = partiallyUpdatedState.getServiceInfo(serviceId);
|
|
23475
23507
|
if (info === null) {
|
|
23476
|
-
return result_Result.error(DeferredTransfersErrorCode.ServiceInfoNotExist);
|
|
23508
|
+
return result_Result.error(DeferredTransfersErrorCode.ServiceInfoNotExist, () => `Deferred transfers: service info not found for ${serviceId}`);
|
|
23477
23509
|
}
|
|
23478
23510
|
const codeHash = info.codeHash;
|
|
23479
23511
|
const code = partiallyUpdatedState.getPreimage(serviceId, codeHash.asOpaque());
|
|
23480
23512
|
const newBalance = sumU64(info.balance, ...transfers.map((item) => item.amount));
|
|
23481
23513
|
if (newBalance.overflow) {
|
|
23482
|
-
return result_Result.error(DeferredTransfersErrorCode.ServiceBalanceOverflow);
|
|
23514
|
+
return result_Result.error(DeferredTransfersErrorCode.ServiceBalanceOverflow, () => `Deferred transfers: balance overflow for service ${serviceId}`);
|
|
23483
23515
|
}
|
|
23484
23516
|
const newInfo = ServiceAccountInfo.create({ ...info, balance: newBalance.value });
|
|
23485
23517
|
partiallyUpdatedState.updateServiceInfo(serviceId, newInfo);
|
|
@@ -23957,7 +23989,7 @@ function verifyReportsBasic(input) {
|
|
|
23957
23989
|
const noOfPrerequisites = reportView.context.view().prerequisites.view().length;
|
|
23958
23990
|
const noOfSegmentRootLookups = reportView.segmentRootLookup.view().length;
|
|
23959
23991
|
if (noOfPrerequisites + noOfSegmentRootLookups > MAX_REPORT_DEPENDENCIES) {
|
|
23960
|
-
return result_Result.error(ReportsError.TooManyDependencies, `Report at ${reportView.coreIndex.materialize()} has too many dependencies. Got ${noOfPrerequisites} + ${noOfSegmentRootLookups}, max: ${MAX_REPORT_DEPENDENCIES}`);
|
|
23992
|
+
return result_Result.error(ReportsError.TooManyDependencies, () => `Report at ${reportView.coreIndex.materialize()} has too many dependencies. Got ${noOfPrerequisites} + ${noOfSegmentRootLookups}, max: ${MAX_REPORT_DEPENDENCIES}`);
|
|
23961
23993
|
}
|
|
23962
23994
|
/**
|
|
23963
23995
|
* In order to ensure fair use of a block’s extrinsic space,
|
|
@@ -23976,7 +24008,7 @@ function verifyReportsBasic(input) {
|
|
|
23976
24008
|
totalOutputsSize += item.view().result.view().okBlob?.raw.length ?? 0;
|
|
23977
24009
|
}
|
|
23978
24010
|
if (authOutputSize + totalOutputsSize > MAX_WORK_REPORT_SIZE_BYTES) {
|
|
23979
|
-
return result_Result.error(ReportsError.WorkReportTooBig, `Work report at ${reportView.coreIndex.materialize()} too big. Got ${authOutputSize} + ${totalOutputsSize}, max: ${MAX_WORK_REPORT_SIZE_BYTES}`);
|
|
24011
|
+
return result_Result.error(ReportsError.WorkReportTooBig, () => `Work report at ${reportView.coreIndex.materialize()} too big. Got ${authOutputSize} + ${totalOutputsSize}, max: ${MAX_WORK_REPORT_SIZE_BYTES}`);
|
|
23980
24012
|
}
|
|
23981
24013
|
}
|
|
23982
24014
|
return result_Result.ok(result_OK);
|
|
@@ -24010,12 +24042,12 @@ function verifyContextualValidity(input, state, headerChain, maxLookupAnchorAge)
|
|
|
24010
24042
|
for (const result of guarantee.report.results) {
|
|
24011
24043
|
const service = state.getService(result.serviceId);
|
|
24012
24044
|
if (service === null) {
|
|
24013
|
-
return result_Result.error(ReportsError.BadServiceId, `No service with id: ${result.serviceId}`);
|
|
24045
|
+
return result_Result.error(ReportsError.BadServiceId, () => `No service with id: ${result.serviceId}`);
|
|
24014
24046
|
}
|
|
24015
24047
|
// check service code hash
|
|
24016
24048
|
// https://graypaper.fluffylabs.dev/#/5f542d7/154b02154b02
|
|
24017
24049
|
if (!result.codeHash.isEqualTo(service.getInfo().codeHash)) {
|
|
24018
|
-
return result_Result.error(ReportsError.BadCodeHash, `Service (${result.serviceId}) code hash mismatch. Got: ${result.codeHash}, expected: ${service.getInfo().codeHash}`);
|
|
24050
|
+
return result_Result.error(ReportsError.BadCodeHash, () => `Service (${result.serviceId}) code hash mismatch. Got: ${result.codeHash}, expected: ${service.getInfo().codeHash}`);
|
|
24019
24051
|
}
|
|
24020
24052
|
}
|
|
24021
24053
|
}
|
|
@@ -24026,7 +24058,7 @@ function verifyContextualValidity(input, state, headerChain, maxLookupAnchorAge)
|
|
|
24026
24058
|
* https://graypaper.fluffylabs.dev/#/5f542d7/151f01152101
|
|
24027
24059
|
*/
|
|
24028
24060
|
if (currentWorkPackages.size !== input.guarantees.length) {
|
|
24029
|
-
return result_Result.error(ReportsError.DuplicatePackage, "Duplicate work package detected.");
|
|
24061
|
+
return result_Result.error(ReportsError.DuplicatePackage, () => "Duplicate work package detected.");
|
|
24030
24062
|
}
|
|
24031
24063
|
const minLookupSlot = Math.max(0, input.slot - maxLookupAnchorAge);
|
|
24032
24064
|
const contextResult = verifyRefineContexts(minLookupSlot, contexts, input.recentBlocksPartialUpdate, headerChain);
|
|
@@ -24071,7 +24103,7 @@ function verifyContextualValidity(input, state, headerChain, maxLookupAnchorAge)
|
|
|
24071
24103
|
: undefined;
|
|
24072
24104
|
}
|
|
24073
24105
|
if (root === undefined || !root.segmentTreeRoot.isEqualTo(lookup.segmentTreeRoot)) {
|
|
24074
|
-
return result_Result.error(ReportsError.SegmentRootLookupInvalid, `Mismatching segment tree root for package ${lookup.workPackageHash}. Got: ${lookup.segmentTreeRoot}, expected: ${root?.segmentTreeRoot}`);
|
|
24106
|
+
return result_Result.error(ReportsError.SegmentRootLookupInvalid, () => `Mismatching segment tree root for package ${lookup.workPackageHash}. Got: ${lookup.segmentTreeRoot}, expected: ${root?.segmentTreeRoot}`);
|
|
24075
24107
|
}
|
|
24076
24108
|
}
|
|
24077
24109
|
}
|
|
@@ -24094,16 +24126,16 @@ function verifyRefineContexts(minLookupSlot, contexts, recentBlocksPartialUpdate
|
|
|
24094
24126
|
*/
|
|
24095
24127
|
const recentBlock = recentBlocks.get(context.anchor);
|
|
24096
24128
|
if (recentBlock === undefined) {
|
|
24097
|
-
return result_Result.error(ReportsError.AnchorNotRecent, `Anchor block ${context.anchor} not found in recent blocks.`);
|
|
24129
|
+
return result_Result.error(ReportsError.AnchorNotRecent, () => `Anchor block ${context.anchor} not found in recent blocks.`);
|
|
24098
24130
|
}
|
|
24099
24131
|
// check state root
|
|
24100
24132
|
if (!recentBlock.postStateRoot.isEqualTo(context.stateRoot)) {
|
|
24101
|
-
return result_Result.error(ReportsError.BadStateRoot, `Anchor state root mismatch. Got: ${context.stateRoot}, expected: ${recentBlock.postStateRoot}.`);
|
|
24133
|
+
return result_Result.error(ReportsError.BadStateRoot, () => `Anchor state root mismatch. Got: ${context.stateRoot}, expected: ${recentBlock.postStateRoot}.`);
|
|
24102
24134
|
}
|
|
24103
24135
|
// check beefy root
|
|
24104
24136
|
const beefyRoot = recentBlock.accumulationResult;
|
|
24105
24137
|
if (!beefyRoot.isEqualTo(context.beefyRoot)) {
|
|
24106
|
-
return result_Result.error(ReportsError.BadBeefyMmrRoot, `Invalid BEEFY super peak hash. Got: ${context.beefyRoot}, expected: ${beefyRoot}. Anchor: ${recentBlock.headerHash}`);
|
|
24138
|
+
return result_Result.error(ReportsError.BadBeefyMmrRoot, () => `Invalid BEEFY super peak hash. Got: ${context.beefyRoot}, expected: ${beefyRoot}. Anchor: ${recentBlock.headerHash}`);
|
|
24107
24139
|
}
|
|
24108
24140
|
/**
|
|
24109
24141
|
* We require that each lookup-anchor block be within the
|
|
@@ -24112,7 +24144,7 @@ function verifyRefineContexts(minLookupSlot, contexts, recentBlocksPartialUpdate
|
|
|
24112
24144
|
* https://graypaper.fluffylabs.dev/#/5f542d7/154601154701
|
|
24113
24145
|
*/
|
|
24114
24146
|
if (context.lookupAnchorSlot < minLookupSlot) {
|
|
24115
|
-
return result_Result.error(ReportsError.SegmentRootLookupInvalid, `Lookup anchor slot's too old. Got: ${context.lookupAnchorSlot}, minimal: ${minLookupSlot}`);
|
|
24147
|
+
return result_Result.error(ReportsError.SegmentRootLookupInvalid, () => `Lookup anchor slot's too old. Got: ${context.lookupAnchorSlot}, minimal: ${minLookupSlot}`);
|
|
24116
24148
|
}
|
|
24117
24149
|
/**
|
|
24118
24150
|
* We also require that we have a record of it; this is one of
|
|
@@ -24129,7 +24161,7 @@ function verifyRefineContexts(minLookupSlot, contexts, recentBlocksPartialUpdate
|
|
|
24129
24161
|
verify_contextual_logger.warn `Lookup anchor check for ${context.lookupAnchor} would fail, but override is active.`;
|
|
24130
24162
|
}
|
|
24131
24163
|
else {
|
|
24132
|
-
return result_Result.error(ReportsError.SegmentRootLookupInvalid, `Lookup anchor is not found in chain. Hash: ${context.lookupAnchor} (slot: ${context.lookupAnchorSlot})`);
|
|
24164
|
+
return result_Result.error(ReportsError.SegmentRootLookupInvalid, () => `Lookup anchor is not found in chain. Hash: ${context.lookupAnchor} (slot: ${context.lookupAnchorSlot})`);
|
|
24133
24165
|
}
|
|
24134
24166
|
}
|
|
24135
24167
|
}
|
|
@@ -24152,7 +24184,7 @@ function verifyDependencies({ currentWorkPackages, recentlyReported, prerequisit
|
|
|
24152
24184
|
if (recentlyReported.has(preReqHash)) {
|
|
24153
24185
|
continue;
|
|
24154
24186
|
}
|
|
24155
|
-
return result_Result.error(isSegmentRoot ? ReportsError.SegmentRootLookupInvalid : ReportsError.DependencyMissing, `Missing work package ${preReqHash} in current extrinsic or recent history.`);
|
|
24187
|
+
return result_Result.error(isSegmentRoot ? ReportsError.SegmentRootLookupInvalid : ReportsError.DependencyMissing, () => `Missing work package ${preReqHash} in current extrinsic or recent history.`);
|
|
24156
24188
|
}
|
|
24157
24189
|
return result_Result.ok(result_OK);
|
|
24158
24190
|
};
|
|
@@ -24200,7 +24232,7 @@ function verifyWorkPackagesUniqueness(workPackageHashes, state) {
|
|
|
24200
24232
|
// let's check if any of our packages is in the pipeline
|
|
24201
24233
|
const intersection = packagesInPipeline.intersection(workPackageHashes);
|
|
24202
24234
|
for (const packageHash of intersection) {
|
|
24203
|
-
return result_Result.error(ReportsError.DuplicatePackage, `The same work package hash found in the pipeline (workPackageHash: ${packageHash})`);
|
|
24235
|
+
return result_Result.error(ReportsError.DuplicatePackage, () => `The same work package hash found in the pipeline (workPackageHash: ${packageHash})`);
|
|
24204
24236
|
}
|
|
24205
24237
|
return result_Result.ok(result_OK);
|
|
24206
24238
|
}
|
|
@@ -24239,7 +24271,7 @@ workReportHashes, slot, getGuarantorAssignment) {
|
|
|
24239
24271
|
const credentialsView = guaranteeView.credentials.view();
|
|
24240
24272
|
if (credentialsView.length < REQUIRED_CREDENTIALS_RANGE[0] ||
|
|
24241
24273
|
credentialsView.length > REQUIRED_CREDENTIALS_RANGE[1]) {
|
|
24242
|
-
return result_Result.error(ReportsError.InsufficientGuarantees, `Invalid number of credentials. Expected ${REQUIRED_CREDENTIALS_RANGE}, got ${credentialsView.length}`);
|
|
24274
|
+
return result_Result.error(ReportsError.InsufficientGuarantees, () => `Invalid number of credentials. Expected ${REQUIRED_CREDENTIALS_RANGE}, got ${credentialsView.length}`);
|
|
24243
24275
|
}
|
|
24244
24276
|
/** Retrieve current core assignment. */
|
|
24245
24277
|
const timeSlot = guaranteeView.slot.materialize();
|
|
@@ -24254,20 +24286,20 @@ workReportHashes, slot, getGuarantorAssignment) {
|
|
|
24254
24286
|
const credentialView = credential.view();
|
|
24255
24287
|
const validatorIndex = credentialView.validatorIndex.materialize();
|
|
24256
24288
|
if (lastValidatorIndex >= validatorIndex) {
|
|
24257
|
-
return result_Result.error(ReportsError.NotSortedOrUniqueGuarantors, `Credentials must be sorted by validator index. Got ${validatorIndex}, expected at least ${lastValidatorIndex + 1}`);
|
|
24289
|
+
return result_Result.error(ReportsError.NotSortedOrUniqueGuarantors, () => `Credentials must be sorted by validator index. Got ${validatorIndex}, expected at least ${lastValidatorIndex + 1}`);
|
|
24258
24290
|
}
|
|
24259
24291
|
lastValidatorIndex = validatorIndex;
|
|
24260
24292
|
const signature = credentialView.signature.materialize();
|
|
24261
24293
|
const guarantorData = guarantorAssignments[validatorIndex];
|
|
24262
24294
|
if (guarantorData === undefined) {
|
|
24263
|
-
return result_Result.error(ReportsError.BadValidatorIndex, `Invalid validator index: ${validatorIndex}`);
|
|
24295
|
+
return result_Result.error(ReportsError.BadValidatorIndex, () => `Invalid validator index: ${validatorIndex}`);
|
|
24264
24296
|
}
|
|
24265
24297
|
/**
|
|
24266
24298
|
* Verify core assignment.
|
|
24267
24299
|
* https://graypaper.fluffylabs.dev/#/5f542d7/14e40214e602
|
|
24268
24300
|
*/
|
|
24269
24301
|
if (guarantorData.core !== coreIndex) {
|
|
24270
|
-
return result_Result.error(ReportsError.WrongAssignment, `Invalid core assignment for validator ${validatorIndex}. Expected: ${guarantorData.core}, got: ${coreIndex}`);
|
|
24302
|
+
return result_Result.error(ReportsError.WrongAssignment, () => `Invalid core assignment for validator ${validatorIndex}. Expected: ${guarantorData.core}, got: ${coreIndex}`);
|
|
24271
24303
|
}
|
|
24272
24304
|
signaturesToVerify.push({
|
|
24273
24305
|
signature,
|
|
@@ -24305,10 +24337,10 @@ function verifyReportsOrder(input, chainSpec) {
|
|
|
24305
24337
|
const reportView = guarantee.view().report.view();
|
|
24306
24338
|
const coreIndex = reportView.coreIndex.materialize();
|
|
24307
24339
|
if (lastCoreIndex >= coreIndex) {
|
|
24308
|
-
return result_Result.error(ReportsError.OutOfOrderGuarantee, `Core indices of work reports are not unique or in order. Got: ${coreIndex}, expected at least: ${lastCoreIndex + 1}`);
|
|
24340
|
+
return result_Result.error(ReportsError.OutOfOrderGuarantee, () => `Core indices of work reports are not unique or in order. Got: ${coreIndex}, expected at least: ${lastCoreIndex + 1}`);
|
|
24309
24341
|
}
|
|
24310
24342
|
if (coreIndex >= noOfCores) {
|
|
24311
|
-
return result_Result.error(ReportsError.BadCoreIndex, `Invalid core index. Got: ${coreIndex}, max: ${noOfCores}`);
|
|
24343
|
+
return result_Result.error(ReportsError.BadCoreIndex, () => `Invalid core index. Got: ${coreIndex}, max: ${noOfCores}`);
|
|
24312
24344
|
}
|
|
24313
24345
|
lastCoreIndex = coreIndex;
|
|
24314
24346
|
}
|
|
@@ -24333,7 +24365,7 @@ function verifyPostSignatureChecks(input, availabilityAssignment, authPools, ser
|
|
|
24333
24365
|
* https://graypaper.fluffylabs.dev/#/5f542d7/15ea0015ea00
|
|
24334
24366
|
*/
|
|
24335
24367
|
if (availabilityAssignment[coreIndex] !== null) {
|
|
24336
|
-
return result_Result.error(ReportsError.CoreEngaged, `Report pending availability at core: ${coreIndex}`);
|
|
24368
|
+
return result_Result.error(ReportsError.CoreEngaged, () => `Report pending availability at core: ${coreIndex}`);
|
|
24337
24369
|
}
|
|
24338
24370
|
/**
|
|
24339
24371
|
* A report is valid only if the authorizer hash is present
|
|
@@ -24346,7 +24378,7 @@ function verifyPostSignatureChecks(input, availabilityAssignment, authPools, ser
|
|
|
24346
24378
|
const authorizerPool = authPools.get(coreIndex);
|
|
24347
24379
|
const pool = authorizerPool?.materialize() ?? [];
|
|
24348
24380
|
if (pool.find((hash) => hash.isEqualTo(authorizerHash)) === undefined) {
|
|
24349
|
-
return result_Result.error(ReportsError.CoreUnauthorized, `Authorizer hash not found in the pool of core ${coreIndex}: ${authorizerHash}`);
|
|
24381
|
+
return result_Result.error(ReportsError.CoreUnauthorized, () => `Authorizer hash not found in the pool of core ${coreIndex}: ${authorizerHash}`);
|
|
24350
24382
|
}
|
|
24351
24383
|
/**
|
|
24352
24384
|
* We require that the gas allotted for accumulation of each
|
|
@@ -24358,17 +24390,17 @@ function verifyPostSignatureChecks(input, availabilityAssignment, authPools, ser
|
|
|
24358
24390
|
for (const result of report.results) {
|
|
24359
24391
|
const service = services(result.serviceId);
|
|
24360
24392
|
if (service === null) {
|
|
24361
|
-
return result_Result.error(ReportsError.BadServiceId, `No service with id: ${result.serviceId}`);
|
|
24393
|
+
return result_Result.error(ReportsError.BadServiceId, () => `No service with id: ${result.serviceId}`);
|
|
24362
24394
|
}
|
|
24363
24395
|
const info = service.getInfo();
|
|
24364
24396
|
// check minimal accumulation gas
|
|
24365
24397
|
if (result.gas < info.accumulateMinGas) {
|
|
24366
|
-
return result_Result.error(ReportsError.ServiceItemGasTooLow, `Service (${result.serviceId}) gas is less than minimal. Got: ${result.gas}, expected at least: ${info.accumulateMinGas}`);
|
|
24398
|
+
return result_Result.error(ReportsError.ServiceItemGasTooLow, () => `Service (${result.serviceId}) gas is less than minimal. Got: ${result.gas}, expected at least: ${info.accumulateMinGas}`);
|
|
24367
24399
|
}
|
|
24368
24400
|
}
|
|
24369
24401
|
const totalGas = sumU64(...report.results.map((x) => x.gas));
|
|
24370
24402
|
if (totalGas.overflow || totalGas.value > G_A) {
|
|
24371
|
-
return result_Result.error(ReportsError.WorkReportGasTooHigh, `Total gas too high. Got: ${totalGas.value} (ovfl: ${totalGas.overflow}), maximal: ${G_A}`);
|
|
24403
|
+
return result_Result.error(ReportsError.WorkReportGasTooHigh, () => `Total gas too high. Got: ${totalGas.value} (ovfl: ${totalGas.overflow}), maximal: ${G_A}`);
|
|
24372
24404
|
}
|
|
24373
24405
|
}
|
|
24374
24406
|
return result_Result.ok(result_OK);
|
|
@@ -24454,7 +24486,7 @@ class Reports {
|
|
|
24454
24486
|
}
|
|
24455
24487
|
const reporters = SortedSet.fromArray(bytesBlobComparator, signaturesToVerify.ok.map((x) => x.key)).slice();
|
|
24456
24488
|
if (hasAnyOffenders(reporters, input.offenders)) {
|
|
24457
|
-
return result_Result.error(ReportsError.BannedValidator);
|
|
24489
|
+
return result_Result.error(ReportsError.BannedValidator, () => "One or more reporters are banned validators");
|
|
24458
24490
|
}
|
|
24459
24491
|
return result_Result.ok({
|
|
24460
24492
|
stateUpdate: {
|
|
@@ -24494,7 +24526,7 @@ class Reports {
|
|
|
24494
24526
|
return signaturesToVerify[idx].key;
|
|
24495
24527
|
})
|
|
24496
24528
|
.filter((x) => x !== null);
|
|
24497
|
-
return result_Result.error(ReportsError.BadSignature, `Invalid signatures for validators with keys: ${invalidKeys.join(", ")}`);
|
|
24529
|
+
return result_Result.error(ReportsError.BadSignature, () => `Invalid signatures for validators with keys: ${invalidKeys.join(", ")}`);
|
|
24498
24530
|
}
|
|
24499
24531
|
/**
|
|
24500
24532
|
* Get the guarantor assignment (both core and validator data)
|
|
@@ -24510,10 +24542,10 @@ class Reports {
|
|
|
24510
24542
|
const minTimeSlot = Math.max(0, headerRotation - 1) * rotationPeriod;
|
|
24511
24543
|
// https://graypaper.fluffylabs.dev/#/5f542d7/155e00156900
|
|
24512
24544
|
if (guaranteeTimeSlot > headerTimeSlot) {
|
|
24513
|
-
return result_Result.error(ReportsError.FutureReportSlot, `Report slot is in future. Block ${headerTimeSlot}, Report: ${guaranteeTimeSlot}`);
|
|
24545
|
+
return result_Result.error(ReportsError.FutureReportSlot, () => `Report slot is in future. Block ${headerTimeSlot}, Report: ${guaranteeTimeSlot}`);
|
|
24514
24546
|
}
|
|
24515
24547
|
if (guaranteeTimeSlot < minTimeSlot) {
|
|
24516
|
-
return result_Result.error(ReportsError.ReportEpochBeforeLast, `Report slot is too old. Block ${headerTimeSlot}, Report: ${guaranteeTimeSlot}`);
|
|
24548
|
+
return result_Result.error(ReportsError.ReportEpochBeforeLast, () => `Report slot is too old. Block ${headerTimeSlot}, Report: ${guaranteeTimeSlot}`);
|
|
24517
24549
|
}
|
|
24518
24550
|
// TODO [ToDr] [opti] below code needs cache.
|
|
24519
24551
|
// The `G` and `G*` sets should only be computed once per rotation.
|
|
@@ -25081,6 +25113,16 @@ class OnChain {
|
|
|
25081
25113
|
});
|
|
25082
25114
|
const { statistics, ...statisticsRest } = statisticsUpdate;
|
|
25083
25115
|
assertEmpty(statisticsRest);
|
|
25116
|
+
// Concat accumulatePreimages updates with preimages
|
|
25117
|
+
for (const [serviceId, accPreimageUpdates] of accumulatePreimages.entries()) {
|
|
25118
|
+
const preimagesUpdates = preimages.get(serviceId);
|
|
25119
|
+
if (preimagesUpdates === undefined) {
|
|
25120
|
+
preimages.set(serviceId, accPreimageUpdates);
|
|
25121
|
+
}
|
|
25122
|
+
else {
|
|
25123
|
+
preimages.set(serviceId, preimagesUpdates.concat(accPreimageUpdates));
|
|
25124
|
+
}
|
|
25125
|
+
}
|
|
25084
25126
|
return result_Result.ok({
|
|
25085
25127
|
...(maybeAuthorizationQueues !== undefined ? { authQueues: maybeAuthorizationQueues } : {}),
|
|
25086
25128
|
...(maybeDesignatedValidatorData !== undefined ? { designatedValidatorData: maybeDesignatedValidatorData } : {}),
|
|
@@ -25102,7 +25144,7 @@ class OnChain {
|
|
|
25102
25144
|
recentlyAccumulated,
|
|
25103
25145
|
accumulationOutputLog,
|
|
25104
25146
|
...servicesUpdate,
|
|
25105
|
-
preimages
|
|
25147
|
+
preimages,
|
|
25106
25148
|
});
|
|
25107
25149
|
}
|
|
25108
25150
|
getUsedAuthorizerHashes(guarantees) {
|
|
@@ -25119,11 +25161,11 @@ class OnChain {
|
|
|
25119
25161
|
}
|
|
25120
25162
|
function checkOffendersMatch(offendersMark, headerOffendersMark) {
|
|
25121
25163
|
if (offendersMark.size !== headerOffendersMark.length) {
|
|
25122
|
-
return result_Result.error(OFFENDERS_ERROR, `Length mismatch: ${offendersMark.size} vs ${headerOffendersMark.length}`);
|
|
25164
|
+
return result_Result.error(OFFENDERS_ERROR, () => `Length mismatch: ${offendersMark.size} vs ${headerOffendersMark.length}`);
|
|
25123
25165
|
}
|
|
25124
25166
|
for (const key of headerOffendersMark) {
|
|
25125
25167
|
if (!offendersMark.has(key)) {
|
|
25126
|
-
return result_Result.error(OFFENDERS_ERROR, `Missing key: ${key}`);
|
|
25168
|
+
return result_Result.error(OFFENDERS_ERROR, () => `Missing key: ${key}`);
|
|
25127
25169
|
}
|
|
25128
25170
|
}
|
|
25129
25171
|
return result_Result.ok(result_OK);
|
|
@@ -25205,7 +25247,7 @@ class Importer {
|
|
|
25205
25247
|
if (!this.currentHash.isEqualTo(parentHash)) {
|
|
25206
25248
|
const state = this.states.getState(parentHash);
|
|
25207
25249
|
if (state === null) {
|
|
25208
|
-
const e = result_Result.error(BlockVerifierError.StateRootNotFound);
|
|
25250
|
+
const e = result_Result.error(BlockVerifierError.StateRootNotFound, () => `State not found for parent block ${parentHash}`);
|
|
25209
25251
|
if (!e.isError) {
|
|
25210
25252
|
throw new Error("unreachable, just adding to make compiler happy");
|
|
25211
25253
|
}
|
|
@@ -25401,7 +25443,7 @@ const importBlockResultCodec = descriptors_codec.custom({
|
|
|
25401
25443
|
}
|
|
25402
25444
|
if (kind === 1) {
|
|
25403
25445
|
const error = d.bytesBlob();
|
|
25404
|
-
return result_Result.error(error.asText());
|
|
25446
|
+
return result_Result.error(error.asText(), () => error.asText());
|
|
25405
25447
|
}
|
|
25406
25448
|
throw new Error(`Invalid Result: ${kind}`);
|
|
25407
25449
|
}, (s) => {
|
|
@@ -25445,14 +25487,14 @@ class MainReady extends State {
|
|
|
25445
25487
|
sendBlock(port, block) {
|
|
25446
25488
|
// TODO [ToDr] How to make a better API to pass this binary data around?
|
|
25447
25489
|
// Currently we don't guarantee that the underlying buffer is actually `ArrayBuffer`.
|
|
25448
|
-
port.sendSignal("block", block, [
|
|
25490
|
+
port.sendSignal("block", block, []);
|
|
25449
25491
|
}
|
|
25450
25492
|
async importBlock(port, block) {
|
|
25451
|
-
const res = await port.sendRequest("importBlock", block, [
|
|
25493
|
+
const res = await port.sendRequest("importBlock", block, []);
|
|
25452
25494
|
if (res instanceof Uint8Array) {
|
|
25453
25495
|
return decoder_Decoder.decodeObject(importBlockResultCodec, res);
|
|
25454
25496
|
}
|
|
25455
|
-
return result_Result.error("Invalid worker response.");
|
|
25497
|
+
return result_Result.error("Invalid worker response.", () => "Invalid worker response: expected Uint8Array");
|
|
25456
25498
|
}
|
|
25457
25499
|
async getStateEntries(port, hash) {
|
|
25458
25500
|
const res = await port.sendRequest("getStateEntries", hash, [hash.buffer]);
|
|
@@ -25564,13 +25606,13 @@ class ImporterReady extends State {
|
|
|
25564
25606
|
response = result_Result.ok(this.importer.getBestStateRootHash() ?? ZERO_HASH.asOpaque());
|
|
25565
25607
|
}
|
|
25566
25608
|
else {
|
|
25567
|
-
response = result_Result.error(resultToString(res));
|
|
25609
|
+
response = result_Result.error(resultToString(res), () => resultToString(res));
|
|
25568
25610
|
}
|
|
25569
25611
|
}
|
|
25570
25612
|
catch (e) {
|
|
25571
25613
|
state_machine_logger.error `Failed to import block: ${e}`;
|
|
25572
25614
|
state_machine_logger.error `${e instanceof Error ? e.stack : ""}`;
|
|
25573
|
-
response = result_Result.error(`${e}`);
|
|
25615
|
+
response = result_Result.error(`${e}`, () => `${e}`);
|
|
25574
25616
|
}
|
|
25575
25617
|
const encoded = encoder_Encoder.encodeObject(importBlockResultCodec, response);
|
|
25576
25618
|
return {
|