@typeberry/lib 0.2.0-c3df163 → 0.2.0-f506473
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.cjs +250 -206
- package/index.d.ts +300 -227
- package/index.js +250 -206
- package/package.json +1 -1
package/index.d.ts
CHANGED
|
@@ -322,7 +322,7 @@ type ErrorResult<Error> = {
|
|
|
322
322
|
isOk: false;
|
|
323
323
|
isError: true;
|
|
324
324
|
error: Error;
|
|
325
|
-
details: string;
|
|
325
|
+
details: () => string;
|
|
326
326
|
};
|
|
327
327
|
|
|
328
328
|
/**
|
|
@@ -383,7 +383,7 @@ declare function resultToString<Ok, Error>(res: Result$2<Ok, Error>) {
|
|
|
383
383
|
if (res.isOk) {
|
|
384
384
|
return `OK: ${typeof res.ok === "symbol" ? res.ok.toString() : res.ok}`;
|
|
385
385
|
}
|
|
386
|
-
return `${res.details}\nError: ${maybeTaggedErrorToString(res.error)}`;
|
|
386
|
+
return `${res.details()}\nError: ${maybeTaggedErrorToString(res.error)}`;
|
|
387
387
|
}
|
|
388
388
|
|
|
389
389
|
/** An indication of two possible outcomes returned from a function. */
|
|
@@ -402,7 +402,7 @@ declare const Result$2 = {
|
|
|
402
402
|
},
|
|
403
403
|
|
|
404
404
|
/** Create new [`Result`] with `Error` status. */
|
|
405
|
-
error: <Error>(error: Error, details
|
|
405
|
+
error: <Error>(error: Error, details: () => string): ErrorResult<Error> => {
|
|
406
406
|
check`${error !== undefined} 'Error' type cannot be undefined.`;
|
|
407
407
|
return {
|
|
408
408
|
isOk: false,
|
|
@@ -556,7 +556,7 @@ declare function deepEqual<T>(
|
|
|
556
556
|
|
|
557
557
|
if (actual.isError && expected.isError) {
|
|
558
558
|
deepEqual(actual.error, expected.error, { context: ctx.concat(["error"]), errorsCollector: errors, ignore });
|
|
559
|
-
deepEqual(actual.details, expected.details, {
|
|
559
|
+
deepEqual(actual.details(), expected.details(), {
|
|
560
560
|
context: ctx.concat(["details"]),
|
|
561
561
|
errorsCollector: errors,
|
|
562
562
|
// display details when error does not match
|
|
@@ -1467,8 +1467,8 @@ declare class Decoder {
|
|
|
1467
1467
|
/**
|
|
1468
1468
|
* Create a new [`Decoder`] instance given a raw array of bytes as a source.
|
|
1469
1469
|
*/
|
|
1470
|
-
static fromBlob(source: Uint8Array) {
|
|
1471
|
-
return new Decoder(source);
|
|
1470
|
+
static fromBlob(source: Uint8Array, context?: unknown) {
|
|
1471
|
+
return new Decoder(source, undefined, context);
|
|
1472
1472
|
}
|
|
1473
1473
|
|
|
1474
1474
|
/**
|
|
@@ -1818,7 +1818,7 @@ declare class Decoder {
|
|
|
1818
1818
|
private ensureHasBytes(bytes: number) {
|
|
1819
1819
|
check`${bytes >= 0} Negative number of bytes given.`;
|
|
1820
1820
|
if (this.offset + bytes > this.source.length) {
|
|
1821
|
-
throw new
|
|
1821
|
+
throw new EndOfDataError(
|
|
1822
1822
|
`Attempting to decode more data than there is left. Need ${bytes}, left: ${this.source.length - this.offset}.`,
|
|
1823
1823
|
);
|
|
1824
1824
|
}
|
|
@@ -1837,6 +1837,8 @@ declare function decodeVariableLengthExtraBytes(firstByte: number) {
|
|
|
1837
1837
|
return 0;
|
|
1838
1838
|
}
|
|
1839
1839
|
|
|
1840
|
+
declare class EndOfDataError extends Error {}
|
|
1841
|
+
|
|
1840
1842
|
/** Hint for how big the encoded object will be. */
|
|
1841
1843
|
type SizeHint = {
|
|
1842
1844
|
/** Number of bytes in the encoding. */
|
|
@@ -3346,6 +3348,9 @@ declare function forEachDescriptor<T>(
|
|
|
3346
3348
|
try {
|
|
3347
3349
|
f(k, descriptors[k]);
|
|
3348
3350
|
} catch (e) {
|
|
3351
|
+
if (e instanceof EndOfDataError) {
|
|
3352
|
+
throw new EndOfDataError(`${key}: ${e}`);
|
|
3353
|
+
}
|
|
3349
3354
|
throw new Error(`${key}: ${e}`);
|
|
3350
3355
|
}
|
|
3351
3356
|
}
|
|
@@ -3469,6 +3474,8 @@ type index$q_DescriptorRecord<T> = DescriptorRecord<T>;
|
|
|
3469
3474
|
type index$q_Encode<T> = Encode<T>;
|
|
3470
3475
|
type index$q_Encoder = Encoder;
|
|
3471
3476
|
declare const index$q_Encoder: typeof Encoder;
|
|
3477
|
+
type index$q_EndOfDataError = EndOfDataError;
|
|
3478
|
+
declare const index$q_EndOfDataError: typeof EndOfDataError;
|
|
3472
3479
|
type index$q_LengthRange = LengthRange;
|
|
3473
3480
|
declare const index$q_MASKS: typeof MASKS;
|
|
3474
3481
|
declare const index$q_MAX_LENGTH: typeof MAX_LENGTH;
|
|
@@ -3497,7 +3504,7 @@ declare const index$q_sequenceViewVarLen: typeof sequenceViewVarLen;
|
|
|
3497
3504
|
declare const index$q_tryAsExactBytes: typeof tryAsExactBytes;
|
|
3498
3505
|
declare const index$q_validateLength: typeof validateLength;
|
|
3499
3506
|
declare namespace index$q {
|
|
3500
|
-
export { index$q_DEFAULT_START_LENGTH as DEFAULT_START_LENGTH, index$q_Decoder as Decoder, index$q_Descriptor as Descriptor, index$q_Encoder as Encoder, index$q_MASKS as MASKS, index$q_MAX_LENGTH as MAX_LENGTH, index$q_ObjectView as ObjectView, index$q_SequenceView as SequenceView, index$q_TYPICAL_DICTIONARY_LENGTH as TYPICAL_DICTIONARY_LENGTH, index$q_TYPICAL_SEQUENCE_LENGTH as TYPICAL_SEQUENCE_LENGTH, index$q_ViewField as ViewField, index$q_addSizeHints as addSizeHints, codec$1 as codec, index$q_decodeVariableLengthExtraBytes as decodeVariableLengthExtraBytes, index$q_exactHint as exactHint, index$q_forEachDescriptor as forEachDescriptor, index$q_hasUniqueView as hasUniqueView, index$q_objectView as objectView, index$q_readonlyArray as readonlyArray, index$q_sequenceViewFixLen as sequenceViewFixLen, index$q_sequenceViewVarLen as sequenceViewVarLen, index$q_tryAsExactBytes as tryAsExactBytes, index$q_validateLength as validateLength };
|
|
3507
|
+
export { index$q_DEFAULT_START_LENGTH as DEFAULT_START_LENGTH, index$q_Decoder as Decoder, index$q_Descriptor as Descriptor, index$q_Encoder as Encoder, index$q_EndOfDataError as EndOfDataError, index$q_MASKS as MASKS, index$q_MAX_LENGTH as MAX_LENGTH, index$q_ObjectView as ObjectView, index$q_SequenceView as SequenceView, index$q_TYPICAL_DICTIONARY_LENGTH as TYPICAL_DICTIONARY_LENGTH, index$q_TYPICAL_SEQUENCE_LENGTH as TYPICAL_SEQUENCE_LENGTH, index$q_ViewField as ViewField, index$q_addSizeHints as addSizeHints, codec$1 as codec, index$q_decodeVariableLengthExtraBytes as decodeVariableLengthExtraBytes, index$q_exactHint as exactHint, index$q_forEachDescriptor as forEachDescriptor, index$q_hasUniqueView as hasUniqueView, index$q_objectView as objectView, index$q_readonlyArray as readonlyArray, index$q_sequenceViewFixLen as sequenceViewFixLen, index$q_sequenceViewVarLen as sequenceViewVarLen, index$q_tryAsExactBytes as tryAsExactBytes, index$q_validateLength as validateLength };
|
|
3501
3508
|
export type { index$q_ClassConstructor as ClassConstructor, index$q_Codec as Codec, index$q_CodecRecord as CodecRecord, index$q_CodecWithView as CodecWithView, index$q_Decode as Decode, index$q_DescribedBy as DescribedBy, index$q_DescriptorRecord as DescriptorRecord, index$q_Encode as Encode, index$q_LengthRange as LengthRange, index$q_OptionalRecord as OptionalRecord, Options$1 as Options, index$q_PropertyKeys as PropertyKeys, index$q_SimpleDescriptorRecord as SimpleDescriptorRecord, index$q_SizeHint as SizeHint, index$q_ViewOf as ViewOf };
|
|
3502
3509
|
}
|
|
3503
3510
|
|
|
@@ -10659,7 +10666,6 @@ declare enum UpdatePreimageKind {
|
|
|
10659
10666
|
*/
|
|
10660
10667
|
declare class UpdatePreimage {
|
|
10661
10668
|
private constructor(
|
|
10662
|
-
public readonly serviceId: ServiceId,
|
|
10663
10669
|
public readonly action:
|
|
10664
10670
|
| {
|
|
10665
10671
|
kind: UpdatePreimageKind.Provide;
|
|
@@ -10679,16 +10685,8 @@ declare class UpdatePreimage {
|
|
|
10679
10685
|
) {}
|
|
10680
10686
|
|
|
10681
10687
|
/** A preimage is provided. We should update the lookuphistory and add the preimage to db. */
|
|
10682
|
-
static provide({
|
|
10683
|
-
|
|
10684
|
-
preimage,
|
|
10685
|
-
slot,
|
|
10686
|
-
}: {
|
|
10687
|
-
serviceId: ServiceId;
|
|
10688
|
-
preimage: PreimageItem;
|
|
10689
|
-
slot: TimeSlot | null;
|
|
10690
|
-
}) {
|
|
10691
|
-
return new UpdatePreimage(serviceId, {
|
|
10688
|
+
static provide({ preimage, slot }: { preimage: PreimageItem; slot: TimeSlot | null }) {
|
|
10689
|
+
return new UpdatePreimage({
|
|
10692
10690
|
kind: UpdatePreimageKind.Provide,
|
|
10693
10691
|
preimage,
|
|
10694
10692
|
slot,
|
|
@@ -10696,8 +10694,8 @@ declare class UpdatePreimage {
|
|
|
10696
10694
|
}
|
|
10697
10695
|
|
|
10698
10696
|
/** The preimage should be removed completely from the database. */
|
|
10699
|
-
static remove({
|
|
10700
|
-
return new UpdatePreimage(
|
|
10697
|
+
static remove({ hash, length }: { hash: PreimageHash; length: U32 }) {
|
|
10698
|
+
return new UpdatePreimage({
|
|
10701
10699
|
kind: UpdatePreimageKind.Remove,
|
|
10702
10700
|
hash,
|
|
10703
10701
|
length,
|
|
@@ -10705,8 +10703,8 @@ declare class UpdatePreimage {
|
|
|
10705
10703
|
}
|
|
10706
10704
|
|
|
10707
10705
|
/** Update the lookup history of some preimage or add a new one (request). */
|
|
10708
|
-
static updateOrAdd({
|
|
10709
|
-
return new UpdatePreimage(
|
|
10706
|
+
static updateOrAdd({ lookupHistory }: { lookupHistory: LookupHistoryItem }) {
|
|
10707
|
+
return new UpdatePreimage({
|
|
10710
10708
|
kind: UpdatePreimageKind.UpdateOrAdd,
|
|
10711
10709
|
item: lookupHistory,
|
|
10712
10710
|
});
|
|
@@ -10744,12 +10742,12 @@ declare enum UpdateServiceKind {
|
|
|
10744
10742
|
/** Create a new `Service` instance. */
|
|
10745
10743
|
Create = 1,
|
|
10746
10744
|
}
|
|
10745
|
+
|
|
10747
10746
|
/**
|
|
10748
|
-
* Update service info
|
|
10747
|
+
* Update service info or create a new one.
|
|
10749
10748
|
*/
|
|
10750
10749
|
declare class UpdateService {
|
|
10751
10750
|
private constructor(
|
|
10752
|
-
public readonly serviceId: ServiceId,
|
|
10753
10751
|
public readonly action:
|
|
10754
10752
|
| {
|
|
10755
10753
|
kind: UpdateServiceKind.Update;
|
|
@@ -10762,23 +10760,21 @@ declare class UpdateService {
|
|
|
10762
10760
|
},
|
|
10763
10761
|
) {}
|
|
10764
10762
|
|
|
10765
|
-
static update({
|
|
10766
|
-
return new UpdateService(
|
|
10763
|
+
static update({ serviceInfo }: { serviceInfo: ServiceAccountInfo }) {
|
|
10764
|
+
return new UpdateService({
|
|
10767
10765
|
kind: UpdateServiceKind.Update,
|
|
10768
10766
|
account: serviceInfo,
|
|
10769
10767
|
});
|
|
10770
10768
|
}
|
|
10771
10769
|
|
|
10772
10770
|
static create({
|
|
10773
|
-
serviceId,
|
|
10774
10771
|
serviceInfo,
|
|
10775
10772
|
lookupHistory,
|
|
10776
10773
|
}: {
|
|
10777
|
-
serviceId: ServiceId;
|
|
10778
10774
|
serviceInfo: ServiceAccountInfo;
|
|
10779
10775
|
lookupHistory: LookupHistoryItem | null;
|
|
10780
10776
|
}) {
|
|
10781
|
-
return new UpdateService(
|
|
10777
|
+
return new UpdateService({
|
|
10782
10778
|
kind: UpdateServiceKind.Create,
|
|
10783
10779
|
account: serviceInfo,
|
|
10784
10780
|
lookupHistory,
|
|
@@ -10800,7 +10796,6 @@ declare enum UpdateStorageKind {
|
|
|
10800
10796
|
*/
|
|
10801
10797
|
declare class UpdateStorage {
|
|
10802
10798
|
private constructor(
|
|
10803
|
-
public readonly serviceId: ServiceId,
|
|
10804
10799
|
public readonly action:
|
|
10805
10800
|
| {
|
|
10806
10801
|
kind: UpdateStorageKind.Set;
|
|
@@ -10812,12 +10807,12 @@ declare class UpdateStorage {
|
|
|
10812
10807
|
},
|
|
10813
10808
|
) {}
|
|
10814
10809
|
|
|
10815
|
-
static set({
|
|
10816
|
-
return new UpdateStorage(
|
|
10810
|
+
static set({ storage }: { storage: StorageItem }) {
|
|
10811
|
+
return new UpdateStorage({ kind: UpdateStorageKind.Set, storage });
|
|
10817
10812
|
}
|
|
10818
10813
|
|
|
10819
|
-
static remove({
|
|
10820
|
-
return new UpdateStorage(
|
|
10814
|
+
static remove({ key }: { key: StorageKey }) {
|
|
10815
|
+
return new UpdateStorage({ kind: UpdateStorageKind.Remove, key });
|
|
10821
10816
|
}
|
|
10822
10817
|
|
|
10823
10818
|
get key() {
|
|
@@ -10835,16 +10830,17 @@ declare class UpdateStorage {
|
|
|
10835
10830
|
}
|
|
10836
10831
|
}
|
|
10837
10832
|
|
|
10838
|
-
// TODO [ToDr] This would be more convenient to use if the data was grouped by `ServiceId`.
|
|
10839
10833
|
type ServicesUpdate = {
|
|
10840
10834
|
/** Service ids to remove from state alongside all their data. */
|
|
10841
|
-
|
|
10842
|
-
/** Services
|
|
10843
|
-
|
|
10835
|
+
removed: ServiceId[];
|
|
10836
|
+
/** Services newly created. */
|
|
10837
|
+
created: ServiceId[];
|
|
10838
|
+
/** Services to update. */
|
|
10839
|
+
updated: Map<ServiceId, UpdateService>;
|
|
10844
10840
|
/** Service preimages to update and potentially lookup history */
|
|
10845
|
-
preimages: UpdatePreimage[]
|
|
10841
|
+
preimages: Map<ServiceId, UpdatePreimage[]>;
|
|
10846
10842
|
/** Service storage to update. */
|
|
10847
|
-
storage: UpdateStorage[]
|
|
10843
|
+
storage: Map<ServiceId, UpdateStorage[]>;
|
|
10848
10844
|
};
|
|
10849
10845
|
|
|
10850
10846
|
declare enum UpdateError {
|
|
@@ -11040,13 +11036,13 @@ declare class InMemoryState extends WithDebug implements State, WithStateView, E
|
|
|
11040
11036
|
* Modify the state and apply a single state update.
|
|
11041
11037
|
*/
|
|
11042
11038
|
applyUpdate(update: Partial<State & ServicesUpdate>): Result$2<OK, UpdateError> {
|
|
11043
|
-
const {
|
|
11039
|
+
const { removed, created: _, updated, preimages, storage, ...rest } = update;
|
|
11044
11040
|
// just assign all other variables
|
|
11045
11041
|
Object.assign(this, rest);
|
|
11046
11042
|
|
|
11047
11043
|
// and update the services state
|
|
11048
11044
|
let result: Result<OK, UpdateError>;
|
|
11049
|
-
result = this.updateServices(
|
|
11045
|
+
result = this.updateServices(updated);
|
|
11050
11046
|
if (result.isError) {
|
|
11051
11047
|
return result;
|
|
11052
11048
|
}
|
|
@@ -11058,7 +11054,7 @@ declare class InMemoryState extends WithDebug implements State, WithStateView, E
|
|
|
11058
11054
|
if (result.isError) {
|
|
11059
11055
|
return result;
|
|
11060
11056
|
}
|
|
11061
|
-
this.removeServices(
|
|
11057
|
+
this.removeServices(removed);
|
|
11062
11058
|
|
|
11063
11059
|
return Result.ok(OK);
|
|
11064
11060
|
}
|
|
@@ -11070,93 +11066,108 @@ declare class InMemoryState extends WithDebug implements State, WithStateView, E
|
|
|
11070
11066
|
}
|
|
11071
11067
|
}
|
|
11072
11068
|
|
|
11073
|
-
private updateStorage(
|
|
11074
|
-
|
|
11075
|
-
|
|
11076
|
-
|
|
11077
|
-
|
|
11078
|
-
|
|
11079
|
-
|
|
11080
|
-
|
|
11081
|
-
)
|
|
11082
|
-
|
|
11069
|
+
private updateStorage(storageUpdates: Map<ServiceId, UpdateStorage[]> | undefined): Result$2<OK, UpdateError> {
|
|
11070
|
+
if (storageUpdates === undefined) {
|
|
11071
|
+
return Result.ok(OK);
|
|
11072
|
+
}
|
|
11073
|
+
for (const [serviceId, updates] of storageUpdates.entries()) {
|
|
11074
|
+
for (const update of updates) {
|
|
11075
|
+
const { kind } = update.action;
|
|
11076
|
+
const service = this.services.get(serviceId);
|
|
11077
|
+
if (service === undefined) {
|
|
11078
|
+
return Result.error(
|
|
11079
|
+
UpdateError.NoService,
|
|
11080
|
+
() => `Attempting to update storage of non-existing service: ${serviceId}`,
|
|
11081
|
+
);
|
|
11082
|
+
}
|
|
11083
11083
|
|
|
11084
|
-
|
|
11085
|
-
|
|
11086
|
-
|
|
11087
|
-
|
|
11088
|
-
|
|
11089
|
-
|
|
11084
|
+
if (kind === UpdateStorageKind.Set) {
|
|
11085
|
+
const { key, value } = update.action.storage;
|
|
11086
|
+
service.data.storage.set(key.toString(), StorageItem.create({ key, value }));
|
|
11087
|
+
} else if (kind === UpdateStorageKind.Remove) {
|
|
11088
|
+
const { key } = update.action;
|
|
11089
|
+
check`
|
|
11090
11090
|
${service.data.storage.has(key.toString())}
|
|
11091
|
-
Attempting to remove non-existing storage item at ${serviceId}: ${action.key}
|
|
11091
|
+
Attempting to remove non-existing storage item at ${serviceId}: ${update.action.key}
|
|
11092
11092
|
`;
|
|
11093
|
-
|
|
11094
|
-
|
|
11095
|
-
|
|
11093
|
+
service.data.storage.delete(key.toString());
|
|
11094
|
+
} else {
|
|
11095
|
+
assertNever(kind);
|
|
11096
|
+
}
|
|
11096
11097
|
}
|
|
11097
11098
|
}
|
|
11098
|
-
|
|
11099
11099
|
return Result.ok(OK);
|
|
11100
11100
|
}
|
|
11101
11101
|
|
|
11102
|
-
private updatePreimages(
|
|
11103
|
-
|
|
11102
|
+
private updatePreimages(preimagesUpdates: Map<ServiceId, UpdatePreimage[]> | undefined): Result$2<OK, UpdateError> {
|
|
11103
|
+
if (preimagesUpdates === undefined) {
|
|
11104
|
+
return Result.ok(OK);
|
|
11105
|
+
}
|
|
11106
|
+
for (const [serviceId, updates] of preimagesUpdates.entries()) {
|
|
11104
11107
|
const service = this.services.get(serviceId);
|
|
11105
11108
|
if (service === undefined) {
|
|
11106
11109
|
return Result.error(
|
|
11107
11110
|
UpdateError.NoService,
|
|
11108
|
-
`Attempting to update preimage of non-existing service: ${serviceId}`,
|
|
11111
|
+
() => `Attempting to update preimage of non-existing service: ${serviceId}`,
|
|
11109
11112
|
);
|
|
11110
11113
|
}
|
|
11111
|
-
const
|
|
11112
|
-
|
|
11113
|
-
|
|
11114
|
-
|
|
11115
|
-
|
|
11116
|
-
|
|
11117
|
-
|
|
11118
|
-
|
|
11119
|
-
|
|
11120
|
-
const length = tryAsU32(preimage.blob.length);
|
|
11121
|
-
const lookup = new LookupHistoryItem(preimage.hash, length, tryAsLookupHistorySlots([slot]));
|
|
11122
|
-
if (lookupHistory === undefined) {
|
|
11123
|
-
// no lookup history for that preimage at all (edge case, should be requested)
|
|
11124
|
-
service.data.lookupHistory.set(preimage.hash, [lookup]);
|
|
11125
|
-
} else {
|
|
11126
|
-
// insert or replace exiting entry
|
|
11127
|
-
const index = lookupHistory.map((x) => x.length).indexOf(length);
|
|
11128
|
-
lookupHistory.splice(index, index === -1 ? 0 : 1, lookup);
|
|
11114
|
+
for (const update of updates) {
|
|
11115
|
+
const { kind } = update.action;
|
|
11116
|
+
if (kind === UpdatePreimageKind.Provide) {
|
|
11117
|
+
const { preimage, slot } = update.action;
|
|
11118
|
+
if (service.data.preimages.has(preimage.hash)) {
|
|
11119
|
+
return Result.error(
|
|
11120
|
+
UpdateError.PreimageExists,
|
|
11121
|
+
() => `Overwriting existing preimage at ${serviceId}: ${preimage}`,
|
|
11122
|
+
);
|
|
11129
11123
|
}
|
|
11124
|
+
service.data.preimages.set(preimage.hash, preimage);
|
|
11125
|
+
if (slot !== null) {
|
|
11126
|
+
const lookupHistory = service.data.lookupHistory.get(preimage.hash);
|
|
11127
|
+
const length = tryAsU32(preimage.blob.length);
|
|
11128
|
+
const lookup = new LookupHistoryItem(preimage.hash, length, tryAsLookupHistorySlots([slot]));
|
|
11129
|
+
if (lookupHistory === undefined) {
|
|
11130
|
+
// no lookup history for that preimage at all (edge case, should be requested)
|
|
11131
|
+
service.data.lookupHistory.set(preimage.hash, [lookup]);
|
|
11132
|
+
} else {
|
|
11133
|
+
// insert or replace exiting entry
|
|
11134
|
+
const index = lookupHistory.map((x) => x.length).indexOf(length);
|
|
11135
|
+
lookupHistory.splice(index, index === -1 ? 0 : 1, lookup);
|
|
11136
|
+
}
|
|
11137
|
+
}
|
|
11138
|
+
} else if (kind === UpdatePreimageKind.Remove) {
|
|
11139
|
+
const { hash, length } = update.action;
|
|
11140
|
+
service.data.preimages.delete(hash);
|
|
11141
|
+
const history = service.data.lookupHistory.get(hash) ?? [];
|
|
11142
|
+
const idx = history.map((x) => x.length).indexOf(length);
|
|
11143
|
+
if (idx !== -1) {
|
|
11144
|
+
history.splice(idx, 1);
|
|
11145
|
+
}
|
|
11146
|
+
} else if (kind === UpdatePreimageKind.UpdateOrAdd) {
|
|
11147
|
+
const { item } = update.action;
|
|
11148
|
+
const history = service.data.lookupHistory.get(item.hash) ?? [];
|
|
11149
|
+
const existingIdx = history.map((x) => x.length).indexOf(item.length);
|
|
11150
|
+
const removeCount = existingIdx === -1 ? 0 : 1;
|
|
11151
|
+
history.splice(existingIdx, removeCount, item);
|
|
11152
|
+
service.data.lookupHistory.set(item.hash, history);
|
|
11153
|
+
} else {
|
|
11154
|
+
assertNever(kind);
|
|
11130
11155
|
}
|
|
11131
|
-
} else if (kind === UpdatePreimageKind.Remove) {
|
|
11132
|
-
const { hash, length } = action;
|
|
11133
|
-
service.data.preimages.delete(hash);
|
|
11134
|
-
const history = service.data.lookupHistory.get(hash) ?? [];
|
|
11135
|
-
const idx = history.map((x) => x.length).indexOf(length);
|
|
11136
|
-
if (idx !== -1) {
|
|
11137
|
-
history.splice(idx, 1);
|
|
11138
|
-
}
|
|
11139
|
-
} else if (kind === UpdatePreimageKind.UpdateOrAdd) {
|
|
11140
|
-
const { item } = action;
|
|
11141
|
-
const history = service.data.lookupHistory.get(item.hash) ?? [];
|
|
11142
|
-
const existingIdx = history.map((x) => x.length).indexOf(item.length);
|
|
11143
|
-
const removeCount = existingIdx === -1 ? 0 : 1;
|
|
11144
|
-
history.splice(existingIdx, removeCount, item);
|
|
11145
|
-
service.data.lookupHistory.set(item.hash, history);
|
|
11146
|
-
} else {
|
|
11147
|
-
assertNever(kind);
|
|
11148
11156
|
}
|
|
11149
11157
|
}
|
|
11150
11158
|
return Result.ok(OK);
|
|
11151
11159
|
}
|
|
11152
11160
|
|
|
11153
|
-
private updateServices(servicesUpdates
|
|
11154
|
-
|
|
11155
|
-
|
|
11161
|
+
private updateServices(servicesUpdates: Map<ServiceId, UpdateService> | undefined): Result$2<OK, UpdateError> {
|
|
11162
|
+
if (servicesUpdates === undefined) {
|
|
11163
|
+
return Result.ok(OK);
|
|
11164
|
+
}
|
|
11165
|
+
for (const [serviceId, update] of servicesUpdates.entries()) {
|
|
11166
|
+
const { kind, account } = update.action;
|
|
11156
11167
|
if (kind === UpdateServiceKind.Create) {
|
|
11157
|
-
const { lookupHistory } = action;
|
|
11168
|
+
const { lookupHistory } = update.action;
|
|
11158
11169
|
if (this.services.has(serviceId)) {
|
|
11159
|
-
return Result.error(UpdateError.DuplicateService, `${serviceId} already exists!`);
|
|
11170
|
+
return Result.error(UpdateError.DuplicateService, () => `${serviceId} already exists!`);
|
|
11160
11171
|
}
|
|
11161
11172
|
this.services.set(
|
|
11162
11173
|
serviceId,
|
|
@@ -11172,7 +11183,7 @@ declare class InMemoryState extends WithDebug implements State, WithStateView, E
|
|
|
11172
11183
|
} else if (kind === UpdateServiceKind.Update) {
|
|
11173
11184
|
const existingService = this.services.get(serviceId);
|
|
11174
11185
|
if (existingService === undefined) {
|
|
11175
|
-
return Result.error(UpdateError.NoService, `Cannot update ${serviceId} because it does not exist.`);
|
|
11186
|
+
return Result.error(UpdateError.NoService, () => `Cannot update ${serviceId} because it does not exist.`);
|
|
11176
11187
|
}
|
|
11177
11188
|
existingService.data.info = account;
|
|
11178
11189
|
} else {
|
|
@@ -11975,89 +11986,104 @@ declare function* serializeStateUpdate(
|
|
|
11975
11986
|
const encode = <T>(codec: Encode<T>, val: T) => Encoder.encodeObject(codec, val, spec);
|
|
11976
11987
|
|
|
11977
11988
|
// then let's proceed with service updates
|
|
11978
|
-
yield* serializeServiceUpdates(update.
|
|
11989
|
+
yield* serializeServiceUpdates(update.updated, encode, blake2b);
|
|
11979
11990
|
yield* serializePreimages(update.preimages, encode, blake2b);
|
|
11980
11991
|
yield* serializeStorage(update.storage, blake2b);
|
|
11981
|
-
yield* serializeRemovedServices(update.
|
|
11992
|
+
yield* serializeRemovedServices(update.removed);
|
|
11982
11993
|
}
|
|
11983
11994
|
|
|
11984
11995
|
declare function* serializeRemovedServices(servicesRemoved: ServiceId[] | undefined): Generator<StateEntryUpdate> {
|
|
11985
|
-
|
|
11996
|
+
if (servicesRemoved === undefined) {
|
|
11997
|
+
return;
|
|
11998
|
+
}
|
|
11999
|
+
for (const serviceId of servicesRemoved) {
|
|
11986
12000
|
// TODO [ToDr] what about all data associated with a service?
|
|
11987
12001
|
const codec = serialize.serviceData(serviceId);
|
|
11988
12002
|
yield [StateEntryUpdateAction.Remove, codec.key, EMPTY_BLOB];
|
|
11989
12003
|
}
|
|
11990
12004
|
}
|
|
11991
12005
|
|
|
11992
|
-
declare function* serializeStorage(
|
|
11993
|
-
|
|
11994
|
-
|
|
11995
|
-
|
|
11996
|
-
|
|
11997
|
-
|
|
11998
|
-
|
|
11999
|
-
|
|
12000
|
-
|
|
12001
|
-
|
|
12002
|
-
|
|
12003
|
-
|
|
12004
|
-
|
|
12005
|
-
|
|
12006
|
+
declare function* serializeStorage(
|
|
12007
|
+
storageUpdates: Map<ServiceId, UpdateStorage[]> | undefined,
|
|
12008
|
+
blake2b: Blake2b,
|
|
12009
|
+
): Generator<StateEntryUpdate> {
|
|
12010
|
+
if (storageUpdates === undefined) {
|
|
12011
|
+
return;
|
|
12012
|
+
}
|
|
12013
|
+
for (const [serviceId, updates] of storageUpdates.entries()) {
|
|
12014
|
+
for (const { action } of updates) {
|
|
12015
|
+
switch (action.kind) {
|
|
12016
|
+
case UpdateStorageKind.Set: {
|
|
12017
|
+
const key = action.storage.key;
|
|
12018
|
+
const codec = serialize.serviceStorage(blake2b, serviceId, key);
|
|
12019
|
+
yield [StateEntryUpdateAction.Insert, codec.key, action.storage.value];
|
|
12020
|
+
break;
|
|
12021
|
+
}
|
|
12022
|
+
case UpdateStorageKind.Remove: {
|
|
12023
|
+
const key = action.key;
|
|
12024
|
+
const codec = serialize.serviceStorage(blake2b, serviceId, key);
|
|
12025
|
+
yield [StateEntryUpdateAction.Remove, codec.key, EMPTY_BLOB];
|
|
12026
|
+
break;
|
|
12027
|
+
}
|
|
12006
12028
|
}
|
|
12007
|
-
default:
|
|
12008
|
-
assertNever(action);
|
|
12009
12029
|
}
|
|
12010
12030
|
}
|
|
12011
12031
|
}
|
|
12012
12032
|
|
|
12013
12033
|
declare function* serializePreimages(
|
|
12014
|
-
|
|
12034
|
+
preimagesUpdates: Map<ServiceId, UpdatePreimage[]> | undefined,
|
|
12015
12035
|
encode: EncodeFun,
|
|
12016
12036
|
blake2b: Blake2b,
|
|
12017
12037
|
): Generator<StateEntryUpdate> {
|
|
12018
|
-
|
|
12019
|
-
|
|
12020
|
-
|
|
12021
|
-
|
|
12022
|
-
|
|
12023
|
-
|
|
12024
|
-
|
|
12025
|
-
|
|
12026
|
-
const
|
|
12027
|
-
yield [
|
|
12028
|
-
|
|
12029
|
-
|
|
12030
|
-
|
|
12031
|
-
|
|
12038
|
+
if (preimagesUpdates === undefined) {
|
|
12039
|
+
return;
|
|
12040
|
+
}
|
|
12041
|
+
for (const [serviceId, updates] of preimagesUpdates.entries()) {
|
|
12042
|
+
for (const { action } of updates) {
|
|
12043
|
+
switch (action.kind) {
|
|
12044
|
+
case UpdatePreimageKind.Provide: {
|
|
12045
|
+
const { hash, blob } = action.preimage;
|
|
12046
|
+
const codec = serialize.servicePreimages(blake2b, serviceId, hash);
|
|
12047
|
+
yield [StateEntryUpdateAction.Insert, codec.key, blob];
|
|
12048
|
+
|
|
12049
|
+
if (action.slot !== null) {
|
|
12050
|
+
const codec2 = serialize.serviceLookupHistory(blake2b, serviceId, hash, tryAsU32(blob.length));
|
|
12051
|
+
yield [
|
|
12052
|
+
StateEntryUpdateAction.Insert,
|
|
12053
|
+
codec2.key,
|
|
12054
|
+
encode(codec2.Codec, tryAsLookupHistorySlots([action.slot])),
|
|
12055
|
+
];
|
|
12056
|
+
}
|
|
12057
|
+
break;
|
|
12032
12058
|
}
|
|
12033
|
-
|
|
12034
|
-
|
|
12035
|
-
|
|
12036
|
-
|
|
12037
|
-
|
|
12038
|
-
|
|
12039
|
-
|
|
12040
|
-
|
|
12041
|
-
|
|
12042
|
-
|
|
12043
|
-
const codec = serialize.servicePreimages(blake2b, serviceId, hash);
|
|
12044
|
-
yield [StateEntryUpdateAction.Remove, codec.key, EMPTY_BLOB];
|
|
12059
|
+
case UpdatePreimageKind.UpdateOrAdd: {
|
|
12060
|
+
const { hash, length, slots } = action.item;
|
|
12061
|
+
const codec = serialize.serviceLookupHistory(blake2b, serviceId, hash, length);
|
|
12062
|
+
yield [StateEntryUpdateAction.Insert, codec.key, encode(codec.Codec, slots)];
|
|
12063
|
+
break;
|
|
12064
|
+
}
|
|
12065
|
+
case UpdatePreimageKind.Remove: {
|
|
12066
|
+
const { hash, length } = action;
|
|
12067
|
+
const codec = serialize.servicePreimages(blake2b, serviceId, hash);
|
|
12068
|
+
yield [StateEntryUpdateAction.Remove, codec.key, EMPTY_BLOB];
|
|
12045
12069
|
|
|
12046
|
-
|
|
12047
|
-
|
|
12048
|
-
|
|
12070
|
+
const codec2 = serialize.serviceLookupHistory(blake2b, serviceId, hash, length);
|
|
12071
|
+
yield [StateEntryUpdateAction.Remove, codec2.key, EMPTY_BLOB];
|
|
12072
|
+
break;
|
|
12073
|
+
}
|
|
12049
12074
|
}
|
|
12050
|
-
default:
|
|
12051
|
-
assertNever(action);
|
|
12052
12075
|
}
|
|
12053
12076
|
}
|
|
12054
12077
|
}
|
|
12055
12078
|
declare function* serializeServiceUpdates(
|
|
12056
|
-
servicesUpdates: UpdateService
|
|
12079
|
+
servicesUpdates: Map<ServiceId, UpdateService> | undefined,
|
|
12057
12080
|
encode: EncodeFun,
|
|
12058
12081
|
blake2b: Blake2b,
|
|
12059
12082
|
): Generator<StateEntryUpdate> {
|
|
12060
|
-
|
|
12083
|
+
if (servicesUpdates === undefined) {
|
|
12084
|
+
return;
|
|
12085
|
+
}
|
|
12086
|
+
for (const [serviceId, { action }] of servicesUpdates.entries()) {
|
|
12061
12087
|
// new service being created or updated
|
|
12062
12088
|
const codec = serialize.serviceData(serviceId);
|
|
12063
12089
|
yield [StateEntryUpdateAction.Insert, codec.key, encode(codec.Codec, action.account)];
|
|
@@ -12663,7 +12689,7 @@ declare class LeafDb implements SerializedStateBackend {
|
|
|
12663
12689
|
if (blob.length % TRIE_NODE_BYTES !== 0) {
|
|
12664
12690
|
return Result.error(
|
|
12665
12691
|
LeafDbError.InvalidLeafData,
|
|
12666
|
-
`${blob.length} is not a multiply of ${TRIE_NODE_BYTES}: ${blob}`,
|
|
12692
|
+
() => `${blob.length} is not a multiply of ${TRIE_NODE_BYTES}: ${blob}`,
|
|
12667
12693
|
);
|
|
12668
12694
|
}
|
|
12669
12695
|
|
|
@@ -12671,7 +12697,7 @@ declare class LeafDb implements SerializedStateBackend {
|
|
|
12671
12697
|
for (const nodeData of blob.chunks(TRIE_NODE_BYTES)) {
|
|
12672
12698
|
const node = new TrieNode(nodeData.raw);
|
|
12673
12699
|
if (node.getNodeType() === NodeType.Branch) {
|
|
12674
|
-
return Result.error(LeafDbError.InvalidLeafData, `Branch node detected: ${nodeData}`);
|
|
12700
|
+
return Result.error(LeafDbError.InvalidLeafData, () => `Branch node detected: ${nodeData}`);
|
|
12675
12701
|
}
|
|
12676
12702
|
leaves.insert(node.asLeafNode());
|
|
12677
12703
|
}
|
|
@@ -15483,7 +15509,7 @@ declare class Memory {
|
|
|
15483
15509
|
const pagesResult = this.getPages(address, bytes.length, AccessType.WRITE);
|
|
15484
15510
|
|
|
15485
15511
|
if (pagesResult.isError) {
|
|
15486
|
-
return Result.error(pagesResult.error);
|
|
15512
|
+
return Result.error(pagesResult.error, pagesResult.details);
|
|
15487
15513
|
}
|
|
15488
15514
|
|
|
15489
15515
|
const pages = pagesResult.ok;
|
|
@@ -15516,17 +15542,23 @@ declare class Memory {
|
|
|
15516
15542
|
|
|
15517
15543
|
for (const pageNumber of pageRange) {
|
|
15518
15544
|
if (pageNumber < RESERVED_NUMBER_OF_PAGES) {
|
|
15519
|
-
return Result.error(
|
|
15545
|
+
return Result.error(
|
|
15546
|
+
PageFault.fromPageNumber(pageNumber, true),
|
|
15547
|
+
() => `Page fault: attempted to access reserved page ${pageNumber}`,
|
|
15548
|
+
);
|
|
15520
15549
|
}
|
|
15521
15550
|
|
|
15522
15551
|
const page = this.memory.get(pageNumber);
|
|
15523
15552
|
|
|
15524
15553
|
if (page === undefined) {
|
|
15525
|
-
return Result.error(PageFault.fromPageNumber(pageNumber));
|
|
15554
|
+
return Result.error(PageFault.fromPageNumber(pageNumber), () => `Page fault: page ${pageNumber} not allocated`);
|
|
15526
15555
|
}
|
|
15527
15556
|
|
|
15528
15557
|
if (accessType === AccessType.WRITE && !page.isWriteable()) {
|
|
15529
|
-
return Result.error(
|
|
15558
|
+
return Result.error(
|
|
15559
|
+
PageFault.fromPageNumber(pageNumber, true),
|
|
15560
|
+
() => `Page fault: attempted to write to read-only page ${pageNumber}`,
|
|
15561
|
+
);
|
|
15530
15562
|
}
|
|
15531
15563
|
|
|
15532
15564
|
pages.push(page);
|
|
@@ -15548,7 +15580,7 @@ declare class Memory {
|
|
|
15548
15580
|
const pagesResult = this.getPages(startAddress, result.length, AccessType.READ);
|
|
15549
15581
|
|
|
15550
15582
|
if (pagesResult.isError) {
|
|
15551
|
-
return Result.error(pagesResult.error);
|
|
15583
|
+
return Result.error(pagesResult.error, pagesResult.details);
|
|
15552
15584
|
}
|
|
15553
15585
|
|
|
15554
15586
|
const pages = pagesResult.ok;
|
|
@@ -17533,7 +17565,7 @@ declare class ProgramDecoder {
|
|
|
17533
17565
|
return Result.ok(new ProgramDecoder(program));
|
|
17534
17566
|
} catch (e) {
|
|
17535
17567
|
logger.error`Invalid program: ${e}`;
|
|
17536
|
-
return Result.error(ProgramDecoderError.InvalidProgramError);
|
|
17568
|
+
return Result.error(ProgramDecoderError.InvalidProgramError, () => `Program decoder error: ${e}`);
|
|
17537
17569
|
}
|
|
17538
17570
|
}
|
|
17539
17571
|
}
|
|
@@ -17891,7 +17923,10 @@ declare class HostCallMemory implements IHostCallMemory {
|
|
|
17891
17923
|
}
|
|
17892
17924
|
|
|
17893
17925
|
if (address + tryAsU64(bytes.length) > MEMORY_SIZE) {
|
|
17894
|
-
return Result.error(
|
|
17926
|
+
return Result.error(
|
|
17927
|
+
new OutOfBounds(),
|
|
17928
|
+
() => `Memory access out of bounds: address ${address} + length ${bytes.length} exceeds memory size`,
|
|
17929
|
+
);
|
|
17895
17930
|
}
|
|
17896
17931
|
|
|
17897
17932
|
return this.memory.storeFrom(tryAsMemoryIndex(Number(address)), bytes);
|
|
@@ -17903,7 +17938,10 @@ declare class HostCallMemory implements IHostCallMemory {
|
|
|
17903
17938
|
}
|
|
17904
17939
|
|
|
17905
17940
|
if (startAddress + tryAsU64(result.length) > MEMORY_SIZE) {
|
|
17906
|
-
return Result.error(
|
|
17941
|
+
return Result.error(
|
|
17942
|
+
new OutOfBounds(),
|
|
17943
|
+
() => `Memory access out of bounds: address ${startAddress} + length ${result.length} exceeds memory size`,
|
|
17944
|
+
);
|
|
17907
17945
|
}
|
|
17908
17946
|
|
|
17909
17947
|
return this.memory.loadInto(result, tryAsMemoryIndex(Number(startAddress)));
|
|
@@ -18375,6 +18413,17 @@ type InsufficientFundsError = typeof InsufficientFundsError;
|
|
|
18375
18413
|
type ServiceStateUpdate = Partial<Pick<State, "privilegedServices" | "authQueues" | "designatedValidatorData">> &
|
|
18376
18414
|
ServicesUpdate;
|
|
18377
18415
|
|
|
18416
|
+
/** Deep clone of a map with array. */
|
|
18417
|
+
declare function deepCloneMapWithArray<K, V>(map: Map<K, V[]>): Map<K, V[]> {
|
|
18418
|
+
const cloned: [K, V[]][] = [];
|
|
18419
|
+
|
|
18420
|
+
for (const [k, v] of map.entries()) {
|
|
18421
|
+
cloned.push([k, v.slice()]);
|
|
18422
|
+
}
|
|
18423
|
+
|
|
18424
|
+
return new Map(cloned);
|
|
18425
|
+
}
|
|
18426
|
+
|
|
18378
18427
|
/**
|
|
18379
18428
|
* State updates that currently accumulating service produced.
|
|
18380
18429
|
*
|
|
@@ -18402,10 +18451,11 @@ declare class AccumulationStateUpdate {
|
|
|
18402
18451
|
static empty(): AccumulationStateUpdate {
|
|
18403
18452
|
return new AccumulationStateUpdate(
|
|
18404
18453
|
{
|
|
18405
|
-
|
|
18406
|
-
|
|
18407
|
-
|
|
18408
|
-
|
|
18454
|
+
created: [],
|
|
18455
|
+
updated: new Map(),
|
|
18456
|
+
removed: [],
|
|
18457
|
+
preimages: new Map(),
|
|
18458
|
+
storage: new Map(),
|
|
18409
18459
|
},
|
|
18410
18460
|
[],
|
|
18411
18461
|
);
|
|
@@ -18424,10 +18474,13 @@ declare class AccumulationStateUpdate {
|
|
|
18424
18474
|
/** Create a copy of another `StateUpdate`. Used by checkpoints. */
|
|
18425
18475
|
static copyFrom(from: AccumulationStateUpdate): AccumulationStateUpdate {
|
|
18426
18476
|
const serviceUpdates: ServicesUpdate = {
|
|
18427
|
-
|
|
18428
|
-
|
|
18429
|
-
|
|
18430
|
-
|
|
18477
|
+
// shallow copy
|
|
18478
|
+
created: [...from.services.created],
|
|
18479
|
+
updated: new Map(from.services.updated),
|
|
18480
|
+
removed: [...from.services.removed],
|
|
18481
|
+
// deep copy
|
|
18482
|
+
preimages: deepCloneMapWithArray(from.services.preimages),
|
|
18483
|
+
storage: deepCloneMapWithArray(from.services.storage),
|
|
18431
18484
|
};
|
|
18432
18485
|
const transfers = [...from.transfers];
|
|
18433
18486
|
const update = new AccumulationStateUpdate(serviceUpdates, transfers, new Map(from.yieldedRoots));
|
|
@@ -18485,12 +18538,10 @@ declare class PartiallyUpdatedState<T extends StateSlice = StateSlice> {
|
|
|
18485
18538
|
return null;
|
|
18486
18539
|
}
|
|
18487
18540
|
|
|
18488
|
-
const
|
|
18489
|
-
(update) => update.serviceId === destination,
|
|
18490
|
-
);
|
|
18541
|
+
const maybeUpdatedServiceInfo = this.stateUpdate.services.updated.get(destination);
|
|
18491
18542
|
|
|
18492
|
-
if (
|
|
18493
|
-
return
|
|
18543
|
+
if (maybeUpdatedServiceInfo !== undefined) {
|
|
18544
|
+
return maybeUpdatedServiceInfo.action.account;
|
|
18494
18545
|
}
|
|
18495
18546
|
|
|
18496
18547
|
const maybeService = this.state.getService(destination);
|
|
@@ -18502,7 +18553,8 @@ declare class PartiallyUpdatedState<T extends StateSlice = StateSlice> {
|
|
|
18502
18553
|
}
|
|
18503
18554
|
|
|
18504
18555
|
getStorage(serviceId: ServiceId, rawKey: StorageKey): BytesBlob | null {
|
|
18505
|
-
const
|
|
18556
|
+
const storages = this.stateUpdate.services.storage.get(serviceId) ?? [];
|
|
18557
|
+
const item = storages.find((x) => x.key.isEqualTo(rawKey));
|
|
18506
18558
|
if (item !== undefined) {
|
|
18507
18559
|
return item.value;
|
|
18508
18560
|
}
|
|
@@ -18519,10 +18571,11 @@ declare class PartiallyUpdatedState<T extends StateSlice = StateSlice> {
|
|
|
18519
18571
|
* the existence in `preimages` map.
|
|
18520
18572
|
*/
|
|
18521
18573
|
hasPreimage(serviceId: ServiceId, hash: PreimageHash): boolean {
|
|
18522
|
-
const
|
|
18574
|
+
const preimages = this.stateUpdate.services.preimages.get(serviceId) ?? [];
|
|
18575
|
+
const providedPreimage = preimages.find(
|
|
18523
18576
|
// we ignore the action here, since if there is <any> update on that
|
|
18524
18577
|
// hash it means it has to exist, right?
|
|
18525
|
-
(p) => p.
|
|
18578
|
+
(p) => p.hash.isEqualTo(hash),
|
|
18526
18579
|
);
|
|
18527
18580
|
if (providedPreimage !== undefined) {
|
|
18528
18581
|
return true;
|
|
@@ -18539,9 +18592,8 @@ declare class PartiallyUpdatedState<T extends StateSlice = StateSlice> {
|
|
|
18539
18592
|
|
|
18540
18593
|
getPreimage(serviceId: ServiceId, hash: PreimageHash): BytesBlob | null {
|
|
18541
18594
|
// TODO [ToDr] Should we verify availability here?
|
|
18542
|
-
const
|
|
18543
|
-
|
|
18544
|
-
);
|
|
18595
|
+
const preimages = this.stateUpdate.services.preimages.get(serviceId) ?? [];
|
|
18596
|
+
const freshlyProvided = preimages.find((x) => x.hash.isEqualTo(hash));
|
|
18545
18597
|
if (freshlyProvided !== undefined && freshlyProvided.action.kind === UpdatePreimageKind.Provide) {
|
|
18546
18598
|
return freshlyProvided.action.preimage.blob;
|
|
18547
18599
|
}
|
|
@@ -18557,11 +18609,12 @@ declare class PartiallyUpdatedState<T extends StateSlice = StateSlice> {
|
|
|
18557
18609
|
hash: PreimageHash,
|
|
18558
18610
|
length: U64,
|
|
18559
18611
|
): LookupHistoryItem | null {
|
|
18612
|
+
const preimages = this.stateUpdate.services.preimages.get(serviceId) ?? [];
|
|
18560
18613
|
// TODO [ToDr] This is most likely wrong. We may have `provide` and `remove` within
|
|
18561
18614
|
// the same state update. We should however switch to proper "updated state"
|
|
18562
18615
|
// representation soon.
|
|
18563
|
-
const updatedPreimage =
|
|
18564
|
-
(update) => update.
|
|
18616
|
+
const updatedPreimage = preimages.findLast(
|
|
18617
|
+
(update) => update.hash.isEqualTo(hash) && BigInt(update.length) === length,
|
|
18565
18618
|
);
|
|
18566
18619
|
|
|
18567
18620
|
const stateFallback = () => {
|
|
@@ -18604,21 +18657,19 @@ declare class PartiallyUpdatedState<T extends StateSlice = StateSlice> {
|
|
|
18604
18657
|
}
|
|
18605
18658
|
|
|
18606
18659
|
/* State update functions. */
|
|
18607
|
-
|
|
18608
18660
|
updateStorage(serviceId: ServiceId, key: StorageKey, value: BytesBlob | null) {
|
|
18609
18661
|
const update =
|
|
18610
18662
|
value === null
|
|
18611
|
-
? UpdateStorage.remove({
|
|
18663
|
+
? UpdateStorage.remove({ key })
|
|
18612
18664
|
: UpdateStorage.set({
|
|
18613
|
-
serviceId,
|
|
18614
18665
|
storage: StorageItem.create({ key, value }),
|
|
18615
18666
|
});
|
|
18616
18667
|
|
|
18617
|
-
const
|
|
18618
|
-
|
|
18619
|
-
);
|
|
18668
|
+
const storages = this.stateUpdate.services.storage.get(serviceId) ?? [];
|
|
18669
|
+
const index = storages.findIndex((x) => x.key.isEqualTo(key));
|
|
18620
18670
|
const count = index === -1 ? 0 : 1;
|
|
18621
|
-
|
|
18671
|
+
storages.splice(index, count, update);
|
|
18672
|
+
this.stateUpdate.services.storage.set(serviceId, storages);
|
|
18622
18673
|
}
|
|
18623
18674
|
|
|
18624
18675
|
/**
|
|
@@ -18627,8 +18678,10 @@ declare class PartiallyUpdatedState<T extends StateSlice = StateSlice> {
|
|
|
18627
18678
|
* Note we store all previous entries as well, since there might be a sequence of:
|
|
18628
18679
|
* `provide` -> `remove` and both should update the end state somehow.
|
|
18629
18680
|
*/
|
|
18630
|
-
updatePreimage(newUpdate: UpdatePreimage) {
|
|
18631
|
-
this.stateUpdate.services.preimages.
|
|
18681
|
+
updatePreimage(serviceId: ServiceId, newUpdate: UpdatePreimage) {
|
|
18682
|
+
const updatePreimages = this.stateUpdate.services.preimages.get(serviceId) ?? [];
|
|
18683
|
+
updatePreimages.push(newUpdate);
|
|
18684
|
+
this.stateUpdate.services.preimages.set(serviceId, updatePreimages);
|
|
18632
18685
|
}
|
|
18633
18686
|
|
|
18634
18687
|
updateServiceStorageUtilisation(
|
|
@@ -18645,12 +18698,18 @@ declare class PartiallyUpdatedState<T extends StateSlice = StateSlice> {
|
|
|
18645
18698
|
|
|
18646
18699
|
// TODO [ToDr] this is not specified in GP, but it seems sensible.
|
|
18647
18700
|
if (overflowItems || overflowBytes) {
|
|
18648
|
-
return Result.error(
|
|
18701
|
+
return Result.error(
|
|
18702
|
+
InsufficientFundsError,
|
|
18703
|
+
() => `Storage utilisation overflow: items=${overflowItems}, bytes=${overflowBytes}`,
|
|
18704
|
+
);
|
|
18649
18705
|
}
|
|
18650
18706
|
|
|
18651
18707
|
const thresholdBalance = ServiceAccountInfo.calculateThresholdBalance(items, bytes, serviceInfo.gratisStorage);
|
|
18652
18708
|
if (serviceInfo.balance < thresholdBalance) {
|
|
18653
|
-
return Result.error(
|
|
18709
|
+
return Result.error(
|
|
18710
|
+
InsufficientFundsError,
|
|
18711
|
+
() => `Service balance (${serviceInfo.balance}) below threshold (${thresholdBalance})`,
|
|
18712
|
+
);
|
|
18654
18713
|
}
|
|
18655
18714
|
|
|
18656
18715
|
// Update service info with new details.
|
|
@@ -18666,34 +18725,38 @@ declare class PartiallyUpdatedState<T extends StateSlice = StateSlice> {
|
|
|
18666
18725
|
}
|
|
18667
18726
|
|
|
18668
18727
|
updateServiceInfo(serviceId: ServiceId, newInfo: ServiceAccountInfo) {
|
|
18669
|
-
const
|
|
18670
|
-
|
|
18671
|
-
|
|
18672
|
-
|
|
18673
|
-
|
|
18674
|
-
this.stateUpdate.services.servicesUpdates.splice(
|
|
18675
|
-
idx,
|
|
18676
|
-
toRemove,
|
|
18728
|
+
const existingUpdate = this.stateUpdate.services.updated.get(serviceId);
|
|
18729
|
+
|
|
18730
|
+
if (existingUpdate?.action.kind === UpdateServiceKind.Create) {
|
|
18731
|
+
this.stateUpdate.services.updated.set(
|
|
18732
|
+
serviceId,
|
|
18677
18733
|
UpdateService.create({
|
|
18678
|
-
serviceId,
|
|
18679
18734
|
serviceInfo: newInfo,
|
|
18680
|
-
lookupHistory:
|
|
18735
|
+
lookupHistory: existingUpdate.action.lookupHistory,
|
|
18681
18736
|
}),
|
|
18682
18737
|
);
|
|
18683
|
-
|
|
18684
18738
|
return;
|
|
18685
18739
|
}
|
|
18686
18740
|
|
|
18687
|
-
this.stateUpdate.services.
|
|
18688
|
-
|
|
18689
|
-
toRemove,
|
|
18741
|
+
this.stateUpdate.services.updated.set(
|
|
18742
|
+
serviceId,
|
|
18690
18743
|
UpdateService.update({
|
|
18691
|
-
serviceId,
|
|
18692
18744
|
serviceInfo: newInfo,
|
|
18693
18745
|
}),
|
|
18694
18746
|
);
|
|
18695
18747
|
}
|
|
18696
18748
|
|
|
18749
|
+
createService(serviceId: ServiceId, newInfo: ServiceAccountInfo, newLookupHistory: LookupHistoryItem) {
|
|
18750
|
+
this.stateUpdate.services.created.push(serviceId);
|
|
18751
|
+
this.stateUpdate.services.updated.set(
|
|
18752
|
+
serviceId,
|
|
18753
|
+
UpdateService.create({
|
|
18754
|
+
serviceInfo: newInfo,
|
|
18755
|
+
lookupHistory: newLookupHistory,
|
|
18756
|
+
}),
|
|
18757
|
+
);
|
|
18758
|
+
}
|
|
18759
|
+
|
|
18697
18760
|
getPrivilegedServices() {
|
|
18698
18761
|
if (this.stateUpdate.privilegedServices !== null) {
|
|
18699
18762
|
return this.stateUpdate.privilegedServices;
|
|
@@ -18826,6 +18889,7 @@ declare const index$6_UpdatePrivilegesError: typeof UpdatePrivilegesError;
|
|
|
18826
18889
|
type index$6_ZeroVoidError = ZeroVoidError;
|
|
18827
18890
|
declare const index$6_ZeroVoidError: typeof ZeroVoidError;
|
|
18828
18891
|
declare const index$6_clampU64ToU32: typeof clampU64ToU32;
|
|
18892
|
+
declare const index$6_deepCloneMapWithArray: typeof deepCloneMapWithArray;
|
|
18829
18893
|
declare const index$6_getServiceId: typeof getServiceId;
|
|
18830
18894
|
declare const index$6_getServiceIdOrCurrent: typeof getServiceIdOrCurrent;
|
|
18831
18895
|
declare const index$6_preimageLenAsU32: typeof preimageLenAsU32;
|
|
@@ -18835,7 +18899,7 @@ declare const index$6_tryAsMachineId: typeof tryAsMachineId;
|
|
|
18835
18899
|
declare const index$6_tryAsProgramCounter: typeof tryAsProgramCounter;
|
|
18836
18900
|
declare const index$6_writeServiceIdAsLeBytes: typeof writeServiceIdAsLeBytes;
|
|
18837
18901
|
declare namespace index$6 {
|
|
18838
|
-
export { index$6_AccumulationStateUpdate as AccumulationStateUpdate, index$6_CURRENT_SERVICE_ID as CURRENT_SERVICE_ID, index$6_EjectError as EjectError, index$6_ForgetPreimageError as ForgetPreimageError, index$6_HostCallResult as HostCallResult, index$6_MAX_U32 as MAX_U32, index$6_MAX_U32_BIG_INT as MAX_U32_BIG_INT, index$6_MachineInstance as MachineInstance, index$6_MemoryOperation as MemoryOperation, index$6_NewServiceError as NewServiceError, index$6_PagesError as PagesError, index$6_PartiallyUpdatedState as PartiallyUpdatedState, index$6_PeekPokeError as PeekPokeError, index$6_PendingTransfer as PendingTransfer, index$6_PreimageStatusKind as PreimageStatusKind, index$6_ProvidePreimageError as ProvidePreimageError, index$6_RequestPreimageError as RequestPreimageError, index$6_SERVICE_ID_BYTES as SERVICE_ID_BYTES, index$6_TransferError as TransferError, index$6_UpdatePrivilegesError as UpdatePrivilegesError, index$6_ZeroVoidError as ZeroVoidError, index$6_clampU64ToU32 as clampU64ToU32, index$6_getServiceId as getServiceId, index$6_getServiceIdOrCurrent as getServiceIdOrCurrent, index$6_preimageLenAsU32 as preimageLenAsU32, index$6_slotsToPreimageStatus as slotsToPreimageStatus, index$6_toMemoryOperation as toMemoryOperation, index$6_tryAsMachineId as tryAsMachineId, index$6_tryAsProgramCounter as tryAsProgramCounter, index$6_writeServiceIdAsLeBytes as writeServiceIdAsLeBytes };
|
|
18902
|
+
export { index$6_AccumulationStateUpdate as AccumulationStateUpdate, index$6_CURRENT_SERVICE_ID as CURRENT_SERVICE_ID, index$6_EjectError as EjectError, index$6_ForgetPreimageError as ForgetPreimageError, index$6_HostCallResult as HostCallResult, index$6_MAX_U32 as MAX_U32, index$6_MAX_U32_BIG_INT as MAX_U32_BIG_INT, index$6_MachineInstance as MachineInstance, index$6_MemoryOperation as MemoryOperation, index$6_NewServiceError as NewServiceError, index$6_PagesError as PagesError, index$6_PartiallyUpdatedState as PartiallyUpdatedState, index$6_PeekPokeError as PeekPokeError, index$6_PendingTransfer as PendingTransfer, index$6_PreimageStatusKind as PreimageStatusKind, index$6_ProvidePreimageError as ProvidePreimageError, index$6_RequestPreimageError as RequestPreimageError, index$6_SERVICE_ID_BYTES as SERVICE_ID_BYTES, index$6_TransferError as TransferError, index$6_UpdatePrivilegesError as UpdatePrivilegesError, index$6_ZeroVoidError as ZeroVoidError, index$6_clampU64ToU32 as clampU64ToU32, index$6_deepCloneMapWithArray as deepCloneMapWithArray, index$6_getServiceId as getServiceId, index$6_getServiceIdOrCurrent as getServiceIdOrCurrent, index$6_preimageLenAsU32 as preimageLenAsU32, index$6_slotsToPreimageStatus as slotsToPreimageStatus, index$6_toMemoryOperation as toMemoryOperation, index$6_tryAsMachineId as tryAsMachineId, index$6_tryAsProgramCounter as tryAsProgramCounter, index$6_writeServiceIdAsLeBytes as writeServiceIdAsLeBytes };
|
|
18839
18903
|
export type { index$6_InsufficientFundsError as InsufficientFundsError, index$6_MachineId as MachineId, index$6_MachineResult as MachineResult, index$6_MachineStatus as MachineStatus, index$6_NoMachineError as NoMachineError, index$6_PartialState as PartialState, index$6_PreimageStatus as PreimageStatus, index$6_ProgramCounter as ProgramCounter, index$6_RefineExternalities as RefineExternalities, index$6_SegmentExportError as SegmentExportError, index$6_ServiceStateUpdate as ServiceStateUpdate, index$6_StateSlice as StateSlice, index$6_TRANSFER_MEMO_BYTES as TRANSFER_MEMO_BYTES, index$6_UnprivilegedError as UnprivilegedError };
|
|
18840
18904
|
}
|
|
18841
18905
|
|
|
@@ -19236,6 +19300,7 @@ declare const index$3_check: typeof check;
|
|
|
19236
19300
|
declare const index$3_clampU64ToU32: typeof clampU64ToU32;
|
|
19237
19301
|
declare const index$3_createResults: typeof createResults;
|
|
19238
19302
|
declare const index$3_decodeStandardProgram: typeof decodeStandardProgram;
|
|
19303
|
+
declare const index$3_deepCloneMapWithArray: typeof deepCloneMapWithArray;
|
|
19239
19304
|
declare const index$3_extractCodeAndMetadata: typeof extractCodeAndMetadata;
|
|
19240
19305
|
declare const index$3_getServiceId: typeof getServiceId;
|
|
19241
19306
|
declare const index$3_getServiceIdOrCurrent: typeof getServiceIdOrCurrent;
|
|
@@ -19254,7 +19319,7 @@ declare const index$3_tryAsMachineId: typeof tryAsMachineId;
|
|
|
19254
19319
|
declare const index$3_tryAsProgramCounter: typeof tryAsProgramCounter;
|
|
19255
19320
|
declare const index$3_writeServiceIdAsLeBytes: typeof writeServiceIdAsLeBytes;
|
|
19256
19321
|
declare namespace index$3 {
|
|
19257
|
-
export { index$3_AccumulationStateUpdate as AccumulationStateUpdate, index$3_ArgsDecoder as ArgsDecoder, index$3_ArgumentType as ArgumentType, index$3_BasicBlocks as BasicBlocks, index$3_CURRENT_SERVICE_ID as CURRENT_SERVICE_ID, index$3_EjectError as EjectError, index$3_ExtendedWitdthImmediateDecoder as ExtendedWitdthImmediateDecoder, index$3_ForgetPreimageError as ForgetPreimageError, index$3_HostCallMemory as HostCallMemory, index$3_HostCallRegisters as HostCallRegisters, index$3_HostCallResult as HostCallResult, index$3_ImmediateDecoder as ImmediateDecoder, index$3_MAX_U32 as MAX_U32, index$3_MAX_U32_BIG_INT as MAX_U32_BIG_INT, index$3_MachineInstance as MachineInstance, index$3_Mask as Mask, index$3_MemoryOperation as MemoryOperation, index$3_MemorySegment as MemorySegment, NO_OF_REGISTERS$1 as NO_OF_REGISTERS, index$3_NewServiceError as NewServiceError, index$3_NibblesDecoder as NibblesDecoder, index$3_PagesError as PagesError, index$3_PartiallyUpdatedState as PartiallyUpdatedState, index$3_PeekPokeError as PeekPokeError, index$3_PendingTransfer as PendingTransfer, index$3_PreimageStatusKind as PreimageStatusKind, index$3_Program as Program, index$3_ProgramDecoder as ProgramDecoder, index$3_ProvidePreimageError as ProvidePreimageError, DebuggerAdapter as Pvm, index$3_Registers as Registers, index$3_RequestPreimageError as RequestPreimageError, Result$2 as Result, index$3_RichTaggedError as RichTaggedError, index$3_SERVICE_ID_BYTES as SERVICE_ID_BYTES, index$3_SpiMemory as SpiMemory, index$3_SpiProgram as SpiProgram, index$3_TransferError as TransferError, index$3_UpdatePrivilegesError as UpdatePrivilegesError, index$3_WithDebug as WithDebug, index$3_ZeroVoidError as ZeroVoidError, index$3___OPAQUE_TYPE__ as __OPAQUE_TYPE__, index$3_asOpaqueType as asOpaqueType, index$3_assertEmpty as assertEmpty, index$3_assertNever as assertNever, index$l as block, index$s as bytes, index$3_check as check, index$3_clampU64ToU32 as clampU64ToU32, index$3_createResults as createResults, index$3_decodeStandardProgram as decodeStandardProgram, index$3_extractCodeAndMetadata as extractCodeAndMetadata, index$3_getServiceId as getServiceId, index$3_getServiceIdOrCurrent as getServiceIdOrCurrent, index$p as hash, index$3_inspect as inspect, index$3_instructionArgumentTypeMap as instructionArgumentTypeMap, index$8 as interpreter, index$3_isBrowser as isBrowser, index$3_isTaggedError as isTaggedError, index$3_maybeTaggedErrorToString as maybeTaggedErrorToString, index$3_measure as measure, index$r as numbers, index$3_preimageLenAsU32 as preimageLenAsU32, index$3_resultToString as resultToString, index$3_seeThrough as seeThrough, index$3_slotsToPreimageStatus as slotsToPreimageStatus, index$3_toMemoryOperation as toMemoryOperation, index$3_tryAsMachineId as tryAsMachineId, index$3_tryAsProgramCounter as tryAsProgramCounter, index$3_writeServiceIdAsLeBytes as writeServiceIdAsLeBytes };
|
|
19322
|
+
export { index$3_AccumulationStateUpdate as AccumulationStateUpdate, index$3_ArgsDecoder as ArgsDecoder, index$3_ArgumentType as ArgumentType, index$3_BasicBlocks as BasicBlocks, index$3_CURRENT_SERVICE_ID as CURRENT_SERVICE_ID, index$3_EjectError as EjectError, index$3_ExtendedWitdthImmediateDecoder as ExtendedWitdthImmediateDecoder, index$3_ForgetPreimageError as ForgetPreimageError, index$3_HostCallMemory as HostCallMemory, index$3_HostCallRegisters as HostCallRegisters, index$3_HostCallResult as HostCallResult, index$3_ImmediateDecoder as ImmediateDecoder, index$3_MAX_U32 as MAX_U32, index$3_MAX_U32_BIG_INT as MAX_U32_BIG_INT, index$3_MachineInstance as MachineInstance, index$3_Mask as Mask, index$3_MemoryOperation as MemoryOperation, index$3_MemorySegment as MemorySegment, NO_OF_REGISTERS$1 as NO_OF_REGISTERS, index$3_NewServiceError as NewServiceError, index$3_NibblesDecoder as NibblesDecoder, index$3_PagesError as PagesError, index$3_PartiallyUpdatedState as PartiallyUpdatedState, index$3_PeekPokeError as PeekPokeError, index$3_PendingTransfer as PendingTransfer, index$3_PreimageStatusKind as PreimageStatusKind, index$3_Program as Program, index$3_ProgramDecoder as ProgramDecoder, index$3_ProvidePreimageError as ProvidePreimageError, DebuggerAdapter as Pvm, index$3_Registers as Registers, index$3_RequestPreimageError as RequestPreimageError, Result$2 as Result, index$3_RichTaggedError as RichTaggedError, index$3_SERVICE_ID_BYTES as SERVICE_ID_BYTES, index$3_SpiMemory as SpiMemory, index$3_SpiProgram as SpiProgram, index$3_TransferError as TransferError, index$3_UpdatePrivilegesError as UpdatePrivilegesError, index$3_WithDebug as WithDebug, index$3_ZeroVoidError as ZeroVoidError, index$3___OPAQUE_TYPE__ as __OPAQUE_TYPE__, index$3_asOpaqueType as asOpaqueType, index$3_assertEmpty as assertEmpty, index$3_assertNever as assertNever, index$l as block, index$s as bytes, index$3_check as check, index$3_clampU64ToU32 as clampU64ToU32, index$3_createResults as createResults, index$3_decodeStandardProgram as decodeStandardProgram, index$3_deepCloneMapWithArray as deepCloneMapWithArray, index$3_extractCodeAndMetadata as extractCodeAndMetadata, index$3_getServiceId as getServiceId, index$3_getServiceIdOrCurrent as getServiceIdOrCurrent, index$p as hash, index$3_inspect as inspect, index$3_instructionArgumentTypeMap as instructionArgumentTypeMap, index$8 as interpreter, index$3_isBrowser as isBrowser, index$3_isTaggedError as isTaggedError, index$3_maybeTaggedErrorToString as maybeTaggedErrorToString, index$3_measure as measure, index$r as numbers, index$3_preimageLenAsU32 as preimageLenAsU32, index$3_resultToString as resultToString, index$3_seeThrough as seeThrough, index$3_slotsToPreimageStatus as slotsToPreimageStatus, index$3_toMemoryOperation as toMemoryOperation, index$3_tryAsMachineId as tryAsMachineId, index$3_tryAsProgramCounter as tryAsProgramCounter, index$3_writeServiceIdAsLeBytes as writeServiceIdAsLeBytes };
|
|
19258
19323
|
export type { index$3_Args as Args, index$3_EnumMapping as EnumMapping, index$3_ErrorResult as ErrorResult, index$3_IHostCallMemory as IHostCallMemory, index$3_IHostCallRegisters as IHostCallRegisters, index$3_InsufficientFundsError as InsufficientFundsError, index$3_MachineId as MachineId, index$3_MachineResult as MachineResult, index$3_MachineStatus as MachineStatus, index$3_NoMachineError as NoMachineError, index$3_OK as OK, index$3_OkResult as OkResult, index$3_Opaque as Opaque, index$3_PartialState as PartialState, index$3_PreimageStatus as PreimageStatus, index$3_ProgramCounter as ProgramCounter, index$3_RefineExternalities as RefineExternalities, index$3_SegmentExportError as SegmentExportError, index$3_ServiceStateUpdate as ServiceStateUpdate, index$3_StateSlice as StateSlice, index$3_StringLiteral as StringLiteral, index$3_TRANSFER_MEMO_BYTES as TRANSFER_MEMO_BYTES, index$3_TaggedError as TaggedError, index$3_TokenOf as TokenOf, index$3_Uninstantiable as Uninstantiable, index$3_UnprivilegedError as UnprivilegedError, index$3_WithOpaque as WithOpaque };
|
|
19259
19324
|
}
|
|
19260
19325
|
|
|
@@ -20084,12 +20149,15 @@ declare class Preimages {
|
|
|
20084
20149
|
prevPreimage.requester > currPreimage.requester ||
|
|
20085
20150
|
currPreimage.blob.compare(prevPreimage.blob).isLessOrEqual()
|
|
20086
20151
|
) {
|
|
20087
|
-
return Result.error(
|
|
20152
|
+
return Result.error(
|
|
20153
|
+
PreimagesErrorCode.PreimagesNotSortedUnique,
|
|
20154
|
+
() => `Preimages not sorted/unique at index ${i}`,
|
|
20155
|
+
);
|
|
20088
20156
|
}
|
|
20089
20157
|
}
|
|
20090
20158
|
|
|
20091
20159
|
const { preimages, slot } = input;
|
|
20092
|
-
const pendingChanges
|
|
20160
|
+
const pendingChanges = new Map<ServiceId, UpdatePreimage[]>();
|
|
20093
20161
|
|
|
20094
20162
|
// select preimages for integration
|
|
20095
20163
|
for (const preimage of preimages) {
|
|
@@ -20098,7 +20166,7 @@ declare class Preimages {
|
|
|
20098
20166
|
|
|
20099
20167
|
const service = this.state.getService(requester);
|
|
20100
20168
|
if (service === null) {
|
|
20101
|
-
return Result.error(PreimagesErrorCode.AccountNotFound);
|
|
20169
|
+
return Result.error(PreimagesErrorCode.AccountNotFound, () => `Service not found: ${requester}`);
|
|
20102
20170
|
}
|
|
20103
20171
|
|
|
20104
20172
|
const hasPreimage = service.hasPreimage(hash);
|
|
@@ -20106,17 +20174,22 @@ declare class Preimages {
|
|
|
20106
20174
|
// https://graypaper.fluffylabs.dev/#/5f542d7/181800181900
|
|
20107
20175
|
// https://graypaper.fluffylabs.dev/#/5f542d7/116f0011a500
|
|
20108
20176
|
if (hasPreimage || slots === null || !LookupHistoryItem.isRequested(slots)) {
|
|
20109
|
-
return Result.error(
|
|
20177
|
+
return Result.error(
|
|
20178
|
+
PreimagesErrorCode.PreimageUnneeded,
|
|
20179
|
+
() =>
|
|
20180
|
+
`Preimage unneeded: requester=${requester}, hash=${hash}, hasPreimage=${hasPreimage}, isRequested=${slots !== null && LookupHistoryItem.isRequested(slots)}`,
|
|
20181
|
+
);
|
|
20110
20182
|
}
|
|
20111
20183
|
|
|
20112
20184
|
// https://graypaper.fluffylabs.dev/#/5f542d7/18c00018f300
|
|
20113
|
-
pendingChanges.
|
|
20185
|
+
const updates = pendingChanges.get(requester) ?? [];
|
|
20186
|
+
updates.push(
|
|
20114
20187
|
UpdatePreimage.provide({
|
|
20115
|
-
serviceId: requester,
|
|
20116
20188
|
preimage: PreimageItem.create({ hash, blob }),
|
|
20117
20189
|
slot,
|
|
20118
20190
|
}),
|
|
20119
20191
|
);
|
|
20192
|
+
pendingChanges.set(requester, updates);
|
|
20120
20193
|
}
|
|
20121
20194
|
|
|
20122
20195
|
return Result.ok({
|