@typeberry/lib 0.2.0-e767e74 → 0.2.0-f506473
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.cjs +252 -209
- package/index.d.ts +1025 -957
- package/index.js +252 -209
- package/package.json +1 -1
package/index.cjs
CHANGED
|
@@ -304,7 +304,7 @@ function resultToString(res) {
|
|
|
304
304
|
if (res.isOk) {
|
|
305
305
|
return `OK: ${typeof res.ok === "symbol" ? res.ok.toString() : res.ok}`;
|
|
306
306
|
}
|
|
307
|
-
return `${res.details}\nError: ${maybeTaggedErrorToString(res.error)}`;
|
|
307
|
+
return `${res.details()}\nError: ${maybeTaggedErrorToString(res.error)}`;
|
|
308
308
|
}
|
|
309
309
|
/** An indication of two possible outcomes returned from a function. */
|
|
310
310
|
const Result$1 = {
|
|
@@ -318,7 +318,7 @@ const Result$1 = {
|
|
|
318
318
|
};
|
|
319
319
|
},
|
|
320
320
|
/** Create new [`Result`] with `Error` status. */
|
|
321
|
-
error: (error, details
|
|
321
|
+
error: (error, details) => {
|
|
322
322
|
check `${error !== undefined} 'Error' type cannot be undefined.`;
|
|
323
323
|
return {
|
|
324
324
|
isOk: false,
|
|
@@ -431,7 +431,7 @@ function deepEqual(actual, expected, { context = [], errorsCollector, ignore = [
|
|
|
431
431
|
}
|
|
432
432
|
if (actual.isError && expected.isError) {
|
|
433
433
|
deepEqual(actual.error, expected.error, { context: ctx.concat(["error"]), errorsCollector: errors, ignore });
|
|
434
|
-
deepEqual(actual.details, expected.details, {
|
|
434
|
+
deepEqual(actual.details(), expected.details(), {
|
|
435
435
|
context: ctx.concat(["details"]),
|
|
436
436
|
errorsCollector: errors,
|
|
437
437
|
// display details when error does not match
|
|
@@ -1122,8 +1122,8 @@ class Decoder {
|
|
|
1122
1122
|
/**
|
|
1123
1123
|
* Create a new [`Decoder`] instance given a raw array of bytes as a source.
|
|
1124
1124
|
*/
|
|
1125
|
-
static fromBlob(source) {
|
|
1126
|
-
return new Decoder(source);
|
|
1125
|
+
static fromBlob(source, context) {
|
|
1126
|
+
return new Decoder(source, undefined, context);
|
|
1127
1127
|
}
|
|
1128
1128
|
/**
|
|
1129
1129
|
* Decode a single object from all of the source bytes.
|
|
@@ -1418,7 +1418,7 @@ class Decoder {
|
|
|
1418
1418
|
ensureHasBytes(bytes) {
|
|
1419
1419
|
check `${bytes >= 0} Negative number of bytes given.`;
|
|
1420
1420
|
if (this.offset + bytes > this.source.length) {
|
|
1421
|
-
throw new
|
|
1421
|
+
throw new EndOfDataError(`Attempting to decode more data than there is left. Need ${bytes}, left: ${this.source.length - this.offset}.`);
|
|
1422
1422
|
}
|
|
1423
1423
|
}
|
|
1424
1424
|
}
|
|
@@ -1432,6 +1432,8 @@ function decodeVariableLengthExtraBytes(firstByte) {
|
|
|
1432
1432
|
}
|
|
1433
1433
|
return 0;
|
|
1434
1434
|
}
|
|
1435
|
+
class EndOfDataError extends Error {
|
|
1436
|
+
}
|
|
1435
1437
|
|
|
1436
1438
|
/** Wrapper for `Decoder` that can skip bytes of fields in the data buffer instead of decoding them. */
|
|
1437
1439
|
class Skipper {
|
|
@@ -2448,6 +2450,9 @@ function forEachDescriptor(descriptors, f) {
|
|
|
2448
2450
|
f(k, descriptors[k]);
|
|
2449
2451
|
}
|
|
2450
2452
|
catch (e) {
|
|
2453
|
+
if (e instanceof EndOfDataError) {
|
|
2454
|
+
throw new EndOfDataError(`${key}: ${e}`);
|
|
2455
|
+
}
|
|
2451
2456
|
throw new Error(`${key}: ${e}`);
|
|
2452
2457
|
}
|
|
2453
2458
|
}
|
|
@@ -2525,6 +2530,7 @@ var index$q = /*#__PURE__*/Object.freeze({
|
|
|
2525
2530
|
Decoder: Decoder,
|
|
2526
2531
|
Descriptor: Descriptor,
|
|
2527
2532
|
Encoder: Encoder,
|
|
2533
|
+
EndOfDataError: EndOfDataError,
|
|
2528
2534
|
ObjectView: ObjectView,
|
|
2529
2535
|
SequenceView: SequenceView,
|
|
2530
2536
|
TYPICAL_DICTIONARY_LENGTH: TYPICAL_DICTIONARY_LENGTH,
|
|
@@ -7311,9 +7317,7 @@ var chain_spec$1 = {
|
|
|
7311
7317
|
id: "typeberry-default",
|
|
7312
7318
|
bootnodes: [
|
|
7313
7319
|
"e3r2oc62zwfj3crnuifuvsxvbtlzetk4o5qyhetkhagsc2fgl2oka@127.0.0.1:40000",
|
|
7314
|
-
"
|
|
7315
|
-
"en5ejs5b2tybkfh4ym5vpfh7nynby73xhtfzmazumtvcijpcsz6ma@127.0.0.1:12346",
|
|
7316
|
-
"ekwmt37xecoq6a7otkm4ux5gfmm4uwbat4bg5m223shckhaaxdpqa@127.0.0.1:12347"
|
|
7320
|
+
"eyonydqt7gj7bjdek62lwdeuxdzr5q7nmxa2p5zwwtoijgamdnkka@127.0.0.1:12345"
|
|
7317
7321
|
],
|
|
7318
7322
|
genesis_header: "0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001ee155ace9c40292074cb6aff8c9ccdd273c81648ff1149ef36bcea6ebb8a3e25bb30a42c1e62f0afda5f0a4e8a562f7a13a24cea00ee81917b86b89e801314aaff71c6c03ff88adb5ed52c9681de1629a54e702fc14729f6b50d2f0a76f185b34418fb8c85bb3985394a8c2756d3643457ce614546202a2f50b093d762499acedee6d555b82024f1ccf8a1e37e60fa60fd40b1958c4bb3006af78647950e1b91ad93247bd01307550ec7acd757ce6fb805fcf73db364063265b30a949e90d9339326edb21e5541717fde24ec085000b28709847b8aab1ac51f84e94b37ca1b66cab2b9ff25c2410fbe9b8a717abb298c716a03983c98ceb4def2087500b8e3410746846d17469fb2f95ef365efcab9f4e22fa1feb53111c995376be8019981ccf30aa5444688b3cab47697b37d5cac5707bb3289e986b19b17db437206931a8d151e5c8fe2b9d8a606966a79edd2f9e5db47e83947ce368ccba53bf6ba20a40b8b8c5d436f92ecf605421e873a99ec528761eb52a88a2f9a057b3b3003e6f32a2105650944fcd101621fd5bb3124c9fd191d114b7ad936c1d79d734f9f21392eab0084d01534b31c1dd87c81645fd762482a90027754041ca1b56133d0466c0600ffff00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
|
|
7319
7323
|
genesis_state: {
|
|
@@ -7359,9 +7363,7 @@ var authorship = {
|
|
|
7359
7363
|
var chain_spec = {
|
|
7360
7364
|
id: "typeberry-dev",
|
|
7361
7365
|
bootnodes: [
|
|
7362
|
-
"
|
|
7363
|
-
"en5ejs5b2tybkfh4ym5vpfh7nynby73xhtfzmazumtvcijpcsz6ma@127.0.0.1:12346",
|
|
7364
|
-
"ekwmt37xecoq6a7otkm4ux5gfmm4uwbat4bg5m223shckhaaxdpqa@127.0.0.1:12347"
|
|
7366
|
+
"eyonydqt7gj7bjdek62lwdeuxdzr5q7nmxa2p5zwwtoijgamdnkka@127.0.0.1:12345"
|
|
7365
7367
|
],
|
|
7366
7368
|
genesis_header: "0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001ee155ace9c40292074cb6aff8c9ccdd273c81648ff1149ef36bcea6ebb8a3e25bb30a42c1e62f0afda5f0a4e8a562f7a13a24cea00ee81917b86b89e801314aaff71c6c03ff88adb5ed52c9681de1629a54e702fc14729f6b50d2f0a76f185b34418fb8c85bb3985394a8c2756d3643457ce614546202a2f50b093d762499acedee6d555b82024f1ccf8a1e37e60fa60fd40b1958c4bb3006af78647950e1b91ad93247bd01307550ec7acd757ce6fb805fcf73db364063265b30a949e90d9339326edb21e5541717fde24ec085000b28709847b8aab1ac51f84e94b37ca1b66cab2b9ff25c2410fbe9b8a717abb298c716a03983c98ceb4def2087500b8e3410746846d17469fb2f95ef365efcab9f4e22fa1feb53111c995376be8019981ccf30aa5444688b3cab47697b37d5cac5707bb3289e986b19b17db437206931a8d151e5c8fe2b9d8a606966a79edd2f9e5db47e83947ce368ccba53bf6ba20a40b8b8c5d436f92ecf605421e873a99ec528761eb52a88a2f9a057b3b3003e6f32a2105650944fcd101621fd5bb3124c9fd191d114b7ad936c1d79d734f9f21392eab0084d01534b31c1dd87c81645fd762482a90027754041ca1b56133d0466c0600ffff00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
|
|
7367
7369
|
genesis_state: {
|
|
@@ -8072,6 +8074,8 @@ function accumulationOutputComparator(a, b) {
|
|
|
8072
8074
|
const O = 8;
|
|
8073
8075
|
/** `Q`: The number of items in the authorizations queue. */
|
|
8074
8076
|
const Q = 80;
|
|
8077
|
+
/** `W_B`: The maximum size of the concatenated variable-size blobs, extrinsics and imported segments of a work-package, in octets */
|
|
8078
|
+
Compatibility.isGreaterOrEqual(GpVersion.V0_7_2) ? 13_791_360 : 13_794_305;
|
|
8075
8079
|
/** `W_T`: The size of a transfer memo in octets. */
|
|
8076
8080
|
const W_T = 128;
|
|
8077
8081
|
/**
|
|
@@ -9062,31 +9066,29 @@ var UpdatePreimageKind;
|
|
|
9062
9066
|
* 3. Update `LookupHistory` with given value.
|
|
9063
9067
|
*/
|
|
9064
9068
|
class UpdatePreimage {
|
|
9065
|
-
serviceId;
|
|
9066
9069
|
action;
|
|
9067
|
-
constructor(
|
|
9068
|
-
this.serviceId = serviceId;
|
|
9070
|
+
constructor(action) {
|
|
9069
9071
|
this.action = action;
|
|
9070
9072
|
}
|
|
9071
9073
|
/** A preimage is provided. We should update the lookuphistory and add the preimage to db. */
|
|
9072
|
-
static provide({
|
|
9073
|
-
return new UpdatePreimage(
|
|
9074
|
+
static provide({ preimage, slot }) {
|
|
9075
|
+
return new UpdatePreimage({
|
|
9074
9076
|
kind: UpdatePreimageKind.Provide,
|
|
9075
9077
|
preimage,
|
|
9076
9078
|
slot,
|
|
9077
9079
|
});
|
|
9078
9080
|
}
|
|
9079
9081
|
/** The preimage should be removed completely from the database. */
|
|
9080
|
-
static remove({
|
|
9081
|
-
return new UpdatePreimage(
|
|
9082
|
+
static remove({ hash, length }) {
|
|
9083
|
+
return new UpdatePreimage({
|
|
9082
9084
|
kind: UpdatePreimageKind.Remove,
|
|
9083
9085
|
hash,
|
|
9084
9086
|
length,
|
|
9085
9087
|
});
|
|
9086
9088
|
}
|
|
9087
9089
|
/** Update the lookup history of some preimage or add a new one (request). */
|
|
9088
|
-
static updateOrAdd({
|
|
9089
|
-
return new UpdatePreimage(
|
|
9090
|
+
static updateOrAdd({ lookupHistory }) {
|
|
9091
|
+
return new UpdatePreimage({
|
|
9090
9092
|
kind: UpdatePreimageKind.UpdateOrAdd,
|
|
9091
9093
|
item: lookupHistory,
|
|
9092
9094
|
});
|
|
@@ -9123,23 +9125,21 @@ var UpdateServiceKind;
|
|
|
9123
9125
|
UpdateServiceKind[UpdateServiceKind["Create"] = 1] = "Create";
|
|
9124
9126
|
})(UpdateServiceKind || (UpdateServiceKind = {}));
|
|
9125
9127
|
/**
|
|
9126
|
-
* Update service info
|
|
9128
|
+
* Update service info or create a new one.
|
|
9127
9129
|
*/
|
|
9128
9130
|
class UpdateService {
|
|
9129
|
-
serviceId;
|
|
9130
9131
|
action;
|
|
9131
|
-
constructor(
|
|
9132
|
-
this.serviceId = serviceId;
|
|
9132
|
+
constructor(action) {
|
|
9133
9133
|
this.action = action;
|
|
9134
9134
|
}
|
|
9135
|
-
static update({
|
|
9136
|
-
return new UpdateService(
|
|
9135
|
+
static update({ serviceInfo }) {
|
|
9136
|
+
return new UpdateService({
|
|
9137
9137
|
kind: UpdateServiceKind.Update,
|
|
9138
9138
|
account: serviceInfo,
|
|
9139
9139
|
});
|
|
9140
9140
|
}
|
|
9141
|
-
static create({
|
|
9142
|
-
return new UpdateService(
|
|
9141
|
+
static create({ serviceInfo, lookupHistory, }) {
|
|
9142
|
+
return new UpdateService({
|
|
9143
9143
|
kind: UpdateServiceKind.Create,
|
|
9144
9144
|
account: serviceInfo,
|
|
9145
9145
|
lookupHistory,
|
|
@@ -9160,17 +9160,15 @@ var UpdateStorageKind;
|
|
|
9160
9160
|
* Can either create/modify an entry or remove it.
|
|
9161
9161
|
*/
|
|
9162
9162
|
class UpdateStorage {
|
|
9163
|
-
serviceId;
|
|
9164
9163
|
action;
|
|
9165
|
-
constructor(
|
|
9166
|
-
this.serviceId = serviceId;
|
|
9164
|
+
constructor(action) {
|
|
9167
9165
|
this.action = action;
|
|
9168
9166
|
}
|
|
9169
|
-
static set({
|
|
9170
|
-
return new UpdateStorage(
|
|
9167
|
+
static set({ storage }) {
|
|
9168
|
+
return new UpdateStorage({ kind: UpdateStorageKind.Set, storage });
|
|
9171
9169
|
}
|
|
9172
|
-
static remove({
|
|
9173
|
-
return new UpdateStorage(
|
|
9170
|
+
static remove({ key }) {
|
|
9171
|
+
return new UpdateStorage({ kind: UpdateStorageKind.Remove, key });
|
|
9174
9172
|
}
|
|
9175
9173
|
get key() {
|
|
9176
9174
|
if (this.action.kind === UpdateStorageKind.Remove) {
|
|
@@ -9355,12 +9353,12 @@ class InMemoryState extends WithDebug {
|
|
|
9355
9353
|
* Modify the state and apply a single state update.
|
|
9356
9354
|
*/
|
|
9357
9355
|
applyUpdate(update) {
|
|
9358
|
-
const {
|
|
9356
|
+
const { removed, created: _, updated, preimages, storage, ...rest } = update;
|
|
9359
9357
|
// just assign all other variables
|
|
9360
9358
|
Object.assign(this, rest);
|
|
9361
9359
|
// and update the services state
|
|
9362
9360
|
let result;
|
|
9363
|
-
result = this.updateServices(
|
|
9361
|
+
result = this.updateServices(updated);
|
|
9364
9362
|
if (result.isError) {
|
|
9365
9363
|
return result;
|
|
9366
9364
|
}
|
|
@@ -9372,7 +9370,7 @@ class InMemoryState extends WithDebug {
|
|
|
9372
9370
|
if (result.isError) {
|
|
9373
9371
|
return result;
|
|
9374
9372
|
}
|
|
9375
|
-
this.removeServices(
|
|
9373
|
+
this.removeServices(removed);
|
|
9376
9374
|
return Result$1.ok(OK);
|
|
9377
9375
|
}
|
|
9378
9376
|
removeServices(servicesRemoved) {
|
|
@@ -9381,89 +9379,102 @@ class InMemoryState extends WithDebug {
|
|
|
9381
9379
|
this.services.delete(serviceId);
|
|
9382
9380
|
}
|
|
9383
9381
|
}
|
|
9384
|
-
updateStorage(
|
|
9385
|
-
|
|
9386
|
-
|
|
9387
|
-
|
|
9388
|
-
|
|
9389
|
-
|
|
9390
|
-
|
|
9391
|
-
|
|
9392
|
-
|
|
9393
|
-
|
|
9394
|
-
|
|
9395
|
-
|
|
9396
|
-
|
|
9397
|
-
|
|
9382
|
+
updateStorage(storageUpdates) {
|
|
9383
|
+
if (storageUpdates === undefined) {
|
|
9384
|
+
return Result$1.ok(OK);
|
|
9385
|
+
}
|
|
9386
|
+
for (const [serviceId, updates] of storageUpdates.entries()) {
|
|
9387
|
+
for (const update of updates) {
|
|
9388
|
+
const { kind } = update.action;
|
|
9389
|
+
const service = this.services.get(serviceId);
|
|
9390
|
+
if (service === undefined) {
|
|
9391
|
+
return Result$1.error(UpdateError.NoService, () => `Attempting to update storage of non-existing service: ${serviceId}`);
|
|
9392
|
+
}
|
|
9393
|
+
if (kind === UpdateStorageKind.Set) {
|
|
9394
|
+
const { key, value } = update.action.storage;
|
|
9395
|
+
service.data.storage.set(key.toString(), StorageItem.create({ key, value }));
|
|
9396
|
+
}
|
|
9397
|
+
else if (kind === UpdateStorageKind.Remove) {
|
|
9398
|
+
const { key } = update.action;
|
|
9399
|
+
check `
|
|
9398
9400
|
${service.data.storage.has(key.toString())}
|
|
9399
|
-
Attempting to remove non-existing storage item at ${serviceId}: ${action.key}
|
|
9401
|
+
Attempting to remove non-existing storage item at ${serviceId}: ${update.action.key}
|
|
9400
9402
|
`;
|
|
9401
|
-
|
|
9402
|
-
|
|
9403
|
-
|
|
9404
|
-
|
|
9403
|
+
service.data.storage.delete(key.toString());
|
|
9404
|
+
}
|
|
9405
|
+
else {
|
|
9406
|
+
assertNever(kind);
|
|
9407
|
+
}
|
|
9405
9408
|
}
|
|
9406
9409
|
}
|
|
9407
9410
|
return Result$1.ok(OK);
|
|
9408
9411
|
}
|
|
9409
|
-
updatePreimages(
|
|
9410
|
-
|
|
9412
|
+
updatePreimages(preimagesUpdates) {
|
|
9413
|
+
if (preimagesUpdates === undefined) {
|
|
9414
|
+
return Result$1.ok(OK);
|
|
9415
|
+
}
|
|
9416
|
+
for (const [serviceId, updates] of preimagesUpdates.entries()) {
|
|
9411
9417
|
const service = this.services.get(serviceId);
|
|
9412
9418
|
if (service === undefined) {
|
|
9413
|
-
return Result$1.error(UpdateError.NoService, `Attempting to update preimage of non-existing service: ${serviceId}`);
|
|
9419
|
+
return Result$1.error(UpdateError.NoService, () => `Attempting to update preimage of non-existing service: ${serviceId}`);
|
|
9414
9420
|
}
|
|
9415
|
-
const
|
|
9416
|
-
|
|
9417
|
-
|
|
9418
|
-
|
|
9419
|
-
|
|
9420
|
-
|
|
9421
|
-
service.data.preimages.set(preimage.hash, preimage);
|
|
9422
|
-
if (slot !== null) {
|
|
9423
|
-
const lookupHistory = service.data.lookupHistory.get(preimage.hash);
|
|
9424
|
-
const length = tryAsU32(preimage.blob.length);
|
|
9425
|
-
const lookup = new LookupHistoryItem(preimage.hash, length, tryAsLookupHistorySlots([slot]));
|
|
9426
|
-
if (lookupHistory === undefined) {
|
|
9427
|
-
// no lookup history for that preimage at all (edge case, should be requested)
|
|
9428
|
-
service.data.lookupHistory.set(preimage.hash, [lookup]);
|
|
9421
|
+
for (const update of updates) {
|
|
9422
|
+
const { kind } = update.action;
|
|
9423
|
+
if (kind === UpdatePreimageKind.Provide) {
|
|
9424
|
+
const { preimage, slot } = update.action;
|
|
9425
|
+
if (service.data.preimages.has(preimage.hash)) {
|
|
9426
|
+
return Result$1.error(UpdateError.PreimageExists, () => `Overwriting existing preimage at ${serviceId}: ${preimage}`);
|
|
9429
9427
|
}
|
|
9430
|
-
|
|
9431
|
-
|
|
9432
|
-
const
|
|
9433
|
-
|
|
9428
|
+
service.data.preimages.set(preimage.hash, preimage);
|
|
9429
|
+
if (slot !== null) {
|
|
9430
|
+
const lookupHistory = service.data.lookupHistory.get(preimage.hash);
|
|
9431
|
+
const length = tryAsU32(preimage.blob.length);
|
|
9432
|
+
const lookup = new LookupHistoryItem(preimage.hash, length, tryAsLookupHistorySlots([slot]));
|
|
9433
|
+
if (lookupHistory === undefined) {
|
|
9434
|
+
// no lookup history for that preimage at all (edge case, should be requested)
|
|
9435
|
+
service.data.lookupHistory.set(preimage.hash, [lookup]);
|
|
9436
|
+
}
|
|
9437
|
+
else {
|
|
9438
|
+
// insert or replace exiting entry
|
|
9439
|
+
const index = lookupHistory.map((x) => x.length).indexOf(length);
|
|
9440
|
+
lookupHistory.splice(index, index === -1 ? 0 : 1, lookup);
|
|
9441
|
+
}
|
|
9434
9442
|
}
|
|
9435
9443
|
}
|
|
9436
|
-
|
|
9437
|
-
|
|
9438
|
-
|
|
9439
|
-
|
|
9440
|
-
|
|
9441
|
-
|
|
9442
|
-
|
|
9443
|
-
|
|
9444
|
+
else if (kind === UpdatePreimageKind.Remove) {
|
|
9445
|
+
const { hash, length } = update.action;
|
|
9446
|
+
service.data.preimages.delete(hash);
|
|
9447
|
+
const history = service.data.lookupHistory.get(hash) ?? [];
|
|
9448
|
+
const idx = history.map((x) => x.length).indexOf(length);
|
|
9449
|
+
if (idx !== -1) {
|
|
9450
|
+
history.splice(idx, 1);
|
|
9451
|
+
}
|
|
9452
|
+
}
|
|
9453
|
+
else if (kind === UpdatePreimageKind.UpdateOrAdd) {
|
|
9454
|
+
const { item } = update.action;
|
|
9455
|
+
const history = service.data.lookupHistory.get(item.hash) ?? [];
|
|
9456
|
+
const existingIdx = history.map((x) => x.length).indexOf(item.length);
|
|
9457
|
+
const removeCount = existingIdx === -1 ? 0 : 1;
|
|
9458
|
+
history.splice(existingIdx, removeCount, item);
|
|
9459
|
+
service.data.lookupHistory.set(item.hash, history);
|
|
9460
|
+
}
|
|
9461
|
+
else {
|
|
9462
|
+
assertNever(kind);
|
|
9444
9463
|
}
|
|
9445
|
-
}
|
|
9446
|
-
else if (kind === UpdatePreimageKind.UpdateOrAdd) {
|
|
9447
|
-
const { item } = action;
|
|
9448
|
-
const history = service.data.lookupHistory.get(item.hash) ?? [];
|
|
9449
|
-
const existingIdx = history.map((x) => x.length).indexOf(item.length);
|
|
9450
|
-
const removeCount = existingIdx === -1 ? 0 : 1;
|
|
9451
|
-
history.splice(existingIdx, removeCount, item);
|
|
9452
|
-
service.data.lookupHistory.set(item.hash, history);
|
|
9453
|
-
}
|
|
9454
|
-
else {
|
|
9455
|
-
assertNever(kind);
|
|
9456
9464
|
}
|
|
9457
9465
|
}
|
|
9458
9466
|
return Result$1.ok(OK);
|
|
9459
9467
|
}
|
|
9460
9468
|
updateServices(servicesUpdates) {
|
|
9461
|
-
|
|
9462
|
-
|
|
9469
|
+
if (servicesUpdates === undefined) {
|
|
9470
|
+
return Result$1.ok(OK);
|
|
9471
|
+
}
|
|
9472
|
+
for (const [serviceId, update] of servicesUpdates.entries()) {
|
|
9473
|
+
const { kind, account } = update.action;
|
|
9463
9474
|
if (kind === UpdateServiceKind.Create) {
|
|
9464
|
-
const { lookupHistory } = action;
|
|
9475
|
+
const { lookupHistory } = update.action;
|
|
9465
9476
|
if (this.services.has(serviceId)) {
|
|
9466
|
-
return Result$1.error(UpdateError.DuplicateService, `${serviceId} already exists!`);
|
|
9477
|
+
return Result$1.error(UpdateError.DuplicateService, () => `${serviceId} already exists!`);
|
|
9467
9478
|
}
|
|
9468
9479
|
this.services.set(serviceId, new InMemoryService(serviceId, {
|
|
9469
9480
|
info: account,
|
|
@@ -9475,7 +9486,7 @@ class InMemoryState extends WithDebug {
|
|
|
9475
9486
|
else if (kind === UpdateServiceKind.Update) {
|
|
9476
9487
|
const existingService = this.services.get(serviceId);
|
|
9477
9488
|
if (existingService === undefined) {
|
|
9478
|
-
return Result$1.error(UpdateError.NoService, `Cannot update ${serviceId} because it does not exist.`);
|
|
9489
|
+
return Result$1.error(UpdateError.NoService, () => `Cannot update ${serviceId} because it does not exist.`);
|
|
9479
9490
|
}
|
|
9480
9491
|
existingService.data.info = account;
|
|
9481
9492
|
}
|
|
@@ -10729,76 +10740,88 @@ function* serializeStateUpdate(spec, blake2b, update) {
|
|
|
10729
10740
|
yield* serializeBasicKeys(spec, update);
|
|
10730
10741
|
const encode = (codec, val) => Encoder.encodeObject(codec, val, spec);
|
|
10731
10742
|
// then let's proceed with service updates
|
|
10732
|
-
yield* serializeServiceUpdates(update.
|
|
10743
|
+
yield* serializeServiceUpdates(update.updated, encode, blake2b);
|
|
10733
10744
|
yield* serializePreimages(update.preimages, encode, blake2b);
|
|
10734
10745
|
yield* serializeStorage(update.storage, blake2b);
|
|
10735
|
-
yield* serializeRemovedServices(update.
|
|
10746
|
+
yield* serializeRemovedServices(update.removed);
|
|
10736
10747
|
}
|
|
10737
10748
|
function* serializeRemovedServices(servicesRemoved) {
|
|
10738
|
-
|
|
10749
|
+
if (servicesRemoved === undefined) {
|
|
10750
|
+
return;
|
|
10751
|
+
}
|
|
10752
|
+
for (const serviceId of servicesRemoved) {
|
|
10739
10753
|
// TODO [ToDr] what about all data associated with a service?
|
|
10740
10754
|
const codec = serialize.serviceData(serviceId);
|
|
10741
10755
|
yield [StateEntryUpdateAction.Remove, codec.key, EMPTY_BLOB];
|
|
10742
10756
|
}
|
|
10743
10757
|
}
|
|
10744
|
-
function* serializeStorage(
|
|
10745
|
-
|
|
10746
|
-
|
|
10747
|
-
|
|
10748
|
-
|
|
10749
|
-
|
|
10750
|
-
|
|
10751
|
-
|
|
10752
|
-
|
|
10753
|
-
|
|
10754
|
-
|
|
10755
|
-
|
|
10756
|
-
|
|
10757
|
-
|
|
10758
|
+
function* serializeStorage(storageUpdates, blake2b) {
|
|
10759
|
+
if (storageUpdates === undefined) {
|
|
10760
|
+
return;
|
|
10761
|
+
}
|
|
10762
|
+
for (const [serviceId, updates] of storageUpdates.entries()) {
|
|
10763
|
+
for (const { action } of updates) {
|
|
10764
|
+
switch (action.kind) {
|
|
10765
|
+
case UpdateStorageKind.Set: {
|
|
10766
|
+
const key = action.storage.key;
|
|
10767
|
+
const codec = serialize.serviceStorage(blake2b, serviceId, key);
|
|
10768
|
+
yield [StateEntryUpdateAction.Insert, codec.key, action.storage.value];
|
|
10769
|
+
break;
|
|
10770
|
+
}
|
|
10771
|
+
case UpdateStorageKind.Remove: {
|
|
10772
|
+
const key = action.key;
|
|
10773
|
+
const codec = serialize.serviceStorage(blake2b, serviceId, key);
|
|
10774
|
+
yield [StateEntryUpdateAction.Remove, codec.key, EMPTY_BLOB];
|
|
10775
|
+
break;
|
|
10776
|
+
}
|
|
10758
10777
|
}
|
|
10759
|
-
default:
|
|
10760
|
-
assertNever(action);
|
|
10761
10778
|
}
|
|
10762
10779
|
}
|
|
10763
10780
|
}
|
|
10764
|
-
function* serializePreimages(
|
|
10765
|
-
|
|
10766
|
-
|
|
10767
|
-
|
|
10768
|
-
|
|
10769
|
-
|
|
10770
|
-
|
|
10771
|
-
|
|
10772
|
-
const
|
|
10773
|
-
|
|
10774
|
-
|
|
10775
|
-
|
|
10776
|
-
|
|
10777
|
-
|
|
10781
|
+
function* serializePreimages(preimagesUpdates, encode, blake2b) {
|
|
10782
|
+
if (preimagesUpdates === undefined) {
|
|
10783
|
+
return;
|
|
10784
|
+
}
|
|
10785
|
+
for (const [serviceId, updates] of preimagesUpdates.entries()) {
|
|
10786
|
+
for (const { action } of updates) {
|
|
10787
|
+
switch (action.kind) {
|
|
10788
|
+
case UpdatePreimageKind.Provide: {
|
|
10789
|
+
const { hash, blob } = action.preimage;
|
|
10790
|
+
const codec = serialize.servicePreimages(blake2b, serviceId, hash);
|
|
10791
|
+
yield [StateEntryUpdateAction.Insert, codec.key, blob];
|
|
10792
|
+
if (action.slot !== null) {
|
|
10793
|
+
const codec2 = serialize.serviceLookupHistory(blake2b, serviceId, hash, tryAsU32(blob.length));
|
|
10794
|
+
yield [
|
|
10795
|
+
StateEntryUpdateAction.Insert,
|
|
10796
|
+
codec2.key,
|
|
10797
|
+
encode(codec2.Codec, tryAsLookupHistorySlots([action.slot])),
|
|
10798
|
+
];
|
|
10799
|
+
}
|
|
10800
|
+
break;
|
|
10801
|
+
}
|
|
10802
|
+
case UpdatePreimageKind.UpdateOrAdd: {
|
|
10803
|
+
const { hash, length, slots } = action.item;
|
|
10804
|
+
const codec = serialize.serviceLookupHistory(blake2b, serviceId, hash, length);
|
|
10805
|
+
yield [StateEntryUpdateAction.Insert, codec.key, encode(codec.Codec, slots)];
|
|
10806
|
+
break;
|
|
10807
|
+
}
|
|
10808
|
+
case UpdatePreimageKind.Remove: {
|
|
10809
|
+
const { hash, length } = action;
|
|
10810
|
+
const codec = serialize.servicePreimages(blake2b, serviceId, hash);
|
|
10811
|
+
yield [StateEntryUpdateAction.Remove, codec.key, EMPTY_BLOB];
|
|
10812
|
+
const codec2 = serialize.serviceLookupHistory(blake2b, serviceId, hash, length);
|
|
10813
|
+
yield [StateEntryUpdateAction.Remove, codec2.key, EMPTY_BLOB];
|
|
10814
|
+
break;
|
|
10778
10815
|
}
|
|
10779
|
-
break;
|
|
10780
|
-
}
|
|
10781
|
-
case UpdatePreimageKind.UpdateOrAdd: {
|
|
10782
|
-
const { hash, length, slots } = action.item;
|
|
10783
|
-
const codec = serialize.serviceLookupHistory(blake2b, serviceId, hash, length);
|
|
10784
|
-
yield [StateEntryUpdateAction.Insert, codec.key, encode(codec.Codec, slots)];
|
|
10785
|
-
break;
|
|
10786
|
-
}
|
|
10787
|
-
case UpdatePreimageKind.Remove: {
|
|
10788
|
-
const { hash, length } = action;
|
|
10789
|
-
const codec = serialize.servicePreimages(blake2b, serviceId, hash);
|
|
10790
|
-
yield [StateEntryUpdateAction.Remove, codec.key, EMPTY_BLOB];
|
|
10791
|
-
const codec2 = serialize.serviceLookupHistory(blake2b, serviceId, hash, length);
|
|
10792
|
-
yield [StateEntryUpdateAction.Remove, codec2.key, EMPTY_BLOB];
|
|
10793
|
-
break;
|
|
10794
10816
|
}
|
|
10795
|
-
default:
|
|
10796
|
-
assertNever(action);
|
|
10797
10817
|
}
|
|
10798
10818
|
}
|
|
10799
10819
|
}
|
|
10800
10820
|
function* serializeServiceUpdates(servicesUpdates, encode, blake2b) {
|
|
10801
|
-
|
|
10821
|
+
if (servicesUpdates === undefined) {
|
|
10822
|
+
return;
|
|
10823
|
+
}
|
|
10824
|
+
for (const [serviceId, { action }] of servicesUpdates.entries()) {
|
|
10802
10825
|
// new service being created or updated
|
|
10803
10826
|
const codec = serialize.serviceData(serviceId);
|
|
10804
10827
|
yield [StateEntryUpdateAction.Insert, codec.key, encode(codec.Codec, action.account)];
|
|
@@ -11074,13 +11097,13 @@ class LeafDb {
|
|
|
11074
11097
|
*/
|
|
11075
11098
|
static fromLeavesBlob(blob, db) {
|
|
11076
11099
|
if (blob.length % TRIE_NODE_BYTES !== 0) {
|
|
11077
|
-
return Result$1.error(LeafDbError.InvalidLeafData, `${blob.length} is not a multiply of ${TRIE_NODE_BYTES}: ${blob}`);
|
|
11100
|
+
return Result$1.error(LeafDbError.InvalidLeafData, () => `${blob.length} is not a multiply of ${TRIE_NODE_BYTES}: ${blob}`);
|
|
11078
11101
|
}
|
|
11079
11102
|
const leaves = SortedSet.fromArray(leafComparator, []);
|
|
11080
11103
|
for (const nodeData of blob.chunks(TRIE_NODE_BYTES)) {
|
|
11081
11104
|
const node = new TrieNode(nodeData.raw);
|
|
11082
11105
|
if (node.getNodeType() === NodeType.Branch) {
|
|
11083
|
-
return Result$1.error(LeafDbError.InvalidLeafData, `Branch node detected: ${nodeData}`);
|
|
11106
|
+
return Result$1.error(LeafDbError.InvalidLeafData, () => `Branch node detected: ${nodeData}`);
|
|
11084
11107
|
}
|
|
11085
11108
|
leaves.insert(node.asLeafNode());
|
|
11086
11109
|
}
|
|
@@ -12395,6 +12418,14 @@ const NoMachineError = Symbol("Machine index not found.");
|
|
|
12395
12418
|
const SegmentExportError = Symbol("Too many segments already exported.");
|
|
12396
12419
|
|
|
12397
12420
|
const InsufficientFundsError = "insufficient funds";
|
|
12421
|
+
/** Deep clone of a map with array. */
|
|
12422
|
+
function deepCloneMapWithArray(map) {
|
|
12423
|
+
const cloned = [];
|
|
12424
|
+
for (const [k, v] of map.entries()) {
|
|
12425
|
+
cloned.push([k, v.slice()]);
|
|
12426
|
+
}
|
|
12427
|
+
return new Map(cloned);
|
|
12428
|
+
}
|
|
12398
12429
|
/**
|
|
12399
12430
|
* State updates that currently accumulating service produced.
|
|
12400
12431
|
*
|
|
@@ -12424,10 +12455,11 @@ class AccumulationStateUpdate {
|
|
|
12424
12455
|
/** Create new empty state update. */
|
|
12425
12456
|
static empty() {
|
|
12426
12457
|
return new AccumulationStateUpdate({
|
|
12427
|
-
|
|
12428
|
-
|
|
12429
|
-
|
|
12430
|
-
|
|
12458
|
+
created: [],
|
|
12459
|
+
updated: new Map(),
|
|
12460
|
+
removed: [],
|
|
12461
|
+
preimages: new Map(),
|
|
12462
|
+
storage: new Map(),
|
|
12431
12463
|
}, []);
|
|
12432
12464
|
}
|
|
12433
12465
|
/** Create a state update with some existing, yet uncommited services updates. */
|
|
@@ -12439,10 +12471,13 @@ class AccumulationStateUpdate {
|
|
|
12439
12471
|
/** Create a copy of another `StateUpdate`. Used by checkpoints. */
|
|
12440
12472
|
static copyFrom(from) {
|
|
12441
12473
|
const serviceUpdates = {
|
|
12442
|
-
|
|
12443
|
-
|
|
12444
|
-
|
|
12445
|
-
|
|
12474
|
+
// shallow copy
|
|
12475
|
+
created: [...from.services.created],
|
|
12476
|
+
updated: new Map(from.services.updated),
|
|
12477
|
+
removed: [...from.services.removed],
|
|
12478
|
+
// deep copy
|
|
12479
|
+
preimages: deepCloneMapWithArray(from.services.preimages),
|
|
12480
|
+
storage: deepCloneMapWithArray(from.services.storage),
|
|
12446
12481
|
};
|
|
12447
12482
|
const transfers = [...from.transfers];
|
|
12448
12483
|
const update = new AccumulationStateUpdate(serviceUpdates, transfers, new Map(from.yieldedRoots));
|
|
@@ -12490,9 +12525,9 @@ class PartiallyUpdatedState {
|
|
|
12490
12525
|
if (destination === null) {
|
|
12491
12526
|
return null;
|
|
12492
12527
|
}
|
|
12493
|
-
const
|
|
12494
|
-
if (
|
|
12495
|
-
return
|
|
12528
|
+
const maybeUpdatedServiceInfo = this.stateUpdate.services.updated.get(destination);
|
|
12529
|
+
if (maybeUpdatedServiceInfo !== undefined) {
|
|
12530
|
+
return maybeUpdatedServiceInfo.action.account;
|
|
12496
12531
|
}
|
|
12497
12532
|
const maybeService = this.state.getService(destination);
|
|
12498
12533
|
if (maybeService === null) {
|
|
@@ -12501,7 +12536,8 @@ class PartiallyUpdatedState {
|
|
|
12501
12536
|
return maybeService.getInfo();
|
|
12502
12537
|
}
|
|
12503
12538
|
getStorage(serviceId, rawKey) {
|
|
12504
|
-
const
|
|
12539
|
+
const storages = this.stateUpdate.services.storage.get(serviceId) ?? [];
|
|
12540
|
+
const item = storages.find((x) => x.key.isEqualTo(rawKey));
|
|
12505
12541
|
if (item !== undefined) {
|
|
12506
12542
|
return item.value;
|
|
12507
12543
|
}
|
|
@@ -12516,10 +12552,11 @@ class PartiallyUpdatedState {
|
|
|
12516
12552
|
* the existence in `preimages` map.
|
|
12517
12553
|
*/
|
|
12518
12554
|
hasPreimage(serviceId, hash) {
|
|
12519
|
-
const
|
|
12555
|
+
const preimages = this.stateUpdate.services.preimages.get(serviceId) ?? [];
|
|
12556
|
+
const providedPreimage = preimages.find(
|
|
12520
12557
|
// we ignore the action here, since if there is <any> update on that
|
|
12521
12558
|
// hash it means it has to exist, right?
|
|
12522
|
-
(p) => p.
|
|
12559
|
+
(p) => p.hash.isEqualTo(hash));
|
|
12523
12560
|
if (providedPreimage !== undefined) {
|
|
12524
12561
|
return true;
|
|
12525
12562
|
}
|
|
@@ -12532,7 +12569,8 @@ class PartiallyUpdatedState {
|
|
|
12532
12569
|
}
|
|
12533
12570
|
getPreimage(serviceId, hash) {
|
|
12534
12571
|
// TODO [ToDr] Should we verify availability here?
|
|
12535
|
-
const
|
|
12572
|
+
const preimages = this.stateUpdate.services.preimages.get(serviceId) ?? [];
|
|
12573
|
+
const freshlyProvided = preimages.find((x) => x.hash.isEqualTo(hash));
|
|
12536
12574
|
if (freshlyProvided !== undefined && freshlyProvided.action.kind === UpdatePreimageKind.Provide) {
|
|
12537
12575
|
return freshlyProvided.action.preimage.blob;
|
|
12538
12576
|
}
|
|
@@ -12541,10 +12579,11 @@ class PartiallyUpdatedState {
|
|
|
12541
12579
|
}
|
|
12542
12580
|
/** Get status of a preimage of current service taking into account any updates. */
|
|
12543
12581
|
getLookupHistory(currentTimeslot, serviceId, hash, length) {
|
|
12582
|
+
const preimages = this.stateUpdate.services.preimages.get(serviceId) ?? [];
|
|
12544
12583
|
// TODO [ToDr] This is most likely wrong. We may have `provide` and `remove` within
|
|
12545
12584
|
// the same state update. We should however switch to proper "updated state"
|
|
12546
12585
|
// representation soon.
|
|
12547
|
-
const updatedPreimage =
|
|
12586
|
+
const updatedPreimage = preimages.findLast((update) => update.hash.isEqualTo(hash) && BigInt(update.length) === length);
|
|
12548
12587
|
const stateFallback = () => {
|
|
12549
12588
|
// fallback to state lookup
|
|
12550
12589
|
const service = this.state.getService(serviceId);
|
|
@@ -12581,14 +12620,15 @@ class PartiallyUpdatedState {
|
|
|
12581
12620
|
/* State update functions. */
|
|
12582
12621
|
updateStorage(serviceId, key, value) {
|
|
12583
12622
|
const update = value === null
|
|
12584
|
-
? UpdateStorage.remove({
|
|
12623
|
+
? UpdateStorage.remove({ key })
|
|
12585
12624
|
: UpdateStorage.set({
|
|
12586
|
-
serviceId,
|
|
12587
12625
|
storage: StorageItem.create({ key, value }),
|
|
12588
12626
|
});
|
|
12589
|
-
const
|
|
12627
|
+
const storages = this.stateUpdate.services.storage.get(serviceId) ?? [];
|
|
12628
|
+
const index = storages.findIndex((x) => x.key.isEqualTo(key));
|
|
12590
12629
|
const count = index === -1 ? 0 : 1;
|
|
12591
|
-
|
|
12630
|
+
storages.splice(index, count, update);
|
|
12631
|
+
this.stateUpdate.services.storage.set(serviceId, storages);
|
|
12592
12632
|
}
|
|
12593
12633
|
/**
|
|
12594
12634
|
* Update a preimage.
|
|
@@ -12596,8 +12636,10 @@ class PartiallyUpdatedState {
|
|
|
12596
12636
|
* Note we store all previous entries as well, since there might be a sequence of:
|
|
12597
12637
|
* `provide` -> `remove` and both should update the end state somehow.
|
|
12598
12638
|
*/
|
|
12599
|
-
updatePreimage(newUpdate) {
|
|
12600
|
-
this.stateUpdate.services.preimages.
|
|
12639
|
+
updatePreimage(serviceId, newUpdate) {
|
|
12640
|
+
const updatePreimages = this.stateUpdate.services.preimages.get(serviceId) ?? [];
|
|
12641
|
+
updatePreimages.push(newUpdate);
|
|
12642
|
+
this.stateUpdate.services.preimages.set(serviceId, updatePreimages);
|
|
12601
12643
|
}
|
|
12602
12644
|
updateServiceStorageUtilisation(serviceId, items, bytes, serviceInfo) {
|
|
12603
12645
|
check `${items >= 0} storageUtilisationCount has to be a positive number, got: ${items}`;
|
|
@@ -12606,11 +12648,11 @@ class PartiallyUpdatedState {
|
|
|
12606
12648
|
const overflowBytes = !isU64(bytes);
|
|
12607
12649
|
// TODO [ToDr] this is not specified in GP, but it seems sensible.
|
|
12608
12650
|
if (overflowItems || overflowBytes) {
|
|
12609
|
-
return Result$1.error(InsufficientFundsError);
|
|
12651
|
+
return Result$1.error(InsufficientFundsError, () => `Storage utilisation overflow: items=${overflowItems}, bytes=${overflowBytes}`);
|
|
12610
12652
|
}
|
|
12611
12653
|
const thresholdBalance = ServiceAccountInfo.calculateThresholdBalance(items, bytes, serviceInfo.gratisStorage);
|
|
12612
12654
|
if (serviceInfo.balance < thresholdBalance) {
|
|
12613
|
-
return Result$1.error(InsufficientFundsError);
|
|
12655
|
+
return Result$1.error(InsufficientFundsError, () => `Service balance (${serviceInfo.balance}) below threshold (${thresholdBalance})`);
|
|
12614
12656
|
}
|
|
12615
12657
|
// Update service info with new details.
|
|
12616
12658
|
this.updateServiceInfo(serviceId, ServiceAccountInfo.create({
|
|
@@ -12621,20 +12663,23 @@ class PartiallyUpdatedState {
|
|
|
12621
12663
|
return Result$1.ok(OK);
|
|
12622
12664
|
}
|
|
12623
12665
|
updateServiceInfo(serviceId, newInfo) {
|
|
12624
|
-
const
|
|
12625
|
-
|
|
12626
|
-
|
|
12627
|
-
if (existingItem?.action.kind === UpdateServiceKind.Create) {
|
|
12628
|
-
this.stateUpdate.services.servicesUpdates.splice(idx, toRemove, UpdateService.create({
|
|
12629
|
-
serviceId,
|
|
12666
|
+
const existingUpdate = this.stateUpdate.services.updated.get(serviceId);
|
|
12667
|
+
if (existingUpdate?.action.kind === UpdateServiceKind.Create) {
|
|
12668
|
+
this.stateUpdate.services.updated.set(serviceId, UpdateService.create({
|
|
12630
12669
|
serviceInfo: newInfo,
|
|
12631
|
-
lookupHistory:
|
|
12670
|
+
lookupHistory: existingUpdate.action.lookupHistory,
|
|
12632
12671
|
}));
|
|
12633
12672
|
return;
|
|
12634
12673
|
}
|
|
12635
|
-
this.stateUpdate.services.
|
|
12636
|
-
|
|
12674
|
+
this.stateUpdate.services.updated.set(serviceId, UpdateService.update({
|
|
12675
|
+
serviceInfo: newInfo,
|
|
12676
|
+
}));
|
|
12677
|
+
}
|
|
12678
|
+
createService(serviceId, newInfo, newLookupHistory) {
|
|
12679
|
+
this.stateUpdate.services.created.push(serviceId);
|
|
12680
|
+
this.stateUpdate.services.updated.set(serviceId, UpdateService.create({
|
|
12637
12681
|
serviceInfo: newInfo,
|
|
12682
|
+
lookupHistory: newLookupHistory,
|
|
12638
12683
|
}));
|
|
12639
12684
|
}
|
|
12640
12685
|
getPrivilegedServices() {
|
|
@@ -14255,7 +14300,7 @@ class ReadablePage extends MemoryPage {
|
|
|
14255
14300
|
loadInto(result, startIndex, length) {
|
|
14256
14301
|
const endIndex = startIndex + length;
|
|
14257
14302
|
if (endIndex > PAGE_SIZE$1) {
|
|
14258
|
-
return Result$1.error(PageFault.fromMemoryIndex(this.start + PAGE_SIZE$1));
|
|
14303
|
+
return Result$1.error(PageFault.fromMemoryIndex(this.start + PAGE_SIZE$1), () => `Page fault: read beyond page boundary at ${this.start + PAGE_SIZE$1}`);
|
|
14259
14304
|
}
|
|
14260
14305
|
const bytes = this.data.subarray(startIndex, endIndex);
|
|
14261
14306
|
// we zero the bytes, since data might not yet be initialized at `endIndex`.
|
|
@@ -14264,7 +14309,7 @@ class ReadablePage extends MemoryPage {
|
|
|
14264
14309
|
return Result$1.ok(OK);
|
|
14265
14310
|
}
|
|
14266
14311
|
storeFrom(_address, _data) {
|
|
14267
|
-
return Result$1.error(PageFault.fromMemoryIndex(this.start, true));
|
|
14312
|
+
return Result$1.error(PageFault.fromMemoryIndex(this.start, true), () => `Page fault: attempted to write to read-only page at ${this.start}`);
|
|
14268
14313
|
}
|
|
14269
14314
|
setData(pageIndex, data) {
|
|
14270
14315
|
this.data.set(data, pageIndex);
|
|
@@ -14293,7 +14338,7 @@ class WriteablePage extends MemoryPage {
|
|
|
14293
14338
|
loadInto(result, startIndex, length) {
|
|
14294
14339
|
const endIndex = startIndex + length;
|
|
14295
14340
|
if (endIndex > PAGE_SIZE$1) {
|
|
14296
|
-
return Result$1.error(PageFault.fromMemoryIndex(this.start + PAGE_SIZE$1));
|
|
14341
|
+
return Result$1.error(PageFault.fromMemoryIndex(this.start + PAGE_SIZE$1), () => `Page fault: read beyond page boundary at ${this.start + PAGE_SIZE$1}`);
|
|
14297
14342
|
}
|
|
14298
14343
|
const bytes = this.view.subarray(startIndex, endIndex);
|
|
14299
14344
|
// we zero the bytes, since the view might not yet be initialized at `endIndex`.
|
|
@@ -14363,7 +14408,7 @@ class Memory {
|
|
|
14363
14408
|
logger$3.insane `MEM[${address}] <- ${BytesBlob.blobFrom(bytes)}`;
|
|
14364
14409
|
const pagesResult = this.getPages(address, bytes.length, AccessType.WRITE);
|
|
14365
14410
|
if (pagesResult.isError) {
|
|
14366
|
-
return Result$1.error(pagesResult.error);
|
|
14411
|
+
return Result$1.error(pagesResult.error, pagesResult.details);
|
|
14367
14412
|
}
|
|
14368
14413
|
const pages = pagesResult.ok;
|
|
14369
14414
|
let currentPosition = address;
|
|
@@ -14388,14 +14433,14 @@ class Memory {
|
|
|
14388
14433
|
const pages = [];
|
|
14389
14434
|
for (const pageNumber of pageRange) {
|
|
14390
14435
|
if (pageNumber < RESERVED_NUMBER_OF_PAGES) {
|
|
14391
|
-
return Result$1.error(PageFault.fromPageNumber(pageNumber, true));
|
|
14436
|
+
return Result$1.error(PageFault.fromPageNumber(pageNumber, true), () => `Page fault: attempted to access reserved page ${pageNumber}`);
|
|
14392
14437
|
}
|
|
14393
14438
|
const page = this.memory.get(pageNumber);
|
|
14394
14439
|
if (page === undefined) {
|
|
14395
|
-
return Result$1.error(PageFault.fromPageNumber(pageNumber));
|
|
14440
|
+
return Result$1.error(PageFault.fromPageNumber(pageNumber), () => `Page fault: page ${pageNumber} not allocated`);
|
|
14396
14441
|
}
|
|
14397
14442
|
if (accessType === AccessType.WRITE && !page.isWriteable()) {
|
|
14398
|
-
return Result$1.error(PageFault.fromPageNumber(pageNumber, true));
|
|
14443
|
+
return Result$1.error(PageFault.fromPageNumber(pageNumber, true), () => `Page fault: attempted to write to read-only page ${pageNumber}`);
|
|
14399
14444
|
}
|
|
14400
14445
|
pages.push(page);
|
|
14401
14446
|
}
|
|
@@ -14413,7 +14458,7 @@ class Memory {
|
|
|
14413
14458
|
}
|
|
14414
14459
|
const pagesResult = this.getPages(startAddress, result.length, AccessType.READ);
|
|
14415
14460
|
if (pagesResult.isError) {
|
|
14416
|
-
return Result$1.error(pagesResult.error);
|
|
14461
|
+
return Result$1.error(pagesResult.error, pagesResult.details);
|
|
14417
14462
|
}
|
|
14418
14463
|
const pages = pagesResult.ok;
|
|
14419
14464
|
let currentPosition = startAddress;
|
|
@@ -16218,7 +16263,7 @@ class ProgramDecoder {
|
|
|
16218
16263
|
}
|
|
16219
16264
|
catch (e) {
|
|
16220
16265
|
logger$2.error `Invalid program: ${e}`;
|
|
16221
|
-
return Result$1.error(ProgramDecoderError.InvalidProgramError);
|
|
16266
|
+
return Result$1.error(ProgramDecoderError.InvalidProgramError, () => `Program decoder error: ${e}`);
|
|
16222
16267
|
}
|
|
16223
16268
|
}
|
|
16224
16269
|
}
|
|
@@ -16499,7 +16544,7 @@ class HostCallMemory {
|
|
|
16499
16544
|
return Result$1.ok(OK);
|
|
16500
16545
|
}
|
|
16501
16546
|
if (address + tryAsU64(bytes.length) > MEMORY_SIZE) {
|
|
16502
|
-
return Result$1.error(new OutOfBounds());
|
|
16547
|
+
return Result$1.error(new OutOfBounds(), () => `Memory access out of bounds: address ${address} + length ${bytes.length} exceeds memory size`);
|
|
16503
16548
|
}
|
|
16504
16549
|
return this.memory.storeFrom(tryAsMemoryIndex(Number(address)), bytes);
|
|
16505
16550
|
}
|
|
@@ -16508,13 +16553,10 @@ class HostCallMemory {
|
|
|
16508
16553
|
return Result$1.ok(OK);
|
|
16509
16554
|
}
|
|
16510
16555
|
if (startAddress + tryAsU64(result.length) > MEMORY_SIZE) {
|
|
16511
|
-
return Result$1.error(new OutOfBounds());
|
|
16556
|
+
return Result$1.error(new OutOfBounds(), () => `Memory access out of bounds: address ${startAddress} + length ${result.length} exceeds memory size`);
|
|
16512
16557
|
}
|
|
16513
16558
|
return this.memory.loadInto(result, tryAsMemoryIndex(Number(startAddress)));
|
|
16514
16559
|
}
|
|
16515
|
-
getMemory() {
|
|
16516
|
-
return this.memory;
|
|
16517
|
-
}
|
|
16518
16560
|
}
|
|
16519
16561
|
|
|
16520
16562
|
class HostCallRegisters {
|
|
@@ -17602,32 +17644,33 @@ class Preimages {
|
|
|
17602
17644
|
}
|
|
17603
17645
|
if (prevPreimage.requester > currPreimage.requester ||
|
|
17604
17646
|
currPreimage.blob.compare(prevPreimage.blob).isLessOrEqual()) {
|
|
17605
|
-
return Result$1.error(PreimagesErrorCode.PreimagesNotSortedUnique);
|
|
17647
|
+
return Result$1.error(PreimagesErrorCode.PreimagesNotSortedUnique, () => `Preimages not sorted/unique at index ${i}`);
|
|
17606
17648
|
}
|
|
17607
17649
|
}
|
|
17608
17650
|
const { preimages, slot } = input;
|
|
17609
|
-
const pendingChanges =
|
|
17651
|
+
const pendingChanges = new Map();
|
|
17610
17652
|
// select preimages for integration
|
|
17611
17653
|
for (const preimage of preimages) {
|
|
17612
17654
|
const { requester, blob } = preimage;
|
|
17613
17655
|
const hash = this.blake2b.hashBytes(blob).asOpaque();
|
|
17614
17656
|
const service = this.state.getService(requester);
|
|
17615
17657
|
if (service === null) {
|
|
17616
|
-
return Result$1.error(PreimagesErrorCode.AccountNotFound);
|
|
17658
|
+
return Result$1.error(PreimagesErrorCode.AccountNotFound, () => `Service not found: ${requester}`);
|
|
17617
17659
|
}
|
|
17618
17660
|
const hasPreimage = service.hasPreimage(hash);
|
|
17619
17661
|
const slots = service.getLookupHistory(hash, tryAsU32(blob.length));
|
|
17620
17662
|
// https://graypaper.fluffylabs.dev/#/5f542d7/181800181900
|
|
17621
17663
|
// https://graypaper.fluffylabs.dev/#/5f542d7/116f0011a500
|
|
17622
17664
|
if (hasPreimage || slots === null || !LookupHistoryItem.isRequested(slots)) {
|
|
17623
|
-
return Result$1.error(PreimagesErrorCode.PreimageUnneeded);
|
|
17665
|
+
return Result$1.error(PreimagesErrorCode.PreimageUnneeded, () => `Preimage unneeded: requester=${requester}, hash=${hash}, hasPreimage=${hasPreimage}, isRequested=${slots !== null && LookupHistoryItem.isRequested(slots)}`);
|
|
17624
17666
|
}
|
|
17625
17667
|
// https://graypaper.fluffylabs.dev/#/5f542d7/18c00018f300
|
|
17626
|
-
pendingChanges.
|
|
17627
|
-
|
|
17668
|
+
const updates = pendingChanges.get(requester) ?? [];
|
|
17669
|
+
updates.push(UpdatePreimage.provide({
|
|
17628
17670
|
preimage: PreimageItem.create({ hash, blob }),
|
|
17629
17671
|
slot,
|
|
17630
17672
|
}));
|
|
17673
|
+
pendingChanges.set(requester, updates);
|
|
17631
17674
|
}
|
|
17632
17675
|
return Result$1.ok({
|
|
17633
17676
|
preimages: pendingChanges,
|