@typeberry/lib 0.2.0-e767e74 → 0.2.0-f506473
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.cjs +252 -209
- package/index.d.ts +1025 -957
- package/index.js +252 -209
- package/package.json +1 -1
package/index.js
CHANGED
|
@@ -301,7 +301,7 @@ function resultToString(res) {
|
|
|
301
301
|
if (res.isOk) {
|
|
302
302
|
return `OK: ${typeof res.ok === "symbol" ? res.ok.toString() : res.ok}`;
|
|
303
303
|
}
|
|
304
|
-
return `${res.details}\nError: ${maybeTaggedErrorToString(res.error)}`;
|
|
304
|
+
return `${res.details()}\nError: ${maybeTaggedErrorToString(res.error)}`;
|
|
305
305
|
}
|
|
306
306
|
/** An indication of two possible outcomes returned from a function. */
|
|
307
307
|
const Result$1 = {
|
|
@@ -315,7 +315,7 @@ const Result$1 = {
|
|
|
315
315
|
};
|
|
316
316
|
},
|
|
317
317
|
/** Create new [`Result`] with `Error` status. */
|
|
318
|
-
error: (error, details
|
|
318
|
+
error: (error, details) => {
|
|
319
319
|
check `${error !== undefined} 'Error' type cannot be undefined.`;
|
|
320
320
|
return {
|
|
321
321
|
isOk: false,
|
|
@@ -428,7 +428,7 @@ function deepEqual(actual, expected, { context = [], errorsCollector, ignore = [
|
|
|
428
428
|
}
|
|
429
429
|
if (actual.isError && expected.isError) {
|
|
430
430
|
deepEqual(actual.error, expected.error, { context: ctx.concat(["error"]), errorsCollector: errors, ignore });
|
|
431
|
-
deepEqual(actual.details, expected.details, {
|
|
431
|
+
deepEqual(actual.details(), expected.details(), {
|
|
432
432
|
context: ctx.concat(["details"]),
|
|
433
433
|
errorsCollector: errors,
|
|
434
434
|
// display details when error does not match
|
|
@@ -1119,8 +1119,8 @@ class Decoder {
|
|
|
1119
1119
|
/**
|
|
1120
1120
|
* Create a new [`Decoder`] instance given a raw array of bytes as a source.
|
|
1121
1121
|
*/
|
|
1122
|
-
static fromBlob(source) {
|
|
1123
|
-
return new Decoder(source);
|
|
1122
|
+
static fromBlob(source, context) {
|
|
1123
|
+
return new Decoder(source, undefined, context);
|
|
1124
1124
|
}
|
|
1125
1125
|
/**
|
|
1126
1126
|
* Decode a single object from all of the source bytes.
|
|
@@ -1415,7 +1415,7 @@ class Decoder {
|
|
|
1415
1415
|
ensureHasBytes(bytes) {
|
|
1416
1416
|
check `${bytes >= 0} Negative number of bytes given.`;
|
|
1417
1417
|
if (this.offset + bytes > this.source.length) {
|
|
1418
|
-
throw new
|
|
1418
|
+
throw new EndOfDataError(`Attempting to decode more data than there is left. Need ${bytes}, left: ${this.source.length - this.offset}.`);
|
|
1419
1419
|
}
|
|
1420
1420
|
}
|
|
1421
1421
|
}
|
|
@@ -1429,6 +1429,8 @@ function decodeVariableLengthExtraBytes(firstByte) {
|
|
|
1429
1429
|
}
|
|
1430
1430
|
return 0;
|
|
1431
1431
|
}
|
|
1432
|
+
class EndOfDataError extends Error {
|
|
1433
|
+
}
|
|
1432
1434
|
|
|
1433
1435
|
/** Wrapper for `Decoder` that can skip bytes of fields in the data buffer instead of decoding them. */
|
|
1434
1436
|
class Skipper {
|
|
@@ -2445,6 +2447,9 @@ function forEachDescriptor(descriptors, f) {
|
|
|
2445
2447
|
f(k, descriptors[k]);
|
|
2446
2448
|
}
|
|
2447
2449
|
catch (e) {
|
|
2450
|
+
if (e instanceof EndOfDataError) {
|
|
2451
|
+
throw new EndOfDataError(`${key}: ${e}`);
|
|
2452
|
+
}
|
|
2448
2453
|
throw new Error(`${key}: ${e}`);
|
|
2449
2454
|
}
|
|
2450
2455
|
}
|
|
@@ -2522,6 +2527,7 @@ var index$q = /*#__PURE__*/Object.freeze({
|
|
|
2522
2527
|
Decoder: Decoder,
|
|
2523
2528
|
Descriptor: Descriptor,
|
|
2524
2529
|
Encoder: Encoder,
|
|
2530
|
+
EndOfDataError: EndOfDataError,
|
|
2525
2531
|
ObjectView: ObjectView,
|
|
2526
2532
|
SequenceView: SequenceView,
|
|
2527
2533
|
TYPICAL_DICTIONARY_LENGTH: TYPICAL_DICTIONARY_LENGTH,
|
|
@@ -7308,9 +7314,7 @@ var chain_spec$1 = {
|
|
|
7308
7314
|
id: "typeberry-default",
|
|
7309
7315
|
bootnodes: [
|
|
7310
7316
|
"e3r2oc62zwfj3crnuifuvsxvbtlzetk4o5qyhetkhagsc2fgl2oka@127.0.0.1:40000",
|
|
7311
|
-
"
|
|
7312
|
-
"en5ejs5b2tybkfh4ym5vpfh7nynby73xhtfzmazumtvcijpcsz6ma@127.0.0.1:12346",
|
|
7313
|
-
"ekwmt37xecoq6a7otkm4ux5gfmm4uwbat4bg5m223shckhaaxdpqa@127.0.0.1:12347"
|
|
7317
|
+
"eyonydqt7gj7bjdek62lwdeuxdzr5q7nmxa2p5zwwtoijgamdnkka@127.0.0.1:12345"
|
|
7314
7318
|
],
|
|
7315
7319
|
genesis_header: "0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001ee155ace9c40292074cb6aff8c9ccdd273c81648ff1149ef36bcea6ebb8a3e25bb30a42c1e62f0afda5f0a4e8a562f7a13a24cea00ee81917b86b89e801314aaff71c6c03ff88adb5ed52c9681de1629a54e702fc14729f6b50d2f0a76f185b34418fb8c85bb3985394a8c2756d3643457ce614546202a2f50b093d762499acedee6d555b82024f1ccf8a1e37e60fa60fd40b1958c4bb3006af78647950e1b91ad93247bd01307550ec7acd757ce6fb805fcf73db364063265b30a949e90d9339326edb21e5541717fde24ec085000b28709847b8aab1ac51f84e94b37ca1b66cab2b9ff25c2410fbe9b8a717abb298c716a03983c98ceb4def2087500b8e3410746846d17469fb2f95ef365efcab9f4e22fa1feb53111c995376be8019981ccf30aa5444688b3cab47697b37d5cac5707bb3289e986b19b17db437206931a8d151e5c8fe2b9d8a606966a79edd2f9e5db47e83947ce368ccba53bf6ba20a40b8b8c5d436f92ecf605421e873a99ec528761eb52a88a2f9a057b3b3003e6f32a2105650944fcd101621fd5bb3124c9fd191d114b7ad936c1d79d734f9f21392eab0084d01534b31c1dd87c81645fd762482a90027754041ca1b56133d0466c0600ffff00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
|
|
7316
7320
|
genesis_state: {
|
|
@@ -7356,9 +7360,7 @@ var authorship = {
|
|
|
7356
7360
|
var chain_spec = {
|
|
7357
7361
|
id: "typeberry-dev",
|
|
7358
7362
|
bootnodes: [
|
|
7359
|
-
"
|
|
7360
|
-
"en5ejs5b2tybkfh4ym5vpfh7nynby73xhtfzmazumtvcijpcsz6ma@127.0.0.1:12346",
|
|
7361
|
-
"ekwmt37xecoq6a7otkm4ux5gfmm4uwbat4bg5m223shckhaaxdpqa@127.0.0.1:12347"
|
|
7363
|
+
"eyonydqt7gj7bjdek62lwdeuxdzr5q7nmxa2p5zwwtoijgamdnkka@127.0.0.1:12345"
|
|
7362
7364
|
],
|
|
7363
7365
|
genesis_header: "0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001ee155ace9c40292074cb6aff8c9ccdd273c81648ff1149ef36bcea6ebb8a3e25bb30a42c1e62f0afda5f0a4e8a562f7a13a24cea00ee81917b86b89e801314aaff71c6c03ff88adb5ed52c9681de1629a54e702fc14729f6b50d2f0a76f185b34418fb8c85bb3985394a8c2756d3643457ce614546202a2f50b093d762499acedee6d555b82024f1ccf8a1e37e60fa60fd40b1958c4bb3006af78647950e1b91ad93247bd01307550ec7acd757ce6fb805fcf73db364063265b30a949e90d9339326edb21e5541717fde24ec085000b28709847b8aab1ac51f84e94b37ca1b66cab2b9ff25c2410fbe9b8a717abb298c716a03983c98ceb4def2087500b8e3410746846d17469fb2f95ef365efcab9f4e22fa1feb53111c995376be8019981ccf30aa5444688b3cab47697b37d5cac5707bb3289e986b19b17db437206931a8d151e5c8fe2b9d8a606966a79edd2f9e5db47e83947ce368ccba53bf6ba20a40b8b8c5d436f92ecf605421e873a99ec528761eb52a88a2f9a057b3b3003e6f32a2105650944fcd101621fd5bb3124c9fd191d114b7ad936c1d79d734f9f21392eab0084d01534b31c1dd87c81645fd762482a90027754041ca1b56133d0466c0600ffff00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
|
|
7364
7366
|
genesis_state: {
|
|
@@ -8069,6 +8071,8 @@ function accumulationOutputComparator(a, b) {
|
|
|
8069
8071
|
const O = 8;
|
|
8070
8072
|
/** `Q`: The number of items in the authorizations queue. */
|
|
8071
8073
|
const Q = 80;
|
|
8074
|
+
/** `W_B`: The maximum size of the concatenated variable-size blobs, extrinsics and imported segments of a work-package, in octets */
|
|
8075
|
+
Compatibility.isGreaterOrEqual(GpVersion.V0_7_2) ? 13_791_360 : 13_794_305;
|
|
8072
8076
|
/** `W_T`: The size of a transfer memo in octets. */
|
|
8073
8077
|
const W_T = 128;
|
|
8074
8078
|
/**
|
|
@@ -9059,31 +9063,29 @@ var UpdatePreimageKind;
|
|
|
9059
9063
|
* 3. Update `LookupHistory` with given value.
|
|
9060
9064
|
*/
|
|
9061
9065
|
class UpdatePreimage {
|
|
9062
|
-
serviceId;
|
|
9063
9066
|
action;
|
|
9064
|
-
constructor(
|
|
9065
|
-
this.serviceId = serviceId;
|
|
9067
|
+
constructor(action) {
|
|
9066
9068
|
this.action = action;
|
|
9067
9069
|
}
|
|
9068
9070
|
/** A preimage is provided. We should update the lookuphistory and add the preimage to db. */
|
|
9069
|
-
static provide({
|
|
9070
|
-
return new UpdatePreimage(
|
|
9071
|
+
static provide({ preimage, slot }) {
|
|
9072
|
+
return new UpdatePreimage({
|
|
9071
9073
|
kind: UpdatePreimageKind.Provide,
|
|
9072
9074
|
preimage,
|
|
9073
9075
|
slot,
|
|
9074
9076
|
});
|
|
9075
9077
|
}
|
|
9076
9078
|
/** The preimage should be removed completely from the database. */
|
|
9077
|
-
static remove({
|
|
9078
|
-
return new UpdatePreimage(
|
|
9079
|
+
static remove({ hash, length }) {
|
|
9080
|
+
return new UpdatePreimage({
|
|
9079
9081
|
kind: UpdatePreimageKind.Remove,
|
|
9080
9082
|
hash,
|
|
9081
9083
|
length,
|
|
9082
9084
|
});
|
|
9083
9085
|
}
|
|
9084
9086
|
/** Update the lookup history of some preimage or add a new one (request). */
|
|
9085
|
-
static updateOrAdd({
|
|
9086
|
-
return new UpdatePreimage(
|
|
9087
|
+
static updateOrAdd({ lookupHistory }) {
|
|
9088
|
+
return new UpdatePreimage({
|
|
9087
9089
|
kind: UpdatePreimageKind.UpdateOrAdd,
|
|
9088
9090
|
item: lookupHistory,
|
|
9089
9091
|
});
|
|
@@ -9120,23 +9122,21 @@ var UpdateServiceKind;
|
|
|
9120
9122
|
UpdateServiceKind[UpdateServiceKind["Create"] = 1] = "Create";
|
|
9121
9123
|
})(UpdateServiceKind || (UpdateServiceKind = {}));
|
|
9122
9124
|
/**
|
|
9123
|
-
* Update service info
|
|
9125
|
+
* Update service info or create a new one.
|
|
9124
9126
|
*/
|
|
9125
9127
|
class UpdateService {
|
|
9126
|
-
serviceId;
|
|
9127
9128
|
action;
|
|
9128
|
-
constructor(
|
|
9129
|
-
this.serviceId = serviceId;
|
|
9129
|
+
constructor(action) {
|
|
9130
9130
|
this.action = action;
|
|
9131
9131
|
}
|
|
9132
|
-
static update({
|
|
9133
|
-
return new UpdateService(
|
|
9132
|
+
static update({ serviceInfo }) {
|
|
9133
|
+
return new UpdateService({
|
|
9134
9134
|
kind: UpdateServiceKind.Update,
|
|
9135
9135
|
account: serviceInfo,
|
|
9136
9136
|
});
|
|
9137
9137
|
}
|
|
9138
|
-
static create({
|
|
9139
|
-
return new UpdateService(
|
|
9138
|
+
static create({ serviceInfo, lookupHistory, }) {
|
|
9139
|
+
return new UpdateService({
|
|
9140
9140
|
kind: UpdateServiceKind.Create,
|
|
9141
9141
|
account: serviceInfo,
|
|
9142
9142
|
lookupHistory,
|
|
@@ -9157,17 +9157,15 @@ var UpdateStorageKind;
|
|
|
9157
9157
|
* Can either create/modify an entry or remove it.
|
|
9158
9158
|
*/
|
|
9159
9159
|
class UpdateStorage {
|
|
9160
|
-
serviceId;
|
|
9161
9160
|
action;
|
|
9162
|
-
constructor(
|
|
9163
|
-
this.serviceId = serviceId;
|
|
9161
|
+
constructor(action) {
|
|
9164
9162
|
this.action = action;
|
|
9165
9163
|
}
|
|
9166
|
-
static set({
|
|
9167
|
-
return new UpdateStorage(
|
|
9164
|
+
static set({ storage }) {
|
|
9165
|
+
return new UpdateStorage({ kind: UpdateStorageKind.Set, storage });
|
|
9168
9166
|
}
|
|
9169
|
-
static remove({
|
|
9170
|
-
return new UpdateStorage(
|
|
9167
|
+
static remove({ key }) {
|
|
9168
|
+
return new UpdateStorage({ kind: UpdateStorageKind.Remove, key });
|
|
9171
9169
|
}
|
|
9172
9170
|
get key() {
|
|
9173
9171
|
if (this.action.kind === UpdateStorageKind.Remove) {
|
|
@@ -9352,12 +9350,12 @@ class InMemoryState extends WithDebug {
|
|
|
9352
9350
|
* Modify the state and apply a single state update.
|
|
9353
9351
|
*/
|
|
9354
9352
|
applyUpdate(update) {
|
|
9355
|
-
const {
|
|
9353
|
+
const { removed, created: _, updated, preimages, storage, ...rest } = update;
|
|
9356
9354
|
// just assign all other variables
|
|
9357
9355
|
Object.assign(this, rest);
|
|
9358
9356
|
// and update the services state
|
|
9359
9357
|
let result;
|
|
9360
|
-
result = this.updateServices(
|
|
9358
|
+
result = this.updateServices(updated);
|
|
9361
9359
|
if (result.isError) {
|
|
9362
9360
|
return result;
|
|
9363
9361
|
}
|
|
@@ -9369,7 +9367,7 @@ class InMemoryState extends WithDebug {
|
|
|
9369
9367
|
if (result.isError) {
|
|
9370
9368
|
return result;
|
|
9371
9369
|
}
|
|
9372
|
-
this.removeServices(
|
|
9370
|
+
this.removeServices(removed);
|
|
9373
9371
|
return Result$1.ok(OK);
|
|
9374
9372
|
}
|
|
9375
9373
|
removeServices(servicesRemoved) {
|
|
@@ -9378,89 +9376,102 @@ class InMemoryState extends WithDebug {
|
|
|
9378
9376
|
this.services.delete(serviceId);
|
|
9379
9377
|
}
|
|
9380
9378
|
}
|
|
9381
|
-
updateStorage(
|
|
9382
|
-
|
|
9383
|
-
|
|
9384
|
-
|
|
9385
|
-
|
|
9386
|
-
|
|
9387
|
-
|
|
9388
|
-
|
|
9389
|
-
|
|
9390
|
-
|
|
9391
|
-
|
|
9392
|
-
|
|
9393
|
-
|
|
9394
|
-
|
|
9379
|
+
updateStorage(storageUpdates) {
|
|
9380
|
+
if (storageUpdates === undefined) {
|
|
9381
|
+
return Result$1.ok(OK);
|
|
9382
|
+
}
|
|
9383
|
+
for (const [serviceId, updates] of storageUpdates.entries()) {
|
|
9384
|
+
for (const update of updates) {
|
|
9385
|
+
const { kind } = update.action;
|
|
9386
|
+
const service = this.services.get(serviceId);
|
|
9387
|
+
if (service === undefined) {
|
|
9388
|
+
return Result$1.error(UpdateError.NoService, () => `Attempting to update storage of non-existing service: ${serviceId}`);
|
|
9389
|
+
}
|
|
9390
|
+
if (kind === UpdateStorageKind.Set) {
|
|
9391
|
+
const { key, value } = update.action.storage;
|
|
9392
|
+
service.data.storage.set(key.toString(), StorageItem.create({ key, value }));
|
|
9393
|
+
}
|
|
9394
|
+
else if (kind === UpdateStorageKind.Remove) {
|
|
9395
|
+
const { key } = update.action;
|
|
9396
|
+
check `
|
|
9395
9397
|
${service.data.storage.has(key.toString())}
|
|
9396
|
-
Attempting to remove non-existing storage item at ${serviceId}: ${action.key}
|
|
9398
|
+
Attempting to remove non-existing storage item at ${serviceId}: ${update.action.key}
|
|
9397
9399
|
`;
|
|
9398
|
-
|
|
9399
|
-
|
|
9400
|
-
|
|
9401
|
-
|
|
9400
|
+
service.data.storage.delete(key.toString());
|
|
9401
|
+
}
|
|
9402
|
+
else {
|
|
9403
|
+
assertNever(kind);
|
|
9404
|
+
}
|
|
9402
9405
|
}
|
|
9403
9406
|
}
|
|
9404
9407
|
return Result$1.ok(OK);
|
|
9405
9408
|
}
|
|
9406
|
-
updatePreimages(
|
|
9407
|
-
|
|
9409
|
+
updatePreimages(preimagesUpdates) {
|
|
9410
|
+
if (preimagesUpdates === undefined) {
|
|
9411
|
+
return Result$1.ok(OK);
|
|
9412
|
+
}
|
|
9413
|
+
for (const [serviceId, updates] of preimagesUpdates.entries()) {
|
|
9408
9414
|
const service = this.services.get(serviceId);
|
|
9409
9415
|
if (service === undefined) {
|
|
9410
|
-
return Result$1.error(UpdateError.NoService, `Attempting to update preimage of non-existing service: ${serviceId}`);
|
|
9416
|
+
return Result$1.error(UpdateError.NoService, () => `Attempting to update preimage of non-existing service: ${serviceId}`);
|
|
9411
9417
|
}
|
|
9412
|
-
const
|
|
9413
|
-
|
|
9414
|
-
|
|
9415
|
-
|
|
9416
|
-
|
|
9417
|
-
|
|
9418
|
-
service.data.preimages.set(preimage.hash, preimage);
|
|
9419
|
-
if (slot !== null) {
|
|
9420
|
-
const lookupHistory = service.data.lookupHistory.get(preimage.hash);
|
|
9421
|
-
const length = tryAsU32(preimage.blob.length);
|
|
9422
|
-
const lookup = new LookupHistoryItem(preimage.hash, length, tryAsLookupHistorySlots([slot]));
|
|
9423
|
-
if (lookupHistory === undefined) {
|
|
9424
|
-
// no lookup history for that preimage at all (edge case, should be requested)
|
|
9425
|
-
service.data.lookupHistory.set(preimage.hash, [lookup]);
|
|
9418
|
+
for (const update of updates) {
|
|
9419
|
+
const { kind } = update.action;
|
|
9420
|
+
if (kind === UpdatePreimageKind.Provide) {
|
|
9421
|
+
const { preimage, slot } = update.action;
|
|
9422
|
+
if (service.data.preimages.has(preimage.hash)) {
|
|
9423
|
+
return Result$1.error(UpdateError.PreimageExists, () => `Overwriting existing preimage at ${serviceId}: ${preimage}`);
|
|
9426
9424
|
}
|
|
9427
|
-
|
|
9428
|
-
|
|
9429
|
-
const
|
|
9430
|
-
|
|
9425
|
+
service.data.preimages.set(preimage.hash, preimage);
|
|
9426
|
+
if (slot !== null) {
|
|
9427
|
+
const lookupHistory = service.data.lookupHistory.get(preimage.hash);
|
|
9428
|
+
const length = tryAsU32(preimage.blob.length);
|
|
9429
|
+
const lookup = new LookupHistoryItem(preimage.hash, length, tryAsLookupHistorySlots([slot]));
|
|
9430
|
+
if (lookupHistory === undefined) {
|
|
9431
|
+
// no lookup history for that preimage at all (edge case, should be requested)
|
|
9432
|
+
service.data.lookupHistory.set(preimage.hash, [lookup]);
|
|
9433
|
+
}
|
|
9434
|
+
else {
|
|
9435
|
+
// insert or replace exiting entry
|
|
9436
|
+
const index = lookupHistory.map((x) => x.length).indexOf(length);
|
|
9437
|
+
lookupHistory.splice(index, index === -1 ? 0 : 1, lookup);
|
|
9438
|
+
}
|
|
9431
9439
|
}
|
|
9432
9440
|
}
|
|
9433
|
-
|
|
9434
|
-
|
|
9435
|
-
|
|
9436
|
-
|
|
9437
|
-
|
|
9438
|
-
|
|
9439
|
-
|
|
9440
|
-
|
|
9441
|
+
else if (kind === UpdatePreimageKind.Remove) {
|
|
9442
|
+
const { hash, length } = update.action;
|
|
9443
|
+
service.data.preimages.delete(hash);
|
|
9444
|
+
const history = service.data.lookupHistory.get(hash) ?? [];
|
|
9445
|
+
const idx = history.map((x) => x.length).indexOf(length);
|
|
9446
|
+
if (idx !== -1) {
|
|
9447
|
+
history.splice(idx, 1);
|
|
9448
|
+
}
|
|
9449
|
+
}
|
|
9450
|
+
else if (kind === UpdatePreimageKind.UpdateOrAdd) {
|
|
9451
|
+
const { item } = update.action;
|
|
9452
|
+
const history = service.data.lookupHistory.get(item.hash) ?? [];
|
|
9453
|
+
const existingIdx = history.map((x) => x.length).indexOf(item.length);
|
|
9454
|
+
const removeCount = existingIdx === -1 ? 0 : 1;
|
|
9455
|
+
history.splice(existingIdx, removeCount, item);
|
|
9456
|
+
service.data.lookupHistory.set(item.hash, history);
|
|
9457
|
+
}
|
|
9458
|
+
else {
|
|
9459
|
+
assertNever(kind);
|
|
9441
9460
|
}
|
|
9442
|
-
}
|
|
9443
|
-
else if (kind === UpdatePreimageKind.UpdateOrAdd) {
|
|
9444
|
-
const { item } = action;
|
|
9445
|
-
const history = service.data.lookupHistory.get(item.hash) ?? [];
|
|
9446
|
-
const existingIdx = history.map((x) => x.length).indexOf(item.length);
|
|
9447
|
-
const removeCount = existingIdx === -1 ? 0 : 1;
|
|
9448
|
-
history.splice(existingIdx, removeCount, item);
|
|
9449
|
-
service.data.lookupHistory.set(item.hash, history);
|
|
9450
|
-
}
|
|
9451
|
-
else {
|
|
9452
|
-
assertNever(kind);
|
|
9453
9461
|
}
|
|
9454
9462
|
}
|
|
9455
9463
|
return Result$1.ok(OK);
|
|
9456
9464
|
}
|
|
9457
9465
|
updateServices(servicesUpdates) {
|
|
9458
|
-
|
|
9459
|
-
|
|
9466
|
+
if (servicesUpdates === undefined) {
|
|
9467
|
+
return Result$1.ok(OK);
|
|
9468
|
+
}
|
|
9469
|
+
for (const [serviceId, update] of servicesUpdates.entries()) {
|
|
9470
|
+
const { kind, account } = update.action;
|
|
9460
9471
|
if (kind === UpdateServiceKind.Create) {
|
|
9461
|
-
const { lookupHistory } = action;
|
|
9472
|
+
const { lookupHistory } = update.action;
|
|
9462
9473
|
if (this.services.has(serviceId)) {
|
|
9463
|
-
return Result$1.error(UpdateError.DuplicateService, `${serviceId} already exists!`);
|
|
9474
|
+
return Result$1.error(UpdateError.DuplicateService, () => `${serviceId} already exists!`);
|
|
9464
9475
|
}
|
|
9465
9476
|
this.services.set(serviceId, new InMemoryService(serviceId, {
|
|
9466
9477
|
info: account,
|
|
@@ -9472,7 +9483,7 @@ class InMemoryState extends WithDebug {
|
|
|
9472
9483
|
else if (kind === UpdateServiceKind.Update) {
|
|
9473
9484
|
const existingService = this.services.get(serviceId);
|
|
9474
9485
|
if (existingService === undefined) {
|
|
9475
|
-
return Result$1.error(UpdateError.NoService, `Cannot update ${serviceId} because it does not exist.`);
|
|
9486
|
+
return Result$1.error(UpdateError.NoService, () => `Cannot update ${serviceId} because it does not exist.`);
|
|
9476
9487
|
}
|
|
9477
9488
|
existingService.data.info = account;
|
|
9478
9489
|
}
|
|
@@ -10726,76 +10737,88 @@ function* serializeStateUpdate(spec, blake2b, update) {
|
|
|
10726
10737
|
yield* serializeBasicKeys(spec, update);
|
|
10727
10738
|
const encode = (codec, val) => Encoder.encodeObject(codec, val, spec);
|
|
10728
10739
|
// then let's proceed with service updates
|
|
10729
|
-
yield* serializeServiceUpdates(update.
|
|
10740
|
+
yield* serializeServiceUpdates(update.updated, encode, blake2b);
|
|
10730
10741
|
yield* serializePreimages(update.preimages, encode, blake2b);
|
|
10731
10742
|
yield* serializeStorage(update.storage, blake2b);
|
|
10732
|
-
yield* serializeRemovedServices(update.
|
|
10743
|
+
yield* serializeRemovedServices(update.removed);
|
|
10733
10744
|
}
|
|
10734
10745
|
function* serializeRemovedServices(servicesRemoved) {
|
|
10735
|
-
|
|
10746
|
+
if (servicesRemoved === undefined) {
|
|
10747
|
+
return;
|
|
10748
|
+
}
|
|
10749
|
+
for (const serviceId of servicesRemoved) {
|
|
10736
10750
|
// TODO [ToDr] what about all data associated with a service?
|
|
10737
10751
|
const codec = serialize.serviceData(serviceId);
|
|
10738
10752
|
yield [StateEntryUpdateAction.Remove, codec.key, EMPTY_BLOB];
|
|
10739
10753
|
}
|
|
10740
10754
|
}
|
|
10741
|
-
function* serializeStorage(
|
|
10742
|
-
|
|
10743
|
-
|
|
10744
|
-
|
|
10745
|
-
|
|
10746
|
-
|
|
10747
|
-
|
|
10748
|
-
|
|
10749
|
-
|
|
10750
|
-
|
|
10751
|
-
|
|
10752
|
-
|
|
10753
|
-
|
|
10754
|
-
|
|
10755
|
+
function* serializeStorage(storageUpdates, blake2b) {
|
|
10756
|
+
if (storageUpdates === undefined) {
|
|
10757
|
+
return;
|
|
10758
|
+
}
|
|
10759
|
+
for (const [serviceId, updates] of storageUpdates.entries()) {
|
|
10760
|
+
for (const { action } of updates) {
|
|
10761
|
+
switch (action.kind) {
|
|
10762
|
+
case UpdateStorageKind.Set: {
|
|
10763
|
+
const key = action.storage.key;
|
|
10764
|
+
const codec = serialize.serviceStorage(blake2b, serviceId, key);
|
|
10765
|
+
yield [StateEntryUpdateAction.Insert, codec.key, action.storage.value];
|
|
10766
|
+
break;
|
|
10767
|
+
}
|
|
10768
|
+
case UpdateStorageKind.Remove: {
|
|
10769
|
+
const key = action.key;
|
|
10770
|
+
const codec = serialize.serviceStorage(blake2b, serviceId, key);
|
|
10771
|
+
yield [StateEntryUpdateAction.Remove, codec.key, EMPTY_BLOB];
|
|
10772
|
+
break;
|
|
10773
|
+
}
|
|
10755
10774
|
}
|
|
10756
|
-
default:
|
|
10757
|
-
assertNever(action);
|
|
10758
10775
|
}
|
|
10759
10776
|
}
|
|
10760
10777
|
}
|
|
10761
|
-
function* serializePreimages(
|
|
10762
|
-
|
|
10763
|
-
|
|
10764
|
-
|
|
10765
|
-
|
|
10766
|
-
|
|
10767
|
-
|
|
10768
|
-
|
|
10769
|
-
const
|
|
10770
|
-
|
|
10771
|
-
|
|
10772
|
-
|
|
10773
|
-
|
|
10774
|
-
|
|
10778
|
+
function* serializePreimages(preimagesUpdates, encode, blake2b) {
|
|
10779
|
+
if (preimagesUpdates === undefined) {
|
|
10780
|
+
return;
|
|
10781
|
+
}
|
|
10782
|
+
for (const [serviceId, updates] of preimagesUpdates.entries()) {
|
|
10783
|
+
for (const { action } of updates) {
|
|
10784
|
+
switch (action.kind) {
|
|
10785
|
+
case UpdatePreimageKind.Provide: {
|
|
10786
|
+
const { hash, blob } = action.preimage;
|
|
10787
|
+
const codec = serialize.servicePreimages(blake2b, serviceId, hash);
|
|
10788
|
+
yield [StateEntryUpdateAction.Insert, codec.key, blob];
|
|
10789
|
+
if (action.slot !== null) {
|
|
10790
|
+
const codec2 = serialize.serviceLookupHistory(blake2b, serviceId, hash, tryAsU32(blob.length));
|
|
10791
|
+
yield [
|
|
10792
|
+
StateEntryUpdateAction.Insert,
|
|
10793
|
+
codec2.key,
|
|
10794
|
+
encode(codec2.Codec, tryAsLookupHistorySlots([action.slot])),
|
|
10795
|
+
];
|
|
10796
|
+
}
|
|
10797
|
+
break;
|
|
10798
|
+
}
|
|
10799
|
+
case UpdatePreimageKind.UpdateOrAdd: {
|
|
10800
|
+
const { hash, length, slots } = action.item;
|
|
10801
|
+
const codec = serialize.serviceLookupHistory(blake2b, serviceId, hash, length);
|
|
10802
|
+
yield [StateEntryUpdateAction.Insert, codec.key, encode(codec.Codec, slots)];
|
|
10803
|
+
break;
|
|
10804
|
+
}
|
|
10805
|
+
case UpdatePreimageKind.Remove: {
|
|
10806
|
+
const { hash, length } = action;
|
|
10807
|
+
const codec = serialize.servicePreimages(blake2b, serviceId, hash);
|
|
10808
|
+
yield [StateEntryUpdateAction.Remove, codec.key, EMPTY_BLOB];
|
|
10809
|
+
const codec2 = serialize.serviceLookupHistory(blake2b, serviceId, hash, length);
|
|
10810
|
+
yield [StateEntryUpdateAction.Remove, codec2.key, EMPTY_BLOB];
|
|
10811
|
+
break;
|
|
10775
10812
|
}
|
|
10776
|
-
break;
|
|
10777
|
-
}
|
|
10778
|
-
case UpdatePreimageKind.UpdateOrAdd: {
|
|
10779
|
-
const { hash, length, slots } = action.item;
|
|
10780
|
-
const codec = serialize.serviceLookupHistory(blake2b, serviceId, hash, length);
|
|
10781
|
-
yield [StateEntryUpdateAction.Insert, codec.key, encode(codec.Codec, slots)];
|
|
10782
|
-
break;
|
|
10783
|
-
}
|
|
10784
|
-
case UpdatePreimageKind.Remove: {
|
|
10785
|
-
const { hash, length } = action;
|
|
10786
|
-
const codec = serialize.servicePreimages(blake2b, serviceId, hash);
|
|
10787
|
-
yield [StateEntryUpdateAction.Remove, codec.key, EMPTY_BLOB];
|
|
10788
|
-
const codec2 = serialize.serviceLookupHistory(blake2b, serviceId, hash, length);
|
|
10789
|
-
yield [StateEntryUpdateAction.Remove, codec2.key, EMPTY_BLOB];
|
|
10790
|
-
break;
|
|
10791
10813
|
}
|
|
10792
|
-
default:
|
|
10793
|
-
assertNever(action);
|
|
10794
10814
|
}
|
|
10795
10815
|
}
|
|
10796
10816
|
}
|
|
10797
10817
|
function* serializeServiceUpdates(servicesUpdates, encode, blake2b) {
|
|
10798
|
-
|
|
10818
|
+
if (servicesUpdates === undefined) {
|
|
10819
|
+
return;
|
|
10820
|
+
}
|
|
10821
|
+
for (const [serviceId, { action }] of servicesUpdates.entries()) {
|
|
10799
10822
|
// new service being created or updated
|
|
10800
10823
|
const codec = serialize.serviceData(serviceId);
|
|
10801
10824
|
yield [StateEntryUpdateAction.Insert, codec.key, encode(codec.Codec, action.account)];
|
|
@@ -11071,13 +11094,13 @@ class LeafDb {
|
|
|
11071
11094
|
*/
|
|
11072
11095
|
static fromLeavesBlob(blob, db) {
|
|
11073
11096
|
if (blob.length % TRIE_NODE_BYTES !== 0) {
|
|
11074
|
-
return Result$1.error(LeafDbError.InvalidLeafData, `${blob.length} is not a multiply of ${TRIE_NODE_BYTES}: ${blob}`);
|
|
11097
|
+
return Result$1.error(LeafDbError.InvalidLeafData, () => `${blob.length} is not a multiply of ${TRIE_NODE_BYTES}: ${blob}`);
|
|
11075
11098
|
}
|
|
11076
11099
|
const leaves = SortedSet.fromArray(leafComparator, []);
|
|
11077
11100
|
for (const nodeData of blob.chunks(TRIE_NODE_BYTES)) {
|
|
11078
11101
|
const node = new TrieNode(nodeData.raw);
|
|
11079
11102
|
if (node.getNodeType() === NodeType.Branch) {
|
|
11080
|
-
return Result$1.error(LeafDbError.InvalidLeafData, `Branch node detected: ${nodeData}`);
|
|
11103
|
+
return Result$1.error(LeafDbError.InvalidLeafData, () => `Branch node detected: ${nodeData}`);
|
|
11081
11104
|
}
|
|
11082
11105
|
leaves.insert(node.asLeafNode());
|
|
11083
11106
|
}
|
|
@@ -12392,6 +12415,14 @@ const NoMachineError = Symbol("Machine index not found.");
|
|
|
12392
12415
|
const SegmentExportError = Symbol("Too many segments already exported.");
|
|
12393
12416
|
|
|
12394
12417
|
const InsufficientFundsError = "insufficient funds";
|
|
12418
|
+
/** Deep clone of a map with array. */
|
|
12419
|
+
function deepCloneMapWithArray(map) {
|
|
12420
|
+
const cloned = [];
|
|
12421
|
+
for (const [k, v] of map.entries()) {
|
|
12422
|
+
cloned.push([k, v.slice()]);
|
|
12423
|
+
}
|
|
12424
|
+
return new Map(cloned);
|
|
12425
|
+
}
|
|
12395
12426
|
/**
|
|
12396
12427
|
* State updates that currently accumulating service produced.
|
|
12397
12428
|
*
|
|
@@ -12421,10 +12452,11 @@ class AccumulationStateUpdate {
|
|
|
12421
12452
|
/** Create new empty state update. */
|
|
12422
12453
|
static empty() {
|
|
12423
12454
|
return new AccumulationStateUpdate({
|
|
12424
|
-
|
|
12425
|
-
|
|
12426
|
-
|
|
12427
|
-
|
|
12455
|
+
created: [],
|
|
12456
|
+
updated: new Map(),
|
|
12457
|
+
removed: [],
|
|
12458
|
+
preimages: new Map(),
|
|
12459
|
+
storage: new Map(),
|
|
12428
12460
|
}, []);
|
|
12429
12461
|
}
|
|
12430
12462
|
/** Create a state update with some existing, yet uncommited services updates. */
|
|
@@ -12436,10 +12468,13 @@ class AccumulationStateUpdate {
|
|
|
12436
12468
|
/** Create a copy of another `StateUpdate`. Used by checkpoints. */
|
|
12437
12469
|
static copyFrom(from) {
|
|
12438
12470
|
const serviceUpdates = {
|
|
12439
|
-
|
|
12440
|
-
|
|
12441
|
-
|
|
12442
|
-
|
|
12471
|
+
// shallow copy
|
|
12472
|
+
created: [...from.services.created],
|
|
12473
|
+
updated: new Map(from.services.updated),
|
|
12474
|
+
removed: [...from.services.removed],
|
|
12475
|
+
// deep copy
|
|
12476
|
+
preimages: deepCloneMapWithArray(from.services.preimages),
|
|
12477
|
+
storage: deepCloneMapWithArray(from.services.storage),
|
|
12443
12478
|
};
|
|
12444
12479
|
const transfers = [...from.transfers];
|
|
12445
12480
|
const update = new AccumulationStateUpdate(serviceUpdates, transfers, new Map(from.yieldedRoots));
|
|
@@ -12487,9 +12522,9 @@ class PartiallyUpdatedState {
|
|
|
12487
12522
|
if (destination === null) {
|
|
12488
12523
|
return null;
|
|
12489
12524
|
}
|
|
12490
|
-
const
|
|
12491
|
-
if (
|
|
12492
|
-
return
|
|
12525
|
+
const maybeUpdatedServiceInfo = this.stateUpdate.services.updated.get(destination);
|
|
12526
|
+
if (maybeUpdatedServiceInfo !== undefined) {
|
|
12527
|
+
return maybeUpdatedServiceInfo.action.account;
|
|
12493
12528
|
}
|
|
12494
12529
|
const maybeService = this.state.getService(destination);
|
|
12495
12530
|
if (maybeService === null) {
|
|
@@ -12498,7 +12533,8 @@ class PartiallyUpdatedState {
|
|
|
12498
12533
|
return maybeService.getInfo();
|
|
12499
12534
|
}
|
|
12500
12535
|
getStorage(serviceId, rawKey) {
|
|
12501
|
-
const
|
|
12536
|
+
const storages = this.stateUpdate.services.storage.get(serviceId) ?? [];
|
|
12537
|
+
const item = storages.find((x) => x.key.isEqualTo(rawKey));
|
|
12502
12538
|
if (item !== undefined) {
|
|
12503
12539
|
return item.value;
|
|
12504
12540
|
}
|
|
@@ -12513,10 +12549,11 @@ class PartiallyUpdatedState {
|
|
|
12513
12549
|
* the existence in `preimages` map.
|
|
12514
12550
|
*/
|
|
12515
12551
|
hasPreimage(serviceId, hash) {
|
|
12516
|
-
const
|
|
12552
|
+
const preimages = this.stateUpdate.services.preimages.get(serviceId) ?? [];
|
|
12553
|
+
const providedPreimage = preimages.find(
|
|
12517
12554
|
// we ignore the action here, since if there is <any> update on that
|
|
12518
12555
|
// hash it means it has to exist, right?
|
|
12519
|
-
(p) => p.
|
|
12556
|
+
(p) => p.hash.isEqualTo(hash));
|
|
12520
12557
|
if (providedPreimage !== undefined) {
|
|
12521
12558
|
return true;
|
|
12522
12559
|
}
|
|
@@ -12529,7 +12566,8 @@ class PartiallyUpdatedState {
|
|
|
12529
12566
|
}
|
|
12530
12567
|
getPreimage(serviceId, hash) {
|
|
12531
12568
|
// TODO [ToDr] Should we verify availability here?
|
|
12532
|
-
const
|
|
12569
|
+
const preimages = this.stateUpdate.services.preimages.get(serviceId) ?? [];
|
|
12570
|
+
const freshlyProvided = preimages.find((x) => x.hash.isEqualTo(hash));
|
|
12533
12571
|
if (freshlyProvided !== undefined && freshlyProvided.action.kind === UpdatePreimageKind.Provide) {
|
|
12534
12572
|
return freshlyProvided.action.preimage.blob;
|
|
12535
12573
|
}
|
|
@@ -12538,10 +12576,11 @@ class PartiallyUpdatedState {
|
|
|
12538
12576
|
}
|
|
12539
12577
|
/** Get status of a preimage of current service taking into account any updates. */
|
|
12540
12578
|
getLookupHistory(currentTimeslot, serviceId, hash, length) {
|
|
12579
|
+
const preimages = this.stateUpdate.services.preimages.get(serviceId) ?? [];
|
|
12541
12580
|
// TODO [ToDr] This is most likely wrong. We may have `provide` and `remove` within
|
|
12542
12581
|
// the same state update. We should however switch to proper "updated state"
|
|
12543
12582
|
// representation soon.
|
|
12544
|
-
const updatedPreimage =
|
|
12583
|
+
const updatedPreimage = preimages.findLast((update) => update.hash.isEqualTo(hash) && BigInt(update.length) === length);
|
|
12545
12584
|
const stateFallback = () => {
|
|
12546
12585
|
// fallback to state lookup
|
|
12547
12586
|
const service = this.state.getService(serviceId);
|
|
@@ -12578,14 +12617,15 @@ class PartiallyUpdatedState {
|
|
|
12578
12617
|
/* State update functions. */
|
|
12579
12618
|
updateStorage(serviceId, key, value) {
|
|
12580
12619
|
const update = value === null
|
|
12581
|
-
? UpdateStorage.remove({
|
|
12620
|
+
? UpdateStorage.remove({ key })
|
|
12582
12621
|
: UpdateStorage.set({
|
|
12583
|
-
serviceId,
|
|
12584
12622
|
storage: StorageItem.create({ key, value }),
|
|
12585
12623
|
});
|
|
12586
|
-
const
|
|
12624
|
+
const storages = this.stateUpdate.services.storage.get(serviceId) ?? [];
|
|
12625
|
+
const index = storages.findIndex((x) => x.key.isEqualTo(key));
|
|
12587
12626
|
const count = index === -1 ? 0 : 1;
|
|
12588
|
-
|
|
12627
|
+
storages.splice(index, count, update);
|
|
12628
|
+
this.stateUpdate.services.storage.set(serviceId, storages);
|
|
12589
12629
|
}
|
|
12590
12630
|
/**
|
|
12591
12631
|
* Update a preimage.
|
|
@@ -12593,8 +12633,10 @@ class PartiallyUpdatedState {
|
|
|
12593
12633
|
* Note we store all previous entries as well, since there might be a sequence of:
|
|
12594
12634
|
* `provide` -> `remove` and both should update the end state somehow.
|
|
12595
12635
|
*/
|
|
12596
|
-
updatePreimage(newUpdate) {
|
|
12597
|
-
this.stateUpdate.services.preimages.
|
|
12636
|
+
updatePreimage(serviceId, newUpdate) {
|
|
12637
|
+
const updatePreimages = this.stateUpdate.services.preimages.get(serviceId) ?? [];
|
|
12638
|
+
updatePreimages.push(newUpdate);
|
|
12639
|
+
this.stateUpdate.services.preimages.set(serviceId, updatePreimages);
|
|
12598
12640
|
}
|
|
12599
12641
|
updateServiceStorageUtilisation(serviceId, items, bytes, serviceInfo) {
|
|
12600
12642
|
check `${items >= 0} storageUtilisationCount has to be a positive number, got: ${items}`;
|
|
@@ -12603,11 +12645,11 @@ class PartiallyUpdatedState {
|
|
|
12603
12645
|
const overflowBytes = !isU64(bytes);
|
|
12604
12646
|
// TODO [ToDr] this is not specified in GP, but it seems sensible.
|
|
12605
12647
|
if (overflowItems || overflowBytes) {
|
|
12606
|
-
return Result$1.error(InsufficientFundsError);
|
|
12648
|
+
return Result$1.error(InsufficientFundsError, () => `Storage utilisation overflow: items=${overflowItems}, bytes=${overflowBytes}`);
|
|
12607
12649
|
}
|
|
12608
12650
|
const thresholdBalance = ServiceAccountInfo.calculateThresholdBalance(items, bytes, serviceInfo.gratisStorage);
|
|
12609
12651
|
if (serviceInfo.balance < thresholdBalance) {
|
|
12610
|
-
return Result$1.error(InsufficientFundsError);
|
|
12652
|
+
return Result$1.error(InsufficientFundsError, () => `Service balance (${serviceInfo.balance}) below threshold (${thresholdBalance})`);
|
|
12611
12653
|
}
|
|
12612
12654
|
// Update service info with new details.
|
|
12613
12655
|
this.updateServiceInfo(serviceId, ServiceAccountInfo.create({
|
|
@@ -12618,20 +12660,23 @@ class PartiallyUpdatedState {
|
|
|
12618
12660
|
return Result$1.ok(OK);
|
|
12619
12661
|
}
|
|
12620
12662
|
updateServiceInfo(serviceId, newInfo) {
|
|
12621
|
-
const
|
|
12622
|
-
|
|
12623
|
-
|
|
12624
|
-
if (existingItem?.action.kind === UpdateServiceKind.Create) {
|
|
12625
|
-
this.stateUpdate.services.servicesUpdates.splice(idx, toRemove, UpdateService.create({
|
|
12626
|
-
serviceId,
|
|
12663
|
+
const existingUpdate = this.stateUpdate.services.updated.get(serviceId);
|
|
12664
|
+
if (existingUpdate?.action.kind === UpdateServiceKind.Create) {
|
|
12665
|
+
this.stateUpdate.services.updated.set(serviceId, UpdateService.create({
|
|
12627
12666
|
serviceInfo: newInfo,
|
|
12628
|
-
lookupHistory:
|
|
12667
|
+
lookupHistory: existingUpdate.action.lookupHistory,
|
|
12629
12668
|
}));
|
|
12630
12669
|
return;
|
|
12631
12670
|
}
|
|
12632
|
-
this.stateUpdate.services.
|
|
12633
|
-
|
|
12671
|
+
this.stateUpdate.services.updated.set(serviceId, UpdateService.update({
|
|
12672
|
+
serviceInfo: newInfo,
|
|
12673
|
+
}));
|
|
12674
|
+
}
|
|
12675
|
+
createService(serviceId, newInfo, newLookupHistory) {
|
|
12676
|
+
this.stateUpdate.services.created.push(serviceId);
|
|
12677
|
+
this.stateUpdate.services.updated.set(serviceId, UpdateService.create({
|
|
12634
12678
|
serviceInfo: newInfo,
|
|
12679
|
+
lookupHistory: newLookupHistory,
|
|
12635
12680
|
}));
|
|
12636
12681
|
}
|
|
12637
12682
|
getPrivilegedServices() {
|
|
@@ -14252,7 +14297,7 @@ class ReadablePage extends MemoryPage {
|
|
|
14252
14297
|
loadInto(result, startIndex, length) {
|
|
14253
14298
|
const endIndex = startIndex + length;
|
|
14254
14299
|
if (endIndex > PAGE_SIZE$1) {
|
|
14255
|
-
return Result$1.error(PageFault.fromMemoryIndex(this.start + PAGE_SIZE$1));
|
|
14300
|
+
return Result$1.error(PageFault.fromMemoryIndex(this.start + PAGE_SIZE$1), () => `Page fault: read beyond page boundary at ${this.start + PAGE_SIZE$1}`);
|
|
14256
14301
|
}
|
|
14257
14302
|
const bytes = this.data.subarray(startIndex, endIndex);
|
|
14258
14303
|
// we zero the bytes, since data might not yet be initialized at `endIndex`.
|
|
@@ -14261,7 +14306,7 @@ class ReadablePage extends MemoryPage {
|
|
|
14261
14306
|
return Result$1.ok(OK);
|
|
14262
14307
|
}
|
|
14263
14308
|
storeFrom(_address, _data) {
|
|
14264
|
-
return Result$1.error(PageFault.fromMemoryIndex(this.start, true));
|
|
14309
|
+
return Result$1.error(PageFault.fromMemoryIndex(this.start, true), () => `Page fault: attempted to write to read-only page at ${this.start}`);
|
|
14265
14310
|
}
|
|
14266
14311
|
setData(pageIndex, data) {
|
|
14267
14312
|
this.data.set(data, pageIndex);
|
|
@@ -14290,7 +14335,7 @@ class WriteablePage extends MemoryPage {
|
|
|
14290
14335
|
loadInto(result, startIndex, length) {
|
|
14291
14336
|
const endIndex = startIndex + length;
|
|
14292
14337
|
if (endIndex > PAGE_SIZE$1) {
|
|
14293
|
-
return Result$1.error(PageFault.fromMemoryIndex(this.start + PAGE_SIZE$1));
|
|
14338
|
+
return Result$1.error(PageFault.fromMemoryIndex(this.start + PAGE_SIZE$1), () => `Page fault: read beyond page boundary at ${this.start + PAGE_SIZE$1}`);
|
|
14294
14339
|
}
|
|
14295
14340
|
const bytes = this.view.subarray(startIndex, endIndex);
|
|
14296
14341
|
// we zero the bytes, since the view might not yet be initialized at `endIndex`.
|
|
@@ -14360,7 +14405,7 @@ class Memory {
|
|
|
14360
14405
|
logger$3.insane `MEM[${address}] <- ${BytesBlob.blobFrom(bytes)}`;
|
|
14361
14406
|
const pagesResult = this.getPages(address, bytes.length, AccessType.WRITE);
|
|
14362
14407
|
if (pagesResult.isError) {
|
|
14363
|
-
return Result$1.error(pagesResult.error);
|
|
14408
|
+
return Result$1.error(pagesResult.error, pagesResult.details);
|
|
14364
14409
|
}
|
|
14365
14410
|
const pages = pagesResult.ok;
|
|
14366
14411
|
let currentPosition = address;
|
|
@@ -14385,14 +14430,14 @@ class Memory {
|
|
|
14385
14430
|
const pages = [];
|
|
14386
14431
|
for (const pageNumber of pageRange) {
|
|
14387
14432
|
if (pageNumber < RESERVED_NUMBER_OF_PAGES) {
|
|
14388
|
-
return Result$1.error(PageFault.fromPageNumber(pageNumber, true));
|
|
14433
|
+
return Result$1.error(PageFault.fromPageNumber(pageNumber, true), () => `Page fault: attempted to access reserved page ${pageNumber}`);
|
|
14389
14434
|
}
|
|
14390
14435
|
const page = this.memory.get(pageNumber);
|
|
14391
14436
|
if (page === undefined) {
|
|
14392
|
-
return Result$1.error(PageFault.fromPageNumber(pageNumber));
|
|
14437
|
+
return Result$1.error(PageFault.fromPageNumber(pageNumber), () => `Page fault: page ${pageNumber} not allocated`);
|
|
14393
14438
|
}
|
|
14394
14439
|
if (accessType === AccessType.WRITE && !page.isWriteable()) {
|
|
14395
|
-
return Result$1.error(PageFault.fromPageNumber(pageNumber, true));
|
|
14440
|
+
return Result$1.error(PageFault.fromPageNumber(pageNumber, true), () => `Page fault: attempted to write to read-only page ${pageNumber}`);
|
|
14396
14441
|
}
|
|
14397
14442
|
pages.push(page);
|
|
14398
14443
|
}
|
|
@@ -14410,7 +14455,7 @@ class Memory {
|
|
|
14410
14455
|
}
|
|
14411
14456
|
const pagesResult = this.getPages(startAddress, result.length, AccessType.READ);
|
|
14412
14457
|
if (pagesResult.isError) {
|
|
14413
|
-
return Result$1.error(pagesResult.error);
|
|
14458
|
+
return Result$1.error(pagesResult.error, pagesResult.details);
|
|
14414
14459
|
}
|
|
14415
14460
|
const pages = pagesResult.ok;
|
|
14416
14461
|
let currentPosition = startAddress;
|
|
@@ -16215,7 +16260,7 @@ class ProgramDecoder {
|
|
|
16215
16260
|
}
|
|
16216
16261
|
catch (e) {
|
|
16217
16262
|
logger$2.error `Invalid program: ${e}`;
|
|
16218
|
-
return Result$1.error(ProgramDecoderError.InvalidProgramError);
|
|
16263
|
+
return Result$1.error(ProgramDecoderError.InvalidProgramError, () => `Program decoder error: ${e}`);
|
|
16219
16264
|
}
|
|
16220
16265
|
}
|
|
16221
16266
|
}
|
|
@@ -16496,7 +16541,7 @@ class HostCallMemory {
|
|
|
16496
16541
|
return Result$1.ok(OK);
|
|
16497
16542
|
}
|
|
16498
16543
|
if (address + tryAsU64(bytes.length) > MEMORY_SIZE) {
|
|
16499
|
-
return Result$1.error(new OutOfBounds());
|
|
16544
|
+
return Result$1.error(new OutOfBounds(), () => `Memory access out of bounds: address ${address} + length ${bytes.length} exceeds memory size`);
|
|
16500
16545
|
}
|
|
16501
16546
|
return this.memory.storeFrom(tryAsMemoryIndex(Number(address)), bytes);
|
|
16502
16547
|
}
|
|
@@ -16505,13 +16550,10 @@ class HostCallMemory {
|
|
|
16505
16550
|
return Result$1.ok(OK);
|
|
16506
16551
|
}
|
|
16507
16552
|
if (startAddress + tryAsU64(result.length) > MEMORY_SIZE) {
|
|
16508
|
-
return Result$1.error(new OutOfBounds());
|
|
16553
|
+
return Result$1.error(new OutOfBounds(), () => `Memory access out of bounds: address ${startAddress} + length ${result.length} exceeds memory size`);
|
|
16509
16554
|
}
|
|
16510
16555
|
return this.memory.loadInto(result, tryAsMemoryIndex(Number(startAddress)));
|
|
16511
16556
|
}
|
|
16512
|
-
getMemory() {
|
|
16513
|
-
return this.memory;
|
|
16514
|
-
}
|
|
16515
16557
|
}
|
|
16516
16558
|
|
|
16517
16559
|
class HostCallRegisters {
|
|
@@ -17599,32 +17641,33 @@ class Preimages {
|
|
|
17599
17641
|
}
|
|
17600
17642
|
if (prevPreimage.requester > currPreimage.requester ||
|
|
17601
17643
|
currPreimage.blob.compare(prevPreimage.blob).isLessOrEqual()) {
|
|
17602
|
-
return Result$1.error(PreimagesErrorCode.PreimagesNotSortedUnique);
|
|
17644
|
+
return Result$1.error(PreimagesErrorCode.PreimagesNotSortedUnique, () => `Preimages not sorted/unique at index ${i}`);
|
|
17603
17645
|
}
|
|
17604
17646
|
}
|
|
17605
17647
|
const { preimages, slot } = input;
|
|
17606
|
-
const pendingChanges =
|
|
17648
|
+
const pendingChanges = new Map();
|
|
17607
17649
|
// select preimages for integration
|
|
17608
17650
|
for (const preimage of preimages) {
|
|
17609
17651
|
const { requester, blob } = preimage;
|
|
17610
17652
|
const hash = this.blake2b.hashBytes(blob).asOpaque();
|
|
17611
17653
|
const service = this.state.getService(requester);
|
|
17612
17654
|
if (service === null) {
|
|
17613
|
-
return Result$1.error(PreimagesErrorCode.AccountNotFound);
|
|
17655
|
+
return Result$1.error(PreimagesErrorCode.AccountNotFound, () => `Service not found: ${requester}`);
|
|
17614
17656
|
}
|
|
17615
17657
|
const hasPreimage = service.hasPreimage(hash);
|
|
17616
17658
|
const slots = service.getLookupHistory(hash, tryAsU32(blob.length));
|
|
17617
17659
|
// https://graypaper.fluffylabs.dev/#/5f542d7/181800181900
|
|
17618
17660
|
// https://graypaper.fluffylabs.dev/#/5f542d7/116f0011a500
|
|
17619
17661
|
if (hasPreimage || slots === null || !LookupHistoryItem.isRequested(slots)) {
|
|
17620
|
-
return Result$1.error(PreimagesErrorCode.PreimageUnneeded);
|
|
17662
|
+
return Result$1.error(PreimagesErrorCode.PreimageUnneeded, () => `Preimage unneeded: requester=${requester}, hash=${hash}, hasPreimage=${hasPreimage}, isRequested=${slots !== null && LookupHistoryItem.isRequested(slots)}`);
|
|
17621
17663
|
}
|
|
17622
17664
|
// https://graypaper.fluffylabs.dev/#/5f542d7/18c00018f300
|
|
17623
|
-
pendingChanges.
|
|
17624
|
-
|
|
17665
|
+
const updates = pendingChanges.get(requester) ?? [];
|
|
17666
|
+
updates.push(UpdatePreimage.provide({
|
|
17625
17667
|
preimage: PreimageItem.create({ hash, blob }),
|
|
17626
17668
|
slot,
|
|
17627
17669
|
}));
|
|
17670
|
+
pendingChanges.set(requester, updates);
|
|
17628
17671
|
}
|
|
17629
17672
|
return Result$1.ok({
|
|
17630
17673
|
preimages: pendingChanges,
|