@typeberry/jam 0.1.3-ca63b35 → 0.2.0-0a3dfd4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2870,7 +2870,7 @@ var GpVersion;
2870
2870
  (function (GpVersion) {
2871
2871
  GpVersion["V0_6_7"] = "0.6.7";
2872
2872
  GpVersion["V0_7_0"] = "0.7.0";
2873
- GpVersion["V0_7_1"] = "0.7.1-preview";
2873
+ GpVersion["V0_7_1"] = "0.7.1";
2874
2874
  GpVersion["V0_7_2"] = "0.7.2-preview";
2875
2875
  })(GpVersion || (GpVersion = {}));
2876
2876
  var TestSuite;
@@ -2879,11 +2879,11 @@ var TestSuite;
2879
2879
  TestSuite["JAMDUNA"] = "jamduna";
2880
2880
  })(TestSuite || (TestSuite = {}));
2881
2881
  const DEFAULT_SUITE = TestSuite.W3F_DAVXY;
2882
- const ALL_VERSIONS_IN_ORDER = [GpVersion.V0_6_7, GpVersion.V0_7_0, GpVersion.V0_7_1, GpVersion.V0_7_2];
2882
+ const DEFAULT_VERSION = GpVersion.V0_7_1;
2883
2883
  const env = typeof process === "undefined" ? {} : process.env;
2884
- const DEFAULT_VERSION = GpVersion.V0_7_0;
2885
2884
  let CURRENT_VERSION = parseCurrentVersion(env.GP_VERSION) ?? DEFAULT_VERSION;
2886
2885
  let CURRENT_SUITE = parseCurrentSuite(env.TEST_SUITE) ?? DEFAULT_SUITE;
2886
+ const ALL_VERSIONS_IN_ORDER = [GpVersion.V0_6_7, GpVersion.V0_7_0, GpVersion.V0_7_1, GpVersion.V0_7_2];
2887
2887
  function parseCurrentVersion(env) {
2888
2888
  if (env === undefined) {
2889
2889
  return undefined;
@@ -3170,7 +3170,7 @@ function resultToString(res) {
3170
3170
  if (res.isOk) {
3171
3171
  return `OK: ${typeof res.ok === "symbol" ? res.ok.toString() : res.ok}`;
3172
3172
  }
3173
- return `${res.details}\nError: ${maybeTaggedErrorToString(res.error)}`;
3173
+ return `${res.details()}\nError: ${maybeTaggedErrorToString(res.error)}`;
3174
3174
  }
3175
3175
  /** An indication of two possible outcomes returned from a function. */
3176
3176
  const result_Result = {
@@ -3184,7 +3184,7 @@ const result_Result = {
3184
3184
  };
3185
3185
  },
3186
3186
  /** Create new [`Result`] with `Error` status. */
3187
- error: (error, details = "") => {
3187
+ error: (error, details) => {
3188
3188
  debug_check `${error !== undefined} 'Error' type cannot be undefined.`;
3189
3189
  return {
3190
3190
  isOk: false,
@@ -3303,7 +3303,7 @@ function deepEqual(actual, expected, { context = [], errorsCollector, ignore = [
3303
3303
  }
3304
3304
  if (actual.isError && expected.isError) {
3305
3305
  deepEqual(actual.error, expected.error, { context: ctx.concat(["error"]), errorsCollector: errors, ignore });
3306
- deepEqual(actual.details, expected.details, {
3306
+ deepEqual(actual.details(), expected.details(), {
3307
3307
  context: ctx.concat(["details"]),
3308
3308
  errorsCollector: errors,
3309
3309
  // display details when error does not match
@@ -4940,6 +4940,9 @@ class ObjectView {
4940
4940
  toString() {
4941
4941
  return `View<${this.materializedConstructor.name}>(cache: ${this.cache.size})`;
4942
4942
  }
4943
+ [TEST_COMPARE_USING]() {
4944
+ return this.materialize();
4945
+ }
4943
4946
  }
4944
4947
  /**
4945
4948
  * A lazy-evaluated decoder of a sequence.
@@ -5072,7 +5075,7 @@ const TYPICAL_DICTIONARY_LENGTH = 32;
5072
5075
  * It's not true in a general case, but should be good enough for us.
5073
5076
  *
5074
5077
  */
5075
- function readonlyArray(desc) {
5078
+ function descriptors_readonlyArray(desc) {
5076
5079
  return desc.convert((x) => {
5077
5080
  debug_check `
5078
5081
  ${Array.isArray(x)}
@@ -5234,7 +5237,15 @@ var descriptors_codec;
5234
5237
  /** Custom encoding / decoding logic. */
5235
5238
  codec.custom = ({ name, sizeHint = { bytes: 0, isExact: false }, }, encode, decode, skip) => Descriptor.new(name, sizeHint, encode, decode, skip);
5236
5239
  /** Choose a descriptor depending on the encoding/decoding context. */
5237
- codec.select = ({ name, sizeHint, }, chooser) => Descriptor.withView(name, sizeHint, (e, x) => chooser(e.getContext()).encode(e, x), (d) => chooser(d.getContext()).decode(d), (s) => chooser(s.decoder.getContext()).skip(s), chooser(null).View);
5240
+ codec.select = ({ name, sizeHint, }, chooser) => {
5241
+ const Self = chooser(null);
5242
+ return Descriptor.withView(name, sizeHint, (e, x) => chooser(e.getContext()).encode(e, x), (d) => chooser(d.getContext()).decode(d), (s) => chooser(s.decoder.getContext()).skip(s), hasUniqueView(Self)
5243
+ ? codec.select({
5244
+ name: Self.View.name,
5245
+ sizeHint: Self.View.sizeHint,
5246
+ }, (ctx) => chooser(ctx).View)
5247
+ : Self.View);
5248
+ };
5238
5249
  /**
5239
5250
  * A descriptor for a more complex POJO.
5240
5251
  *
@@ -6056,7 +6067,7 @@ const BANDERSNATCH_KEY_BYTES = 32;
6056
6067
  /** Bandersnatch VRF signature size */
6057
6068
  const bandersnatch_BANDERSNATCH_VRF_SIGNATURE_BYTES = 96;
6058
6069
  /** Bandersnatch ring commitment size */
6059
- const BANDERSNATCH_RING_ROOT_BYTES = 144;
6070
+ const bandersnatch_BANDERSNATCH_RING_ROOT_BYTES = 144;
6060
6071
  /** Bandersnatch proof size */
6061
6072
  const BANDERSNATCH_PROOF_BYTES = 784;
6062
6073
  /** BLS public key size. */
@@ -7184,9 +7195,9 @@ function codecWithContext(chooser) {
7184
7195
  /** Codec for a known-size array with length validation. */
7185
7196
  const codecKnownSizeArray = (val, options, _id) => {
7186
7197
  if ("fixedLength" in options) {
7187
- return readonlyArray(descriptors_codec.sequenceFixLen(val, options.fixedLength)).convert(seeThrough, sized_array_asKnownSize);
7198
+ return descriptors_readonlyArray(descriptors_codec.sequenceFixLen(val, options.fixedLength)).convert(seeThrough, sized_array_asKnownSize);
7188
7199
  }
7189
- return readonlyArray(descriptors_codec.sequenceVarLen(val, options)).convert(seeThrough, sized_array_asKnownSize);
7200
+ return descriptors_readonlyArray(descriptors_codec.sequenceVarLen(val, options)).convert(seeThrough, sized_array_asKnownSize);
7190
7201
  };
7191
7202
  /** Codec for a fixed-size array with length validation. */
7192
7203
  const codecFixedSizeArray = (val, len) => {
@@ -7323,7 +7334,7 @@ function tryAsPerValidator(array, spec) {
7323
7334
  `;
7324
7335
  return sized_array_asKnownSize(array);
7325
7336
  }
7326
- const codecPerValidator = (val) => codecWithContext((context) => {
7337
+ const common_codecPerValidator = (val) => codecWithContext((context) => {
7327
7338
  return codecKnownSizeArray(val, {
7328
7339
  fixedLength: context.validatorsCount,
7329
7340
  });
@@ -7450,7 +7461,7 @@ class Verdict extends WithDebug {
7450
7461
  workReportHash: descriptors_codec.bytes(hash_HASH_SIZE).asOpaque(),
7451
7462
  votesEpoch: descriptors_codec.u32.asOpaque(),
7452
7463
  votes: codecWithContext((context) => {
7453
- return readonlyArray(descriptors_codec.sequenceFixLen(Judgement.Codec, context.validatorsSuperMajority)).convert(seeThrough, sized_array_asKnownSize);
7464
+ return descriptors_readonlyArray(descriptors_codec.sequenceFixLen(Judgement.Codec, context.validatorsSuperMajority)).convert(seeThrough, sized_array_asKnownSize);
7454
7465
  }),
7455
7466
  });
7456
7467
  static create({ workReportHash, votesEpoch, votes }) {
@@ -8142,7 +8153,7 @@ const WorkReportCodec = descriptors_codec.Class(WorkReportNoCodec, {
8142
8153
  authorizerHash: descriptors_codec.bytes(hash_HASH_SIZE).asOpaque(),
8143
8154
  authorizationGasUsed: descriptors_codec.varU64.asOpaque(),
8144
8155
  authorizationOutput: descriptors_codec.blob,
8145
- segmentRootLookup: readonlyArray(descriptors_codec.sequenceVarLen(WorkPackageInfo.Codec)),
8156
+ segmentRootLookup: descriptors_readonlyArray(descriptors_codec.sequenceVarLen(WorkPackageInfo.Codec)),
8146
8157
  results: descriptors_codec.sequenceVarLen(WorkResult.Codec).convert((x) => x, (items) => FixedSizeArray.new(items, tryAsWorkItemsCount(items.length))),
8147
8158
  });
8148
8159
  const WorkReportCodecPre070 = descriptors_codec.Class(WorkReportNoCodec, {
@@ -8156,7 +8167,7 @@ const WorkReportCodecPre070 = descriptors_codec.Class(WorkReportNoCodec, {
8156
8167
  }),
8157
8168
  authorizerHash: descriptors_codec.bytes(hash_HASH_SIZE).asOpaque(),
8158
8169
  authorizationOutput: descriptors_codec.blob,
8159
- segmentRootLookup: readonlyArray(descriptors_codec.sequenceVarLen(WorkPackageInfo.Codec)),
8170
+ segmentRootLookup: descriptors_readonlyArray(descriptors_codec.sequenceVarLen(WorkPackageInfo.Codec)),
8160
8171
  results: descriptors_codec.sequenceVarLen(WorkResult.Codec).convert((x) => x, (items) => FixedSizeArray.new(items, tryAsWorkItemsCount(items.length))),
8161
8172
  authorizationGasUsed: descriptors_codec.varU64.asOpaque(),
8162
8173
  });
@@ -8274,16 +8285,16 @@ class SignedTicket extends WithDebug {
8274
8285
  }
8275
8286
  }
8276
8287
  /** Anonymous? entry into the ticket contest. */
8277
- class Ticket extends WithDebug {
8288
+ class tickets_Ticket extends WithDebug {
8278
8289
  id;
8279
8290
  attempt;
8280
- static Codec = descriptors_codec.Class(Ticket, {
8291
+ static Codec = descriptors_codec.Class(tickets_Ticket, {
8281
8292
  id: descriptors_codec.bytes(hash_HASH_SIZE),
8282
8293
  // TODO [ToDr] we should verify that attempt is either 0|1|2.
8283
8294
  attempt: descriptors_codec.u8.asOpaque(),
8284
8295
  });
8285
8296
  static create({ id, attempt }) {
8286
- return new Ticket(id, attempt);
8297
+ return new tickets_Ticket(id, attempt);
8287
8298
  }
8288
8299
  constructor(
8289
8300
  /**
@@ -8352,7 +8363,7 @@ class ValidatorKeys extends WithDebug {
8352
8363
  class TicketsMarker extends WithDebug {
8353
8364
  tickets;
8354
8365
  static Codec = descriptors_codec.Class(TicketsMarker, {
8355
- tickets: codecPerEpochBlock(Ticket.Codec),
8366
+ tickets: codecPerEpochBlock(tickets_Ticket.Codec),
8356
8367
  });
8357
8368
  static create({ tickets }) {
8358
8369
  return new TicketsMarker(tickets);
@@ -8376,7 +8387,7 @@ class EpochMarker extends WithDebug {
8376
8387
  static Codec = descriptors_codec.Class(EpochMarker, {
8377
8388
  entropy: descriptors_codec.bytes(hash_HASH_SIZE).asOpaque(),
8378
8389
  ticketsEntropy: descriptors_codec.bytes(hash_HASH_SIZE).asOpaque(),
8379
- validators: codecPerValidator(ValidatorKeys.Codec),
8390
+ validators: common_codecPerValidator(ValidatorKeys.Codec),
8380
8391
  });
8381
8392
  static create({ entropy, ticketsEntropy, validators }) {
8382
8393
  return new EpochMarker(entropy, ticketsEntropy, validators);
@@ -8648,6 +8659,19 @@ function emptyBlock(slot = tryAsTimeSlot(0)) {
8648
8659
  });
8649
8660
  }
8650
8661
 
8662
+ ;// CONCATENATED MODULE: ./packages/jam/block/utils.ts
8663
+
8664
+ /**
8665
+ * Take an input data and re-encode that data as view.
8666
+ *
8667
+ * NOTE: this function should NEVER be used in any production code,
8668
+ * it's only a test helper.
8669
+ */
8670
+ function reencodeAsView(codec, object, chainSpec) {
8671
+ const encoded = encoder_Encoder.encodeObject(codec, object, chainSpec);
8672
+ return decoder_Decoder.decodeObject(codec.View, encoded, chainSpec);
8673
+ }
8674
+
8651
8675
  ;// CONCATENATED MODULE: ./packages/jam/block/index.ts
8652
8676
 
8653
8677
 
@@ -8666,6 +8690,7 @@ function emptyBlock(slot = tryAsTimeSlot(0)) {
8666
8690
 
8667
8691
 
8668
8692
 
8693
+
8669
8694
  ;// CONCATENATED MODULE: ./packages/jam/database-lmdb/blocks.ts
8670
8695
 
8671
8696
 
@@ -9017,10 +9042,129 @@ function accumulationOutputComparator(a, b) {
9017
9042
  return Ordering.Equal;
9018
9043
  }
9019
9044
 
9045
+ ;// CONCATENATED MODULE: ./packages/jam/block/gp-constants.ts
9046
+
9047
+
9048
+ /**
9049
+ * This file lists all of the constants defined in the GrayPaper appendix.
9050
+ *
9051
+ * NOTE: Avoid using the constants directly, prefer "named" constants defined
9052
+ * in a semantical proximity to where they are used.
9053
+ *
9054
+ * NOTE: This file will most likely be removed in the future. The constants
9055
+ * here are only temporarily for convenience. When we figure out better names
9056
+ * and places for these this file will be eradicated.
9057
+ *
9058
+ * https://graypaper.fluffylabs.dev/#/ab2cdbd/442300442300?v=0.7.2
9059
+ */
9060
+ /** `G_I`: The gas allocated to invoke a work-package’s Is-Authorized logic. */
9061
+ const G_I = 50_000_000;
9062
+ /** `I`: Maximum number of work items in a package. */
9063
+ const I = (/* unused pure expression or super */ null && (MAX_NUMBER_OF_WORK_ITEMS));
9064
+ /** `O`: Maximum number of items in the authorizations pool. */
9065
+ const O = 8;
9066
+ /** `Q`: The number of items in the authorizations queue. */
9067
+ const Q = 80;
9068
+ /** `S`: The maximum number of entries in the accumulation queue. */
9069
+ const S = 1024;
9070
+ /** `T`: The maximum number of extrinsics in a work-package. */
9071
+ const T = 128;
9072
+ /** `W_A`: The maximum size of is-authorized code in octets. */
9073
+ const W_A = 64_000;
9074
+ /** `W_B`: The maximum size of the concatenated variable-size blobs, extrinsics and imported segments of a work-package, in octets */
9075
+ const W_B = Compatibility.isGreaterOrEqual(GpVersion.V0_7_2) ? 13_791_360 : 13_794_305;
9076
+ /** `W_C`: The maximum size of service code in octets. */
9077
+ const W_C = 4_000_000;
9078
+ /** `W_M`: The maximum number of imports in a work-package. */
9079
+ const W_M = 3_072;
9080
+ /** `W_R`: The maximum total size of all output blobs in a work-report, in octets. */
9081
+ const W_R = 49_152;
9082
+ /** `W_T`: The size of a transfer memo in octets. */
9083
+ const W_T = 128;
9084
+ /** `W_M`: The maximum number of exports in a work-package. */
9085
+ const W_X = 3_072;
9086
+ // TODO [ToDr] Not sure where these should live yet :(
9087
+ /**
9088
+ * `S`: The minimum public service index.
9089
+ * Services of indices below these may only be created by the Registrar.
9090
+ *
9091
+ * https://graypaper.fluffylabs.dev/#/ab2cdbd/447a00447a00?v=0.7.2
9092
+ */
9093
+ const MIN_PUBLIC_SERVICE_INDEX = (/* unused pure expression or super */ null && (2 ** 16));
9094
+ /**
9095
+ * `J`: The maximum sum of dependency items in a work-report.
9096
+ *
9097
+ * https://graypaper.fluffylabs.dev/#/5f542d7/416a00416a00?v=0.6.2
9098
+ */
9099
+ const MAX_REPORT_DEPENDENCIES = 8;
9100
+
9101
+ ;// CONCATENATED MODULE: ./packages/jam/state/accumulation-queue.ts
9102
+
9103
+
9104
+
9105
+
9106
+
9107
+
9108
+
9109
+ /**
9110
+ * Ready (i.e. available and/or audited) but not-yet-accumulated work-reports.
9111
+ *
9112
+ * https://graypaper.fluffylabs.dev/#/5f542d7/165300165400
9113
+ */
9114
+ class NotYetAccumulatedReport extends WithDebug {
9115
+ report;
9116
+ dependencies;
9117
+ static Codec = descriptors_codec.Class(NotYetAccumulatedReport, {
9118
+ report: WorkReport.Codec,
9119
+ dependencies: codecKnownSizeArray(descriptors_codec.bytes(hash_HASH_SIZE).asOpaque(), {
9120
+ typicalLength: MAX_REPORT_DEPENDENCIES / 2,
9121
+ maxLength: MAX_REPORT_DEPENDENCIES,
9122
+ minLength: 0,
9123
+ }),
9124
+ });
9125
+ static create({ report, dependencies }) {
9126
+ return new NotYetAccumulatedReport(report, dependencies);
9127
+ }
9128
+ constructor(
9129
+ /**
9130
+ * Each of these were made available at most one epoch ago
9131
+ * but have or had unfulfilled dependencies.
9132
+ */
9133
+ report,
9134
+ /**
9135
+ * Alongside the work-report itself, we retain its un-accumulated
9136
+ * dependencies, a set of work-package hashes.
9137
+ *
9138
+ * https://graypaper.fluffylabs.dev/#/5f542d7/165800165800
9139
+ */
9140
+ dependencies) {
9141
+ super();
9142
+ this.report = report;
9143
+ this.dependencies = dependencies;
9144
+ }
9145
+ }
9146
+ const accumulationQueueCodec = codecPerEpochBlock(descriptors_readonlyArray(descriptors_codec.sequenceVarLen(NotYetAccumulatedReport.Codec)));
9147
+
9148
+ ;// CONCATENATED MODULE: ./packages/jam/state/common.ts
9149
+
9150
+
9151
+ /** Check if given array has correct length before casting to the opaque type. */
9152
+ function tryAsPerCore(array, spec) {
9153
+ debug_check `
9154
+ ${array.length === spec.coresCount}
9155
+ Invalid per-core array length. Expected ${spec.coresCount}, got: ${array.length}
9156
+ `;
9157
+ return opaque_asOpaqueType(array);
9158
+ }
9159
+ const codecPerCore = (val) => codecWithContext((context) => {
9160
+ return codecKnownSizeArray(val, { fixedLength: context.coresCount });
9161
+ });
9162
+
9020
9163
  ;// CONCATENATED MODULE: ./packages/jam/state/assurances.ts
9021
9164
 
9022
9165
 
9023
9166
 
9167
+
9024
9168
  /**
9025
9169
  * Assignment of particular work report to a core.
9026
9170
  *
@@ -9049,27 +9193,30 @@ class AvailabilityAssignment extends WithDebug {
9049
9193
  this.timeout = timeout;
9050
9194
  }
9051
9195
  }
9196
+ const availabilityAssignmentsCodec = codecPerCore(descriptors_codec.optional(AvailabilityAssignment.Codec));
9052
9197
 
9053
- ;// CONCATENATED MODULE: ./packages/jam/state/common.ts
9198
+ ;// CONCATENATED MODULE: ./packages/jam/state/auth.ts
9054
9199
 
9055
9200
 
9056
- /** Check if given array has correct length before casting to the opaque type. */
9057
- function tryAsPerCore(array, spec) {
9058
- debug_check `
9059
- ${array.length === spec.coresCount}
9060
- Invalid per-core array length. Expected ${spec.coresCount}, got: ${array.length}
9061
- `;
9062
- return opaque_asOpaqueType(array);
9063
- }
9064
- const codecPerCore = (val) => codecWithContext((context) => {
9065
- return codecKnownSizeArray(val, { fixedLength: context.coresCount });
9066
- });
9201
+
9202
+
9203
+
9204
+ /** `O`: Maximal authorization pool size. */
9205
+ const MAX_AUTH_POOL_SIZE = O;
9206
+ /** `Q`: Size of the authorization queue. */
9207
+ const AUTHORIZATION_QUEUE_SIZE = Q;
9208
+ const authPoolsCodec = codecPerCore(codecKnownSizeArray(descriptors_codec.bytes(hash_HASH_SIZE).asOpaque(), {
9209
+ minLength: 0,
9210
+ maxLength: MAX_AUTH_POOL_SIZE,
9211
+ typicalLength: MAX_AUTH_POOL_SIZE,
9212
+ }));
9213
+ const authQueuesCodec = codecPerCore(codecFixedSizeArray(descriptors_codec.bytes(hash_HASH_SIZE).asOpaque(), AUTHORIZATION_QUEUE_SIZE));
9067
9214
 
9068
9215
  ;// CONCATENATED MODULE: ./packages/jam/state/disputes.ts
9069
9216
 
9070
9217
 
9071
9218
 
9072
- const sortedSetCodec = () => readonlyArray(descriptors_codec.sequenceVarLen(descriptors_codec.bytes(hash_HASH_SIZE))).convert((input) => input.array, (output) => {
9219
+ const sortedSetCodec = () => descriptors_readonlyArray(descriptors_codec.sequenceVarLen(descriptors_codec.bytes(hash_HASH_SIZE))).convert((input) => input.array, (output) => {
9073
9220
  const typed = output.map((x) => x.asOpaque());
9074
9221
  return SortedSet.fromSortedArray(hashComparator, typed);
9075
9222
  });
@@ -9131,65 +9278,6 @@ function hashComparator(a, b) {
9131
9278
  return a.compare(b);
9132
9279
  }
9133
9280
 
9134
- ;// CONCATENATED MODULE: ./packages/jam/block/gp-constants.ts
9135
-
9136
- /**
9137
- * This file lists all of the constants defined in the GrayPaper appendix.
9138
- *
9139
- * NOTE: Avoid using the constants directly, prefer "named" constants defined
9140
- * in a semantical proximity to where they are used.
9141
- *
9142
- * NOTE: This file will most likely be removed in the future. The constants
9143
- * here are only temporarily for convenience. When we figure out better names
9144
- * and places for these this file will be eradicated.
9145
- *
9146
- * https://graypaper.fluffylabs.dev/#/579bd12/413000413000
9147
- */
9148
- /** `G_I`: The gas allocated to invoke a work-package’s Is-Authorized logic. */
9149
- const G_I = 50_000_000;
9150
- /** `I`: Maximum number of work items in a package. */
9151
- const I = (/* unused pure expression or super */ null && (MAX_NUMBER_OF_WORK_ITEMS));
9152
- /** `O`: Maximum number of items in the authorizations pool. */
9153
- const O = 8;
9154
- /** `Q`: The number of items in the authorizations queue. */
9155
- const Q = 80;
9156
- /** `S`: The maximum number of entries in the accumulation queue. */
9157
- const S = 1024;
9158
- /** `T`: The maximum number of extrinsics in a work-package. */
9159
- const T = 128;
9160
- /** `W_A`: The maximum size of is-authorized code in octets. */
9161
- const W_A = 64_000;
9162
- /** `W_B`: The maximum size of an encoded work-package with extrinsic data and imports. */
9163
- const W_B = 13_794_305;
9164
- /** `W_C`: The maximum size of service code in octets. */
9165
- const W_C = 4_000_000;
9166
- /** `W_M`: The maximum number of imports in a work-package. */
9167
- const W_M = 3_072;
9168
- /** `W_R`: The maximum total size of all output blobs in a work-report, in octets. */
9169
- const W_R = 49_152;
9170
- /** `W_T`: The size of a transfer memo in octets. */
9171
- const W_T = 128;
9172
- /** `W_M`: The maximum number of exports in a work-package. */
9173
- const W_X = 3_072;
9174
- // TODO [ToDr] Not sure where these should live yet :(
9175
- /**
9176
- * `S`: The minimum public service index.
9177
- * Services of indices below these may only be created by the Registrar.
9178
- *
9179
- * https://graypaper.fluffylabs.dev/#/ab2cdbd/447a00447a00?v=0.7.2
9180
- */
9181
- const MIN_PUBLIC_SERVICE_INDEX = (/* unused pure expression or super */ null && (2 ** 16));
9182
- /**
9183
- * `J`: The maximum sum of dependency items in a work-report.
9184
- *
9185
- * https://graypaper.fluffylabs.dev/#/5f542d7/416a00416a00?v=0.6.2
9186
- */
9187
- const MAX_REPORT_DEPENDENCIES = 8;
9188
- /** `Q`: Size of the authorization queue. */
9189
- const AUTHORIZATION_QUEUE_SIZE = Q;
9190
- /** `O`: Maximal authorization pool size. */
9191
- const MAX_AUTH_POOL_SIZE = O;
9192
-
9193
9281
  ;// CONCATENATED MODULE: ./packages/core/pvm-interpreter/ops/math-consts.ts
9194
9282
  const MAX_VALUE = 4294967295;
9195
9283
  const math_consts_MAX_VALUE_U64 = (/* unused pure expression or super */ null && (2n ** 63n));
@@ -9197,7 +9285,7 @@ const MIN_VALUE = (/* unused pure expression or super */ null && (-(2 ** 31)));
9197
9285
  const MAX_SHIFT_U32 = 32;
9198
9286
  const MAX_SHIFT_U64 = 64n;
9199
9287
 
9200
- ;// CONCATENATED MODULE: ./packages/jam/state/service.ts
9288
+ ;// CONCATENATED MODULE: ./packages/jam/state/recent-blocks.ts
9201
9289
 
9202
9290
 
9203
9291
 
@@ -9205,26 +9293,270 @@ const MAX_SHIFT_U64 = 64n;
9205
9293
 
9206
9294
 
9207
9295
  /**
9208
- * `B_S`: The basic minimum balance which all services require.
9209
- *
9210
- * https://graypaper.fluffylabs.dev/#/7e6ff6a/445800445800?v=0.6.7
9211
- */
9212
- const BASE_SERVICE_BALANCE = 100n;
9213
- /**
9214
- * `B_I`: The additional minimum balance required per item of elective service state.
9296
+ * `H = 8`: The size of recent history, in blocks.
9215
9297
  *
9216
- * https://graypaper.fluffylabs.dev/#/7e6ff6a/445000445000?v=0.6.7
9298
+ * https://graypaper.fluffylabs.dev/#/579bd12/416300416500
9217
9299
  */
9218
- const ELECTIVE_ITEM_BALANCE = 10n;
9300
+ const MAX_RECENT_HISTORY = 8;
9301
+ /** Recent history of a single block. */
9302
+ class BlockState extends WithDebug {
9303
+ headerHash;
9304
+ accumulationResult;
9305
+ postStateRoot;
9306
+ reported;
9307
+ static Codec = descriptors_codec.Class(BlockState, {
9308
+ headerHash: descriptors_codec.bytes(hash_HASH_SIZE).asOpaque(),
9309
+ accumulationResult: descriptors_codec.bytes(hash_HASH_SIZE),
9310
+ postStateRoot: descriptors_codec.bytes(hash_HASH_SIZE).asOpaque(),
9311
+ reported: codecHashDictionary(WorkPackageInfo.Codec, (x) => x.workPackageHash),
9312
+ });
9313
+ static create({ headerHash, accumulationResult, postStateRoot, reported }) {
9314
+ return new BlockState(headerHash, accumulationResult, postStateRoot, reported);
9315
+ }
9316
+ constructor(
9317
+ /** Header hash. */
9318
+ headerHash,
9319
+ /** Merkle mountain belt of accumulation result. */
9320
+ accumulationResult,
9321
+ /** Posterior state root filled in with a 1-block delay. */
9322
+ postStateRoot,
9323
+ /** Reported work packages (no more than number of cores). */
9324
+ reported) {
9325
+ super();
9326
+ this.headerHash = headerHash;
9327
+ this.accumulationResult = accumulationResult;
9328
+ this.postStateRoot = postStateRoot;
9329
+ this.reported = reported;
9330
+ }
9331
+ }
9219
9332
  /**
9220
- * `B_L`: The additional minimum balance required per octet of elective service state.
9333
+ * Recent history of blocks.
9221
9334
  *
9222
- * https://graypaper.fluffylabs.dev/#/7e6ff6a/445400445400?v=0.6.7
9335
+ * https://graypaper.fluffylabs.dev/#/7e6ff6a/0fc9010fc901?v=0.6.7
9223
9336
  */
9224
- const ELECTIVE_BYTE_BALANCE = 1n;
9225
- const zeroSizeHint = {
9226
- bytes: 0,
9227
- isExact: true,
9337
+ class RecentBlocks extends WithDebug {
9338
+ blocks;
9339
+ accumulationLog;
9340
+ static Codec = descriptors_codec.Class(RecentBlocks, {
9341
+ blocks: codecKnownSizeArray(BlockState.Codec, {
9342
+ minLength: 0,
9343
+ maxLength: MAX_RECENT_HISTORY,
9344
+ typicalLength: MAX_RECENT_HISTORY,
9345
+ }),
9346
+ accumulationLog: descriptors_codec.object({
9347
+ peaks: descriptors_readonlyArray(descriptors_codec.sequenceVarLen(descriptors_codec.optional(descriptors_codec.bytes(hash_HASH_SIZE)))),
9348
+ }),
9349
+ });
9350
+ static empty() {
9351
+ return new RecentBlocks(sized_array_asKnownSize([]), {
9352
+ peaks: [],
9353
+ });
9354
+ }
9355
+ static create(a) {
9356
+ return new RecentBlocks(a.blocks, a.accumulationLog);
9357
+ }
9358
+ constructor(
9359
+ /**
9360
+ * Most recent blocks.
9361
+ * https://graypaper.fluffylabs.dev/#/7e6ff6a/0fea010fea01?v=0.6.7
9362
+ */
9363
+ blocks,
9364
+ /**
9365
+ * Accumulation output log.
9366
+ * https://graypaper.fluffylabs.dev/#/7e6ff6a/0f02020f0202?v=0.6.7
9367
+ */
9368
+ accumulationLog) {
9369
+ super();
9370
+ this.blocks = blocks;
9371
+ this.accumulationLog = accumulationLog;
9372
+ }
9373
+ }
9374
+
9375
+ ;// CONCATENATED MODULE: ./packages/jam/state/recently-accumulated.ts
9376
+
9377
+
9378
+
9379
+
9380
+ const recentlyAccumulatedCodec = codecPerEpochBlock(descriptors_codec.sequenceVarLen(descriptors_codec.bytes(hash_HASH_SIZE).asOpaque()).convert((x) => Array.from(x), (x) => HashSet.from(x)));
9381
+
9382
+ ;// CONCATENATED MODULE: ./packages/jam/state/validator-data.ts
9383
+
9384
+
9385
+
9386
+
9387
+ /**
9388
+ * Fixed size of validator metadata.
9389
+ *
9390
+ * https://graypaper.fluffylabs.dev/#/5f542d7/0d55010d5501
9391
+ */
9392
+ const VALIDATOR_META_BYTES = 128;
9393
+ /**
9394
+ * Details about validators' identity.
9395
+ *
9396
+ * https://graypaper.fluffylabs.dev/#/5f542d7/0d4b010d4c01
9397
+ */
9398
+ class validator_data_ValidatorData extends WithDebug {
9399
+ bandersnatch;
9400
+ ed25519;
9401
+ bls;
9402
+ metadata;
9403
+ static Codec = descriptors_codec.Class(validator_data_ValidatorData, {
9404
+ bandersnatch: descriptors_codec.bytes(BANDERSNATCH_KEY_BYTES).asOpaque(),
9405
+ ed25519: descriptors_codec.bytes(ED25519_KEY_BYTES).asOpaque(),
9406
+ bls: descriptors_codec.bytes(BLS_KEY_BYTES).asOpaque(),
9407
+ metadata: descriptors_codec.bytes(VALIDATOR_META_BYTES),
9408
+ });
9409
+ static create({ ed25519, bandersnatch, bls, metadata }) {
9410
+ return new validator_data_ValidatorData(bandersnatch, ed25519, bls, metadata);
9411
+ }
9412
+ constructor(
9413
+ /** Bandersnatch public key. */
9414
+ bandersnatch,
9415
+ /** ED25519 key data. */
9416
+ ed25519,
9417
+ /** BLS public key. */
9418
+ bls,
9419
+ /** Validator-defined additional metdata. */
9420
+ metadata) {
9421
+ super();
9422
+ this.bandersnatch = bandersnatch;
9423
+ this.ed25519 = ed25519;
9424
+ this.bls = bls;
9425
+ this.metadata = metadata;
9426
+ }
9427
+ }
9428
+ const validatorsDataCodec = common_codecPerValidator(validator_data_ValidatorData.Codec);
9429
+
9430
+ ;// CONCATENATED MODULE: ./packages/jam/state/safrole-data.ts
9431
+
9432
+
9433
+
9434
+
9435
+
9436
+
9437
+
9438
+
9439
+
9440
+
9441
+
9442
+ var SafroleSealingKeysKind;
9443
+ (function (SafroleSealingKeysKind) {
9444
+ SafroleSealingKeysKind[SafroleSealingKeysKind["Tickets"] = 0] = "Tickets";
9445
+ SafroleSealingKeysKind[SafroleSealingKeysKind["Keys"] = 1] = "Keys";
9446
+ })(SafroleSealingKeysKind || (SafroleSealingKeysKind = {}));
9447
+ const codecBandersnatchKey = descriptors_codec.bytes(BANDERSNATCH_KEY_BYTES).asOpaque();
9448
+ class safrole_data_SafroleSealingKeysData extends WithDebug {
9449
+ kind;
9450
+ keys;
9451
+ tickets;
9452
+ static Codec = codecWithContext((context) => {
9453
+ return descriptors_codec.custom({
9454
+ name: "SafroleSealingKeys",
9455
+ sizeHint: { bytes: 1 + hash_HASH_SIZE * context.epochLength, isExact: false },
9456
+ }, (e, x) => {
9457
+ e.varU32(numbers_tryAsU32(x.kind));
9458
+ if (x.kind === SafroleSealingKeysKind.Keys) {
9459
+ e.sequenceFixLen(codecBandersnatchKey, x.keys);
9460
+ }
9461
+ else {
9462
+ e.sequenceFixLen(tickets_Ticket.Codec, x.tickets);
9463
+ }
9464
+ }, (d) => {
9465
+ const epochLength = context.epochLength;
9466
+ const kind = d.varU32();
9467
+ if (kind === SafroleSealingKeysKind.Keys) {
9468
+ const keys = d.sequenceFixLen(codecBandersnatchKey, epochLength);
9469
+ return safrole_data_SafroleSealingKeysData.keys(tryAsPerEpochBlock(keys, context));
9470
+ }
9471
+ if (kind === SafroleSealingKeysKind.Tickets) {
9472
+ const tickets = d.sequenceFixLen(tickets_Ticket.Codec, epochLength);
9473
+ return safrole_data_SafroleSealingKeysData.tickets(tryAsPerEpochBlock(tickets, context));
9474
+ }
9475
+ throw new Error(`Unexpected safrole sealing keys kind: ${kind}`);
9476
+ }, (s) => {
9477
+ const kind = s.decoder.varU32();
9478
+ if (kind === SafroleSealingKeysKind.Keys) {
9479
+ s.sequenceFixLen(codecBandersnatchKey, context.epochLength);
9480
+ return;
9481
+ }
9482
+ if (kind === SafroleSealingKeysKind.Tickets) {
9483
+ s.sequenceFixLen(tickets_Ticket.Codec, context.epochLength);
9484
+ return;
9485
+ }
9486
+ throw new Error(`Unexpected safrole sealing keys kind: ${kind}`);
9487
+ });
9488
+ });
9489
+ static keys(keys) {
9490
+ return new safrole_data_SafroleSealingKeysData(SafroleSealingKeysKind.Keys, keys, undefined);
9491
+ }
9492
+ static tickets(tickets) {
9493
+ return new safrole_data_SafroleSealingKeysData(SafroleSealingKeysKind.Tickets, undefined, tickets);
9494
+ }
9495
+ constructor(kind, keys, tickets) {
9496
+ super();
9497
+ this.kind = kind;
9498
+ this.keys = keys;
9499
+ this.tickets = tickets;
9500
+ }
9501
+ }
9502
+ class SafroleData {
9503
+ nextValidatorData;
9504
+ epochRoot;
9505
+ sealingKeySeries;
9506
+ ticketsAccumulator;
9507
+ static Codec = descriptors_codec.Class(SafroleData, {
9508
+ nextValidatorData: common_codecPerValidator(validator_data_ValidatorData.Codec),
9509
+ epochRoot: descriptors_codec.bytes(bandersnatch_BANDERSNATCH_RING_ROOT_BYTES).asOpaque(),
9510
+ sealingKeySeries: safrole_data_SafroleSealingKeysData.Codec,
9511
+ ticketsAccumulator: descriptors_readonlyArray(descriptors_codec.sequenceVarLen(tickets_Ticket.Codec)).convert(seeThrough, sized_array_asKnownSize),
9512
+ });
9513
+ static create({ nextValidatorData, epochRoot, sealingKeySeries, ticketsAccumulator }) {
9514
+ return new SafroleData(nextValidatorData, epochRoot, sealingKeySeries, ticketsAccumulator);
9515
+ }
9516
+ constructor(
9517
+ /** gamma_k */
9518
+ nextValidatorData,
9519
+ /** gamma_z */
9520
+ epochRoot,
9521
+ /** gamma_s */
9522
+ sealingKeySeries,
9523
+ /** gamma_a */
9524
+ ticketsAccumulator) {
9525
+ this.nextValidatorData = nextValidatorData;
9526
+ this.epochRoot = epochRoot;
9527
+ this.sealingKeySeries = sealingKeySeries;
9528
+ this.ticketsAccumulator = ticketsAccumulator;
9529
+ }
9530
+ }
9531
+
9532
+ ;// CONCATENATED MODULE: ./packages/jam/state/service.ts
9533
+
9534
+
9535
+
9536
+
9537
+
9538
+
9539
+ /**
9540
+ * `B_S`: The basic minimum balance which all services require.
9541
+ *
9542
+ * https://graypaper.fluffylabs.dev/#/7e6ff6a/445800445800?v=0.6.7
9543
+ */
9544
+ const BASE_SERVICE_BALANCE = 100n;
9545
+ /**
9546
+ * `B_I`: The additional minimum balance required per item of elective service state.
9547
+ *
9548
+ * https://graypaper.fluffylabs.dev/#/7e6ff6a/445000445000?v=0.6.7
9549
+ */
9550
+ const ELECTIVE_ITEM_BALANCE = 10n;
9551
+ /**
9552
+ * `B_L`: The additional minimum balance required per octet of elective service state.
9553
+ *
9554
+ * https://graypaper.fluffylabs.dev/#/7e6ff6a/445400445400?v=0.6.7
9555
+ */
9556
+ const ELECTIVE_BYTE_BALANCE = 1n;
9557
+ const zeroSizeHint = {
9558
+ bytes: 0,
9559
+ isExact: true,
9228
9560
  };
9229
9561
  /** 0-byte read, return given default value */
9230
9562
  const ignoreValueWithDefault = (defaultValue) => Descriptor.new("ignoreValue", zeroSizeHint, (_e, _v) => { }, (_d) => defaultValue, (_s) => { });
@@ -9387,358 +9719,418 @@ class service_LookupHistoryItem {
9387
9719
  }
9388
9720
  }
9389
9721
 
9390
- ;// CONCATENATED MODULE: ./packages/jam/state/privileged-services.ts
9722
+ ;// CONCATENATED MODULE: ./packages/jam/state/statistics.ts
9391
9723
 
9392
9724
 
9393
9725
 
9394
9726
 
9395
9727
 
9396
- /** Dictionary entry of services that auto-accumulate every block. */
9397
- class AutoAccumulate {
9398
- service;
9399
- gasLimit;
9400
- static Codec = descriptors_codec.Class(AutoAccumulate, {
9401
- service: descriptors_codec.u32.asOpaque(),
9402
- gasLimit: descriptors_codec.u64.asOpaque(),
9728
+
9729
+ const codecServiceId = Compatibility.isSuite(TestSuite.W3F_DAVXY) || Compatibility.isSuite(TestSuite.JAMDUNA, GpVersion.V0_6_7)
9730
+ ? descriptors_codec.u32.asOpaque()
9731
+ : descriptors_codec.varU32.convert((s) => numbers_tryAsU32(s), (i) => tryAsServiceId(i));
9732
+ /**
9733
+ * Activity Record of a single validator.
9734
+ *
9735
+ * https://graypaper.fluffylabs.dev/#/579bd12/183701183701
9736
+ */
9737
+ class ValidatorStatistics {
9738
+ blocks;
9739
+ tickets;
9740
+ preImages;
9741
+ preImagesSize;
9742
+ guarantees;
9743
+ assurances;
9744
+ static Codec = descriptors_codec.Class(ValidatorStatistics, {
9745
+ blocks: descriptors_codec.u32,
9746
+ tickets: descriptors_codec.u32,
9747
+ preImages: descriptors_codec.u32,
9748
+ preImagesSize: descriptors_codec.u32,
9749
+ guarantees: descriptors_codec.u32,
9750
+ assurances: descriptors_codec.u32,
9403
9751
  });
9404
- static create({ service, gasLimit }) {
9405
- return new AutoAccumulate(service, gasLimit);
9752
+ static create({ blocks, tickets, preImages, preImagesSize, guarantees, assurances, }) {
9753
+ return new ValidatorStatistics(blocks, tickets, preImages, preImagesSize, guarantees, assurances);
9406
9754
  }
9407
9755
  constructor(
9408
- /** Service id that auto-accumulates. */
9409
- service,
9410
- /** Gas limit for auto-accumulation. */
9411
- gasLimit) {
9412
- this.service = service;
9413
- this.gasLimit = gasLimit;
9414
- }
9415
- }
9416
- /**
9417
- * https://graypaper.fluffylabs.dev/#/ab2cdbd/114402114402?v=0.7.2
9418
- */
9419
- class PrivilegedServices {
9420
- manager;
9421
- delegator;
9422
- registrar;
9423
- assigners;
9424
- autoAccumulateServices;
9425
- /** https://graypaper.fluffylabs.dev/#/ab2cdbd/3bbd023bcb02?v=0.7.2 */
9426
- static Codec = descriptors_codec.Class(PrivilegedServices, {
9427
- manager: descriptors_codec.u32.asOpaque(),
9428
- assigners: codecPerCore(descriptors_codec.u32.asOpaque()),
9429
- delegator: descriptors_codec.u32.asOpaque(),
9430
- registrar: Compatibility.isGreaterOrEqual(GpVersion.V0_7_1)
9431
- ? descriptors_codec.u32.asOpaque()
9432
- : ignoreValueWithDefault(tryAsServiceId(2 ** 32 - 1)),
9433
- autoAccumulateServices: readonlyArray(descriptors_codec.sequenceVarLen(AutoAccumulate.Codec)),
9434
- });
9435
- static create(a) {
9436
- return new PrivilegedServices(a.manager, a.delegator, a.registrar, a.assigners, a.autoAccumulateServices);
9756
+ /** The number of blocks produced by the validator. */
9757
+ blocks,
9758
+ /** The number of tickets introduced by the validator. */
9759
+ tickets,
9760
+ /** The number of preimages introduced by the validator. */
9761
+ preImages,
9762
+ /** The total number of octets across all preimages introduced by the validator. */
9763
+ preImagesSize,
9764
+ /** The number of reports guaranteed by the validator. */
9765
+ guarantees,
9766
+ /** The number of availability assurances made by the validator. */
9767
+ assurances) {
9768
+ this.blocks = blocks;
9769
+ this.tickets = tickets;
9770
+ this.preImages = preImages;
9771
+ this.preImagesSize = preImagesSize;
9772
+ this.guarantees = guarantees;
9773
+ this.assurances = assurances;
9437
9774
  }
9438
- constructor(
9439
- /**
9440
- * `χ_M`: Manages alteration of χ from block to block,
9441
- * as well as bestow services with storage deposit credits.
9442
- * https://graypaper.fluffylabs.dev/#/ab2cdbd/111502111902?v=0.7.2
9443
- */
9444
- manager,
9445
- /** `χ_V`: Managers validator keys. */
9446
- delegator,
9447
- /**
9448
- * `χ_R`: Manages the creation of services in protected range.
9449
- *
9450
- * https://graypaper.fluffylabs.dev/#/ab2cdbd/111b02111d02?v=0.7.2
9451
- */
9452
- registrar,
9453
- /** `χ_A`: Manages authorization queue one for each core. */
9454
- assigners,
9455
- /** `χ_Z`: Dictionary of services that auto-accumulate every block with their gas limit. */
9456
- autoAccumulateServices) {
9457
- this.manager = manager;
9458
- this.delegator = delegator;
9459
- this.registrar = registrar;
9460
- this.assigners = assigners;
9461
- this.autoAccumulateServices = autoAccumulateServices;
9775
+ static empty() {
9776
+ const zero = numbers_tryAsU32(0);
9777
+ return new ValidatorStatistics(zero, zero, zero, zero, zero, zero);
9462
9778
  }
9463
9779
  }
9464
-
9465
- ;// CONCATENATED MODULE: ./packages/jam/state/recent-blocks.ts
9466
-
9467
-
9468
-
9469
-
9470
-
9471
-
9780
+ const codecVarU16 = descriptors_codec.varU32.convert((i) => numbers_tryAsU32(i), (o) => numbers_tryAsU16(o));
9781
+ /** Encode/decode unsigned gas. */
9782
+ const codecVarGas = descriptors_codec.varU64.convert((g) => tryAsU64(g), (i) => tryAsServiceGas(i));
9472
9783
  /**
9473
- * `H = 8`: The size of recent history, in blocks.
9784
+ * Single core statistics.
9785
+ * Updated per block, based on incoming work reports (`w`).
9474
9786
  *
9475
- * https://graypaper.fluffylabs.dev/#/579bd12/416300416500
9787
+ * https://graypaper.fluffylabs.dev/#/68eaa1f/18f10318f103?v=0.6.4
9788
+ * https://github.com/gavofyork/graypaper/blob/9bffb08f3ea7b67832019176754df4fb36b9557d/text/statistics.tex#L65
9476
9789
  */
9477
- const MAX_RECENT_HISTORY = 8;
9478
- /** Recent history of a single block. */
9479
- class BlockState extends WithDebug {
9480
- headerHash;
9481
- accumulationResult;
9482
- postStateRoot;
9483
- reported;
9484
- static Codec = descriptors_codec.Class(BlockState, {
9485
- headerHash: descriptors_codec.bytes(hash_HASH_SIZE).asOpaque(),
9486
- accumulationResult: descriptors_codec.bytes(hash_HASH_SIZE),
9487
- postStateRoot: descriptors_codec.bytes(hash_HASH_SIZE).asOpaque(),
9488
- reported: codecHashDictionary(WorkPackageInfo.Codec, (x) => x.workPackageHash),
9489
- });
9490
- static create({ headerHash, accumulationResult, postStateRoot, reported }) {
9491
- return new BlockState(headerHash, accumulationResult, postStateRoot, reported);
9790
+ class CoreStatistics {
9791
+ dataAvailabilityLoad;
9792
+ popularity;
9793
+ imports;
9794
+ exports;
9795
+ extrinsicSize;
9796
+ extrinsicCount;
9797
+ bundleSize;
9798
+ gasUsed;
9799
+ static Codec = Compatibility.isGreaterOrEqual(GpVersion.V0_7_0)
9800
+ ? descriptors_codec.Class(CoreStatistics, {
9801
+ dataAvailabilityLoad: descriptors_codec.varU32,
9802
+ popularity: codecVarU16,
9803
+ imports: codecVarU16,
9804
+ extrinsicCount: codecVarU16,
9805
+ extrinsicSize: descriptors_codec.varU32,
9806
+ exports: codecVarU16,
9807
+ bundleSize: descriptors_codec.varU32,
9808
+ gasUsed: codecVarGas,
9809
+ })
9810
+ : descriptors_codec.Class(CoreStatistics, {
9811
+ dataAvailabilityLoad: descriptors_codec.varU32,
9812
+ popularity: codecVarU16,
9813
+ imports: codecVarU16,
9814
+ exports: codecVarU16,
9815
+ extrinsicSize: descriptors_codec.varU32,
9816
+ extrinsicCount: codecVarU16,
9817
+ bundleSize: descriptors_codec.varU32,
9818
+ gasUsed: codecVarGas,
9819
+ });
9820
+ static create(v) {
9821
+ return new CoreStatistics(v.dataAvailabilityLoad, v.popularity, v.imports, v.exports, v.extrinsicSize, v.extrinsicCount, v.bundleSize, v.gasUsed);
9492
9822
  }
9493
9823
  constructor(
9494
- /** Header hash. */
9495
- headerHash,
9496
- /** Merkle mountain belt of accumulation result. */
9497
- accumulationResult,
9498
- /** Posterior state root filled in with a 1-block delay. */
9499
- postStateRoot,
9500
- /** Reported work packages (no more than number of cores). */
9501
- reported) {
9502
- super();
9503
- this.headerHash = headerHash;
9504
- this.accumulationResult = accumulationResult;
9505
- this.postStateRoot = postStateRoot;
9506
- this.reported = reported;
9507
- }
9508
- }
9509
- class RecentBlocks extends WithDebug {
9510
- blocks;
9511
- accumulationLog;
9512
- static Codec = descriptors_codec.Class(RecentBlocks, {
9513
- blocks: codecKnownSizeArray(BlockState.Codec, {
9514
- minLength: 0,
9515
- maxLength: MAX_RECENT_HISTORY,
9516
- typicalLength: MAX_RECENT_HISTORY,
9517
- }),
9518
- accumulationLog: descriptors_codec.object({
9519
- peaks: readonlyArray(descriptors_codec.sequenceVarLen(descriptors_codec.optional(descriptors_codec.bytes(hash_HASH_SIZE)))),
9520
- }),
9521
- });
9522
- static create(a) {
9523
- return new RecentBlocks(a.blocks, a.accumulationLog);
9824
+ /** `d` */
9825
+ dataAvailabilityLoad,
9826
+ /** `p` */
9827
+ popularity,
9828
+ /** `i` */
9829
+ imports,
9830
+ /** `e` */
9831
+ exports,
9832
+ /** `z` */
9833
+ extrinsicSize,
9834
+ /** `x` */
9835
+ extrinsicCount,
9836
+ /** `b` */
9837
+ bundleSize,
9838
+ /** `u` */
9839
+ gasUsed) {
9840
+ this.dataAvailabilityLoad = dataAvailabilityLoad;
9841
+ this.popularity = popularity;
9842
+ this.imports = imports;
9843
+ this.exports = exports;
9844
+ this.extrinsicSize = extrinsicSize;
9845
+ this.extrinsicCount = extrinsicCount;
9846
+ this.bundleSize = bundleSize;
9847
+ this.gasUsed = gasUsed;
9524
9848
  }
9525
- constructor(
9526
- /**
9527
- * Most recent blocks.
9528
- * https://graypaper.fluffylabs.dev/#/7e6ff6a/0fea010fea01?v=0.6.7
9529
- */
9530
- blocks,
9531
- /**
9532
- * Accumulation output log.
9533
- * https://graypaper.fluffylabs.dev/#/7e6ff6a/0f02020f0202?v=0.6.7
9534
- */
9535
- accumulationLog) {
9536
- super();
9537
- this.blocks = blocks;
9538
- this.accumulationLog = accumulationLog;
9849
+ static empty() {
9850
+ const zero = numbers_tryAsU32(0);
9851
+ const zero16 = numbers_tryAsU16(0);
9852
+ const zeroGas = tryAsServiceGas(0);
9853
+ return new CoreStatistics(zero, zero16, zero16, zero16, zero, zero16, zero, zeroGas);
9539
9854
  }
9540
9855
  }
9541
9856
  /**
9542
- * Recent history of blocks.
9857
+ * Service statistics.
9858
+ * Updated per block, based on available work reports (`W`).
9543
9859
  *
9544
- * https://graypaper.fluffylabs.dev/#/7e6ff6a/0fc9010fc901?v=0.6.7
9860
+ * https://graypaper.fluffylabs.dev/#/1c979cb/199802199802?v=0.7.1
9545
9861
  */
9546
- class RecentBlocksHistory extends WithDebug {
9547
- current;
9548
- static Codec = Descriptor.new("RecentBlocksHistory", RecentBlocks.Codec.sizeHint, (encoder, value) => RecentBlocks.Codec.encode(encoder, value.asCurrent()), (decoder) => {
9549
- const recentBlocks = RecentBlocks.Codec.decode(decoder);
9550
- return RecentBlocksHistory.create(recentBlocks);
9551
- }, (skip) => {
9552
- return RecentBlocks.Codec.skip(skip);
9862
+ class ServiceStatistics {
9863
+ providedCount;
9864
+ providedSize;
9865
+ refinementCount;
9866
+ refinementGasUsed;
9867
+ imports;
9868
+ exports;
9869
+ extrinsicSize;
9870
+ extrinsicCount;
9871
+ accumulateCount;
9872
+ accumulateGasUsed;
9873
+ onTransfersCount;
9874
+ onTransfersGasUsed;
9875
+ static Codec = Compatibility.selectIfGreaterOrEqual({
9876
+ fallback: descriptors_codec.Class(ServiceStatistics, {
9877
+ providedCount: codecVarU16,
9878
+ providedSize: descriptors_codec.varU32,
9879
+ refinementCount: descriptors_codec.varU32,
9880
+ refinementGasUsed: codecVarGas,
9881
+ imports: codecVarU16,
9882
+ exports: codecVarU16,
9883
+ extrinsicSize: descriptors_codec.varU32,
9884
+ extrinsicCount: codecVarU16,
9885
+ accumulateCount: descriptors_codec.varU32,
9886
+ accumulateGasUsed: codecVarGas,
9887
+ onTransfersCount: descriptors_codec.varU32,
9888
+ onTransfersGasUsed: codecVarGas,
9889
+ }),
9890
+ versions: {
9891
+ [GpVersion.V0_7_0]: descriptors_codec.Class(ServiceStatistics, {
9892
+ providedCount: codecVarU16,
9893
+ providedSize: descriptors_codec.varU32,
9894
+ refinementCount: descriptors_codec.varU32,
9895
+ refinementGasUsed: codecVarGas,
9896
+ imports: codecVarU16,
9897
+ extrinsicCount: codecVarU16,
9898
+ extrinsicSize: descriptors_codec.varU32,
9899
+ exports: codecVarU16,
9900
+ accumulateCount: descriptors_codec.varU32,
9901
+ accumulateGasUsed: codecVarGas,
9902
+ onTransfersCount: descriptors_codec.varU32,
9903
+ onTransfersGasUsed: codecVarGas,
9904
+ }),
9905
+ [GpVersion.V0_7_1]: descriptors_codec.Class(ServiceStatistics, {
9906
+ providedCount: codecVarU16,
9907
+ providedSize: descriptors_codec.varU32,
9908
+ refinementCount: descriptors_codec.varU32,
9909
+ refinementGasUsed: codecVarGas,
9910
+ imports: codecVarU16,
9911
+ extrinsicCount: codecVarU16,
9912
+ extrinsicSize: descriptors_codec.varU32,
9913
+ exports: codecVarU16,
9914
+ accumulateCount: descriptors_codec.varU32,
9915
+ accumulateGasUsed: codecVarGas,
9916
+ onTransfersCount: ignoreValueWithDefault(numbers_tryAsU32(0)),
9917
+ onTransfersGasUsed: ignoreValueWithDefault(tryAsServiceGas(0)),
9918
+ }),
9919
+ },
9553
9920
  });
9554
- static create(recentBlocks) {
9555
- return new RecentBlocksHistory(recentBlocks);
9921
+ static create(v) {
9922
+ return new ServiceStatistics(v.providedCount, v.providedSize, v.refinementCount, v.refinementGasUsed, v.imports, v.exports, v.extrinsicSize, v.extrinsicCount, v.accumulateCount, v.accumulateGasUsed, v.onTransfersCount, v.onTransfersGasUsed);
9923
+ }
9924
+ constructor(
9925
+ /** `p.0` */
9926
+ providedCount,
9927
+ /** `p.1` */
9928
+ providedSize,
9929
+ /** `r.0` */
9930
+ refinementCount,
9931
+ /** `r.1` */
9932
+ refinementGasUsed,
9933
+ /** `i` */
9934
+ imports,
9935
+ /** `e` */
9936
+ exports,
9937
+ /** `z` */
9938
+ extrinsicSize,
9939
+ /** `x` */
9940
+ extrinsicCount,
9941
+ /** `a.0` */
9942
+ accumulateCount,
9943
+ /** `a.1` */
9944
+ accumulateGasUsed,
9945
+ /** `t.0` @deprecated since 0.7.1 */
9946
+ onTransfersCount,
9947
+ /** `t.1` @deprecated since 0.7.1 */
9948
+ onTransfersGasUsed) {
9949
+ this.providedCount = providedCount;
9950
+ this.providedSize = providedSize;
9951
+ this.refinementCount = refinementCount;
9952
+ this.refinementGasUsed = refinementGasUsed;
9953
+ this.imports = imports;
9954
+ this.exports = exports;
9955
+ this.extrinsicSize = extrinsicSize;
9956
+ this.extrinsicCount = extrinsicCount;
9957
+ this.accumulateCount = accumulateCount;
9958
+ this.accumulateGasUsed = accumulateGasUsed;
9959
+ this.onTransfersCount = onTransfersCount;
9960
+ this.onTransfersGasUsed = onTransfersGasUsed;
9556
9961
  }
9557
9962
  static empty() {
9558
- return RecentBlocksHistory.create(RecentBlocks.create({
9559
- blocks: sized_array_asKnownSize([]),
9560
- accumulationLog: { peaks: [] },
9561
- }));
9963
+ const zero = numbers_tryAsU32(0);
9964
+ const zero16 = numbers_tryAsU16(0);
9965
+ const zeroGas = tryAsServiceGas(0);
9966
+ return new ServiceStatistics(zero16, zero, zero, zeroGas, zero16, zero16, zero, zero16, zero, zeroGas, zero, zeroGas);
9562
9967
  }
9563
- /**
9564
- * Returns the block's BEEFY super peak.
9565
- */
9566
- static accumulationResult(block) {
9567
- return block.accumulationResult;
9968
+ }
9969
+ /** `pi`: Statistics of each validator, cores statistics and services statistics. */
9970
+ class StatisticsData {
9971
+ current;
9972
+ previous;
9973
+ cores;
9974
+ services;
9975
+ static Codec = descriptors_codec.Class(StatisticsData, {
9976
+ current: common_codecPerValidator(ValidatorStatistics.Codec),
9977
+ previous: common_codecPerValidator(ValidatorStatistics.Codec),
9978
+ cores: codecPerCore(CoreStatistics.Codec),
9979
+ services: descriptors_codec.dictionary(codecServiceId, ServiceStatistics.Codec, {
9980
+ sortKeys: (a, b) => a - b,
9981
+ }),
9982
+ });
9983
+ static create(v) {
9984
+ return new StatisticsData(v.current, v.previous, v.cores, v.services);
9568
9985
  }
9569
- constructor(current) {
9570
- super();
9986
+ constructor(current, previous, cores, services) {
9571
9987
  this.current = current;
9572
- }
9573
- /** History of recent blocks with maximum size of `MAX_RECENT_HISTORY` */
9574
- get blocks() {
9575
- if (this.current !== null) {
9576
- return this.current.blocks;
9577
- }
9578
- throw new Error("RecentBlocksHistory is in invalid state");
9579
- }
9580
- asCurrent() {
9581
- if (this.current === null) {
9582
- throw new Error("Cannot access current RecentBlocks format");
9583
- }
9584
- return this.current;
9585
- }
9586
- updateBlocks(blocks) {
9587
- if (this.current !== null) {
9588
- return RecentBlocksHistory.create(RecentBlocks.create({
9589
- ...this.current,
9590
- blocks: opaque_asOpaqueType(blocks),
9591
- }));
9592
- }
9593
- throw new Error("RecentBlocksHistory is in invalid state. Cannot be updated!");
9988
+ this.previous = previous;
9989
+ this.cores = cores;
9990
+ this.services = services;
9594
9991
  }
9595
9992
  }
9596
9993
 
9597
- ;// CONCATENATED MODULE: ./packages/jam/state/validator-data.ts
9994
+ ;// CONCATENATED MODULE: ./packages/jam/state/in-memory-state-view.ts
9598
9995
 
9599
9996
 
9600
9997
 
9601
- /**
9602
- * Fixed size of validator metadata.
9603
- *
9604
- * https://graypaper.fluffylabs.dev/#/5f542d7/0d55010d5501
9605
- */
9606
- const VALIDATOR_META_BYTES = 128;
9607
- /**
9608
- * Details about validators' identity.
9609
- *
9610
- * https://graypaper.fluffylabs.dev/#/5f542d7/0d4b010d4c01
9611
- */
9612
- class ValidatorData extends WithDebug {
9613
- bandersnatch;
9614
- ed25519;
9615
- bls;
9616
- metadata;
9617
- static Codec = descriptors_codec.Class(ValidatorData, {
9618
- bandersnatch: descriptors_codec.bytes(BANDERSNATCH_KEY_BYTES).asOpaque(),
9619
- ed25519: descriptors_codec.bytes(ED25519_KEY_BYTES).asOpaque(),
9620
- bls: descriptors_codec.bytes(BLS_KEY_BYTES).asOpaque(),
9621
- metadata: descriptors_codec.bytes(VALIDATOR_META_BYTES),
9622
- });
9623
- static create({ ed25519, bandersnatch, bls, metadata }) {
9624
- return new ValidatorData(bandersnatch, ed25519, bls, metadata);
9625
- }
9626
- constructor(
9627
- /** Bandersnatch public key. */
9628
- bandersnatch,
9629
- /** ED25519 key data. */
9630
- ed25519,
9631
- /** BLS public key. */
9632
- bls,
9633
- /** Validator-defined additional metdata. */
9634
- metadata) {
9635
- super();
9636
- this.bandersnatch = bandersnatch;
9637
- this.ed25519 = ed25519;
9638
- this.bls = bls;
9639
- this.metadata = metadata;
9640
- }
9641
- }
9642
9998
 
9643
- ;// CONCATENATED MODULE: ./packages/jam/state/safrole-data.ts
9644
9999
 
9645
10000
 
9646
10001
 
9647
10002
 
9648
10003
 
9649
10004
 
10005
+ class InMemoryStateView {
10006
+ chainSpec;
10007
+ state;
10008
+ constructor(chainSpec, state) {
10009
+ this.chainSpec = chainSpec;
10010
+ this.state = state;
10011
+ }
10012
+ availabilityAssignmentView() {
10013
+ return reencodeAsView(availabilityAssignmentsCodec, this.state.availabilityAssignment, this.chainSpec);
10014
+ }
10015
+ designatedValidatorDataView() {
10016
+ return reencodeAsView(validatorsDataCodec, this.state.designatedValidatorData, this.chainSpec);
10017
+ }
10018
+ currentValidatorDataView() {
10019
+ return reencodeAsView(validatorsDataCodec, this.state.currentValidatorData, this.chainSpec);
10020
+ }
10021
+ previousValidatorDataView() {
10022
+ return reencodeAsView(validatorsDataCodec, this.state.previousValidatorData, this.chainSpec);
10023
+ }
10024
+ authPoolsView() {
10025
+ return reencodeAsView(authPoolsCodec, this.state.authPools, this.chainSpec);
10026
+ }
10027
+ authQueuesView() {
10028
+ return reencodeAsView(authQueuesCodec, this.state.authQueues, this.chainSpec);
10029
+ }
10030
+ recentBlocksView() {
10031
+ return reencodeAsView(RecentBlocks.Codec, this.state.recentBlocks, this.chainSpec);
10032
+ }
10033
+ statisticsView() {
10034
+ return reencodeAsView(StatisticsData.Codec, this.state.statistics, this.chainSpec);
10035
+ }
10036
+ accumulationQueueView() {
10037
+ return reencodeAsView(accumulationQueueCodec, this.state.accumulationQueue, this.chainSpec);
10038
+ }
10039
+ recentlyAccumulatedView() {
10040
+ return reencodeAsView(recentlyAccumulatedCodec, this.state.recentlyAccumulated, this.chainSpec);
10041
+ }
10042
+ safroleDataView() {
10043
+ // TODO [ToDr] Consider exposting `safrole` from state
10044
+ // instead of individual fields
10045
+ const safrole = SafroleData.create({
10046
+ nextValidatorData: this.state.nextValidatorData,
10047
+ epochRoot: this.state.epochRoot,
10048
+ sealingKeySeries: this.state.sealingKeySeries,
10049
+ ticketsAccumulator: this.state.ticketsAccumulator,
10050
+ });
10051
+ return reencodeAsView(SafroleData.Codec, safrole, this.chainSpec);
10052
+ }
10053
+ getServiceInfoView(id) {
10054
+ const service = this.state.getService(id);
10055
+ if (service === null) {
10056
+ return null;
10057
+ }
10058
+ return reencodeAsView(ServiceAccountInfo.Codec, service.getInfo(), this.chainSpec);
10059
+ }
10060
+ }
9650
10061
 
10062
+ ;// CONCATENATED MODULE: ./packages/jam/state/privileged-services.ts
9651
10063
 
9652
10064
 
9653
10065
 
9654
10066
 
9655
- var SafroleSealingKeysKind;
9656
- (function (SafroleSealingKeysKind) {
9657
- SafroleSealingKeysKind[SafroleSealingKeysKind["Tickets"] = 0] = "Tickets";
9658
- SafroleSealingKeysKind[SafroleSealingKeysKind["Keys"] = 1] = "Keys";
9659
- })(SafroleSealingKeysKind || (SafroleSealingKeysKind = {}));
9660
- const codecBandersnatchKey = descriptors_codec.bytes(BANDERSNATCH_KEY_BYTES).asOpaque();
9661
- class SafroleSealingKeysData extends WithDebug {
9662
- kind;
9663
- keys;
9664
- tickets;
9665
- static Codec = codecWithContext((context) => {
9666
- return descriptors_codec.custom({
9667
- name: "SafroleSealingKeys",
9668
- sizeHint: { bytes: 1 + hash_HASH_SIZE * context.epochLength, isExact: false },
9669
- }, (e, x) => {
9670
- e.varU32(numbers_tryAsU32(x.kind));
9671
- if (x.kind === SafroleSealingKeysKind.Keys) {
9672
- e.sequenceFixLen(codecBandersnatchKey, x.keys);
9673
- }
9674
- else {
9675
- e.sequenceFixLen(Ticket.Codec, x.tickets);
9676
- }
9677
- }, (d) => {
9678
- const epochLength = context.epochLength;
9679
- const kind = d.varU32();
9680
- if (kind === SafroleSealingKeysKind.Keys) {
9681
- const keys = d.sequenceFixLen(codecBandersnatchKey, epochLength);
9682
- return SafroleSealingKeysData.keys(tryAsPerEpochBlock(keys, context));
9683
- }
9684
- if (kind === SafroleSealingKeysKind.Tickets) {
9685
- const tickets = d.sequenceFixLen(Ticket.Codec, epochLength);
9686
- return SafroleSealingKeysData.tickets(tryAsPerEpochBlock(tickets, context));
9687
- }
9688
- throw new Error(`Unexpected safrole sealing keys kind: ${kind}`);
9689
- }, (s) => {
9690
- const kind = s.decoder.varU32();
9691
- if (kind === SafroleSealingKeysKind.Keys) {
9692
- s.sequenceFixLen(codecBandersnatchKey, context.epochLength);
9693
- return;
9694
- }
9695
- if (kind === SafroleSealingKeysKind.Tickets) {
9696
- s.sequenceFixLen(Ticket.Codec, context.epochLength);
9697
- return;
9698
- }
9699
- throw new Error(`Unexpected safrole sealing keys kind: ${kind}`);
9700
- });
10067
+
10068
+ /** Dictionary entry of services that auto-accumulate every block. */
10069
+ class AutoAccumulate {
10070
+ service;
10071
+ gasLimit;
10072
+ static Codec = descriptors_codec.Class(AutoAccumulate, {
10073
+ service: descriptors_codec.u32.asOpaque(),
10074
+ gasLimit: descriptors_codec.u64.asOpaque(),
9701
10075
  });
9702
- static keys(keys) {
9703
- return new SafroleSealingKeysData(SafroleSealingKeysKind.Keys, keys, undefined);
9704
- }
9705
- static tickets(tickets) {
9706
- return new SafroleSealingKeysData(SafroleSealingKeysKind.Tickets, undefined, tickets);
10076
+ static create({ service, gasLimit }) {
10077
+ return new AutoAccumulate(service, gasLimit);
9707
10078
  }
9708
- constructor(kind, keys, tickets) {
9709
- super();
9710
- this.kind = kind;
9711
- this.keys = keys;
9712
- this.tickets = tickets;
10079
+ constructor(
10080
+ /** Service id that auto-accumulates. */
10081
+ service,
10082
+ /** Gas limit for auto-accumulation. */
10083
+ gasLimit) {
10084
+ this.service = service;
10085
+ this.gasLimit = gasLimit;
9713
10086
  }
9714
10087
  }
9715
- class SafroleData {
9716
- nextValidatorData;
9717
- epochRoot;
9718
- sealingKeySeries;
9719
- ticketsAccumulator;
9720
- static Codec = descriptors_codec.Class(SafroleData, {
9721
- nextValidatorData: codecPerValidator(ValidatorData.Codec),
9722
- epochRoot: descriptors_codec.bytes(BANDERSNATCH_RING_ROOT_BYTES).asOpaque(),
9723
- sealingKeySeries: SafroleSealingKeysData.Codec,
9724
- ticketsAccumulator: readonlyArray(descriptors_codec.sequenceVarLen(Ticket.Codec)).convert(seeThrough, sized_array_asKnownSize),
10088
+ /**
10089
+ * https://graypaper.fluffylabs.dev/#/ab2cdbd/114402114402?v=0.7.2
10090
+ */
10091
+ class PrivilegedServices {
10092
+ manager;
10093
+ delegator;
10094
+ registrar;
10095
+ assigners;
10096
+ autoAccumulateServices;
10097
+ /** https://graypaper.fluffylabs.dev/#/ab2cdbd/3bbd023bcb02?v=0.7.2 */
10098
+ static Codec = descriptors_codec.Class(PrivilegedServices, {
10099
+ manager: descriptors_codec.u32.asOpaque(),
10100
+ assigners: codecPerCore(descriptors_codec.u32.asOpaque()),
10101
+ delegator: descriptors_codec.u32.asOpaque(),
10102
+ registrar: Compatibility.isGreaterOrEqual(GpVersion.V0_7_1)
10103
+ ? descriptors_codec.u32.asOpaque()
10104
+ : ignoreValueWithDefault(tryAsServiceId(2 ** 32 - 1)),
10105
+ autoAccumulateServices: descriptors_readonlyArray(descriptors_codec.sequenceVarLen(AutoAccumulate.Codec)),
9725
10106
  });
9726
- static create({ nextValidatorData, epochRoot, sealingKeySeries, ticketsAccumulator }) {
9727
- return new SafroleData(nextValidatorData, epochRoot, sealingKeySeries, ticketsAccumulator);
10107
+ static create(a) {
10108
+ return new PrivilegedServices(a.manager, a.delegator, a.registrar, a.assigners, a.autoAccumulateServices);
9728
10109
  }
9729
10110
  constructor(
9730
- /** gamma_k */
9731
- nextValidatorData,
9732
- /** gamma_z */
9733
- epochRoot,
9734
- /** gamma_s */
9735
- sealingKeySeries,
9736
- /** gamma_a */
9737
- ticketsAccumulator) {
9738
- this.nextValidatorData = nextValidatorData;
9739
- this.epochRoot = epochRoot;
9740
- this.sealingKeySeries = sealingKeySeries;
9741
- this.ticketsAccumulator = ticketsAccumulator;
10111
+ /**
10112
+ * `χ_M`: Manages alteration of χ from block to block,
10113
+ * as well as bestow services with storage deposit credits.
10114
+ * https://graypaper.fluffylabs.dev/#/ab2cdbd/111502111902?v=0.7.2
10115
+ */
10116
+ manager,
10117
+ /** `χ_V`: Managers validator keys. */
10118
+ delegator,
10119
+ /**
10120
+ * `χ_R`: Manages the creation of services in protected range.
10121
+ *
10122
+ * https://graypaper.fluffylabs.dev/#/ab2cdbd/111b02111d02?v=0.7.2
10123
+ */
10124
+ registrar,
10125
+ /** `χ_A`: Manages authorization queue one for each core. */
10126
+ assigners,
10127
+ /** `χ_Z`: Dictionary of services that auto-accumulate every block with their gas limit. */
10128
+ autoAccumulateServices) {
10129
+ this.manager = manager;
10130
+ this.delegator = delegator;
10131
+ this.registrar = registrar;
10132
+ this.assigners = assigners;
10133
+ this.autoAccumulateServices = autoAccumulateServices;
9742
10134
  }
9743
10135
  }
9744
10136
 
@@ -9779,31 +10171,29 @@ var UpdatePreimageKind;
9779
10171
  * 3. Update `LookupHistory` with given value.
9780
10172
  */
9781
10173
  class state_update_UpdatePreimage {
9782
- serviceId;
9783
10174
  action;
9784
- constructor(serviceId, action) {
9785
- this.serviceId = serviceId;
10175
+ constructor(action) {
9786
10176
  this.action = action;
9787
10177
  }
9788
10178
  /** A preimage is provided. We should update the lookuphistory and add the preimage to db. */
9789
- static provide({ serviceId, preimage, slot, }) {
9790
- return new state_update_UpdatePreimage(serviceId, {
10179
+ static provide({ preimage, slot }) {
10180
+ return new state_update_UpdatePreimage({
9791
10181
  kind: UpdatePreimageKind.Provide,
9792
10182
  preimage,
9793
10183
  slot,
9794
10184
  });
9795
10185
  }
9796
10186
  /** The preimage should be removed completely from the database. */
9797
- static remove({ serviceId, hash, length }) {
9798
- return new state_update_UpdatePreimage(serviceId, {
10187
+ static remove({ hash, length }) {
10188
+ return new state_update_UpdatePreimage({
9799
10189
  kind: UpdatePreimageKind.Remove,
9800
10190
  hash,
9801
10191
  length,
9802
10192
  });
9803
10193
  }
9804
10194
  /** Update the lookup history of some preimage or add a new one (request). */
9805
- static updateOrAdd({ serviceId, lookupHistory }) {
9806
- return new state_update_UpdatePreimage(serviceId, {
10195
+ static updateOrAdd({ lookupHistory }) {
10196
+ return new state_update_UpdatePreimage({
9807
10197
  kind: UpdatePreimageKind.UpdateOrAdd,
9808
10198
  item: lookupHistory,
9809
10199
  });
@@ -9834,327 +10224,68 @@ class state_update_UpdatePreimage {
9834
10224
  /** The type of service update. */
9835
10225
  var UpdateServiceKind;
9836
10226
  (function (UpdateServiceKind) {
9837
- /** Just update the `ServiceAccountInfo`. */
9838
- UpdateServiceKind[UpdateServiceKind["Update"] = 0] = "Update";
9839
- /** Create a new `Service` instance. */
9840
- UpdateServiceKind[UpdateServiceKind["Create"] = 1] = "Create";
9841
- })(UpdateServiceKind || (UpdateServiceKind = {}));
9842
- /**
9843
- * Update service info of a particular `ServiceId` or create a new one.
9844
- */
9845
- class UpdateService {
9846
- serviceId;
9847
- action;
9848
- constructor(serviceId, action) {
9849
- this.serviceId = serviceId;
9850
- this.action = action;
9851
- }
9852
- static update({ serviceId, serviceInfo }) {
9853
- return new UpdateService(serviceId, {
9854
- kind: UpdateServiceKind.Update,
9855
- account: serviceInfo,
9856
- });
9857
- }
9858
- static create({ serviceId, serviceInfo, lookupHistory, }) {
9859
- return new UpdateService(serviceId, {
9860
- kind: UpdateServiceKind.Create,
9861
- account: serviceInfo,
9862
- lookupHistory,
9863
- });
9864
- }
9865
- }
9866
- /** Update service storage kind. */
9867
- var UpdateStorageKind;
9868
- (function (UpdateStorageKind) {
9869
- /** Set a storage value. */
9870
- UpdateStorageKind[UpdateStorageKind["Set"] = 0] = "Set";
9871
- /** Remove a storage value. */
9872
- UpdateStorageKind[UpdateStorageKind["Remove"] = 1] = "Remove";
9873
- })(UpdateStorageKind || (UpdateStorageKind = {}));
9874
- /**
9875
- * Update service storage item.
9876
- *
9877
- * Can either create/modify an entry or remove it.
9878
- */
9879
- class UpdateStorage {
9880
- serviceId;
9881
- action;
9882
- constructor(serviceId, action) {
9883
- this.serviceId = serviceId;
9884
- this.action = action;
9885
- }
9886
- static set({ serviceId, storage }) {
9887
- return new UpdateStorage(serviceId, { kind: UpdateStorageKind.Set, storage });
9888
- }
9889
- static remove({ serviceId, key }) {
9890
- return new UpdateStorage(serviceId, { kind: UpdateStorageKind.Remove, key });
9891
- }
9892
- get key() {
9893
- if (this.action.kind === UpdateStorageKind.Remove) {
9894
- return this.action.key;
9895
- }
9896
- return this.action.storage.key;
9897
- }
9898
- get value() {
9899
- if (this.action.kind === UpdateStorageKind.Remove) {
9900
- return null;
9901
- }
9902
- return this.action.storage.value;
9903
- }
9904
- }
9905
-
9906
- ;// CONCATENATED MODULE: ./packages/jam/state/statistics.ts
9907
-
9908
-
9909
-
9910
-
9911
-
9912
- const codecServiceId = Compatibility.isSuite(TestSuite.W3F_DAVXY) || Compatibility.isSuite(TestSuite.JAMDUNA, GpVersion.V0_6_7)
9913
- ? descriptors_codec.u32.asOpaque()
9914
- : descriptors_codec.varU32.convert((s) => numbers_tryAsU32(s), (i) => tryAsServiceId(i));
9915
- /**
9916
- * Activity Record of a single validator.
9917
- *
9918
- * https://graypaper.fluffylabs.dev/#/579bd12/183701183701
9919
- */
9920
- class ValidatorStatistics {
9921
- blocks;
9922
- tickets;
9923
- preImages;
9924
- preImagesSize;
9925
- guarantees;
9926
- assurances;
9927
- static Codec = descriptors_codec.Class(ValidatorStatistics, {
9928
- blocks: descriptors_codec.u32,
9929
- tickets: descriptors_codec.u32,
9930
- preImages: descriptors_codec.u32,
9931
- preImagesSize: descriptors_codec.u32,
9932
- guarantees: descriptors_codec.u32,
9933
- assurances: descriptors_codec.u32,
9934
- });
9935
- static create({ blocks, tickets, preImages, preImagesSize, guarantees, assurances, }) {
9936
- return new ValidatorStatistics(blocks, tickets, preImages, preImagesSize, guarantees, assurances);
9937
- }
9938
- constructor(
9939
- /** The number of blocks produced by the validator. */
9940
- blocks,
9941
- /** The number of tickets introduced by the validator. */
9942
- tickets,
9943
- /** The number of preimages introduced by the validator. */
9944
- preImages,
9945
- /** The total number of octets across all preimages introduced by the validator. */
9946
- preImagesSize,
9947
- /** The number of reports guaranteed by the validator. */
9948
- guarantees,
9949
- /** The number of availability assurances made by the validator. */
9950
- assurances) {
9951
- this.blocks = blocks;
9952
- this.tickets = tickets;
9953
- this.preImages = preImages;
9954
- this.preImagesSize = preImagesSize;
9955
- this.guarantees = guarantees;
9956
- this.assurances = assurances;
9957
- }
9958
- static empty() {
9959
- const zero = numbers_tryAsU32(0);
9960
- return new ValidatorStatistics(zero, zero, zero, zero, zero, zero);
9961
- }
9962
- }
9963
- const codecVarU16 = descriptors_codec.varU32.convert((i) => numbers_tryAsU32(i), (o) => numbers_tryAsU16(o));
9964
- /** Encode/decode unsigned gas. */
9965
- const codecVarGas = descriptors_codec.varU64.convert((g) => tryAsU64(g), (i) => tryAsServiceGas(i));
9966
- /**
9967
- * Single core statistics.
9968
- * Updated per block, based on incoming work reports (`w`).
9969
- *
9970
- * https://graypaper.fluffylabs.dev/#/68eaa1f/18f10318f103?v=0.6.4
9971
- * https://github.com/gavofyork/graypaper/blob/9bffb08f3ea7b67832019176754df4fb36b9557d/text/statistics.tex#L65
9972
- */
9973
- class CoreStatistics {
9974
- dataAvailabilityLoad;
9975
- popularity;
9976
- imports;
9977
- exports;
9978
- extrinsicSize;
9979
- extrinsicCount;
9980
- bundleSize;
9981
- gasUsed;
9982
- static Codec = Compatibility.isGreaterOrEqual(GpVersion.V0_7_0)
9983
- ? descriptors_codec.Class(CoreStatistics, {
9984
- dataAvailabilityLoad: descriptors_codec.varU32,
9985
- popularity: codecVarU16,
9986
- imports: codecVarU16,
9987
- extrinsicCount: codecVarU16,
9988
- extrinsicSize: descriptors_codec.varU32,
9989
- exports: codecVarU16,
9990
- bundleSize: descriptors_codec.varU32,
9991
- gasUsed: codecVarGas,
9992
- })
9993
- : descriptors_codec.Class(CoreStatistics, {
9994
- dataAvailabilityLoad: descriptors_codec.varU32,
9995
- popularity: codecVarU16,
9996
- imports: codecVarU16,
9997
- exports: codecVarU16,
9998
- extrinsicSize: descriptors_codec.varU32,
9999
- extrinsicCount: codecVarU16,
10000
- bundleSize: descriptors_codec.varU32,
10001
- gasUsed: codecVarGas,
10002
- });
10003
- static create(v) {
10004
- return new CoreStatistics(v.dataAvailabilityLoad, v.popularity, v.imports, v.exports, v.extrinsicSize, v.extrinsicCount, v.bundleSize, v.gasUsed);
10005
- }
10006
- constructor(
10007
- /** `d` */
10008
- dataAvailabilityLoad,
10009
- /** `p` */
10010
- popularity,
10011
- /** `i` */
10012
- imports,
10013
- /** `e` */
10014
- exports,
10015
- /** `z` */
10016
- extrinsicSize,
10017
- /** `x` */
10018
- extrinsicCount,
10019
- /** `b` */
10020
- bundleSize,
10021
- /** `u` */
10022
- gasUsed) {
10023
- this.dataAvailabilityLoad = dataAvailabilityLoad;
10024
- this.popularity = popularity;
10025
- this.imports = imports;
10026
- this.exports = exports;
10027
- this.extrinsicSize = extrinsicSize;
10028
- this.extrinsicCount = extrinsicCount;
10029
- this.bundleSize = bundleSize;
10030
- this.gasUsed = gasUsed;
10031
- }
10032
- static empty() {
10033
- const zero = numbers_tryAsU32(0);
10034
- const zero16 = numbers_tryAsU16(0);
10035
- const zeroGas = tryAsServiceGas(0);
10036
- return new CoreStatistics(zero, zero16, zero16, zero16, zero, zero16, zero, zeroGas);
10037
- }
10038
- }
10039
- /**
10040
- * Service statistics.
10041
- * Updated per block, based on available work reports (`W`).
10042
- *
10043
- * https://graypaper.fluffylabs.dev/#/68eaa1f/185104185104?v=0.6.4
10044
- * https://github.com/gavofyork/graypaper/blob/9bffb08f3ea7b67832019176754df4fb36b9557d/text/statistics.tex#L77
10045
- */
10046
- class ServiceStatistics {
10047
- providedCount;
10048
- providedSize;
10049
- refinementCount;
10050
- refinementGasUsed;
10051
- imports;
10052
- exports;
10053
- extrinsicSize;
10054
- extrinsicCount;
10055
- accumulateCount;
10056
- accumulateGasUsed;
10057
- onTransfersCount;
10058
- onTransfersGasUsed;
10059
- static Codec = Compatibility.isGreaterOrEqual(GpVersion.V0_7_0)
10060
- ? descriptors_codec.Class(ServiceStatistics, {
10061
- providedCount: codecVarU16,
10062
- providedSize: descriptors_codec.varU32,
10063
- refinementCount: descriptors_codec.varU32,
10064
- refinementGasUsed: codecVarGas,
10065
- imports: codecVarU16,
10066
- extrinsicCount: codecVarU16,
10067
- extrinsicSize: descriptors_codec.varU32,
10068
- exports: codecVarU16,
10069
- accumulateCount: descriptors_codec.varU32,
10070
- accumulateGasUsed: codecVarGas,
10071
- onTransfersCount: descriptors_codec.varU32,
10072
- onTransfersGasUsed: codecVarGas,
10073
- })
10074
- : descriptors_codec.Class(ServiceStatistics, {
10075
- providedCount: codecVarU16,
10076
- providedSize: descriptors_codec.varU32,
10077
- refinementCount: descriptors_codec.varU32,
10078
- refinementGasUsed: codecVarGas,
10079
- imports: codecVarU16,
10080
- exports: codecVarU16,
10081
- extrinsicSize: descriptors_codec.varU32,
10082
- extrinsicCount: codecVarU16,
10083
- accumulateCount: descriptors_codec.varU32,
10084
- accumulateGasUsed: codecVarGas,
10085
- onTransfersCount: descriptors_codec.varU32,
10086
- onTransfersGasUsed: codecVarGas,
10087
- });
10088
- static create(v) {
10089
- return new ServiceStatistics(v.providedCount, v.providedSize, v.refinementCount, v.refinementGasUsed, v.imports, v.exports, v.extrinsicSize, v.extrinsicCount, v.accumulateCount, v.accumulateGasUsed, v.onTransfersCount, v.onTransfersGasUsed);
10227
+ /** Just update the `ServiceAccountInfo`. */
10228
+ UpdateServiceKind[UpdateServiceKind["Update"] = 0] = "Update";
10229
+ /** Create a new `Service` instance. */
10230
+ UpdateServiceKind[UpdateServiceKind["Create"] = 1] = "Create";
10231
+ })(UpdateServiceKind || (UpdateServiceKind = {}));
10232
+ /**
10233
+ * Update service info or create a new one.
10234
+ */
10235
+ class UpdateService {
10236
+ action;
10237
+ constructor(action) {
10238
+ this.action = action;
10090
10239
  }
10091
- constructor(
10092
- /** `p.0` */
10093
- providedCount,
10094
- /** `p.1` */
10095
- providedSize,
10096
- /** `r.0` */
10097
- refinementCount,
10098
- /** `r.1` */
10099
- refinementGasUsed,
10100
- /** `i` */
10101
- imports,
10102
- /** `e` */
10103
- exports,
10104
- /** `z` */
10105
- extrinsicSize,
10106
- /** `x` */
10107
- extrinsicCount,
10108
- /** `a.0` */
10109
- accumulateCount,
10110
- /** `a.1` */
10111
- accumulateGasUsed,
10112
- /** `t.0` */
10113
- onTransfersCount,
10114
- /** `t.1` */
10115
- onTransfersGasUsed) {
10116
- this.providedCount = providedCount;
10117
- this.providedSize = providedSize;
10118
- this.refinementCount = refinementCount;
10119
- this.refinementGasUsed = refinementGasUsed;
10120
- this.imports = imports;
10121
- this.exports = exports;
10122
- this.extrinsicSize = extrinsicSize;
10123
- this.extrinsicCount = extrinsicCount;
10124
- this.accumulateCount = accumulateCount;
10125
- this.accumulateGasUsed = accumulateGasUsed;
10126
- this.onTransfersCount = onTransfersCount;
10127
- this.onTransfersGasUsed = onTransfersGasUsed;
10240
+ static update({ serviceInfo }) {
10241
+ return new UpdateService({
10242
+ kind: UpdateServiceKind.Update,
10243
+ account: serviceInfo,
10244
+ });
10128
10245
  }
10129
- static empty() {
10130
- const zero = numbers_tryAsU32(0);
10131
- const zero16 = numbers_tryAsU16(0);
10132
- const zeroGas = tryAsServiceGas(0);
10133
- return new ServiceStatistics(zero16, zero, zero, zeroGas, zero16, zero16, zero, zero16, zero, zeroGas, zero, zeroGas);
10246
+ static create({ serviceInfo, lookupHistory, }) {
10247
+ return new UpdateService({
10248
+ kind: UpdateServiceKind.Create,
10249
+ account: serviceInfo,
10250
+ lookupHistory,
10251
+ });
10134
10252
  }
10135
10253
  }
10136
- /** `pi`: Statistics of each validator, cores statistics and services statistics. */
10137
- class StatisticsData {
10138
- current;
10139
- previous;
10140
- cores;
10141
- services;
10142
- static Codec = descriptors_codec.Class(StatisticsData, {
10143
- current: codecPerValidator(ValidatorStatistics.Codec),
10144
- previous: codecPerValidator(ValidatorStatistics.Codec),
10145
- cores: codecPerCore(CoreStatistics.Codec),
10146
- services: descriptors_codec.dictionary(codecServiceId, ServiceStatistics.Codec, {
10147
- sortKeys: (a, b) => a - b,
10148
- }),
10149
- });
10150
- static create(v) {
10151
- return new StatisticsData(v.current, v.previous, v.cores, v.services);
10254
+ /** Update service storage kind. */
10255
+ var UpdateStorageKind;
10256
+ (function (UpdateStorageKind) {
10257
+ /** Set a storage value. */
10258
+ UpdateStorageKind[UpdateStorageKind["Set"] = 0] = "Set";
10259
+ /** Remove a storage value. */
10260
+ UpdateStorageKind[UpdateStorageKind["Remove"] = 1] = "Remove";
10261
+ })(UpdateStorageKind || (UpdateStorageKind = {}));
10262
+ /**
10263
+ * Update service storage item.
10264
+ *
10265
+ * Can either create/modify an entry or remove it.
10266
+ */
10267
+ class UpdateStorage {
10268
+ action;
10269
+ constructor(action) {
10270
+ this.action = action;
10152
10271
  }
10153
- constructor(current, previous, cores, services) {
10154
- this.current = current;
10155
- this.previous = previous;
10156
- this.cores = cores;
10157
- this.services = services;
10272
+ static set({ storage }) {
10273
+ return new UpdateStorage({ kind: UpdateStorageKind.Set, storage });
10274
+ }
10275
+ static remove({ key }) {
10276
+ return new UpdateStorage({ kind: UpdateStorageKind.Remove, key });
10277
+ }
10278
+ get key() {
10279
+ if (this.action.kind === UpdateStorageKind.Remove) {
10280
+ return this.action.key;
10281
+ }
10282
+ return this.action.storage.key;
10283
+ }
10284
+ get value() {
10285
+ if (this.action.kind === UpdateStorageKind.Remove) {
10286
+ return null;
10287
+ }
10288
+ return this.action.storage.value;
10158
10289
  }
10159
10290
  }
10160
10291
 
@@ -10179,6 +10310,7 @@ class StatisticsData {
10179
10310
 
10180
10311
 
10181
10312
 
10313
+
10182
10314
 
10183
10315
 
10184
10316
  var in_memory_state_UpdateError;
@@ -10279,10 +10411,11 @@ class InMemoryService extends WithDebug {
10279
10411
  /**
10280
10412
  * A special version of state, stored fully in-memory.
10281
10413
  */
10282
- class InMemoryState extends WithDebug {
10414
+ class in_memory_state_InMemoryState extends WithDebug {
10415
+ chainSpec;
10283
10416
  /** Create a new `InMemoryState` by providing all required fields. */
10284
- static create(state) {
10285
- return new InMemoryState(state);
10417
+ static new(chainSpec, state) {
10418
+ return new in_memory_state_InMemoryState(chainSpec, state);
10286
10419
  }
10287
10420
  /**
10288
10421
  * Create a new `InMemoryState` with a partial state override.
@@ -10291,14 +10424,14 @@ class InMemoryState extends WithDebug {
10291
10424
  * not-necessarily coherent values.
10292
10425
  */
10293
10426
  static partial(spec, partial) {
10294
- const state = InMemoryState.empty(spec);
10427
+ const state = in_memory_state_InMemoryState.empty(spec);
10295
10428
  Object.assign(state, partial);
10296
10429
  return state;
10297
10430
  }
10298
10431
  /**
10299
10432
  * Create a new `InMemoryState` from some other state object.
10300
10433
  */
10301
- static copyFrom(other, servicesData) {
10434
+ static copyFrom(chainSpec, other, servicesData) {
10302
10435
  const services = new Map();
10303
10436
  for (const [id, entries] of servicesData.entries()) {
10304
10437
  const service = other.getService(id);
@@ -10308,7 +10441,7 @@ class InMemoryState extends WithDebug {
10308
10441
  const inMemService = InMemoryService.copyFrom(service, entries);
10309
10442
  services.set(id, inMemService);
10310
10443
  }
10311
- return InMemoryState.create({
10444
+ return in_memory_state_InMemoryState.new(chainSpec, {
10312
10445
  availabilityAssignment: other.availabilityAssignment,
10313
10446
  accumulationQueue: other.accumulationQueue,
10314
10447
  designatedValidatorData: other.designatedValidatorData,
@@ -10349,12 +10482,12 @@ class InMemoryState extends WithDebug {
10349
10482
  * Modify the state and apply a single state update.
10350
10483
  */
10351
10484
  applyUpdate(update) {
10352
- const { servicesRemoved, servicesUpdates, preimages, storage, ...rest } = update;
10485
+ const { removed, created: _, updated, preimages, storage, ...rest } = update;
10353
10486
  // just assign all other variables
10354
10487
  Object.assign(this, rest);
10355
10488
  // and update the services state
10356
10489
  let result;
10357
- result = this.updateServices(servicesUpdates);
10490
+ result = this.updateServices(updated);
10358
10491
  if (result.isError) {
10359
10492
  return result;
10360
10493
  }
@@ -10366,7 +10499,7 @@ class InMemoryState extends WithDebug {
10366
10499
  if (result.isError) {
10367
10500
  return result;
10368
10501
  }
10369
- this.removeServices(servicesRemoved);
10502
+ this.removeServices(removed);
10370
10503
  return result_Result.ok(result_OK);
10371
10504
  }
10372
10505
  removeServices(servicesRemoved) {
@@ -10375,89 +10508,102 @@ class InMemoryState extends WithDebug {
10375
10508
  this.services.delete(serviceId);
10376
10509
  }
10377
10510
  }
10378
- updateStorage(storage) {
10379
- for (const { serviceId, action } of storage ?? []) {
10380
- const { kind } = action;
10381
- const service = this.services.get(serviceId);
10382
- if (service === undefined) {
10383
- return result_Result.error(in_memory_state_UpdateError.NoService, `Attempting to update storage of non-existing service: ${serviceId}`);
10384
- }
10385
- if (kind === UpdateStorageKind.Set) {
10386
- const { key, value } = action.storage;
10387
- service.data.storage.set(key.toString(), StorageItem.create({ key, value }));
10388
- }
10389
- else if (kind === UpdateStorageKind.Remove) {
10390
- const { key } = action;
10391
- debug_check `
10511
+ updateStorage(storageUpdates) {
10512
+ if (storageUpdates === undefined) {
10513
+ return result_Result.ok(result_OK);
10514
+ }
10515
+ for (const [serviceId, updates] of storageUpdates.entries()) {
10516
+ for (const update of updates) {
10517
+ const { kind } = update.action;
10518
+ const service = this.services.get(serviceId);
10519
+ if (service === undefined) {
10520
+ return result_Result.error(in_memory_state_UpdateError.NoService, () => `Attempting to update storage of non-existing service: ${serviceId}`);
10521
+ }
10522
+ if (kind === UpdateStorageKind.Set) {
10523
+ const { key, value } = update.action.storage;
10524
+ service.data.storage.set(key.toString(), StorageItem.create({ key, value }));
10525
+ }
10526
+ else if (kind === UpdateStorageKind.Remove) {
10527
+ const { key } = update.action;
10528
+ debug_check `
10392
10529
  ${service.data.storage.has(key.toString())}
10393
- Attempting to remove non-existing storage item at ${serviceId}: ${action.key}
10530
+ Attempting to remove non-existing storage item at ${serviceId}: ${update.action.key}
10394
10531
  `;
10395
- service.data.storage.delete(key.toString());
10396
- }
10397
- else {
10398
- debug_assertNever(kind);
10532
+ service.data.storage.delete(key.toString());
10533
+ }
10534
+ else {
10535
+ debug_assertNever(kind);
10536
+ }
10399
10537
  }
10400
10538
  }
10401
10539
  return result_Result.ok(result_OK);
10402
10540
  }
10403
- updatePreimages(preimages) {
10404
- for (const { serviceId, action } of preimages ?? []) {
10541
+ updatePreimages(preimagesUpdates) {
10542
+ if (preimagesUpdates === undefined) {
10543
+ return result_Result.ok(result_OK);
10544
+ }
10545
+ for (const [serviceId, updates] of preimagesUpdates.entries()) {
10405
10546
  const service = this.services.get(serviceId);
10406
10547
  if (service === undefined) {
10407
- return result_Result.error(in_memory_state_UpdateError.NoService, `Attempting to update preimage of non-existing service: ${serviceId}`);
10408
- }
10409
- const { kind } = action;
10410
- if (kind === UpdatePreimageKind.Provide) {
10411
- const { preimage, slot } = action;
10412
- if (service.data.preimages.has(preimage.hash)) {
10413
- return result_Result.error(in_memory_state_UpdateError.PreimageExists, `Overwriting existing preimage at ${serviceId}: ${preimage}`);
10414
- }
10415
- service.data.preimages.set(preimage.hash, preimage);
10416
- if (slot !== null) {
10417
- const lookupHistory = service.data.lookupHistory.get(preimage.hash);
10418
- const length = numbers_tryAsU32(preimage.blob.length);
10419
- const lookup = new service_LookupHistoryItem(preimage.hash, length, tryAsLookupHistorySlots([slot]));
10420
- if (lookupHistory === undefined) {
10421
- // no lookup history for that preimage at all (edge case, should be requested)
10422
- service.data.lookupHistory.set(preimage.hash, [lookup]);
10548
+ return result_Result.error(in_memory_state_UpdateError.NoService, () => `Attempting to update preimage of non-existing service: ${serviceId}`);
10549
+ }
10550
+ for (const update of updates) {
10551
+ const { kind } = update.action;
10552
+ if (kind === UpdatePreimageKind.Provide) {
10553
+ const { preimage, slot } = update.action;
10554
+ if (service.data.preimages.has(preimage.hash)) {
10555
+ return result_Result.error(in_memory_state_UpdateError.PreimageExists, () => `Overwriting existing preimage at ${serviceId}: ${preimage}`);
10423
10556
  }
10424
- else {
10425
- // insert or replace exiting entry
10426
- const index = lookupHistory.map((x) => x.length).indexOf(length);
10427
- lookupHistory.splice(index, index === -1 ? 0 : 1, lookup);
10557
+ service.data.preimages.set(preimage.hash, preimage);
10558
+ if (slot !== null) {
10559
+ const lookupHistory = service.data.lookupHistory.get(preimage.hash);
10560
+ const length = numbers_tryAsU32(preimage.blob.length);
10561
+ const lookup = new service_LookupHistoryItem(preimage.hash, length, tryAsLookupHistorySlots([slot]));
10562
+ if (lookupHistory === undefined) {
10563
+ // no lookup history for that preimage at all (edge case, should be requested)
10564
+ service.data.lookupHistory.set(preimage.hash, [lookup]);
10565
+ }
10566
+ else {
10567
+ // insert or replace exiting entry
10568
+ const index = lookupHistory.map((x) => x.length).indexOf(length);
10569
+ lookupHistory.splice(index, index === -1 ? 0 : 1, lookup);
10570
+ }
10428
10571
  }
10429
10572
  }
10430
- }
10431
- else if (kind === UpdatePreimageKind.Remove) {
10432
- const { hash, length } = action;
10433
- service.data.preimages.delete(hash);
10434
- const history = service.data.lookupHistory.get(hash) ?? [];
10435
- const idx = history.map((x) => x.length).indexOf(length);
10436
- if (idx !== -1) {
10437
- history.splice(idx, 1);
10573
+ else if (kind === UpdatePreimageKind.Remove) {
10574
+ const { hash, length } = update.action;
10575
+ service.data.preimages.delete(hash);
10576
+ const history = service.data.lookupHistory.get(hash) ?? [];
10577
+ const idx = history.map((x) => x.length).indexOf(length);
10578
+ if (idx !== -1) {
10579
+ history.splice(idx, 1);
10580
+ }
10581
+ }
10582
+ else if (kind === UpdatePreimageKind.UpdateOrAdd) {
10583
+ const { item } = update.action;
10584
+ const history = service.data.lookupHistory.get(item.hash) ?? [];
10585
+ const existingIdx = history.map((x) => x.length).indexOf(item.length);
10586
+ const removeCount = existingIdx === -1 ? 0 : 1;
10587
+ history.splice(existingIdx, removeCount, item);
10588
+ service.data.lookupHistory.set(item.hash, history);
10589
+ }
10590
+ else {
10591
+ debug_assertNever(kind);
10438
10592
  }
10439
- }
10440
- else if (kind === UpdatePreimageKind.UpdateOrAdd) {
10441
- const { item } = action;
10442
- const history = service.data.lookupHistory.get(item.hash) ?? [];
10443
- const existingIdx = history.map((x) => x.length).indexOf(item.length);
10444
- const removeCount = existingIdx === -1 ? 0 : 1;
10445
- history.splice(existingIdx, removeCount, item);
10446
- service.data.lookupHistory.set(item.hash, history);
10447
- }
10448
- else {
10449
- debug_assertNever(kind);
10450
10593
  }
10451
10594
  }
10452
10595
  return result_Result.ok(result_OK);
10453
10596
  }
10454
10597
  updateServices(servicesUpdates) {
10455
- for (const { serviceId, action } of servicesUpdates ?? []) {
10456
- const { kind, account } = action;
10598
+ if (servicesUpdates === undefined) {
10599
+ return result_Result.ok(result_OK);
10600
+ }
10601
+ for (const [serviceId, update] of servicesUpdates.entries()) {
10602
+ const { kind, account } = update.action;
10457
10603
  if (kind === UpdateServiceKind.Create) {
10458
- const { lookupHistory } = action;
10604
+ const { lookupHistory } = update.action;
10459
10605
  if (this.services.has(serviceId)) {
10460
- return result_Result.error(in_memory_state_UpdateError.DuplicateService, `${serviceId} already exists!`);
10606
+ return result_Result.error(in_memory_state_UpdateError.DuplicateService, () => `${serviceId} already exists!`);
10461
10607
  }
10462
10608
  this.services.set(serviceId, new InMemoryService(serviceId, {
10463
10609
  info: account,
@@ -10469,7 +10615,7 @@ class InMemoryState extends WithDebug {
10469
10615
  else if (kind === UpdateServiceKind.Update) {
10470
10616
  const existingService = this.services.get(serviceId);
10471
10617
  if (existingService === undefined) {
10472
- return result_Result.error(in_memory_state_UpdateError.NoService, `Cannot update ${serviceId} because it does not exist.`);
10618
+ return result_Result.error(in_memory_state_UpdateError.NoService, () => `Cannot update ${serviceId} because it does not exist.`);
10473
10619
  }
10474
10620
  existingService.data.info = account;
10475
10621
  }
@@ -10505,8 +10651,9 @@ class InMemoryState extends WithDebug {
10505
10651
  getService(id) {
10506
10652
  return this.services.get(id) ?? null;
10507
10653
  }
10508
- constructor(s) {
10654
+ constructor(chainSpec, s) {
10509
10655
  super();
10656
+ this.chainSpec = chainSpec;
10510
10657
  this.availabilityAssignment = s.availabilityAssignment;
10511
10658
  this.designatedValidatorData = s.designatedValidatorData;
10512
10659
  this.nextValidatorData = s.nextValidatorData;
@@ -10528,31 +10675,34 @@ class InMemoryState extends WithDebug {
10528
10675
  this.accumulationOutputLog = s.accumulationOutputLog;
10529
10676
  this.services = s.services;
10530
10677
  }
10678
+ view() {
10679
+ return new InMemoryStateView(this.chainSpec, this);
10680
+ }
10531
10681
  /**
10532
10682
  * Create an empty and possibly incoherent `InMemoryState`.
10533
10683
  */
10534
10684
  static empty(spec) {
10535
- return new InMemoryState({
10685
+ return new in_memory_state_InMemoryState(spec, {
10536
10686
  availabilityAssignment: tryAsPerCore(Array.from({ length: spec.coresCount }, () => null), spec),
10537
- designatedValidatorData: tryAsPerValidator(Array.from({ length: spec.validatorsCount }, () => ValidatorData.create({
10687
+ designatedValidatorData: tryAsPerValidator(Array.from({ length: spec.validatorsCount }, () => validator_data_ValidatorData.create({
10538
10688
  bandersnatch: bytes_Bytes.zero(BANDERSNATCH_KEY_BYTES).asOpaque(),
10539
10689
  bls: bytes_Bytes.zero(BLS_KEY_BYTES).asOpaque(),
10540
10690
  ed25519: bytes_Bytes.zero(ED25519_KEY_BYTES).asOpaque(),
10541
10691
  metadata: bytes_Bytes.zero(VALIDATOR_META_BYTES).asOpaque(),
10542
10692
  })), spec),
10543
- nextValidatorData: tryAsPerValidator(Array.from({ length: spec.validatorsCount }, () => ValidatorData.create({
10693
+ nextValidatorData: tryAsPerValidator(Array.from({ length: spec.validatorsCount }, () => validator_data_ValidatorData.create({
10544
10694
  bandersnatch: bytes_Bytes.zero(BANDERSNATCH_KEY_BYTES).asOpaque(),
10545
10695
  bls: bytes_Bytes.zero(BLS_KEY_BYTES).asOpaque(),
10546
10696
  ed25519: bytes_Bytes.zero(ED25519_KEY_BYTES).asOpaque(),
10547
10697
  metadata: bytes_Bytes.zero(VALIDATOR_META_BYTES).asOpaque(),
10548
10698
  })), spec),
10549
- currentValidatorData: tryAsPerValidator(Array.from({ length: spec.validatorsCount }, () => ValidatorData.create({
10699
+ currentValidatorData: tryAsPerValidator(Array.from({ length: spec.validatorsCount }, () => validator_data_ValidatorData.create({
10550
10700
  bandersnatch: bytes_Bytes.zero(BANDERSNATCH_KEY_BYTES).asOpaque(),
10551
10701
  bls: bytes_Bytes.zero(BLS_KEY_BYTES).asOpaque(),
10552
10702
  ed25519: bytes_Bytes.zero(ED25519_KEY_BYTES).asOpaque(),
10553
10703
  metadata: bytes_Bytes.zero(VALIDATOR_META_BYTES).asOpaque(),
10554
10704
  })), spec),
10555
- previousValidatorData: tryAsPerValidator(Array.from({ length: spec.validatorsCount }, () => ValidatorData.create({
10705
+ previousValidatorData: tryAsPerValidator(Array.from({ length: spec.validatorsCount }, () => validator_data_ValidatorData.create({
10556
10706
  bandersnatch: bytes_Bytes.zero(BANDERSNATCH_KEY_BYTES).asOpaque(),
10557
10707
  bls: bytes_Bytes.zero(BLS_KEY_BYTES).asOpaque(),
10558
10708
  ed25519: bytes_Bytes.zero(ED25519_KEY_BYTES).asOpaque(),
@@ -10568,7 +10718,7 @@ class InMemoryState extends WithDebug {
10568
10718
  entropy: FixedSizeArray.fill(() => bytes_Bytes.zero(hash_HASH_SIZE).asOpaque(), ENTROPY_ENTRIES),
10569
10719
  authPools: tryAsPerCore(Array.from({ length: spec.coresCount }, () => sized_array_asKnownSize([])), spec),
10570
10720
  authQueues: tryAsPerCore(Array.from({ length: spec.coresCount }, () => FixedSizeArray.fill(() => bytes_Bytes.zero(hash_HASH_SIZE).asOpaque(), AUTHORIZATION_QUEUE_SIZE)), spec),
10571
- recentBlocks: RecentBlocksHistory.empty(),
10721
+ recentBlocks: RecentBlocks.empty(),
10572
10722
  statistics: StatisticsData.create({
10573
10723
  current: tryAsPerValidator(Array.from({ length: spec.validatorsCount }, () => ValidatorStatistics.empty()), spec),
10574
10724
  previous: tryAsPerValidator(Array.from({ length: spec.validatorsCount }, () => ValidatorStatistics.empty()), spec),
@@ -10578,8 +10728,8 @@ class InMemoryState extends WithDebug {
10578
10728
  accumulationQueue: tryAsPerEpochBlock(Array.from({ length: spec.epochLength }, () => []), spec),
10579
10729
  recentlyAccumulated: tryAsPerEpochBlock(Array.from({ length: spec.epochLength }, () => HashSet.new()), spec),
10580
10730
  ticketsAccumulator: sized_array_asKnownSize([]),
10581
- sealingKeySeries: SafroleSealingKeysData.keys(tryAsPerEpochBlock(Array.from({ length: spec.epochLength }, () => bytes_Bytes.zero(BANDERSNATCH_KEY_BYTES).asOpaque()), spec)),
10582
- epochRoot: bytes_Bytes.zero(BANDERSNATCH_RING_ROOT_BYTES).asOpaque(),
10731
+ sealingKeySeries: safrole_data_SafroleSealingKeysData.keys(tryAsPerEpochBlock(Array.from({ length: spec.epochLength }, () => bytes_Bytes.zero(BANDERSNATCH_KEY_BYTES).asOpaque()), spec)),
10732
+ epochRoot: bytes_Bytes.zero(bandersnatch_BANDERSNATCH_RING_ROOT_BYTES).asOpaque(),
10583
10733
  privilegedServices: PrivilegedServices.create({
10584
10734
  manager: tryAsServiceId(0),
10585
10735
  assigners: tryAsPerCore(new Array(spec.coresCount).fill(tryAsServiceId(0)), spec),
@@ -10619,51 +10769,10 @@ const serviceDataCodec = descriptors_codec.dictionary(descriptors_codec.u32.asOp
10619
10769
 
10620
10770
 
10621
10771
 
10622
- ;// CONCATENATED MODULE: ./packages/jam/state/not-yet-accumulated.ts
10623
-
10624
-
10625
10772
 
10626
10773
 
10627
10774
 
10628
10775
 
10629
- /**
10630
- * Ready (i.e. available and/or audited) but not-yet-accumulated work-reports.
10631
- *
10632
- * https://graypaper.fluffylabs.dev/#/5f542d7/165300165400
10633
- */
10634
- class NotYetAccumulatedReport extends WithDebug {
10635
- report;
10636
- dependencies;
10637
- static Codec = descriptors_codec.Class(NotYetAccumulatedReport, {
10638
- report: WorkReport.Codec,
10639
- dependencies: codecKnownSizeArray(descriptors_codec.bytes(hash_HASH_SIZE).asOpaque(), {
10640
- typicalLength: MAX_REPORT_DEPENDENCIES / 2,
10641
- maxLength: MAX_REPORT_DEPENDENCIES,
10642
- minLength: 0,
10643
- }),
10644
- });
10645
- static create({ report, dependencies }) {
10646
- return new NotYetAccumulatedReport(report, dependencies);
10647
- }
10648
- constructor(
10649
- /**
10650
- * Each of these were made available at most one epoch ago
10651
- * but have or had unfulfilled dependencies.
10652
- */
10653
- report,
10654
- /**
10655
- * Alongside the work-report itself, we retain its un-accumulated
10656
- * dependencies, a set of work-package hashes.
10657
- *
10658
- * https://graypaper.fluffylabs.dev/#/5f542d7/165800165800
10659
- */
10660
- dependencies) {
10661
- super();
10662
- this.report = report;
10663
- this.dependencies = dependencies;
10664
- }
10665
- }
10666
-
10667
10776
  ;// CONCATENATED MODULE: ./packages/jam/state-merkleization/serialize.ts
10668
10777
 
10669
10778
 
@@ -10676,26 +10785,19 @@ class NotYetAccumulatedReport extends WithDebug {
10676
10785
 
10677
10786
 
10678
10787
 
10679
-
10680
-
10681
-
10682
10788
  /** Serialization for particular state entries. */
10683
- var serialize;
10789
+ var serialize_serialize;
10684
10790
  (function (serialize) {
10685
10791
  /** C(1): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b15013b1501?v=0.6.7 */
10686
10792
  serialize.authPools = {
10687
10793
  key: stateKeys.index(StateKeyIdx.Alpha),
10688
- Codec: codecPerCore(codecKnownSizeArray(descriptors_codec.bytes(hash_HASH_SIZE).asOpaque(), {
10689
- minLength: 0,
10690
- maxLength: MAX_AUTH_POOL_SIZE,
10691
- typicalLength: MAX_AUTH_POOL_SIZE,
10692
- })),
10794
+ Codec: authPoolsCodec,
10693
10795
  extract: (s) => s.authPools,
10694
10796
  };
10695
10797
  /** C(2): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b31013b3101?v=0.6.7 */
10696
10798
  serialize.authQueues = {
10697
10799
  key: stateKeys.index(StateKeyIdx.Phi),
10698
- Codec: codecPerCore(codecFixedSizeArray(descriptors_codec.bytes(hash_HASH_SIZE).asOpaque(), AUTHORIZATION_QUEUE_SIZE)),
10800
+ Codec: authQueuesCodec,
10699
10801
  extract: (s) => s.authQueues,
10700
10802
  };
10701
10803
  /**
@@ -10704,7 +10806,7 @@ var serialize;
10704
10806
  */
10705
10807
  serialize.recentBlocks = {
10706
10808
  key: stateKeys.index(StateKeyIdx.Beta),
10707
- Codec: RecentBlocksHistory.Codec,
10809
+ Codec: RecentBlocks.Codec,
10708
10810
  extract: (s) => s.recentBlocks,
10709
10811
  };
10710
10812
  /** C(4): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b63013b6301?v=0.6.7 */
@@ -10733,25 +10835,25 @@ var serialize;
10733
10835
  /** C(7): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b00023b0002?v=0.6.7 */
10734
10836
  serialize.designatedValidators = {
10735
10837
  key: stateKeys.index(StateKeyIdx.Iota),
10736
- Codec: codecPerValidator(ValidatorData.Codec),
10838
+ Codec: validatorsDataCodec,
10737
10839
  extract: (s) => s.designatedValidatorData,
10738
10840
  };
10739
10841
  /** C(8): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b0d023b0d02?v=0.6.7 */
10740
10842
  serialize.currentValidators = {
10741
10843
  key: stateKeys.index(StateKeyIdx.Kappa),
10742
- Codec: codecPerValidator(ValidatorData.Codec),
10844
+ Codec: validatorsDataCodec,
10743
10845
  extract: (s) => s.currentValidatorData,
10744
10846
  };
10745
10847
  /** C(9): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b1a023b1a02?v=0.6.7 */
10746
10848
  serialize.previousValidators = {
10747
10849
  key: stateKeys.index(StateKeyIdx.Lambda),
10748
- Codec: codecPerValidator(ValidatorData.Codec),
10850
+ Codec: validatorsDataCodec,
10749
10851
  extract: (s) => s.previousValidatorData,
10750
10852
  };
10751
10853
  /** C(10): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b27023b2702?v=0.6.7 */
10752
10854
  serialize.availabilityAssignment = {
10753
10855
  key: stateKeys.index(StateKeyIdx.Rho),
10754
- Codec: codecPerCore(descriptors_codec.optional(AvailabilityAssignment.Codec)),
10856
+ Codec: availabilityAssignmentsCodec,
10755
10857
  extract: (s) => s.availabilityAssignment,
10756
10858
  };
10757
10859
  /** C(11): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b3e023b3e02?v=0.6.7 */
@@ -10775,13 +10877,13 @@ var serialize;
10775
10877
  /** C(14): https://graypaper.fluffylabs.dev/#/1c979cb/3bf0023bf002?v=0.7.1 */
10776
10878
  serialize.accumulationQueue = {
10777
10879
  key: stateKeys.index(StateKeyIdx.Omega),
10778
- Codec: codecPerEpochBlock(readonlyArray(descriptors_codec.sequenceVarLen(NotYetAccumulatedReport.Codec))),
10880
+ Codec: accumulationQueueCodec,
10779
10881
  extract: (s) => s.accumulationQueue,
10780
10882
  };
10781
10883
  /** C(15): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b96023b9602?v=0.6.7 */
10782
10884
  serialize.recentlyAccumulated = {
10783
10885
  key: stateKeys.index(StateKeyIdx.Xi),
10784
- Codec: codecPerEpochBlock(descriptors_codec.sequenceVarLen(descriptors_codec.bytes(hash_HASH_SIZE).asOpaque()).convert((x) => Array.from(x), (x) => HashSet.from(x))),
10886
+ Codec: recentlyAccumulatedCodec,
10785
10887
  extract: (s) => s.recentlyAccumulated,
10786
10888
  };
10787
10889
  /** C(16): https://graypaper.fluffylabs.dev/#/38c4e62/3b46033b4603?v=0.7.0 */
@@ -10810,9 +10912,9 @@ var serialize;
10810
10912
  /** https://graypaper.fluffylabs.dev/#/85129da/387603387603?v=0.6.3 */
10811
10913
  serialize.serviceLookupHistory = (blake2b, serviceId, hash, len) => ({
10812
10914
  key: stateKeys.serviceLookupHistory(blake2b, serviceId, hash, len),
10813
- Codec: readonlyArray(descriptors_codec.sequenceVarLen(descriptors_codec.u32)),
10915
+ Codec: descriptors_readonlyArray(descriptors_codec.sequenceVarLen(descriptors_codec.u32)),
10814
10916
  });
10815
- })(serialize || (serialize = {}));
10917
+ })(serialize_serialize || (serialize_serialize = {}));
10816
10918
  /**
10817
10919
  * Just dump the entire terminal blob as-is.
10818
10920
  *
@@ -10822,6 +10924,87 @@ var serialize;
10822
10924
  */
10823
10925
  const dumpCodec = Descriptor.new("Dump", { bytes: 64, isExact: false }, (e, v) => e.bytes(bytes_Bytes.fromBlob(v.raw, v.raw.length)), (d) => bytes_BytesBlob.blobFrom(d.bytes(d.source.length - d.bytesRead()).raw), (s) => s.bytes(s.decoder.source.length - s.decoder.bytesRead()));
10824
10926
 
10927
+ ;// CONCATENATED MODULE: ./packages/jam/state-merkleization/serialized-state-view.ts
10928
+
10929
+
10930
+ class SerializedStateView {
10931
+ spec;
10932
+ backend;
10933
+ recentlyUsedServices;
10934
+ viewCache;
10935
+ constructor(spec, backend,
10936
+ /** Best-effort list of recently active services. */
10937
+ recentlyUsedServices, viewCache) {
10938
+ this.spec = spec;
10939
+ this.backend = backend;
10940
+ this.recentlyUsedServices = recentlyUsedServices;
10941
+ this.viewCache = viewCache;
10942
+ }
10943
+ retrieveView({ key, Codec }, description) {
10944
+ const cached = this.viewCache.get(key);
10945
+ if (cached !== undefined) {
10946
+ return cached;
10947
+ }
10948
+ const bytes = this.backend.get(key);
10949
+ if (bytes === null) {
10950
+ throw new Error(`Required state entry for ${description} is missing!. Accessing view of key: ${key}`);
10951
+ }
10952
+ // NOTE [ToDr] we are not using `Decoder.decodeObject` here because
10953
+ // it needs to get to the end of the data (skip), yet that's expensive.
10954
+ // we assume that the state data is correct and coherent anyway, so
10955
+ // for performance reasons we simply create the view here.
10956
+ const d = decoder_Decoder.fromBytesBlob(bytes);
10957
+ d.attachContext(this.spec);
10958
+ const view = Codec.View.decode(d);
10959
+ this.viewCache.set(key, view);
10960
+ return view;
10961
+ }
10962
+ availabilityAssignmentView() {
10963
+ return this.retrieveView(serialize_serialize.availabilityAssignment, "availabilityAssignmentView");
10964
+ }
10965
+ designatedValidatorDataView() {
10966
+ return this.retrieveView(serialize_serialize.designatedValidators, "designatedValidatorsView");
10967
+ }
10968
+ currentValidatorDataView() {
10969
+ return this.retrieveView(serialize_serialize.currentValidators, "currentValidatorsView");
10970
+ }
10971
+ previousValidatorDataView() {
10972
+ return this.retrieveView(serialize_serialize.previousValidators, "previousValidatorsView");
10973
+ }
10974
+ authPoolsView() {
10975
+ return this.retrieveView(serialize_serialize.authPools, "authPoolsView");
10976
+ }
10977
+ authQueuesView() {
10978
+ return this.retrieveView(serialize_serialize.authQueues, "authQueuesView");
10979
+ }
10980
+ recentBlocksView() {
10981
+ return this.retrieveView(serialize_serialize.recentBlocks, "recentBlocksView");
10982
+ }
10983
+ statisticsView() {
10984
+ return this.retrieveView(serialize_serialize.statistics, "statisticsView");
10985
+ }
10986
+ accumulationQueueView() {
10987
+ return this.retrieveView(serialize_serialize.accumulationQueue, "accumulationQueueView");
10988
+ }
10989
+ recentlyAccumulatedView() {
10990
+ return this.retrieveView(serialize_serialize.recentlyAccumulated, "recentlyAccumulatedView");
10991
+ }
10992
+ safroleDataView() {
10993
+ return this.retrieveView(serialize_serialize.safrole, "safroleDataView");
10994
+ }
10995
+ getServiceInfoView(id) {
10996
+ const serviceData = serialize_serialize.serviceData(id);
10997
+ const bytes = this.backend.get(serviceData.key);
10998
+ if (bytes === null) {
10999
+ return null;
11000
+ }
11001
+ if (!this.recentlyUsedServices.includes(id)) {
11002
+ this.recentlyUsedServices.push(id);
11003
+ }
11004
+ return decoder_Decoder.decodeObject(serviceData.Codec.View, bytes, this.spec);
11005
+ }
11006
+ }
11007
+
10825
11008
  ;// CONCATENATED MODULE: ./packages/jam/state-merkleization/serialized-state.ts
10826
11009
 
10827
11010
 
@@ -10830,6 +11013,8 @@ const dumpCodec = Descriptor.new("Dump", { bytes: 64, isExact: false }, (e, v) =
10830
11013
 
10831
11014
 
10832
11015
 
11016
+
11017
+
10833
11018
  /**
10834
11019
  * State object which reads it's entries from some backend.
10835
11020
  *
@@ -10842,7 +11027,7 @@ class serialized_state_SerializedState {
10842
11027
  spec;
10843
11028
  blake2b;
10844
11029
  backend;
10845
- _recentServiceIds;
11030
+ recentlyUsedServices;
10846
11031
  /** Create a state-like object from collection of serialized entries. */
10847
11032
  static fromStateEntries(spec, blake2b, state, recentServices = []) {
10848
11033
  return new serialized_state_SerializedState(spec, blake2b, state, recentServices);
@@ -10851,106 +11036,120 @@ class serialized_state_SerializedState {
10851
11036
  static new(spec, blake2b, db, recentServices = []) {
10852
11037
  return new serialized_state_SerializedState(spec, blake2b, db, recentServices);
10853
11038
  }
11039
+ dataCache = hash_dictionary_HashDictionary.new();
11040
+ viewCache = hash_dictionary_HashDictionary.new();
10854
11041
  constructor(spec, blake2b, backend,
10855
11042
  /** Best-effort list of recently active services. */
10856
- _recentServiceIds) {
11043
+ recentlyUsedServices) {
10857
11044
  this.spec = spec;
10858
11045
  this.blake2b = blake2b;
10859
11046
  this.backend = backend;
10860
- this._recentServiceIds = _recentServiceIds;
11047
+ this.recentlyUsedServices = recentlyUsedServices;
10861
11048
  }
10862
11049
  /** Comparing the serialized states, just means comparing their backends. */
10863
11050
  [TEST_COMPARE_USING]() {
10864
11051
  return this.backend;
10865
11052
  }
11053
+ /** Return a non-decoding version of the state. */
11054
+ view() {
11055
+ return new SerializedStateView(this.spec, this.backend, this.recentlyUsedServices, this.viewCache);
11056
+ }
10866
11057
  // TODO [ToDr] Temporary method to update the state,
10867
11058
  // without changing references.
10868
11059
  updateBackend(newBackend) {
10869
11060
  this.backend = newBackend;
11061
+ this.dataCache = hash_dictionary_HashDictionary.new();
11062
+ this.viewCache = hash_dictionary_HashDictionary.new();
10870
11063
  }
10871
11064
  recentServiceIds() {
10872
- return this._recentServiceIds;
11065
+ return this.recentlyUsedServices;
10873
11066
  }
10874
11067
  getService(id) {
10875
- const serviceData = this.retrieveOptional(serialize.serviceData(id));
11068
+ const serviceData = this.retrieveOptional(serialize_serialize.serviceData(id));
10876
11069
  if (serviceData === undefined) {
10877
11070
  return null;
10878
11071
  }
10879
- if (!this._recentServiceIds.includes(id)) {
10880
- this._recentServiceIds.push(id);
11072
+ if (!this.recentlyUsedServices.includes(id)) {
11073
+ this.recentlyUsedServices.push(id);
10881
11074
  }
10882
11075
  return new SerializedService(this.blake2b, id, serviceData, (key) => this.retrieveOptional(key));
10883
11076
  }
10884
- retrieve({ key, Codec }, description) {
10885
- const bytes = this.backend.get(key);
10886
- if (bytes === null) {
10887
- throw new Error(`Required state entry for ${description} is missing!. Accessing key: ${key}`);
11077
+ retrieve(k, description) {
11078
+ const data = this.retrieveOptional(k);
11079
+ if (data === undefined) {
11080
+ throw new Error(`Required state entry for ${description} is missing!. Accessing key: ${k.key}`);
10888
11081
  }
10889
- return decoder_Decoder.decodeObject(Codec, bytes, this.spec);
11082
+ return data;
10890
11083
  }
10891
11084
  retrieveOptional({ key, Codec }) {
11085
+ const cached = this.dataCache.get(key);
11086
+ if (cached !== undefined) {
11087
+ return cached;
11088
+ }
10892
11089
  const bytes = this.backend.get(key);
10893
11090
  if (bytes === null) {
10894
11091
  return undefined;
10895
11092
  }
10896
- return decoder_Decoder.decodeObject(Codec, bytes, this.spec);
11093
+ const data = decoder_Decoder.decodeObject(Codec, bytes, this.spec);
11094
+ this.dataCache.set(key, data);
11095
+ return data;
10897
11096
  }
10898
11097
  get availabilityAssignment() {
10899
- return this.retrieve(serialize.availabilityAssignment, "availabilityAssignment");
11098
+ return this.retrieve(serialize_serialize.availabilityAssignment, "availabilityAssignment");
10900
11099
  }
10901
11100
  get designatedValidatorData() {
10902
- return this.retrieve(serialize.designatedValidators, "designatedValidatorData");
11101
+ return this.retrieve(serialize_serialize.designatedValidators, "designatedValidatorData");
10903
11102
  }
10904
11103
  get nextValidatorData() {
10905
- return this.retrieve(serialize.safrole, "safroleData.nextValidatorData").nextValidatorData;
11104
+ return this.retrieve(serialize_serialize.safrole, "safroleData.nextValidatorData").nextValidatorData;
10906
11105
  }
10907
11106
  get currentValidatorData() {
10908
- return this.retrieve(serialize.currentValidators, "currentValidators");
11107
+ return this.retrieve(serialize_serialize.currentValidators, "currentValidators");
10909
11108
  }
10910
11109
  get previousValidatorData() {
10911
- return this.retrieve(serialize.previousValidators, "previousValidators");
11110
+ return this.retrieve(serialize_serialize.previousValidators, "previousValidators");
10912
11111
  }
10913
11112
  get disputesRecords() {
10914
- return this.retrieve(serialize.disputesRecords, "disputesRecords");
11113
+ return this.retrieve(serialize_serialize.disputesRecords, "disputesRecords");
10915
11114
  }
10916
11115
  get timeslot() {
10917
- return this.retrieve(serialize.timeslot, "timeslot");
11116
+ return this.retrieve(serialize_serialize.timeslot, "timeslot");
10918
11117
  }
10919
11118
  get entropy() {
10920
- return this.retrieve(serialize.entropy, "entropy");
11119
+ return this.retrieve(serialize_serialize.entropy, "entropy");
10921
11120
  }
10922
11121
  get authPools() {
10923
- return this.retrieve(serialize.authPools, "authPools");
11122
+ return this.retrieve(serialize_serialize.authPools, "authPools");
10924
11123
  }
10925
11124
  get authQueues() {
10926
- return this.retrieve(serialize.authQueues, "authQueues");
11125
+ return this.retrieve(serialize_serialize.authQueues, "authQueues");
10927
11126
  }
10928
11127
  get recentBlocks() {
10929
- return this.retrieve(serialize.recentBlocks, "recentBlocks");
11128
+ return this.retrieve(serialize_serialize.recentBlocks, "recentBlocks");
10930
11129
  }
10931
11130
  get statistics() {
10932
- return this.retrieve(serialize.statistics, "statistics");
11131
+ return this.retrieve(serialize_serialize.statistics, "statistics");
10933
11132
  }
10934
11133
  get accumulationQueue() {
10935
- return this.retrieve(serialize.accumulationQueue, "accumulationQueue");
11134
+ return this.retrieve(serialize_serialize.accumulationQueue, "accumulationQueue");
10936
11135
  }
10937
11136
  get recentlyAccumulated() {
10938
- return this.retrieve(serialize.recentlyAccumulated, "recentlyAccumulated");
11137
+ return this.retrieve(serialize_serialize.recentlyAccumulated, "recentlyAccumulated");
10939
11138
  }
10940
11139
  get ticketsAccumulator() {
10941
- return this.retrieve(serialize.safrole, "safroleData.ticketsAccumulator").ticketsAccumulator;
11140
+ return this.retrieve(serialize_serialize.safrole, "safroleData.ticketsAccumulator").ticketsAccumulator;
10942
11141
  }
10943
11142
  get sealingKeySeries() {
10944
- return this.retrieve(serialize.safrole, "safrole.sealingKeySeries").sealingKeySeries;
11143
+ return this.retrieve(serialize_serialize.safrole, "safrole.sealingKeySeries").sealingKeySeries;
10945
11144
  }
10946
11145
  get epochRoot() {
10947
- return this.retrieve(serialize.safrole, "safrole.epochRoot").epochRoot;
11146
+ return this.retrieve(serialize_serialize.safrole, "safrole.epochRoot").epochRoot;
10948
11147
  }
10949
11148
  get privilegedServices() {
10950
- return this.retrieve(serialize.privilegedServices, "privilegedServices");
11149
+ return this.retrieve(serialize_serialize.privilegedServices, "privilegedServices");
10951
11150
  }
10952
11151
  get accumulationOutputLog() {
10953
- return this.retrieve(serialize.accumulationOutputLog, "accumulationOutputLog");
11152
+ return this.retrieve(serialize_serialize.accumulationOutputLog, "accumulationOutputLog");
10954
11153
  }
10955
11154
  }
10956
11155
  /** Service data representation on a serialized state. */
@@ -10979,9 +11178,9 @@ class SerializedService {
10979
11178
  serviceIdAndKey.set(numbers_u32AsLeBytes(this.serviceId));
10980
11179
  serviceIdAndKey.set(rawKey.raw, SERVICE_ID_BYTES);
10981
11180
  const key = opaque_asOpaqueType(bytes_BytesBlob.blobFrom(this.blake2b.hashBytes(serviceIdAndKey).raw));
10982
- return this.retrieveOptional(serialize.serviceStorage(this.blake2b, this.serviceId, key)) ?? null;
11181
+ return this.retrieveOptional(serialize_serialize.serviceStorage(this.blake2b, this.serviceId, key)) ?? null;
10983
11182
  }
10984
- return this.retrieveOptional(serialize.serviceStorage(this.blake2b, this.serviceId, rawKey)) ?? null;
11183
+ return this.retrieveOptional(serialize_serialize.serviceStorage(this.blake2b, this.serviceId, rawKey)) ?? null;
10985
11184
  }
10986
11185
  /**
10987
11186
  * Check if preimage is present in the DB.
@@ -10990,15 +11189,15 @@ class SerializedService {
10990
11189
  */
10991
11190
  hasPreimage(hash) {
10992
11191
  // TODO [ToDr] consider optimizing to avoid fetching the whole data.
10993
- return this.retrieveOptional(serialize.servicePreimages(this.blake2b, this.serviceId, hash)) !== undefined;
11192
+ return this.retrieveOptional(serialize_serialize.servicePreimages(this.blake2b, this.serviceId, hash)) !== undefined;
10994
11193
  }
10995
11194
  /** Retrieve preimage from the DB. */
10996
11195
  getPreimage(hash) {
10997
- return this.retrieveOptional(serialize.servicePreimages(this.blake2b, this.serviceId, hash)) ?? null;
11196
+ return this.retrieveOptional(serialize_serialize.servicePreimages(this.blake2b, this.serviceId, hash)) ?? null;
10998
11197
  }
10999
11198
  /** Retrieve preimage lookup history. */
11000
11199
  getLookupHistory(hash, len) {
11001
- const rawSlots = this.retrieveOptional(serialize.serviceLookupHistory(this.blake2b, this.serviceId, hash, len));
11200
+ const rawSlots = this.retrieveOptional(serialize_serialize.serviceLookupHistory(this.blake2b, this.serviceId, hash, len));
11002
11201
  if (rawSlots === undefined) {
11003
11202
  return null;
11004
11203
  }
@@ -11663,7 +11862,6 @@ function getKeccakTrieHasher(hasher) {
11663
11862
 
11664
11863
 
11665
11864
 
11666
-
11667
11865
  /** What should be done with that key? */
11668
11866
  var StateEntryUpdateAction;
11669
11867
  (function (StateEntryUpdateAction) {
@@ -11679,83 +11877,95 @@ function* serializeStateUpdate(spec, blake2b, update) {
11679
11877
  yield* serializeBasicKeys(spec, update);
11680
11878
  const encode = (codec, val) => encoder_Encoder.encodeObject(codec, val, spec);
11681
11879
  // then let's proceed with service updates
11682
- yield* serializeServiceUpdates(update.servicesUpdates, encode, blake2b);
11880
+ yield* serializeServiceUpdates(update.updated, encode, blake2b);
11683
11881
  yield* serializePreimages(update.preimages, encode, blake2b);
11684
11882
  yield* serializeStorage(update.storage, blake2b);
11685
- yield* serializeRemovedServices(update.servicesRemoved);
11883
+ yield* serializeRemovedServices(update.removed);
11686
11884
  }
11687
11885
  function* serializeRemovedServices(servicesRemoved) {
11688
- for (const serviceId of servicesRemoved ?? []) {
11886
+ if (servicesRemoved === undefined) {
11887
+ return;
11888
+ }
11889
+ for (const serviceId of servicesRemoved) {
11689
11890
  // TODO [ToDr] what about all data associated with a service?
11690
- const codec = serialize.serviceData(serviceId);
11891
+ const codec = serialize_serialize.serviceData(serviceId);
11691
11892
  yield [StateEntryUpdateAction.Remove, codec.key, EMPTY_BLOB];
11692
11893
  }
11693
11894
  }
11694
- function* serializeStorage(storage, blake2b) {
11695
- for (const { action, serviceId } of storage ?? []) {
11696
- switch (action.kind) {
11697
- case UpdateStorageKind.Set: {
11698
- const key = action.storage.key;
11699
- const codec = serialize.serviceStorage(blake2b, serviceId, key);
11700
- yield [StateEntryUpdateAction.Insert, codec.key, action.storage.value];
11701
- break;
11702
- }
11703
- case UpdateStorageKind.Remove: {
11704
- const key = action.key;
11705
- const codec = serialize.serviceStorage(blake2b, serviceId, key);
11706
- yield [StateEntryUpdateAction.Remove, codec.key, EMPTY_BLOB];
11707
- break;
11895
+ function* serializeStorage(storageUpdates, blake2b) {
11896
+ if (storageUpdates === undefined) {
11897
+ return;
11898
+ }
11899
+ for (const [serviceId, updates] of storageUpdates.entries()) {
11900
+ for (const { action } of updates) {
11901
+ switch (action.kind) {
11902
+ case UpdateStorageKind.Set: {
11903
+ const key = action.storage.key;
11904
+ const codec = serialize_serialize.serviceStorage(blake2b, serviceId, key);
11905
+ yield [StateEntryUpdateAction.Insert, codec.key, action.storage.value];
11906
+ break;
11907
+ }
11908
+ case UpdateStorageKind.Remove: {
11909
+ const key = action.key;
11910
+ const codec = serialize_serialize.serviceStorage(blake2b, serviceId, key);
11911
+ yield [StateEntryUpdateAction.Remove, codec.key, EMPTY_BLOB];
11912
+ break;
11913
+ }
11708
11914
  }
11709
- default:
11710
- debug_assertNever(action);
11711
11915
  }
11712
11916
  }
11713
11917
  }
11714
- function* serializePreimages(preimages, encode, blake2b) {
11715
- for (const { action, serviceId } of preimages ?? []) {
11716
- switch (action.kind) {
11717
- case UpdatePreimageKind.Provide: {
11718
- const { hash, blob } = action.preimage;
11719
- const codec = serialize.servicePreimages(blake2b, serviceId, hash);
11720
- yield [StateEntryUpdateAction.Insert, codec.key, blob];
11721
- if (action.slot !== null) {
11722
- const codec2 = serialize.serviceLookupHistory(blake2b, serviceId, hash, numbers_tryAsU32(blob.length));
11723
- yield [
11724
- StateEntryUpdateAction.Insert,
11725
- codec2.key,
11726
- encode(codec2.Codec, tryAsLookupHistorySlots([action.slot])),
11727
- ];
11918
+ function* serializePreimages(preimagesUpdates, encode, blake2b) {
11919
+ if (preimagesUpdates === undefined) {
11920
+ return;
11921
+ }
11922
+ for (const [serviceId, updates] of preimagesUpdates.entries()) {
11923
+ for (const { action } of updates) {
11924
+ switch (action.kind) {
11925
+ case UpdatePreimageKind.Provide: {
11926
+ const { hash, blob } = action.preimage;
11927
+ const codec = serialize_serialize.servicePreimages(blake2b, serviceId, hash);
11928
+ yield [StateEntryUpdateAction.Insert, codec.key, blob];
11929
+ if (action.slot !== null) {
11930
+ const codec2 = serialize_serialize.serviceLookupHistory(blake2b, serviceId, hash, numbers_tryAsU32(blob.length));
11931
+ yield [
11932
+ StateEntryUpdateAction.Insert,
11933
+ codec2.key,
11934
+ encode(codec2.Codec, tryAsLookupHistorySlots([action.slot])),
11935
+ ];
11936
+ }
11937
+ break;
11938
+ }
11939
+ case UpdatePreimageKind.UpdateOrAdd: {
11940
+ const { hash, length, slots } = action.item;
11941
+ const codec = serialize_serialize.serviceLookupHistory(blake2b, serviceId, hash, length);
11942
+ yield [StateEntryUpdateAction.Insert, codec.key, encode(codec.Codec, slots)];
11943
+ break;
11944
+ }
11945
+ case UpdatePreimageKind.Remove: {
11946
+ const { hash, length } = action;
11947
+ const codec = serialize_serialize.servicePreimages(blake2b, serviceId, hash);
11948
+ yield [StateEntryUpdateAction.Remove, codec.key, EMPTY_BLOB];
11949
+ const codec2 = serialize_serialize.serviceLookupHistory(blake2b, serviceId, hash, length);
11950
+ yield [StateEntryUpdateAction.Remove, codec2.key, EMPTY_BLOB];
11951
+ break;
11728
11952
  }
11729
- break;
11730
- }
11731
- case UpdatePreimageKind.UpdateOrAdd: {
11732
- const { hash, length, slots } = action.item;
11733
- const codec = serialize.serviceLookupHistory(blake2b, serviceId, hash, length);
11734
- yield [StateEntryUpdateAction.Insert, codec.key, encode(codec.Codec, slots)];
11735
- break;
11736
- }
11737
- case UpdatePreimageKind.Remove: {
11738
- const { hash, length } = action;
11739
- const codec = serialize.servicePreimages(blake2b, serviceId, hash);
11740
- yield [StateEntryUpdateAction.Remove, codec.key, EMPTY_BLOB];
11741
- const codec2 = serialize.serviceLookupHistory(blake2b, serviceId, hash, length);
11742
- yield [StateEntryUpdateAction.Remove, codec2.key, EMPTY_BLOB];
11743
- break;
11744
11953
  }
11745
- default:
11746
- debug_assertNever(action);
11747
11954
  }
11748
11955
  }
11749
11956
  }
11750
11957
  function* serializeServiceUpdates(servicesUpdates, encode, blake2b) {
11751
- for (const { action, serviceId } of servicesUpdates ?? []) {
11958
+ if (servicesUpdates === undefined) {
11959
+ return;
11960
+ }
11961
+ for (const [serviceId, { action }] of servicesUpdates.entries()) {
11752
11962
  // new service being created or updated
11753
- const codec = serialize.serviceData(serviceId);
11963
+ const codec = serialize_serialize.serviceData(serviceId);
11754
11964
  yield [StateEntryUpdateAction.Insert, codec.key, encode(codec.Codec, action.account)];
11755
11965
  // additional lookup history update
11756
11966
  if (action.kind === UpdateServiceKind.Create && action.lookupHistory !== null) {
11757
11967
  const { lookupHistory } = action;
11758
- const codec2 = serialize.serviceLookupHistory(blake2b, serviceId, lookupHistory.hash, lookupHistory.length);
11968
+ const codec2 = serialize_serialize.serviceLookupHistory(blake2b, serviceId, lookupHistory.hash, lookupHistory.length);
11759
11969
  yield [StateEntryUpdateAction.Insert, codec2.key, encode(codec2.Codec, lookupHistory.slots)];
11760
11970
  }
11761
11971
  }
@@ -11765,53 +11975,53 @@ function* serializeBasicKeys(spec, update) {
11765
11975
  return [StateEntryUpdateAction.Insert, codec.key, encoder_Encoder.encodeObject(codec.Codec, val, spec)];
11766
11976
  }
11767
11977
  if (update.authPools !== undefined) {
11768
- yield doSerialize(update.authPools, serialize.authPools); // C(1)
11978
+ yield doSerialize(update.authPools, serialize_serialize.authPools); // C(1)
11769
11979
  }
11770
11980
  if (update.authQueues !== undefined) {
11771
- yield doSerialize(update.authQueues, serialize.authQueues); // C(2)
11981
+ yield doSerialize(update.authQueues, serialize_serialize.authQueues); // C(2)
11772
11982
  }
11773
11983
  if (update.recentBlocks !== undefined) {
11774
- yield doSerialize(update.recentBlocks, serialize.recentBlocks); // C(3)
11984
+ yield doSerialize(update.recentBlocks, serialize_serialize.recentBlocks); // C(3)
11775
11985
  }
11776
11986
  const safroleData = getSafroleData(update.nextValidatorData, update.epochRoot, update.sealingKeySeries, update.ticketsAccumulator);
11777
11987
  if (safroleData !== undefined) {
11778
- yield doSerialize(safroleData, serialize.safrole); // C(4)
11988
+ yield doSerialize(safroleData, serialize_serialize.safrole); // C(4)
11779
11989
  }
11780
11990
  if (update.disputesRecords !== undefined) {
11781
- yield doSerialize(update.disputesRecords, serialize.disputesRecords); // C(5)
11991
+ yield doSerialize(update.disputesRecords, serialize_serialize.disputesRecords); // C(5)
11782
11992
  }
11783
11993
  if (update.entropy !== undefined) {
11784
- yield doSerialize(update.entropy, serialize.entropy); // C(6)
11994
+ yield doSerialize(update.entropy, serialize_serialize.entropy); // C(6)
11785
11995
  }
11786
11996
  if (update.designatedValidatorData !== undefined) {
11787
- yield doSerialize(update.designatedValidatorData, serialize.designatedValidators); // C(7)
11997
+ yield doSerialize(update.designatedValidatorData, serialize_serialize.designatedValidators); // C(7)
11788
11998
  }
11789
11999
  if (update.currentValidatorData !== undefined) {
11790
- yield doSerialize(update.currentValidatorData, serialize.currentValidators); // C(8)
12000
+ yield doSerialize(update.currentValidatorData, serialize_serialize.currentValidators); // C(8)
11791
12001
  }
11792
12002
  if (update.previousValidatorData !== undefined) {
11793
- yield doSerialize(update.previousValidatorData, serialize.previousValidators); // C(9)
12003
+ yield doSerialize(update.previousValidatorData, serialize_serialize.previousValidators); // C(9)
11794
12004
  }
11795
12005
  if (update.availabilityAssignment !== undefined) {
11796
- yield doSerialize(update.availabilityAssignment, serialize.availabilityAssignment); // C(10)
12006
+ yield doSerialize(update.availabilityAssignment, serialize_serialize.availabilityAssignment); // C(10)
11797
12007
  }
11798
12008
  if (update.timeslot !== undefined) {
11799
- yield doSerialize(update.timeslot, serialize.timeslot); // C(11)
12009
+ yield doSerialize(update.timeslot, serialize_serialize.timeslot); // C(11)
11800
12010
  }
11801
12011
  if (update.privilegedServices !== undefined) {
11802
- yield doSerialize(update.privilegedServices, serialize.privilegedServices); // C(12)
12012
+ yield doSerialize(update.privilegedServices, serialize_serialize.privilegedServices); // C(12)
11803
12013
  }
11804
12014
  if (update.statistics !== undefined) {
11805
- yield doSerialize(update.statistics, serialize.statistics); // C(13)
12015
+ yield doSerialize(update.statistics, serialize_serialize.statistics); // C(13)
11806
12016
  }
11807
12017
  if (update.accumulationQueue !== undefined) {
11808
- yield doSerialize(update.accumulationQueue, serialize.accumulationQueue); // C(14)
12018
+ yield doSerialize(update.accumulationQueue, serialize_serialize.accumulationQueue); // C(14)
11809
12019
  }
11810
12020
  if (update.recentlyAccumulated !== undefined) {
11811
- yield doSerialize(update.recentlyAccumulated, serialize.recentlyAccumulated); // C(15)
12021
+ yield doSerialize(update.recentlyAccumulated, serialize_serialize.recentlyAccumulated); // C(15)
11812
12022
  }
11813
12023
  if (update.accumulationOutputLog !== undefined) {
11814
- yield doSerialize(update.accumulationOutputLog, serialize.accumulationOutputLog); // C(16)
12024
+ yield doSerialize(update.accumulationOutputLog, serialize_serialize.accumulationOutputLog); // C(16)
11815
12025
  }
11816
12026
  }
11817
12027
  function getSafroleData(nextValidatorData, epochRoot, sealingKeySeries, ticketsAccumulator) {
@@ -11924,41 +12134,41 @@ function convertInMemoryStateToDictionary(spec, blake2b, state) {
11924
12134
  function doSerialize(codec) {
11925
12135
  serialized.set(codec.key, encoder_Encoder.encodeObject(codec.Codec, codec.extract(state), spec));
11926
12136
  }
11927
- doSerialize(serialize.authPools); // C(1)
11928
- doSerialize(serialize.authQueues); // C(2)
11929
- doSerialize(serialize.recentBlocks); // C(3)
11930
- doSerialize(serialize.safrole); // C(4)
11931
- doSerialize(serialize.disputesRecords); // C(5)
11932
- doSerialize(serialize.entropy); // C(6)
11933
- doSerialize(serialize.designatedValidators); // C(7)
11934
- doSerialize(serialize.currentValidators); // C(8)
11935
- doSerialize(serialize.previousValidators); // C(9)
11936
- doSerialize(serialize.availabilityAssignment); // C(10)
11937
- doSerialize(serialize.timeslot); // C(11)
11938
- doSerialize(serialize.privilegedServices); // C(12)
11939
- doSerialize(serialize.statistics); // C(13)
11940
- doSerialize(serialize.accumulationQueue); // C(14)
11941
- doSerialize(serialize.recentlyAccumulated); // C(15)
11942
- doSerialize(serialize.accumulationOutputLog); // C(16)
12137
+ doSerialize(serialize_serialize.authPools); // C(1)
12138
+ doSerialize(serialize_serialize.authQueues); // C(2)
12139
+ doSerialize(serialize_serialize.recentBlocks); // C(3)
12140
+ doSerialize(serialize_serialize.safrole); // C(4)
12141
+ doSerialize(serialize_serialize.disputesRecords); // C(5)
12142
+ doSerialize(serialize_serialize.entropy); // C(6)
12143
+ doSerialize(serialize_serialize.designatedValidators); // C(7)
12144
+ doSerialize(serialize_serialize.currentValidators); // C(8)
12145
+ doSerialize(serialize_serialize.previousValidators); // C(9)
12146
+ doSerialize(serialize_serialize.availabilityAssignment); // C(10)
12147
+ doSerialize(serialize_serialize.timeslot); // C(11)
12148
+ doSerialize(serialize_serialize.privilegedServices); // C(12)
12149
+ doSerialize(serialize_serialize.statistics); // C(13)
12150
+ doSerialize(serialize_serialize.accumulationQueue); // C(14)
12151
+ doSerialize(serialize_serialize.recentlyAccumulated); // C(15)
12152
+ doSerialize(serialize_serialize.accumulationOutputLog); // C(16)
11943
12153
  // services
11944
12154
  for (const [serviceId, service] of state.services.entries()) {
11945
12155
  // data
11946
- const { key, Codec } = serialize.serviceData(serviceId);
12156
+ const { key, Codec } = serialize_serialize.serviceData(serviceId);
11947
12157
  serialized.set(key, encoder_Encoder.encodeObject(Codec, service.getInfo()));
11948
12158
  // preimages
11949
12159
  for (const preimage of service.data.preimages.values()) {
11950
- const { key, Codec } = serialize.servicePreimages(blake2b, serviceId, preimage.hash);
12160
+ const { key, Codec } = serialize_serialize.servicePreimages(blake2b, serviceId, preimage.hash);
11951
12161
  serialized.set(key, encoder_Encoder.encodeObject(Codec, preimage.blob));
11952
12162
  }
11953
12163
  // storage
11954
12164
  for (const storage of service.data.storage.values()) {
11955
- const { key, Codec } = serialize.serviceStorage(blake2b, serviceId, storage.key);
12165
+ const { key, Codec } = serialize_serialize.serviceStorage(blake2b, serviceId, storage.key);
11956
12166
  serialized.set(key, encoder_Encoder.encodeObject(Codec, storage.value));
11957
12167
  }
11958
12168
  // lookup history
11959
12169
  for (const lookupHistoryList of service.data.lookupHistory.values()) {
11960
12170
  for (const lookupHistory of lookupHistoryList) {
11961
- const { key, Codec } = serialize.serviceLookupHistory(blake2b, serviceId, lookupHistory.hash, lookupHistory.length);
12171
+ const { key, Codec } = serialize_serialize.serviceLookupHistory(blake2b, serviceId, lookupHistory.hash, lookupHistory.length);
11962
12172
  serialized.set(key, encoder_Encoder.encodeObject(Codec, lookupHistory.slots.slice()));
11963
12173
  }
11964
12174
  }
@@ -12008,6 +12218,7 @@ function loadState(spec, blake2b, entries) {
12008
12218
 
12009
12219
 
12010
12220
 
12221
+
12011
12222
  ;// CONCATENATED MODULE: ./packages/jam/database/leaf-db.ts
12012
12223
 
12013
12224
 
@@ -12034,13 +12245,13 @@ class LeafDb {
12034
12245
  */
12035
12246
  static fromLeavesBlob(blob, db) {
12036
12247
  if (blob.length % TRIE_NODE_BYTES !== 0) {
12037
- return result_Result.error(LeafDbError.InvalidLeafData, `${blob.length} is not a multiply of ${TRIE_NODE_BYTES}: ${blob}`);
12248
+ return result_Result.error(LeafDbError.InvalidLeafData, () => `${blob.length} is not a multiply of ${TRIE_NODE_BYTES}: ${blob}`);
12038
12249
  }
12039
12250
  const leaves = SortedSet.fromArray(leafComparator, []);
12040
12251
  for (const nodeData of blob.chunks(TRIE_NODE_BYTES)) {
12041
12252
  const node = new TrieNode(nodeData.raw);
12042
12253
  if (node.getNodeType() === NodeType.Branch) {
12043
- return result_Result.error(LeafDbError.InvalidLeafData, `Branch node detected: ${nodeData}`);
12254
+ return result_Result.error(LeafDbError.InvalidLeafData, () => `Branch node detected: ${nodeData}`);
12044
12255
  }
12045
12256
  leaves.insert(node.asLeafNode());
12046
12257
  }
@@ -12166,7 +12377,7 @@ const codecMap = (value, extractKey, { typicalLength = TYPICAL_DICTIONARY_LENGTH
12166
12377
  const lookupHistoryItemCodec = descriptors_codec.object({
12167
12378
  hash: descriptors_codec.bytes(hash_HASH_SIZE).asOpaque(),
12168
12379
  length: descriptors_codec.u32,
12169
- slots: readonlyArray(descriptors_codec.sequenceVarLen(descriptors_codec.u32.asOpaque())).convert(seeThrough, tryAsLookupHistorySlots),
12380
+ slots: descriptors_readonlyArray(descriptors_codec.sequenceVarLen(descriptors_codec.u32.asOpaque())).convert(seeThrough, tryAsLookupHistorySlots),
12170
12381
  }, "LookupHistoryItem", ({ hash, length, slots }) => new service_LookupHistoryItem(hash, length, slots));
12171
12382
  const lookupHistoryEntryCodec = descriptors_codec.object({
12172
12383
  key: descriptors_codec.bytes(hash_HASH_SIZE).asOpaque(),
@@ -12209,7 +12420,11 @@ class ServiceWithCodec extends InMemoryService {
12209
12420
  return new ServiceWithCodec(serviceId, data);
12210
12421
  }
12211
12422
  }
12212
- const in_memory_state_codec_inMemoryStateCodec = descriptors_codec.Class(InMemoryState, {
12423
+ const in_memory_state_codec_inMemoryStateCodec = (spec) => codec.Class(class State extends InMemoryState {
12424
+ static create(data) {
12425
+ return InMemoryState.new(spec, data);
12426
+ }
12427
+ }, {
12213
12428
  // alpha
12214
12429
  authPools: serialize.authPools.Codec,
12215
12430
  // phi
@@ -12219,11 +12434,11 @@ const in_memory_state_codec_inMemoryStateCodec = descriptors_codec.Class(InMemor
12219
12434
  // gamma_k
12220
12435
  nextValidatorData: codecPerValidator(ValidatorData.Codec),
12221
12436
  // gamma_z
12222
- epochRoot: descriptors_codec.bytes(BANDERSNATCH_RING_ROOT_BYTES).asOpaque(),
12437
+ epochRoot: codec.bytes(BANDERSNATCH_RING_ROOT_BYTES).asOpaque(),
12223
12438
  // gamma_s
12224
12439
  sealingKeySeries: SafroleSealingKeysData.Codec,
12225
12440
  // gamma_a
12226
- ticketsAccumulator: readonlyArray(descriptors_codec.sequenceVarLen(Ticket.Codec)).convert((x) => x, sized_array_asKnownSize),
12441
+ ticketsAccumulator: readonlyArray(codec.sequenceVarLen(Ticket.Codec)).convert((x) => x, asKnownSize),
12227
12442
  // psi
12228
12443
  disputesRecords: serialize.disputesRecords.Codec,
12229
12444
  // eta
@@ -12249,7 +12464,7 @@ const in_memory_state_codec_inMemoryStateCodec = descriptors_codec.Class(InMemor
12249
12464
  // theta
12250
12465
  accumulationOutputLog: serialize.accumulationOutputLog.Codec,
12251
12466
  // delta
12252
- services: descriptors_codec.dictionary(descriptors_codec.u32.asOpaque(), ServiceWithCodec.Codec, {
12467
+ services: codec.dictionary(codec.u32.asOpaque(), ServiceWithCodec.Codec, {
12253
12468
  sortKeys: (a, b) => a - b,
12254
12469
  }),
12255
12470
  });
@@ -12296,7 +12511,7 @@ class InMemoryStates {
12296
12511
  }
12297
12512
  /** Insert a full state into the database. */
12298
12513
  async insertState(headerHash, state) {
12299
- const encoded = Encoder.encodeObject(inMemoryStateCodec, state, this.spec);
12514
+ const encoded = Encoder.encodeObject(inMemoryStateCodec(this.spec), state, this.spec);
12300
12515
  this.db.set(headerHash, encoded);
12301
12516
  return Result.ok(OK);
12302
12517
  }
@@ -12305,7 +12520,7 @@ class InMemoryStates {
12305
12520
  if (encodedState === undefined) {
12306
12521
  return null;
12307
12522
  }
12308
- return Decoder.decodeObject(inMemoryStateCodec, encodedState, this.spec);
12523
+ return Decoder.decodeObject(inMemoryStateCodec(this.spec), encodedState, this.spec);
12309
12524
  }
12310
12525
  }
12311
12526
 
@@ -12755,7 +12970,7 @@ class LmdbStates {
12755
12970
  }
12756
12971
  catch (e) {
12757
12972
  logger.error `${e}`;
12758
- return result_Result.error(StateUpdateError.Commit);
12973
+ return result_Result.error(StateUpdateError.Commit, () => `Failed to commit state update: ${e}`);
12759
12974
  }
12760
12975
  return result_Result.ok(result_OK);
12761
12976
  }
@@ -13492,21 +13707,21 @@ class TransitionHasher {
13492
13707
  */
13493
13708
  extrinsic(extrinsicView) {
13494
13709
  // https://graypaper.fluffylabs.dev/#/cc517d7/0cfb000cfb00?v=0.6.5
13495
- const guarantees = extrinsicView.guarantees
13710
+ const guaranteesCount = numbers_tryAsU32(extrinsicView.guarantees.view().length);
13711
+ const countEncoded = encoder_Encoder.encodeObject(descriptors_codec.varU32, guaranteesCount);
13712
+ const guaranteesBlobs = extrinsicView.guarantees
13496
13713
  .view()
13497
13714
  .map((g) => g.view())
13498
- .map((guarantee) => {
13715
+ .reduce((aggregated, guarantee) => {
13499
13716
  const reportHash = this.blake2b.hashBytes(guarantee.report.encoded()).asOpaque();
13500
- return bytes_BytesBlob.blobFromParts([
13501
- reportHash.raw,
13502
- guarantee.slot.encoded().raw,
13503
- guarantee.credentials.encoded().raw,
13504
- ]);
13505
- });
13506
- const guaranteeBlob = encoder_Encoder.encodeObject(descriptors_codec.sequenceVarLen(dumpCodec), guarantees, this.context);
13717
+ aggregated.push(reportHash.raw);
13718
+ aggregated.push(guarantee.slot.encoded().raw);
13719
+ aggregated.push(guarantee.credentials.encoded().raw);
13720
+ return aggregated;
13721
+ }, [countEncoded.raw]);
13507
13722
  const et = this.blake2b.hashBytes(extrinsicView.tickets.encoded()).asOpaque();
13508
13723
  const ep = this.blake2b.hashBytes(extrinsicView.preimages.encoded()).asOpaque();
13509
- const eg = this.blake2b.hashBytes(guaranteeBlob).asOpaque();
13724
+ const eg = this.blake2b.hashBlobs(guaranteesBlobs).asOpaque();
13510
13725
  const ea = this.blake2b.hashBytes(extrinsicView.assurances.encoded()).asOpaque();
13511
13726
  const ed = this.blake2b.hashBytes(extrinsicView.disputes.encoded()).asOpaque();
13512
13727
  const encoded = bytes_BytesBlob.blobFromParts([et.raw, ep.raw, eg.raw, ea.raw, ed.raw]);
@@ -13554,32 +13769,33 @@ class Preimages {
13554
13769
  }
13555
13770
  if (prevPreimage.requester > currPreimage.requester ||
13556
13771
  currPreimage.blob.compare(prevPreimage.blob).isLessOrEqual()) {
13557
- return Result.error(PreimagesErrorCode.PreimagesNotSortedUnique);
13772
+ return Result.error(PreimagesErrorCode.PreimagesNotSortedUnique, () => `Preimages not sorted/unique at index ${i}`);
13558
13773
  }
13559
13774
  }
13560
13775
  const { preimages, slot } = input;
13561
- const pendingChanges = [];
13776
+ const pendingChanges = new Map();
13562
13777
  // select preimages for integration
13563
13778
  for (const preimage of preimages) {
13564
13779
  const { requester, blob } = preimage;
13565
13780
  const hash = this.blake2b.hashBytes(blob).asOpaque();
13566
13781
  const service = this.state.getService(requester);
13567
13782
  if (service === null) {
13568
- return Result.error(PreimagesErrorCode.AccountNotFound);
13783
+ return Result.error(PreimagesErrorCode.AccountNotFound, () => `Service not found: ${requester}`);
13569
13784
  }
13570
13785
  const hasPreimage = service.hasPreimage(hash);
13571
13786
  const slots = service.getLookupHistory(hash, tryAsU32(blob.length));
13572
13787
  // https://graypaper.fluffylabs.dev/#/5f542d7/181800181900
13573
13788
  // https://graypaper.fluffylabs.dev/#/5f542d7/116f0011a500
13574
13789
  if (hasPreimage || slots === null || !LookupHistoryItem.isRequested(slots)) {
13575
- return Result.error(PreimagesErrorCode.PreimageUnneeded);
13790
+ return Result.error(PreimagesErrorCode.PreimageUnneeded, () => `Preimage unneeded: requester=${requester}, hash=${hash}, hasPreimage=${hasPreimage}, isRequested=${slots !== null && LookupHistoryItem.isRequested(slots)}`);
13576
13791
  }
13577
13792
  // https://graypaper.fluffylabs.dev/#/5f542d7/18c00018f300
13578
- pendingChanges.push(UpdatePreimage.provide({
13579
- serviceId: requester,
13793
+ const updates = pendingChanges.get(requester) ?? [];
13794
+ updates.push(UpdatePreimage.provide({
13580
13795
  preimage: PreimageItem.create({ hash, blob }),
13581
13796
  slot,
13582
13797
  }));
13798
+ pendingChanges.set(requester, updates);
13583
13799
  }
13584
13800
  return Result.ok({
13585
13801
  preimages: pendingChanges,