@typeberry/lib 0.2.0-e767e74 → 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (4) hide show
  1. package/index.cjs +758 -911
  2. package/index.d.ts +697 -980
  3. package/index.js +758 -911
  4. package/package.json +1 -1
package/index.d.ts CHANGED
@@ -2427,15 +2427,11 @@ type ClassConstructor<T> = {
2427
2427
  create: (o: CodecRecord<T>) => T;
2428
2428
  };
2429
2429
 
2430
- /** A full codec type, i.e. the `Encode` and `Decode`. */
2430
+ /**
2431
+ * A full codec type, i.e. the `Encode` and `Decode`.
2432
+ */
2431
2433
  type Codec<T> = Encode<T> & Decode<T>;
2432
2434
 
2433
- /** A codec descriptor with extra view. */
2434
- type CodecWithView<T, V> = Codec<T> & {
2435
- /** encoded data view codec. */
2436
- View: Codec<V>;
2437
- };
2438
-
2439
2435
  /**
2440
2436
  * Type descriptor definition.
2441
2437
  *
@@ -2444,7 +2440,7 @@ type CodecWithView<T, V> = Codec<T> & {
2444
2440
  *
2445
2441
  * Descriptors can be composed to form more complex typings.
2446
2442
  */
2447
- declare class Descriptor<T, V = T> implements Codec<T>, Skip, CodecWithView<T, V> {
2443
+ declare class Descriptor<T, V = T> implements Codec<T>, Skip {
2448
2444
  /** A "lightweight" version of the object. */
2449
2445
  public readonly View: Descriptor<V>;
2450
2446
 
@@ -2690,10 +2686,6 @@ declare abstract class ObjectView<T> {
2690
2686
  toString() {
2691
2687
  return `View<${this.materializedConstructor.name}>(cache: ${this.cache.size})`;
2692
2688
  }
2693
-
2694
- [TEST_COMPARE_USING]() {
2695
- return this.materialize();
2696
- }
2697
2689
  }
2698
2690
 
2699
2691
  /**
@@ -3245,25 +3237,15 @@ declare namespace codec$1 {
3245
3237
  sizeHint: SizeHint;
3246
3238
  },
3247
3239
  chooser: (ctx: unknown | null) => Descriptor<T, V>,
3248
- ): Descriptor<T, V> => {
3249
- const Self = chooser(null);
3250
- return Descriptor.withView(
3240
+ ): Descriptor<T, V> =>
3241
+ Descriptor.withView(
3251
3242
  name,
3252
3243
  sizeHint,
3253
3244
  (e, x) => chooser(e.getContext()).encode(e, x),
3254
3245
  (d) => chooser(d.getContext()).decode(d),
3255
3246
  (s) => chooser(s.decoder.getContext()).skip(s),
3256
- hasUniqueView(Self)
3257
- ? select(
3258
- {
3259
- name: Self.View.name,
3260
- sizeHint: Self.View.sizeHint,
3261
- },
3262
- (ctx) => chooser(ctx).View,
3263
- )
3264
- : Self.View,
3247
+ chooser(null).View,
3265
3248
  );
3266
- };
3267
3249
 
3268
3250
  /**
3269
3251
  * A descriptor for a more complex POJO.
@@ -3457,7 +3439,6 @@ declare function sequenceViewFixLen<T, V>(
3457
3439
  type index$q_ClassConstructor<T> = ClassConstructor<T>;
3458
3440
  type index$q_Codec<T> = Codec<T>;
3459
3441
  type index$q_CodecRecord<T> = CodecRecord<T>;
3460
- type index$q_CodecWithView<T, V> = CodecWithView<T, V>;
3461
3442
  declare const index$q_DEFAULT_START_LENGTH: typeof DEFAULT_START_LENGTH;
3462
3443
  type index$q_Decode<T> = Decode<T>;
3463
3444
  type index$q_Decoder = Decoder;
@@ -3498,7 +3479,7 @@ declare const index$q_tryAsExactBytes: typeof tryAsExactBytes;
3498
3479
  declare const index$q_validateLength: typeof validateLength;
3499
3480
  declare namespace index$q {
3500
3481
  export { index$q_DEFAULT_START_LENGTH as DEFAULT_START_LENGTH, index$q_Decoder as Decoder, index$q_Descriptor as Descriptor, index$q_Encoder as Encoder, index$q_MASKS as MASKS, index$q_MAX_LENGTH as MAX_LENGTH, index$q_ObjectView as ObjectView, index$q_SequenceView as SequenceView, index$q_TYPICAL_DICTIONARY_LENGTH as TYPICAL_DICTIONARY_LENGTH, index$q_TYPICAL_SEQUENCE_LENGTH as TYPICAL_SEQUENCE_LENGTH, index$q_ViewField as ViewField, index$q_addSizeHints as addSizeHints, codec$1 as codec, index$q_decodeVariableLengthExtraBytes as decodeVariableLengthExtraBytes, index$q_exactHint as exactHint, index$q_forEachDescriptor as forEachDescriptor, index$q_hasUniqueView as hasUniqueView, index$q_objectView as objectView, index$q_readonlyArray as readonlyArray, index$q_sequenceViewFixLen as sequenceViewFixLen, index$q_sequenceViewVarLen as sequenceViewVarLen, index$q_tryAsExactBytes as tryAsExactBytes, index$q_validateLength as validateLength };
3501
- export type { index$q_ClassConstructor as ClassConstructor, index$q_Codec as Codec, index$q_CodecRecord as CodecRecord, index$q_CodecWithView as CodecWithView, index$q_Decode as Decode, index$q_DescribedBy as DescribedBy, index$q_DescriptorRecord as DescriptorRecord, index$q_Encode as Encode, index$q_LengthRange as LengthRange, index$q_OptionalRecord as OptionalRecord, Options$1 as Options, index$q_PropertyKeys as PropertyKeys, index$q_SimpleDescriptorRecord as SimpleDescriptorRecord, index$q_SizeHint as SizeHint, index$q_ViewOf as ViewOf };
3482
+ export type { index$q_ClassConstructor as ClassConstructor, index$q_Codec as Codec, index$q_CodecRecord as CodecRecord, index$q_Decode as Decode, index$q_DescribedBy as DescribedBy, index$q_DescriptorRecord as DescriptorRecord, index$q_Encode as Encode, index$q_LengthRange as LengthRange, index$q_OptionalRecord as OptionalRecord, Options$1 as Options, index$q_PropertyKeys as PropertyKeys, index$q_SimpleDescriptorRecord as SimpleDescriptorRecord, index$q_SizeHint as SizeHint, index$q_ViewOf as ViewOf };
3502
3483
  }
3503
3484
 
3504
3485
  /**
@@ -6936,17 +6917,6 @@ declare function emptyBlock(slot: TimeSlot = tryAsTimeSlot(0)) {
6936
6917
  });
6937
6918
  }
6938
6919
 
6939
- /**
6940
- * Take an input data and re-encode that data as view.
6941
- *
6942
- * NOTE: this function should NEVER be used in any production code,
6943
- * it's only a test helper.
6944
- */
6945
- declare function reencodeAsView<T, V>(codec: Descriptor<T, V>, object: T, chainSpec?: ChainSpec): V {
6946
- const encoded = Encoder.encodeObject(codec, object, chainSpec);
6947
- return Decoder.decodeObject(codec.View, encoded, chainSpec);
6948
- }
6949
-
6950
6920
  type index$l_Block = Block;
6951
6921
  declare const index$l_Block: typeof Block;
6952
6922
  type index$l_BlockView = BlockView;
@@ -6996,7 +6966,6 @@ declare const index$l_guarantees: typeof guarantees;
6996
6966
  declare const index$l_headerViewWithHashCodec: typeof headerViewWithHashCodec;
6997
6967
  declare const index$l_legacyDescriptor: typeof legacyDescriptor;
6998
6968
  declare const index$l_preimage: typeof preimage;
6999
- declare const index$l_reencodeAsView: typeof reencodeAsView;
7000
6969
  declare const index$l_refineContext: typeof refineContext;
7001
6970
  declare const index$l_tickets: typeof tickets;
7002
6971
  declare const index$l_tryAsCoreIndex: typeof tryAsCoreIndex;
@@ -7013,7 +6982,7 @@ declare const index$l_workPackage: typeof workPackage;
7013
6982
  declare const index$l_workReport: typeof workReport;
7014
6983
  declare const index$l_workResult: typeof workResult;
7015
6984
  declare namespace index$l {
7016
- export { index$l_Block as Block, index$l_EpochMarker as EpochMarker, index$l_Extrinsic as Extrinsic, index$l_Header as Header, index$l_HeaderViewWithHash as HeaderViewWithHash, index$l_MAX_NUMBER_OF_SEGMENTS as MAX_NUMBER_OF_SEGMENTS, index$l_TicketsMarker as TicketsMarker, index$l_ValidatorKeys as ValidatorKeys, index$l_W_E as W_E, index$l_W_S as W_S, index$l_assurances as assurances, index$l_codecPerEpochBlock as codecPerEpochBlock, index$l_codecPerValidator as codecPerValidator, codec as codecUtils, index$l_disputes as disputes, index$l_emptyBlock as emptyBlock, index$l_encodeUnsealedHeader as encodeUnsealedHeader, index$l_guarantees as guarantees, index$l_headerViewWithHashCodec as headerViewWithHashCodec, index$l_legacyDescriptor as legacyDescriptor, index$l_preimage as preimage, index$l_reencodeAsView as reencodeAsView, index$l_refineContext as refineContext, index$l_tickets as tickets, index$l_tryAsCoreIndex as tryAsCoreIndex, index$l_tryAsEpoch as tryAsEpoch, index$l_tryAsPerEpochBlock as tryAsPerEpochBlock, index$l_tryAsPerValidator as tryAsPerValidator, index$l_tryAsSegmentIndex as tryAsSegmentIndex, index$l_tryAsServiceGas as tryAsServiceGas, index$l_tryAsServiceId as tryAsServiceId, index$l_tryAsTimeSlot as tryAsTimeSlot, index$l_tryAsValidatorIndex as tryAsValidatorIndex, index$l_workItem as workItem, index$l_workPackage as workPackage, index$l_workReport as workReport, index$l_workResult as workResult };
6985
+ export { index$l_Block as Block, index$l_EpochMarker as EpochMarker, index$l_Extrinsic as Extrinsic, index$l_Header as Header, index$l_HeaderViewWithHash as HeaderViewWithHash, index$l_MAX_NUMBER_OF_SEGMENTS as MAX_NUMBER_OF_SEGMENTS, index$l_TicketsMarker as TicketsMarker, index$l_ValidatorKeys as ValidatorKeys, index$l_W_E as W_E, index$l_W_S as W_S, index$l_assurances as assurances, index$l_codecPerEpochBlock as codecPerEpochBlock, index$l_codecPerValidator as codecPerValidator, codec as codecUtils, index$l_disputes as disputes, index$l_emptyBlock as emptyBlock, index$l_encodeUnsealedHeader as encodeUnsealedHeader, index$l_guarantees as guarantees, index$l_headerViewWithHashCodec as headerViewWithHashCodec, index$l_legacyDescriptor as legacyDescriptor, index$l_preimage as preimage, index$l_refineContext as refineContext, index$l_tickets as tickets, index$l_tryAsCoreIndex as tryAsCoreIndex, index$l_tryAsEpoch as tryAsEpoch, index$l_tryAsPerEpochBlock as tryAsPerEpochBlock, index$l_tryAsPerValidator as tryAsPerValidator, index$l_tryAsSegmentIndex as tryAsSegmentIndex, index$l_tryAsServiceGas as tryAsServiceGas, index$l_tryAsServiceId as tryAsServiceId, index$l_tryAsTimeSlot as tryAsTimeSlot, index$l_tryAsValidatorIndex as tryAsValidatorIndex, index$l_workItem as workItem, index$l_workPackage as workPackage, index$l_workReport as workReport, index$l_workResult as workResult };
7017
6986
  export type { index$l_BlockView as BlockView, index$l_CodeHash as CodeHash, index$l_CoreIndex as CoreIndex, index$l_EntropyHash as EntropyHash, index$l_Epoch as Epoch, index$l_EpochMarkerView as EpochMarkerView, index$l_ExtrinsicHash as ExtrinsicHash, index$l_ExtrinsicView as ExtrinsicView, index$l_HeaderHash as HeaderHash, index$l_HeaderView as HeaderView, index$l_PerEpochBlock as PerEpochBlock, index$l_PerValidator as PerValidator, index$l_SEGMENT_BYTES as SEGMENT_BYTES, index$l_Segment as Segment, index$l_SegmentIndex as SegmentIndex, index$l_ServiceGas as ServiceGas, index$l_ServiceId as ServiceId, index$l_StateRootHash as StateRootHash, index$l_TicketsMarkerView as TicketsMarkerView, index$l_TimeSlot as TimeSlot, index$l_ValidatorIndex as ValidatorIndex, index$l_WorkReportHash as WorkReportHash };
7018
6987
  }
7019
6988
 
@@ -9152,77 +9121,6 @@ declare function accumulationOutputComparator(a: AccumulationOutput, b: Accumula
9152
9121
  return Ordering.Equal;
9153
9122
  }
9154
9123
 
9155
- /**
9156
- * `J`: The maximum sum of dependency items in a work-report.
9157
- *
9158
- * https://graypaper.fluffylabs.dev/#/5f542d7/416a00416a00?v=0.6.2
9159
- */
9160
- declare const MAX_REPORT_DEPENDENCIES = 8;
9161
- type MAX_REPORT_DEPENDENCIES = typeof MAX_REPORT_DEPENDENCIES;
9162
-
9163
- /**
9164
- * Ready (i.e. available and/or audited) but not-yet-accumulated work-reports.
9165
- *
9166
- * https://graypaper.fluffylabs.dev/#/5f542d7/165300165400
9167
- */
9168
- declare class NotYetAccumulatedReport extends WithDebug {
9169
- static Codec = codec.Class(NotYetAccumulatedReport, {
9170
- report: WorkReport.Codec,
9171
- dependencies: codecKnownSizeArray(codec.bytes(HASH_SIZE).asOpaque<WorkPackageHash>(), {
9172
- typicalLength: MAX_REPORT_DEPENDENCIES / 2,
9173
- maxLength: MAX_REPORT_DEPENDENCIES,
9174
- minLength: 0,
9175
- }),
9176
- });
9177
-
9178
- static create({ report, dependencies }: CodecRecord<NotYetAccumulatedReport>) {
9179
- return new NotYetAccumulatedReport(report, dependencies);
9180
- }
9181
-
9182
- private constructor(
9183
- /**
9184
- * Each of these were made available at most one epoch ago
9185
- * but have or had unfulfilled dependencies.
9186
- */
9187
- readonly report: WorkReport,
9188
- /**
9189
- * Alongside the work-report itself, we retain its un-accumulated
9190
- * dependencies, a set of work-package hashes.
9191
- *
9192
- * https://graypaper.fluffylabs.dev/#/5f542d7/165800165800
9193
- */
9194
- readonly dependencies: KnownSizeArray<WorkPackageHash, `[0..${MAX_REPORT_DEPENDENCIES})`>,
9195
- ) {
9196
- super();
9197
- }
9198
- }
9199
-
9200
- /**
9201
- * Accumulation queue state entry.
9202
- */
9203
- type AccumulationQueue = PerEpochBlock<readonly NotYetAccumulatedReport[]>;
9204
-
9205
- declare const accumulationQueueCodec = codecPerEpochBlock(
9206
- readonlyArray(codec.sequenceVarLen(NotYetAccumulatedReport.Codec)),
9207
- );
9208
-
9209
- type AccumulationQueueView = DescribedBy<typeof accumulationQueueCodec.View>;
9210
-
9211
- /** One entry of kind `T` for each core. */
9212
- type PerCore<T> = KnownSizeArray<T, "number of cores">;
9213
- /** Check if given array has correct length before casting to the opaque type. */
9214
- declare function tryAsPerCore<T>(array: T[], spec: ChainSpec): PerCore<T> {
9215
- check`
9216
- ${array.length === spec.coresCount}
9217
- Invalid per-core array length. Expected ${spec.coresCount}, got: ${array.length}
9218
- `;
9219
- return asOpaqueType(array);
9220
- }
9221
- declare const codecPerCore = <T, V>(val: Descriptor<T, V>): Descriptor<PerCore<T>, SequenceView<T, V>> =>
9222
- codecWithContext((context) => {
9223
- return codecKnownSizeArray(val, { fixedLength: context.coresCount });
9224
- });
9225
-
9226
9124
  /**
9227
9125
  * Assignment of particular work report to a core.
9228
9126
  *
@@ -9251,39 +9149,20 @@ declare class AvailabilityAssignment extends WithDebug {
9251
9149
  }
9252
9150
  }
9253
9151
 
9254
- declare const availabilityAssignmentsCodec = codecPerCore(codec.optional(AvailabilityAssignment.Codec));
9255
-
9256
- type AvailabilityAssignmentsView = DescribedBy<typeof availabilityAssignmentsCodec.View>;
9257
-
9258
- /** `O`: Maximal authorization pool size. */
9259
- declare const MAX_AUTH_POOL_SIZE = O;
9260
- type MAX_AUTH_POOL_SIZE = typeof MAX_AUTH_POOL_SIZE;
9261
-
9262
- /** `Q`: Size of the authorization queue. */
9263
- declare const AUTHORIZATION_QUEUE_SIZE = Q;
9264
- type AUTHORIZATION_QUEUE_SIZE = typeof AUTHORIZATION_QUEUE_SIZE;
9265
-
9266
- /** A pool of authorization hashes that is filled from the queue. */
9267
- type AuthorizationPool = KnownSizeArray<AuthorizerHash, `At most ${typeof MAX_AUTH_POOL_SIZE}`>;
9268
-
9269
- /**
9270
- * A fixed-size queue of authorization hashes used to fill up the pool.
9271
- *
9272
- * Can be set using `ASSIGN` host call in batches of `AUTHORIZATION_QUEUE_SIZE`.
9273
- */
9274
- type AuthorizationQueue = FixedSizeArray<AuthorizerHash, AUTHORIZATION_QUEUE_SIZE>;
9275
-
9276
- declare const authPoolsCodec = codecPerCore<AuthorizationPool, SequenceView<AuthorizerHash>>(
9277
- codecKnownSizeArray(codec.bytes(HASH_SIZE).asOpaque<AuthorizerHash>(), {
9278
- minLength: 0,
9279
- maxLength: MAX_AUTH_POOL_SIZE,
9280
- typicalLength: MAX_AUTH_POOL_SIZE,
9281
- }),
9282
- );
9283
-
9284
- declare const authQueuesCodec = codecPerCore<AuthorizationQueue, SequenceView<AuthorizerHash>>(
9285
- codecFixedSizeArray(codec.bytes(HASH_SIZE).asOpaque<AuthorizerHash>(), AUTHORIZATION_QUEUE_SIZE),
9286
- );
9152
+ /** One entry of kind `T` for each core. */
9153
+ type PerCore<T> = KnownSizeArray<T, "number of cores">;
9154
+ /** Check if given array has correct length before casting to the opaque type. */
9155
+ declare function tryAsPerCore<T>(array: T[], spec: ChainSpec): PerCore<T> {
9156
+ check`
9157
+ ${array.length === spec.coresCount}
9158
+ Invalid per-core array length. Expected ${spec.coresCount}, got: ${array.length}
9159
+ `;
9160
+ return asOpaqueType(array);
9161
+ }
9162
+ declare const codecPerCore = <T, V>(val: Descriptor<T, V>): Descriptor<PerCore<T>, SequenceView<T, V>> =>
9163
+ codecWithContext((context) => {
9164
+ return codecKnownSizeArray(val, { fixedLength: context.coresCount });
9165
+ });
9287
9166
 
9288
9167
  declare const sortedSetCodec = <T extends OpaqueHash>() =>
9289
9168
  readonlyArray(codec.sequenceVarLen(codec.bytes(HASH_SIZE))).convert<ImmutableSortedSet<T>>(
@@ -9366,93 +9245,415 @@ declare function hashComparator<V extends OpaqueHash>(a: V, b: V) {
9366
9245
  return a.compare(b);
9367
9246
  }
9368
9247
 
9369
- declare const SUPER_PEAK_STRING = BytesBlob.blobFromString("peak");
9248
+ /**
9249
+ * `J`: The maximum sum of dependency items in a work-report.
9250
+ *
9251
+ * https://graypaper.fluffylabs.dev/#/5f542d7/416a00416a00?v=0.6.2
9252
+ */
9253
+ declare const MAX_REPORT_DEPENDENCIES = 8;
9254
+ type MAX_REPORT_DEPENDENCIES = typeof MAX_REPORT_DEPENDENCIES;
9370
9255
 
9371
- /** Merkle Mountain Range peaks. */
9372
- interface MmrPeaks<H extends OpaqueHash> {
9373
- /**
9374
- * Peaks at particular positions.
9375
- *
9376
- * In case there is no merkle trie at given index, `null` is placed.
9377
- */
9378
- peaks: readonly (H | null)[];
9379
- }
9256
+ /** `Q`: Size of the authorization queue. */
9257
+ declare const AUTHORIZATION_QUEUE_SIZE = Q;
9258
+ type AUTHORIZATION_QUEUE_SIZE = typeof AUTHORIZATION_QUEUE_SIZE;
9380
9259
 
9381
- /** Hasher interface for MMR. */
9382
- interface MmrHasher<H extends OpaqueHash> {
9383
- /** Hash two items together. */
9384
- hashConcat(a: H, b: H): H;
9385
- /** Hash two items together with extra bytes blob prepended. */
9386
- hashConcatPrepend(id: BytesBlob, a: H, b: H): H;
9387
- }
9260
+ /** `O`: Maximal authorization pool size. */
9261
+ declare const MAX_AUTH_POOL_SIZE = O;
9262
+ type MAX_AUTH_POOL_SIZE = typeof MAX_AUTH_POOL_SIZE;
9388
9263
 
9389
9264
  /**
9390
- * Merkle Mountain Range.
9265
+ * Ready (i.e. available and/or audited) but not-yet-accumulated work-reports.
9391
9266
  *
9392
- * https://graypaper.fluffylabs.dev/#/5f542d7/3aa0023aa002?v=0.6.2
9267
+ * https://graypaper.fluffylabs.dev/#/5f542d7/165300165400
9393
9268
  */
9394
- declare class MerkleMountainRange<H extends OpaqueHash> {
9395
- /** Construct an empty MMR. */
9396
- static empty<H extends OpaqueHash>(hasher: MmrHasher<H>) {
9397
- return new MerkleMountainRange(hasher);
9398
- }
9269
+ declare class NotYetAccumulatedReport extends WithDebug {
9270
+ static Codec = codec.Class(NotYetAccumulatedReport, {
9271
+ report: WorkReport.Codec,
9272
+ dependencies: codecKnownSizeArray(codec.bytes(HASH_SIZE).asOpaque<WorkPackageHash>(), {
9273
+ typicalLength: MAX_REPORT_DEPENDENCIES / 2,
9274
+ maxLength: MAX_REPORT_DEPENDENCIES,
9275
+ minLength: 0,
9276
+ }),
9277
+ });
9399
9278
 
9400
- /** Construct a new MMR from existing peaks. */
9401
- static fromPeaks<H extends OpaqueHash>(hasher: MmrHasher<H>, mmr: MmrPeaks<H>) {
9402
- return new MerkleMountainRange(
9403
- hasher,
9404
- mmr.peaks
9405
- .reduce((acc: Mountain<H>[], peak, index) => {
9406
- if (peak !== null) {
9407
- acc.push(Mountain.fromPeak(peak, 2 ** index));
9408
- }
9409
- return acc;
9410
- }, [])
9411
- .reverse(),
9412
- );
9279
+ static create({ report, dependencies }: CodecRecord<NotYetAccumulatedReport>) {
9280
+ return new NotYetAccumulatedReport(report, dependencies);
9413
9281
  }
9414
9282
 
9415
9283
  private constructor(
9416
- private readonly hasher: MmrHasher<H>,
9417
- /** Store non-empty merkle tries (mountains) ordered by descending size. */
9418
- private readonly mountains: Mountain<H>[] = [],
9419
- ) {}
9420
-
9421
- /**
9422
- * Append a new hash to the MMR structure.
9423
- *
9424
- * https://graypaper.fluffylabs.dev/#/5f542d7/3b11003b1100?v=0.6.2
9425
- */
9426
- append(hash: H) {
9427
- let newMountain = Mountain.fromPeak(hash, 1);
9284
+ /**
9285
+ * Each of these were made available at most one epoch ago
9286
+ * but have or had unfulfilled dependencies.
9287
+ */
9288
+ readonly report: WorkReport,
9289
+ /**
9290
+ * Alongside the work-report itself, we retain its un-accumulated
9291
+ * dependencies, a set of work-package hashes.
9292
+ *
9293
+ * https://graypaper.fluffylabs.dev/#/5f542d7/165800165800
9294
+ */
9295
+ readonly dependencies: KnownSizeArray<WorkPackageHash, `[0..${MAX_REPORT_DEPENDENCIES})`>,
9296
+ ) {
9297
+ super();
9298
+ }
9299
+ }
9428
9300
 
9429
- for (;;) {
9430
- const last = this.mountains.pop();
9431
- if (last === undefined) {
9432
- this.mountains.push(newMountain);
9433
- return;
9434
- }
9301
+ /**
9302
+ * `B_S`: The basic minimum balance which all services require.
9303
+ *
9304
+ * https://graypaper.fluffylabs.dev/#/7e6ff6a/445800445800?v=0.6.7
9305
+ */
9306
+ declare const BASE_SERVICE_BALANCE = 100n;
9307
+ /**
9308
+ * `B_I`: The additional minimum balance required per item of elective service state.
9309
+ *
9310
+ * https://graypaper.fluffylabs.dev/#/7e6ff6a/445000445000?v=0.6.7
9311
+ */
9312
+ declare const ELECTIVE_ITEM_BALANCE = 10n;
9313
+ /**
9314
+ * `B_L`: The additional minimum balance required per octet of elective service state.
9315
+ *
9316
+ * https://graypaper.fluffylabs.dev/#/7e6ff6a/445400445400?v=0.6.7
9317
+ */
9318
+ declare const ELECTIVE_BYTE_BALANCE = 1n;
9435
9319
 
9436
- if (last.size !== newMountain.size) {
9437
- this.mountains.push(last);
9438
- this.mountains.push(newMountain);
9439
- return;
9440
- }
9320
+ declare const zeroSizeHint: SizeHint = {
9321
+ bytes: 0,
9322
+ isExact: true,
9323
+ };
9441
9324
 
9442
- newMountain = last.mergeWith(this.hasher, newMountain);
9443
- }
9444
- }
9325
+ /** 0-byte read, return given default value */
9326
+ declare const ignoreValueWithDefault = <T>(defaultValue: T) =>
9327
+ Descriptor.new<T>(
9328
+ "ignoreValue",
9329
+ zeroSizeHint,
9330
+ (_e, _v) => {},
9331
+ (_d) => defaultValue,
9332
+ (_s) => {},
9333
+ );
9445
9334
 
9446
- /**
9447
- * Root of the entire structure.
9448
- *
9449
- * https://graypaper.fluffylabs.dev/#/5f542d7/3b20013b2001?v=0.6.2
9450
- */
9451
- getSuperPeakHash(): H {
9452
- if (this.mountains.length === 0) {
9453
- return Bytes.zero(HASH_SIZE).asOpaque();
9454
- }
9455
- const revMountains = this.mountains.slice().reverse();
9335
+ /** Encode and decode object with leading version number. */
9336
+ declare const codecWithVersion = <T>(val: Descriptor<T>): Descriptor<T> =>
9337
+ Descriptor.new<T>(
9338
+ "withVersion",
9339
+ {
9340
+ bytes: val.sizeHint.bytes + 8,
9341
+ isExact: false,
9342
+ },
9343
+ (e, v) => {
9344
+ e.varU64(0n);
9345
+ val.encode(e, v);
9346
+ },
9347
+ (d) => {
9348
+ const version = d.varU64();
9349
+ if (version !== 0n) {
9350
+ throw new Error("Non-zero version is not supported!");
9351
+ }
9352
+ return val.decode(d);
9353
+ },
9354
+ (s) => {
9355
+ s.varU64();
9356
+ val.skip(s);
9357
+ },
9358
+ );
9359
+
9360
+ /**
9361
+ * Service account details.
9362
+ *
9363
+ * https://graypaper.fluffylabs.dev/#/7e6ff6a/108301108301?v=0.6.7
9364
+ */
9365
+ declare class ServiceAccountInfo extends WithDebug {
9366
+ static Codec = codec.Class(ServiceAccountInfo, {
9367
+ codeHash: codec.bytes(HASH_SIZE).asOpaque<CodeHash>(),
9368
+ balance: codec.u64,
9369
+ accumulateMinGas: codec.u64.convert((x) => x, tryAsServiceGas),
9370
+ onTransferMinGas: codec.u64.convert((x) => x, tryAsServiceGas),
9371
+ storageUtilisationBytes: codec.u64,
9372
+ gratisStorage: codec.u64,
9373
+ storageUtilisationCount: codec.u32,
9374
+ created: codec.u32.convert((x) => x, tryAsTimeSlot),
9375
+ lastAccumulation: codec.u32.convert((x) => x, tryAsTimeSlot),
9376
+ parentService: codec.u32.convert((x) => x, tryAsServiceId),
9377
+ });
9378
+
9379
+ static create(a: CodecRecord<ServiceAccountInfo>) {
9380
+ return new ServiceAccountInfo(
9381
+ a.codeHash,
9382
+ a.balance,
9383
+ a.accumulateMinGas,
9384
+ a.onTransferMinGas,
9385
+ a.storageUtilisationBytes,
9386
+ a.gratisStorage,
9387
+ a.storageUtilisationCount,
9388
+ a.created,
9389
+ a.lastAccumulation,
9390
+ a.parentService,
9391
+ );
9392
+ }
9393
+
9394
+ /**
9395
+ * `a_t = max(0, BS + BI * a_i + BL * a_o - a_f)`
9396
+ * https://graypaper.fluffylabs.dev/#/7e6ff6a/119e01119e01?v=0.6.7
9397
+ */
9398
+ static calculateThresholdBalance(items: U32, bytes: U64, gratisStorage: U64): U64 {
9399
+ const storageCost =
9400
+ BASE_SERVICE_BALANCE + ELECTIVE_ITEM_BALANCE * BigInt(items) + ELECTIVE_BYTE_BALANCE * bytes - gratisStorage;
9401
+
9402
+ if (storageCost < 0n) {
9403
+ return tryAsU64(0);
9404
+ }
9405
+
9406
+ if (storageCost >= 2n ** 64n) {
9407
+ return tryAsU64(2n ** 64n - 1n);
9408
+ }
9409
+
9410
+ return tryAsU64(storageCost);
9411
+ }
9412
+
9413
+ private constructor(
9414
+ /** `a_c`: Hash of the service code. */
9415
+ public readonly codeHash: CodeHash,
9416
+ /** `a_b`: Current account balance. */
9417
+ public readonly balance: U64,
9418
+ /** `a_g`: Minimal gas required to execute Accumulate entrypoint. */
9419
+ public readonly accumulateMinGas: ServiceGas,
9420
+ /** `a_m`: Minimal gas required to execute On Transfer entrypoint. */
9421
+ public readonly onTransferMinGas: ServiceGas,
9422
+ /** `a_o`: Total number of octets in storage. */
9423
+ public readonly storageUtilisationBytes: U64,
9424
+ /** `a_f`: Cost-free storage. Decreases both storage item count and total byte size. */
9425
+ public readonly gratisStorage: U64,
9426
+ /** `a_i`: Number of items in storage. */
9427
+ public readonly storageUtilisationCount: U32,
9428
+ /** `a_r`: Creation account time slot. */
9429
+ public readonly created: TimeSlot,
9430
+ /** `a_a`: Most recent accumulation time slot. */
9431
+ public readonly lastAccumulation: TimeSlot,
9432
+ /** `a_p`: Parent service ID. */
9433
+ public readonly parentService: ServiceId,
9434
+ ) {
9435
+ super();
9436
+ }
9437
+ }
9438
+
9439
+ declare class PreimageItem extends WithDebug {
9440
+ static Codec = codec.Class(PreimageItem, {
9441
+ hash: codec.bytes(HASH_SIZE).asOpaque<PreimageHash>(),
9442
+ blob: codec.blob,
9443
+ });
9444
+
9445
+ static create({ hash, blob }: CodecRecord<PreimageItem>) {
9446
+ return new PreimageItem(hash, blob);
9447
+ }
9448
+
9449
+ private constructor(
9450
+ readonly hash: PreimageHash,
9451
+ readonly blob: BytesBlob,
9452
+ ) {
9453
+ super();
9454
+ }
9455
+ }
9456
+
9457
+ type StorageKey = Opaque<BytesBlob, "storage key">;
9458
+
9459
+ declare class StorageItem extends WithDebug {
9460
+ static Codec = codec.Class(StorageItem, {
9461
+ key: codec.blob.convert(
9462
+ (i) => i,
9463
+ (o) => asOpaqueType(o),
9464
+ ),
9465
+ value: codec.blob,
9466
+ });
9467
+
9468
+ static create({ key, value }: CodecRecord<StorageItem>) {
9469
+ return new StorageItem(key, value);
9470
+ }
9471
+
9472
+ private constructor(
9473
+ readonly key: StorageKey,
9474
+ readonly value: BytesBlob,
9475
+ ) {
9476
+ super();
9477
+ }
9478
+ }
9479
+
9480
+ declare const MAX_LOOKUP_HISTORY_SLOTS = 3;
9481
+ type LookupHistorySlots = KnownSizeArray<TimeSlot, `0-${typeof MAX_LOOKUP_HISTORY_SLOTS} timeslots`>;
9482
+ declare function tryAsLookupHistorySlots(items: readonly TimeSlot[]): LookupHistorySlots {
9483
+ const knownSize = asKnownSize(items) as LookupHistorySlots;
9484
+ if (knownSize.length > MAX_LOOKUP_HISTORY_SLOTS) {
9485
+ throw new Error(`Lookup history items must contain 0-${MAX_LOOKUP_HISTORY_SLOTS} timeslots.`);
9486
+ }
9487
+ return knownSize;
9488
+ }
9489
+
9490
+ /** https://graypaper.fluffylabs.dev/#/5f542d7/115400115800 */
9491
+ declare class LookupHistoryItem {
9492
+ constructor(
9493
+ public readonly hash: PreimageHash,
9494
+ public readonly length: U32,
9495
+ /**
9496
+ * Preimage availability history as a sequence of time slots.
9497
+ * See PreimageStatus and the following GP fragment for more details.
9498
+ * https://graypaper.fluffylabs.dev/#/5f542d7/11780011a500 */
9499
+ public readonly slots: LookupHistorySlots,
9500
+ ) {}
9501
+
9502
+ static isRequested(item: LookupHistoryItem | LookupHistorySlots): boolean {
9503
+ if ("slots" in item) {
9504
+ return item.slots.length === 0;
9505
+ }
9506
+ return item.length === 0;
9507
+ }
9508
+ }
9509
+
9510
+ /** Dictionary entry of services that auto-accumulate every block. */
9511
+ declare class AutoAccumulate {
9512
+ static Codec = codec.Class(AutoAccumulate, {
9513
+ service: codec.u32.asOpaque<ServiceId>(),
9514
+ gasLimit: codec.u64.asOpaque<ServiceGas>(),
9515
+ });
9516
+
9517
+ static create({ service, gasLimit }: CodecRecord<AutoAccumulate>) {
9518
+ return new AutoAccumulate(service, gasLimit);
9519
+ }
9520
+
9521
+ private constructor(
9522
+ /** Service id that auto-accumulates. */
9523
+ readonly service: ServiceId,
9524
+ /** Gas limit for auto-accumulation. */
9525
+ readonly gasLimit: ServiceGas,
9526
+ ) {}
9527
+ }
9528
+
9529
+ /**
9530
+ * https://graypaper.fluffylabs.dev/#/ab2cdbd/114402114402?v=0.7.2
9531
+ */
9532
+ declare class PrivilegedServices {
9533
+ /** https://graypaper.fluffylabs.dev/#/ab2cdbd/3bbd023bcb02?v=0.7.2 */
9534
+ static Codec = codec.Class(PrivilegedServices, {
9535
+ manager: codec.u32.asOpaque<ServiceId>(),
9536
+ assigners: codecPerCore(codec.u32.asOpaque<ServiceId>()),
9537
+ delegator: codec.u32.asOpaque<ServiceId>(),
9538
+ registrar: Compatibility.isGreaterOrEqual(GpVersion.V0_7_1)
9539
+ ? codec.u32.asOpaque<ServiceId>()
9540
+ : ignoreValueWithDefault(tryAsServiceId(2 ** 32 - 1)),
9541
+ autoAccumulateServices: readonlyArray(codec.sequenceVarLen(AutoAccumulate.Codec)),
9542
+ });
9543
+
9544
+ static create(a: CodecRecord<PrivilegedServices>) {
9545
+ return new PrivilegedServices(a.manager, a.delegator, a.registrar, a.assigners, a.autoAccumulateServices);
9546
+ }
9547
+
9548
+ private constructor(
9549
+ /**
9550
+ * `χ_M`: Manages alteration of χ from block to block,
9551
+ * as well as bestow services with storage deposit credits.
9552
+ * https://graypaper.fluffylabs.dev/#/ab2cdbd/111502111902?v=0.7.2
9553
+ */
9554
+ readonly manager: ServiceId,
9555
+ /** `χ_V`: Managers validator keys. */
9556
+ readonly delegator: ServiceId,
9557
+ /**
9558
+ * `χ_R`: Manages the creation of services in protected range.
9559
+ *
9560
+ * https://graypaper.fluffylabs.dev/#/ab2cdbd/111b02111d02?v=0.7.2
9561
+ */
9562
+ readonly registrar: ServiceId,
9563
+ /** `χ_A`: Manages authorization queue one for each core. */
9564
+ readonly assigners: PerCore<ServiceId>,
9565
+ /** `χ_Z`: Dictionary of services that auto-accumulate every block with their gas limit. */
9566
+ readonly autoAccumulateServices: readonly AutoAccumulate[],
9567
+ ) {}
9568
+ }
9569
+
9570
+ declare const SUPER_PEAK_STRING = BytesBlob.blobFromString("peak");
9571
+
9572
+ /** Merkle Mountain Range peaks. */
9573
+ interface MmrPeaks<H extends OpaqueHash> {
9574
+ /**
9575
+ * Peaks at particular positions.
9576
+ *
9577
+ * In case there is no merkle trie at given index, `null` is placed.
9578
+ */
9579
+ peaks: readonly (H | null)[];
9580
+ }
9581
+
9582
+ /** Hasher interface for MMR. */
9583
+ interface MmrHasher<H extends OpaqueHash> {
9584
+ /** Hash two items together. */
9585
+ hashConcat(a: H, b: H): H;
9586
+ /** Hash two items together with extra bytes blob prepended. */
9587
+ hashConcatPrepend(id: BytesBlob, a: H, b: H): H;
9588
+ }
9589
+
9590
+ /**
9591
+ * Merkle Mountain Range.
9592
+ *
9593
+ * https://graypaper.fluffylabs.dev/#/5f542d7/3aa0023aa002?v=0.6.2
9594
+ */
9595
+ declare class MerkleMountainRange<H extends OpaqueHash> {
9596
+ /** Construct an empty MMR. */
9597
+ static empty<H extends OpaqueHash>(hasher: MmrHasher<H>) {
9598
+ return new MerkleMountainRange(hasher);
9599
+ }
9600
+
9601
+ /** Construct a new MMR from existing peaks. */
9602
+ static fromPeaks<H extends OpaqueHash>(hasher: MmrHasher<H>, mmr: MmrPeaks<H>) {
9603
+ return new MerkleMountainRange(
9604
+ hasher,
9605
+ mmr.peaks
9606
+ .reduce((acc: Mountain<H>[], peak, index) => {
9607
+ if (peak !== null) {
9608
+ acc.push(Mountain.fromPeak(peak, 2 ** index));
9609
+ }
9610
+ return acc;
9611
+ }, [])
9612
+ .reverse(),
9613
+ );
9614
+ }
9615
+
9616
+ private constructor(
9617
+ private readonly hasher: MmrHasher<H>,
9618
+ /** Store non-empty merkle tries (mountains) ordered by descending size. */
9619
+ private readonly mountains: Mountain<H>[] = [],
9620
+ ) {}
9621
+
9622
+ /**
9623
+ * Append a new hash to the MMR structure.
9624
+ *
9625
+ * https://graypaper.fluffylabs.dev/#/5f542d7/3b11003b1100?v=0.6.2
9626
+ */
9627
+ append(hash: H) {
9628
+ let newMountain = Mountain.fromPeak(hash, 1);
9629
+
9630
+ for (;;) {
9631
+ const last = this.mountains.pop();
9632
+ if (last === undefined) {
9633
+ this.mountains.push(newMountain);
9634
+ return;
9635
+ }
9636
+
9637
+ if (last.size !== newMountain.size) {
9638
+ this.mountains.push(last);
9639
+ this.mountains.push(newMountain);
9640
+ return;
9641
+ }
9642
+
9643
+ newMountain = last.mergeWith(this.hasher, newMountain);
9644
+ }
9645
+ }
9646
+
9647
+ /**
9648
+ * Root of the entire structure.
9649
+ *
9650
+ * https://graypaper.fluffylabs.dev/#/5f542d7/3b20013b2001?v=0.6.2
9651
+ */
9652
+ getSuperPeakHash(): H {
9653
+ if (this.mountains.length === 0) {
9654
+ return Bytes.zero(HASH_SIZE).asOpaque();
9655
+ }
9656
+ const revMountains = this.mountains.slice().reverse();
9456
9657
  const length = revMountains.length;
9457
9658
  let lastHash = revMountains[0].peak;
9458
9659
  for (let i = 1; i < length; i++) {
@@ -9562,11 +9763,6 @@ declare class BlockState extends WithDebug {
9562
9763
  }
9563
9764
  }
9564
9765
 
9565
- /**
9566
- * Recent history of blocks.
9567
- *
9568
- * https://graypaper.fluffylabs.dev/#/7e6ff6a/0fc9010fc901?v=0.6.7
9569
- */
9570
9766
  declare class RecentBlocks extends WithDebug {
9571
9767
  static Codec = codec.Class(RecentBlocks, {
9572
9768
  blocks: codecKnownSizeArray(BlockState.Codec, {
@@ -9579,12 +9775,6 @@ declare class RecentBlocks extends WithDebug {
9579
9775
  }),
9580
9776
  });
9581
9777
 
9582
- static empty() {
9583
- return new RecentBlocks(asKnownSize([]), {
9584
- peaks: [],
9585
- });
9586
- }
9587
-
9588
9778
  static create(a: CodecRecord<RecentBlocks>) {
9589
9779
  return new RecentBlocks(a.blocks, a.accumulationLog);
9590
9780
  }
@@ -9595,449 +9785,229 @@ declare class RecentBlocks extends WithDebug {
9595
9785
  * https://graypaper.fluffylabs.dev/#/7e6ff6a/0fea010fea01?v=0.6.7
9596
9786
  */
9597
9787
  public readonly blocks: BlocksState,
9598
- /**
9599
- * Accumulation output log.
9600
- * https://graypaper.fluffylabs.dev/#/7e6ff6a/0f02020f0202?v=0.6.7
9601
- */
9602
- public readonly accumulationLog: MmrPeaks<KeccakHash>,
9603
- ) {
9604
- super();
9605
- }
9606
- }
9607
-
9608
- type RecentBlocksView = DescribedBy<typeof RecentBlocks.Codec.View>;
9609
-
9610
- type RecentlyAccumulated = PerEpochBlock<ImmutableHashSet<WorkPackageHash>>;
9611
-
9612
- declare const recentlyAccumulatedCodec = codecPerEpochBlock<
9613
- ImmutableHashSet<WorkPackageHash>,
9614
- SequenceView<WorkPackageHash>
9615
- >(
9616
- codec.sequenceVarLen(codec.bytes(HASH_SIZE).asOpaque<WorkPackageHash>()).convert(
9617
- (x) => Array.from(x),
9618
- (x) => HashSet.from(x),
9619
- ),
9620
- );
9621
-
9622
- type RecentlyAccumulatedView = DescribedBy<typeof recentlyAccumulatedCodec.View>;
9623
-
9624
- /**
9625
- * Fixed size of validator metadata.
9626
- *
9627
- * https://graypaper.fluffylabs.dev/#/5f542d7/0d55010d5501
9628
- */
9629
- declare const VALIDATOR_META_BYTES = 128;
9630
- type VALIDATOR_META_BYTES = typeof VALIDATOR_META_BYTES;
9631
-
9632
- /**
9633
- * Details about validators' identity.
9634
- *
9635
- * https://graypaper.fluffylabs.dev/#/5f542d7/0d4b010d4c01
9636
- */
9637
- declare class ValidatorData extends WithDebug {
9638
- static Codec = codec.Class(ValidatorData, {
9639
- bandersnatch: codec.bytes(BANDERSNATCH_KEY_BYTES).asOpaque<BandersnatchKey>(),
9640
- ed25519: codec.bytes(ED25519_KEY_BYTES).asOpaque<Ed25519Key>(),
9641
- bls: codec.bytes(BLS_KEY_BYTES).asOpaque<BlsKey>(),
9642
- metadata: codec.bytes(VALIDATOR_META_BYTES),
9643
- });
9644
-
9645
- static create({ ed25519, bandersnatch, bls, metadata }: CodecRecord<ValidatorData>) {
9646
- return new ValidatorData(bandersnatch, ed25519, bls, metadata);
9647
- }
9648
-
9649
- private constructor(
9650
- /** Bandersnatch public key. */
9651
- public readonly bandersnatch: BandersnatchKey,
9652
- /** ED25519 key data. */
9653
- public readonly ed25519: Ed25519Key,
9654
- /** BLS public key. */
9655
- public readonly bls: BlsKey,
9656
- /** Validator-defined additional metdata. */
9657
- public readonly metadata: Bytes<VALIDATOR_META_BYTES>,
9658
- ) {
9659
- super();
9660
- }
9661
- }
9662
-
9663
- type ValidatorDataView = DescribedBy<typeof ValidatorData.Codec.View>;
9664
-
9665
- declare const validatorsDataCodec = codecPerValidator(ValidatorData.Codec);
9666
-
9667
- declare enum SafroleSealingKeysKind {
9668
- Tickets = 0,
9669
- Keys = 1,
9670
- }
9671
-
9672
- type SafroleSealingKeys =
9673
- | {
9674
- kind: SafroleSealingKeysKind.Keys;
9675
- keys: PerEpochBlock<BandersnatchKey>;
9676
- }
9677
- | {
9678
- kind: SafroleSealingKeysKind.Tickets;
9679
- tickets: PerEpochBlock<Ticket>;
9680
- };
9681
-
9682
- declare const codecBandersnatchKey = codec.bytes(BANDERSNATCH_KEY_BYTES).asOpaque<BandersnatchKey>();
9683
-
9684
- declare class SafroleSealingKeysData extends WithDebug {
9685
- static Codec = codecWithContext((context) => {
9686
- return codec.custom<SafroleSealingKeys>(
9687
- {
9688
- name: "SafroleSealingKeys",
9689
- sizeHint: { bytes: 1 + HASH_SIZE * context.epochLength, isExact: false },
9690
- },
9691
- (e, x) => {
9692
- e.varU32(tryAsU32(x.kind));
9693
- if (x.kind === SafroleSealingKeysKind.Keys) {
9694
- e.sequenceFixLen(codecBandersnatchKey, x.keys);
9695
- } else {
9696
- e.sequenceFixLen(Ticket.Codec, x.tickets);
9697
- }
9698
- },
9699
- (d) => {
9700
- const epochLength = context.epochLength;
9701
- const kind = d.varU32();
9702
- if (kind === SafroleSealingKeysKind.Keys) {
9703
- const keys = d.sequenceFixLen<BandersnatchKey>(codecBandersnatchKey, epochLength);
9704
- return SafroleSealingKeysData.keys(tryAsPerEpochBlock(keys, context));
9705
- }
9706
-
9707
- if (kind === SafroleSealingKeysKind.Tickets) {
9708
- const tickets = d.sequenceFixLen(Ticket.Codec, epochLength);
9709
- return SafroleSealingKeysData.tickets(tryAsPerEpochBlock(tickets, context));
9710
- }
9711
-
9712
- throw new Error(`Unexpected safrole sealing keys kind: ${kind}`);
9713
- },
9714
- (s) => {
9715
- const kind = s.decoder.varU32();
9716
- if (kind === SafroleSealingKeysKind.Keys) {
9717
- s.sequenceFixLen(codecBandersnatchKey, context.epochLength);
9718
- return;
9719
- }
9720
- if (kind === SafroleSealingKeysKind.Tickets) {
9721
- s.sequenceFixLen(Ticket.Codec, context.epochLength);
9722
- return;
9723
- }
9724
-
9725
- throw new Error(`Unexpected safrole sealing keys kind: ${kind}`);
9726
- },
9727
- );
9728
- });
9729
-
9730
- static keys(keys: PerEpochBlock<BandersnatchKey>): SafroleSealingKeys {
9731
- return new SafroleSealingKeysData(SafroleSealingKeysKind.Keys, keys, undefined) as SafroleSealingKeys;
9732
- }
9733
-
9734
- static tickets(tickets: PerEpochBlock<Ticket>): SafroleSealingKeys {
9735
- return new SafroleSealingKeysData(SafroleSealingKeysKind.Tickets, undefined, tickets) as SafroleSealingKeys;
9736
- }
9737
-
9738
- private constructor(
9739
- readonly kind: SafroleSealingKeysKind,
9740
- readonly keys?: PerEpochBlock<BandersnatchKey>,
9741
- readonly tickets?: PerEpochBlock<Ticket>,
9742
- ) {
9743
- super();
9744
- }
9745
- }
9746
-
9747
- declare class SafroleData {
9748
- static Codec = codec.Class(SafroleData, {
9749
- nextValidatorData: codecPerValidator(ValidatorData.Codec),
9750
- epochRoot: codec.bytes(BANDERSNATCH_RING_ROOT_BYTES).asOpaque<BandersnatchRingRoot>(),
9751
- sealingKeySeries: SafroleSealingKeysData.Codec,
9752
- ticketsAccumulator: readonlyArray(codec.sequenceVarLen(Ticket.Codec)).convert(seeThrough, asKnownSize),
9753
- });
9754
-
9755
- static create({ nextValidatorData, epochRoot, sealingKeySeries, ticketsAccumulator }: CodecRecord<SafroleData>) {
9756
- return new SafroleData(nextValidatorData, epochRoot, sealingKeySeries, ticketsAccumulator);
9757
- }
9758
-
9759
- private constructor(
9760
- /** gamma_k */
9761
- public readonly nextValidatorData: PerValidator<ValidatorData>,
9762
- /** gamma_z */
9763
- public readonly epochRoot: BandersnatchRingRoot,
9764
- /** gamma_s */
9765
- public readonly sealingKeySeries: SafroleSealingKeys,
9766
- /** gamma_a */
9767
- public readonly ticketsAccumulator: KnownSizeArray<Ticket, "0...EpochLength">,
9768
- ) {}
9769
- }
9770
-
9771
- type SafroleDataView = DescribedBy<typeof SafroleData.Codec.View>;
9772
-
9773
- /**
9774
- * `B_S`: The basic minimum balance which all services require.
9775
- *
9776
- * https://graypaper.fluffylabs.dev/#/7e6ff6a/445800445800?v=0.6.7
9777
- */
9778
- declare const BASE_SERVICE_BALANCE = 100n;
9779
- /**
9780
- * `B_I`: The additional minimum balance required per item of elective service state.
9781
- *
9782
- * https://graypaper.fluffylabs.dev/#/7e6ff6a/445000445000?v=0.6.7
9783
- */
9784
- declare const ELECTIVE_ITEM_BALANCE = 10n;
9788
+ /**
9789
+ * Accumulation output log.
9790
+ * https://graypaper.fluffylabs.dev/#/7e6ff6a/0f02020f0202?v=0.6.7
9791
+ */
9792
+ public readonly accumulationLog: MmrPeaks<KeccakHash>,
9793
+ ) {
9794
+ super();
9795
+ }
9796
+ }
9797
+
9785
9798
  /**
9786
- * `B_L`: The additional minimum balance required per octet of elective service state.
9799
+ * Recent history of blocks.
9787
9800
  *
9788
- * https://graypaper.fluffylabs.dev/#/7e6ff6a/445400445400?v=0.6.7
9801
+ * https://graypaper.fluffylabs.dev/#/7e6ff6a/0fc9010fc901?v=0.6.7
9789
9802
  */
9790
- declare const ELECTIVE_BYTE_BALANCE = 1n;
9791
-
9792
- declare const zeroSizeHint: SizeHint = {
9793
- bytes: 0,
9794
- isExact: true,
9795
- };
9796
-
9797
- /** 0-byte read, return given default value */
9798
- declare const ignoreValueWithDefault = <T>(defaultValue: T) =>
9799
- Descriptor.new<T>(
9800
- "ignoreValue",
9801
- zeroSizeHint,
9802
- (_e, _v) => {},
9803
- (_d) => defaultValue,
9804
- (_s) => {},
9805
- );
9806
-
9807
- /** Encode and decode object with leading version number. */
9808
- declare const codecWithVersion = <T>(val: Descriptor<T>): Descriptor<T> =>
9809
- Descriptor.new<T>(
9810
- "withVersion",
9811
- {
9812
- bytes: val.sizeHint.bytes + 8,
9813
- isExact: false,
9814
- },
9815
- (e, v) => {
9816
- e.varU64(0n);
9817
- val.encode(e, v);
9818
- },
9819
- (d) => {
9820
- const version = d.varU64();
9821
- if (version !== 0n) {
9822
- throw new Error("Non-zero version is not supported!");
9823
- }
9824
- return val.decode(d);
9803
+ declare class RecentBlocksHistory extends WithDebug {
9804
+ static Codec = Descriptor.new<RecentBlocksHistory>(
9805
+ "RecentBlocksHistory",
9806
+ RecentBlocks.Codec.sizeHint,
9807
+ (encoder, value) => RecentBlocks.Codec.encode(encoder, value.asCurrent()),
9808
+ (decoder) => {
9809
+ const recentBlocks = RecentBlocks.Codec.decode(decoder);
9810
+ return RecentBlocksHistory.create(recentBlocks);
9825
9811
  },
9826
- (s) => {
9827
- s.varU64();
9828
- val.skip(s);
9812
+ (skip) => {
9813
+ return RecentBlocks.Codec.skip(skip);
9829
9814
  },
9830
9815
  );
9831
9816
 
9832
- /**
9833
- * Service account details.
9834
- *
9835
- * https://graypaper.fluffylabs.dev/#/7e6ff6a/108301108301?v=0.6.7
9836
- */
9837
- declare class ServiceAccountInfo extends WithDebug {
9838
- static Codec = codec.Class(ServiceAccountInfo, {
9839
- codeHash: codec.bytes(HASH_SIZE).asOpaque<CodeHash>(),
9840
- balance: codec.u64,
9841
- accumulateMinGas: codec.u64.convert((x) => x, tryAsServiceGas),
9842
- onTransferMinGas: codec.u64.convert((x) => x, tryAsServiceGas),
9843
- storageUtilisationBytes: codec.u64,
9844
- gratisStorage: codec.u64,
9845
- storageUtilisationCount: codec.u32,
9846
- created: codec.u32.convert((x) => x, tryAsTimeSlot),
9847
- lastAccumulation: codec.u32.convert((x) => x, tryAsTimeSlot),
9848
- parentService: codec.u32.convert((x) => x, tryAsServiceId),
9849
- });
9817
+ static create(recentBlocks: RecentBlocks) {
9818
+ return new RecentBlocksHistory(recentBlocks);
9819
+ }
9850
9820
 
9851
- static create(a: CodecRecord<ServiceAccountInfo>) {
9852
- return new ServiceAccountInfo(
9853
- a.codeHash,
9854
- a.balance,
9855
- a.accumulateMinGas,
9856
- a.onTransferMinGas,
9857
- a.storageUtilisationBytes,
9858
- a.gratisStorage,
9859
- a.storageUtilisationCount,
9860
- a.created,
9861
- a.lastAccumulation,
9862
- a.parentService,
9821
+ static empty() {
9822
+ return RecentBlocksHistory.create(
9823
+ RecentBlocks.create({
9824
+ blocks: asKnownSize([]),
9825
+ accumulationLog: { peaks: [] },
9826
+ }),
9863
9827
  );
9864
9828
  }
9865
9829
 
9866
9830
  /**
9867
- * `a_t = max(0, BS + BI * a_i + BL * a_o - a_f)`
9868
- * https://graypaper.fluffylabs.dev/#/7e6ff6a/119e01119e01?v=0.6.7
9831
+ * Returns the block's BEEFY super peak.
9869
9832
  */
9870
- static calculateThresholdBalance(items: U32, bytes: U64, gratisStorage: U64): U64 {
9871
- const storageCost =
9872
- BASE_SERVICE_BALANCE + ELECTIVE_ITEM_BALANCE * BigInt(items) + ELECTIVE_BYTE_BALANCE * bytes - gratisStorage;
9873
-
9874
- if (storageCost < 0n) {
9875
- return tryAsU64(0);
9876
- }
9877
-
9878
- if (storageCost >= 2n ** 64n) {
9879
- return tryAsU64(2n ** 64n - 1n);
9880
- }
9881
-
9882
- return tryAsU64(storageCost);
9833
+ static accumulationResult(block: BlockState): KeccakHash {
9834
+ return (block as BlockState).accumulationResult;
9883
9835
  }
9884
9836
 
9885
- private constructor(
9886
- /** `a_c`: Hash of the service code. */
9887
- public readonly codeHash: CodeHash,
9888
- /** `a_b`: Current account balance. */
9889
- public readonly balance: U64,
9890
- /** `a_g`: Minimal gas required to execute Accumulate entrypoint. */
9891
- public readonly accumulateMinGas: ServiceGas,
9892
- /** `a_m`: Minimal gas required to execute On Transfer entrypoint. */
9893
- public readonly onTransferMinGas: ServiceGas,
9894
- /** `a_o`: Total number of octets in storage. */
9895
- public readonly storageUtilisationBytes: U64,
9896
- /** `a_f`: Cost-free storage. Decreases both storage item count and total byte size. */
9897
- public readonly gratisStorage: U64,
9898
- /** `a_i`: Number of items in storage. */
9899
- public readonly storageUtilisationCount: U32,
9900
- /** `a_r`: Creation account time slot. */
9901
- public readonly created: TimeSlot,
9902
- /** `a_a`: Most recent accumulation time slot. */
9903
- public readonly lastAccumulation: TimeSlot,
9904
- /** `a_p`: Parent service ID. */
9905
- public readonly parentService: ServiceId,
9906
- ) {
9837
+ private constructor(private readonly current: RecentBlocks | null) {
9907
9838
  super();
9908
9839
  }
9909
- }
9910
9840
 
9911
- type ServiceAccountInfoView = DescribedBy<typeof ServiceAccountInfo.Codec.View>;
9841
+ /** History of recent blocks with maximum size of `MAX_RECENT_HISTORY` */
9842
+ get blocks(): readonly BlockState[] {
9843
+ if (this.current !== null) {
9844
+ return this.current.blocks;
9845
+ }
9912
9846
 
9913
- declare class PreimageItem extends WithDebug {
9914
- static Codec = codec.Class(PreimageItem, {
9915
- hash: codec.bytes(HASH_SIZE).asOpaque<PreimageHash>(),
9916
- blob: codec.blob,
9917
- });
9847
+ throw new Error("RecentBlocksHistory is in invalid state");
9848
+ }
9918
9849
 
9919
- static create({ hash, blob }: CodecRecord<PreimageItem>) {
9920
- return new PreimageItem(hash, blob);
9850
+ asCurrent() {
9851
+ if (this.current === null) {
9852
+ throw new Error("Cannot access current RecentBlocks format");
9853
+ }
9854
+ return this.current;
9921
9855
  }
9922
9856
 
9923
- private constructor(
9924
- readonly hash: PreimageHash,
9925
- readonly blob: BytesBlob,
9926
- ) {
9927
- super();
9857
+ updateBlocks(blocks: BlockState[]): RecentBlocksHistory {
9858
+ if (this.current !== null) {
9859
+ return RecentBlocksHistory.create(
9860
+ RecentBlocks.create({
9861
+ ...this.current,
9862
+ blocks: asOpaqueType(blocks as BlockState[]),
9863
+ }),
9864
+ );
9865
+ }
9866
+
9867
+ throw new Error("RecentBlocksHistory is in invalid state. Cannot be updated!");
9928
9868
  }
9929
9869
  }
9930
9870
 
9931
- type StorageKey = Opaque<BytesBlob, "storage key">;
9871
+ /**
9872
+ * Fixed size of validator metadata.
9873
+ *
9874
+ * https://graypaper.fluffylabs.dev/#/5f542d7/0d55010d5501
9875
+ */
9876
+ declare const VALIDATOR_META_BYTES = 128;
9877
+ type VALIDATOR_META_BYTES = typeof VALIDATOR_META_BYTES;
9932
9878
 
9933
- declare class StorageItem extends WithDebug {
9934
- static Codec = codec.Class(StorageItem, {
9935
- key: codec.blob.convert(
9936
- (i) => i,
9937
- (o) => asOpaqueType(o),
9938
- ),
9939
- value: codec.blob,
9879
+ /**
9880
+ * Details about validators' identity.
9881
+ *
9882
+ * https://graypaper.fluffylabs.dev/#/5f542d7/0d4b010d4c01
9883
+ */
9884
+ declare class ValidatorData extends WithDebug {
9885
+ static Codec = codec.Class(ValidatorData, {
9886
+ bandersnatch: codec.bytes(BANDERSNATCH_KEY_BYTES).asOpaque<BandersnatchKey>(),
9887
+ ed25519: codec.bytes(ED25519_KEY_BYTES).asOpaque<Ed25519Key>(),
9888
+ bls: codec.bytes(BLS_KEY_BYTES).asOpaque<BlsKey>(),
9889
+ metadata: codec.bytes(VALIDATOR_META_BYTES),
9940
9890
  });
9941
9891
 
9942
- static create({ key, value }: CodecRecord<StorageItem>) {
9943
- return new StorageItem(key, value);
9892
+ static create({ ed25519, bandersnatch, bls, metadata }: CodecRecord<ValidatorData>) {
9893
+ return new ValidatorData(bandersnatch, ed25519, bls, metadata);
9944
9894
  }
9945
9895
 
9946
9896
  private constructor(
9947
- readonly key: StorageKey,
9948
- readonly value: BytesBlob,
9897
+ /** Bandersnatch public key. */
9898
+ public readonly bandersnatch: BandersnatchKey,
9899
+ /** ED25519 key data. */
9900
+ public readonly ed25519: Ed25519Key,
9901
+ /** BLS public key. */
9902
+ public readonly bls: BlsKey,
9903
+ /** Validator-defined additional metdata. */
9904
+ public readonly metadata: Bytes<VALIDATOR_META_BYTES>,
9949
9905
  ) {
9950
9906
  super();
9951
9907
  }
9952
9908
  }
9953
9909
 
9954
- declare const MAX_LOOKUP_HISTORY_SLOTS = 3;
9955
- type LookupHistorySlots = KnownSizeArray<TimeSlot, `0-${typeof MAX_LOOKUP_HISTORY_SLOTS} timeslots`>;
9956
- declare function tryAsLookupHistorySlots(items: readonly TimeSlot[]): LookupHistorySlots {
9957
- const knownSize = asKnownSize(items) as LookupHistorySlots;
9958
- if (knownSize.length > MAX_LOOKUP_HISTORY_SLOTS) {
9959
- throw new Error(`Lookup history items must contain 0-${MAX_LOOKUP_HISTORY_SLOTS} timeslots.`);
9960
- }
9961
- return knownSize;
9910
+ declare enum SafroleSealingKeysKind {
9911
+ Tickets = 0,
9912
+ Keys = 1,
9962
9913
  }
9963
9914
 
9964
- /** https://graypaper.fluffylabs.dev/#/5f542d7/115400115800 */
9965
- declare class LookupHistoryItem {
9966
- constructor(
9967
- public readonly hash: PreimageHash,
9968
- public readonly length: U32,
9969
- /**
9970
- * Preimage availability history as a sequence of time slots.
9971
- * See PreimageStatus and the following GP fragment for more details.
9972
- * https://graypaper.fluffylabs.dev/#/5f542d7/11780011a500 */
9973
- public readonly slots: LookupHistorySlots,
9974
- ) {}
9975
-
9976
- static isRequested(item: LookupHistoryItem | LookupHistorySlots): boolean {
9977
- if ("slots" in item) {
9978
- return item.slots.length === 0;
9915
+ type SafroleSealingKeys =
9916
+ | {
9917
+ kind: SafroleSealingKeysKind.Keys;
9918
+ keys: PerEpochBlock<BandersnatchKey>;
9979
9919
  }
9980
- return item.length === 0;
9981
- }
9982
- }
9920
+ | {
9921
+ kind: SafroleSealingKeysKind.Tickets;
9922
+ tickets: PerEpochBlock<Ticket>;
9923
+ };
9924
+
9925
+ declare const codecBandersnatchKey = codec.bytes(BANDERSNATCH_KEY_BYTES).asOpaque<BandersnatchKey>();
9926
+
9927
+ declare class SafroleSealingKeysData extends WithDebug {
9928
+ static Codec = codecWithContext((context) => {
9929
+ return codec.custom<SafroleSealingKeys>(
9930
+ {
9931
+ name: "SafroleSealingKeys",
9932
+ sizeHint: { bytes: 1 + HASH_SIZE * context.epochLength, isExact: false },
9933
+ },
9934
+ (e, x) => {
9935
+ e.varU32(tryAsU32(x.kind));
9936
+ if (x.kind === SafroleSealingKeysKind.Keys) {
9937
+ e.sequenceFixLen(codecBandersnatchKey, x.keys);
9938
+ } else {
9939
+ e.sequenceFixLen(Ticket.Codec, x.tickets);
9940
+ }
9941
+ },
9942
+ (d) => {
9943
+ const epochLength = context.epochLength;
9944
+ const kind = d.varU32();
9945
+ if (kind === SafroleSealingKeysKind.Keys) {
9946
+ const keys = d.sequenceFixLen<BandersnatchKey>(codecBandersnatchKey, epochLength);
9947
+ return SafroleSealingKeysData.keys(tryAsPerEpochBlock(keys, context));
9948
+ }
9949
+
9950
+ if (kind === SafroleSealingKeysKind.Tickets) {
9951
+ const tickets = d.sequenceFixLen(Ticket.Codec, epochLength);
9952
+ return SafroleSealingKeysData.tickets(tryAsPerEpochBlock(tickets, context));
9953
+ }
9954
+
9955
+ throw new Error(`Unexpected safrole sealing keys kind: ${kind}`);
9956
+ },
9957
+ (s) => {
9958
+ const kind = s.decoder.varU32();
9959
+ if (kind === SafroleSealingKeysKind.Keys) {
9960
+ s.sequenceFixLen(codecBandersnatchKey, context.epochLength);
9961
+ return;
9962
+ }
9963
+ if (kind === SafroleSealingKeysKind.Tickets) {
9964
+ s.sequenceFixLen(Ticket.Codec, context.epochLength);
9965
+ return;
9966
+ }
9983
9967
 
9984
- /** Dictionary entry of services that auto-accumulate every block. */
9985
- declare class AutoAccumulate {
9986
- static Codec = codec.Class(AutoAccumulate, {
9987
- service: codec.u32.asOpaque<ServiceId>(),
9988
- gasLimit: codec.u64.asOpaque<ServiceGas>(),
9968
+ throw new Error(`Unexpected safrole sealing keys kind: ${kind}`);
9969
+ },
9970
+ );
9989
9971
  });
9990
9972
 
9991
- static create({ service, gasLimit }: CodecRecord<AutoAccumulate>) {
9992
- return new AutoAccumulate(service, gasLimit);
9973
+ static keys(keys: PerEpochBlock<BandersnatchKey>): SafroleSealingKeys {
9974
+ return new SafroleSealingKeysData(SafroleSealingKeysKind.Keys, keys, undefined) as SafroleSealingKeys;
9975
+ }
9976
+
9977
+ static tickets(tickets: PerEpochBlock<Ticket>): SafroleSealingKeys {
9978
+ return new SafroleSealingKeysData(SafroleSealingKeysKind.Tickets, undefined, tickets) as SafroleSealingKeys;
9993
9979
  }
9994
9980
 
9995
9981
  private constructor(
9996
- /** Service id that auto-accumulates. */
9997
- readonly service: ServiceId,
9998
- /** Gas limit for auto-accumulation. */
9999
- readonly gasLimit: ServiceGas,
10000
- ) {}
9982
+ readonly kind: SafroleSealingKeysKind,
9983
+ readonly keys?: PerEpochBlock<BandersnatchKey>,
9984
+ readonly tickets?: PerEpochBlock<Ticket>,
9985
+ ) {
9986
+ super();
9987
+ }
10001
9988
  }
10002
9989
 
10003
- /**
10004
- * https://graypaper.fluffylabs.dev/#/ab2cdbd/114402114402?v=0.7.2
10005
- */
10006
- declare class PrivilegedServices {
10007
- /** https://graypaper.fluffylabs.dev/#/ab2cdbd/3bbd023bcb02?v=0.7.2 */
10008
- static Codec = codec.Class(PrivilegedServices, {
10009
- manager: codec.u32.asOpaque<ServiceId>(),
10010
- assigners: codecPerCore(codec.u32.asOpaque<ServiceId>()),
10011
- delegator: codec.u32.asOpaque<ServiceId>(),
10012
- registrar: Compatibility.isGreaterOrEqual(GpVersion.V0_7_1)
10013
- ? codec.u32.asOpaque<ServiceId>()
10014
- : ignoreValueWithDefault(tryAsServiceId(2 ** 32 - 1)),
10015
- autoAccumulateServices: readonlyArray(codec.sequenceVarLen(AutoAccumulate.Codec)),
9990
+ declare class SafroleData {
9991
+ static Codec = codec.Class(SafroleData, {
9992
+ nextValidatorData: codecPerValidator(ValidatorData.Codec),
9993
+ epochRoot: codec.bytes(BANDERSNATCH_RING_ROOT_BYTES).asOpaque<BandersnatchRingRoot>(),
9994
+ sealingKeySeries: SafroleSealingKeysData.Codec,
9995
+ ticketsAccumulator: readonlyArray(codec.sequenceVarLen(Ticket.Codec)).convert(seeThrough, asKnownSize),
10016
9996
  });
10017
9997
 
10018
- static create(a: CodecRecord<PrivilegedServices>) {
10019
- return new PrivilegedServices(a.manager, a.delegator, a.registrar, a.assigners, a.autoAccumulateServices);
9998
+ static create({ nextValidatorData, epochRoot, sealingKeySeries, ticketsAccumulator }: CodecRecord<SafroleData>) {
9999
+ return new SafroleData(nextValidatorData, epochRoot, sealingKeySeries, ticketsAccumulator);
10020
10000
  }
10021
10001
 
10022
10002
  private constructor(
10023
- /**
10024
- * `χ_M`: Manages alteration of χ from block to block,
10025
- * as well as bestow services with storage deposit credits.
10026
- * https://graypaper.fluffylabs.dev/#/ab2cdbd/111502111902?v=0.7.2
10027
- */
10028
- readonly manager: ServiceId,
10029
- /** `χ_V`: Managers validator keys. */
10030
- readonly delegator: ServiceId,
10031
- /**
10032
- * `χ_R`: Manages the creation of services in protected range.
10033
- *
10034
- * https://graypaper.fluffylabs.dev/#/ab2cdbd/111b02111d02?v=0.7.2
10035
- */
10036
- readonly registrar: ServiceId,
10037
- /** `χ_A`: Manages authorization queue one for each core. */
10038
- readonly assigners: PerCore<ServiceId>,
10039
- /** `χ_Z`: Dictionary of services that auto-accumulate every block with their gas limit. */
10040
- readonly autoAccumulateServices: readonly AutoAccumulate[],
10003
+ /** gamma_k */
10004
+ public readonly nextValidatorData: PerValidator<ValidatorData>,
10005
+ /** gamma_z */
10006
+ public readonly epochRoot: BandersnatchRingRoot,
10007
+ /** gamma_s */
10008
+ public readonly sealingKeySeries: SafroleSealingKeys,
10009
+ /** gamma_a */
10010
+ public readonly ticketsAccumulator: KnownSizeArray<Ticket, "0...EpochLength">,
10041
10011
  ) {}
10042
10012
  }
10043
10013
 
@@ -10319,8 +10289,6 @@ declare class StatisticsData {
10319
10289
  ) {}
10320
10290
  }
10321
10291
 
10322
- type StatisticsDataView = DescribedBy<typeof StatisticsData.Codec.View>;
10323
-
10324
10292
  /**
10325
10293
  * In addition to the entropy accumulator η_0, we retain
10326
10294
  * three additional historical values of the accumulator at
@@ -10372,7 +10340,7 @@ type State = {
10372
10340
  /**
10373
10341
  * `γₖ gamma_k`: The keys for the validators of the next epoch, equivalent to those keys which constitute γ_z .
10374
10342
  */
10375
- readonly nextValidatorData: PerValidator<ValidatorData>;
10343
+ readonly nextValidatorData: SafroleData["nextValidatorData"];
10376
10344
 
10377
10345
  /**
10378
10346
  * `κ kappa`: Validators, who are the set of economic actors uniquely
@@ -10418,7 +10386,7 @@ type State = {
10418
10386
  *
10419
10387
  * https://graypaper-reader.netlify.app/#/6e1c0cd/102400102400
10420
10388
  */
10421
- readonly authPools: PerCore<AuthorizationPool>;
10389
+ readonly authPools: PerCore<KnownSizeArray<AuthorizerHash, `At most ${typeof MAX_AUTH_POOL_SIZE}`>>;
10422
10390
 
10423
10391
  /**
10424
10392
  * `φ phi`: A queue of authorizers for each core used to fill up the pool.
@@ -10427,14 +10395,14 @@ type State = {
10427
10395
  *
10428
10396
  * https://graypaper-reader.netlify.app/#/6e1c0cd/102400102400
10429
10397
  */
10430
- readonly authQueues: PerCore<AuthorizationQueue>;
10398
+ readonly authQueues: PerCore<FixedSizeArray<AuthorizerHash, AUTHORIZATION_QUEUE_SIZE>>;
10431
10399
 
10432
10400
  /**
10433
10401
  * `β beta`: State of the blocks from recent history.
10434
10402
  *
10435
10403
  * https://graypaper.fluffylabs.dev/#/579bd12/0fb7010fb701
10436
10404
  */
10437
- readonly recentBlocks: RecentBlocks;
10405
+ readonly recentBlocks: RecentBlocksHistory;
10438
10406
 
10439
10407
  /**
10440
10408
  * `π pi`: Previous and current statistics of each validator,
@@ -10451,7 +10419,7 @@ type State = {
10451
10419
  *
10452
10420
  * https://graypaper.fluffylabs.dev/#/5f542d7/165300165500
10453
10421
  */
10454
- readonly accumulationQueue: AccumulationQueue;
10422
+ readonly accumulationQueue: PerEpochBlock<readonly NotYetAccumulatedReport[]>;
10455
10423
 
10456
10424
  /**
10457
10425
  * `ξ xi`: In order to know which work-packages have been
@@ -10461,7 +10429,7 @@ type State = {
10461
10429
  *
10462
10430
  * https://graypaper.fluffylabs.dev/#/5f542d7/161a00161d00
10463
10431
  */
10464
- readonly recentlyAccumulated: RecentlyAccumulated;
10432
+ readonly recentlyAccumulated: PerEpochBlock<ImmutableHashSet<WorkPackageHash>>;
10465
10433
 
10466
10434
  /*
10467
10435
  * `γₐ gamma_a`: The ticket accumulator - a series of highest-scoring ticket identifiers to be
@@ -10534,113 +10502,6 @@ interface Service {
10534
10502
  getLookupHistory(hash: PreimageHash, len: U32): LookupHistorySlots | null;
10535
10503
  }
10536
10504
 
10537
- /** Additional marker interface, when state view is supported/required. */
10538
- type WithStateView<V = StateView> = {
10539
- /** Get view of the state. */
10540
- view(): V;
10541
- };
10542
-
10543
- /**
10544
- * A non-decoding version of the `State`.
10545
- *
10546
- * Note we don't require all fields to have view accessors, since
10547
- * it's only beneficial for large collections to be read via views.
10548
- *
10549
- * https://graypaper.fluffylabs.dev/#/579bd12/08f10008f100
10550
- */
10551
- type StateView = {
10552
- /**
10553
-
10554
- * `ρ rho`: work-reports which have been reported but are not yet known to be
10555
- * available to a super-majority of validators, together with the time
10556
- * at which each was reported.
10557
- *
10558
- * https://graypaper.fluffylabs.dev/#/579bd12/135800135800
10559
- */
10560
- availabilityAssignmentView(): AvailabilityAssignmentsView;
10561
-
10562
- /**
10563
- * `ι iota`: The validator keys and metadata to be drawn from next.
10564
- */
10565
- designatedValidatorDataView(): SequenceView<ValidatorData, ValidatorDataView>;
10566
-
10567
- /**
10568
- * `κ kappa`: Validators, who are the set of economic actors uniquely
10569
- * privileged to help build and maintain the Jam chain, are
10570
- * identified within κ, archived in λ and enqueued from ι.
10571
- *
10572
- * https://graypaper.fluffylabs.dev/#/579bd12/080201080601
10573
- */
10574
- currentValidatorDataView(): SequenceView<ValidatorData, ValidatorDataView>;
10575
-
10576
- /**
10577
- * `λ lambda`: Validators, who are the set of economic actors uniquely
10578
- * privileged to help build and maintain the Jam chain, are
10579
- * identified within κ, archived in λ and enqueued from ι.
10580
- *
10581
- * https://graypaper.fluffylabs.dev/#/579bd12/080201080601
10582
- */
10583
- previousValidatorDataView(): SequenceView<ValidatorData, ValidatorDataView>;
10584
-
10585
- /**
10586
- * `α alpha`: Authorizers available for each core (authorizer pool).
10587
- *
10588
- * https://graypaper-reader.netlify.app/#/6e1c0cd/102400102400
10589
- */
10590
- authPoolsView(): SequenceView<AuthorizationPool, SequenceView<AuthorizerHash>>;
10591
-
10592
- /**
10593
- * `φ phi`: A queue of authorizers for each core used to fill up the pool.
10594
- *
10595
- * Only updated by `accumulate` calls using `assign` host call.
10596
- *
10597
- * https://graypaper-reader.netlify.app/#/6e1c0cd/102400102400
10598
- */
10599
- authQueuesView(): SequenceView<AuthorizationQueue, SequenceView<AuthorizerHash>>;
10600
-
10601
- /**
10602
- * `β beta`: State of the blocks from recent history.
10603
- *
10604
- * https://graypaper.fluffylabs.dev/#/579bd12/0fb7010fb701
10605
- */
10606
- recentBlocksView(): RecentBlocksView;
10607
-
10608
- /**
10609
- * `π pi`: Previous and current statistics of each validator,
10610
- * cores statistics and services statistics.
10611
- *
10612
- * https://graypaper.fluffylabs.dev/#/68eaa1f/18f60118f601?v=0.6.4
10613
- */
10614
- statisticsView(): StatisticsDataView;
10615
-
10616
- /**
10617
- * `ϑ theta`: We also maintain knowledge of ready (i.e. available
10618
- * and/or audited) but not-yet-accumulated work-reports in
10619
- * the state item ϑ.
10620
- *
10621
- * https://graypaper.fluffylabs.dev/#/5f542d7/165300165500
10622
- */
10623
- accumulationQueueView(): AccumulationQueueView;
10624
-
10625
- /**
10626
- * `ξ xi`: In order to know which work-packages have been
10627
- * accumulated already, we maintain a history of what has
10628
- * been accumulated. This history, ξ, is sufficiently large
10629
- * for an epoch worth of work-reports.
10630
- *
10631
- * https://graypaper.fluffylabs.dev/#/5f542d7/161a00161d00
10632
- */
10633
- recentlyAccumulatedView(): RecentlyAccumulatedView;
10634
-
10635
- /*
10636
- * `γ gamma`: Safrole data.
10637
- */
10638
- safroleDataView(): SafroleDataView;
10639
-
10640
- /** Retrieve details about single service. */
10641
- getServiceInfoView(id: ServiceId): ServiceAccountInfoView | null;
10642
- };
10643
-
10644
10505
  declare enum UpdatePreimageKind {
10645
10506
  /** Insert new preimage and optionally update it's lookup history. */
10646
10507
  Provide = 0,
@@ -10963,10 +10824,10 @@ declare class InMemoryService extends WithDebug implements Service {
10963
10824
  /**
10964
10825
  * A special version of state, stored fully in-memory.
10965
10826
  */
10966
- declare class InMemoryState extends WithDebug implements State, WithStateView, EnumerableState {
10827
+ declare class InMemoryState extends WithDebug implements State, EnumerableState {
10967
10828
  /** Create a new `InMemoryState` by providing all required fields. */
10968
- static new(chainSpec: ChainSpec, state: InMemoryStateFields) {
10969
- return new InMemoryState(chainSpec, state);
10829
+ static create(state: InMemoryStateFields) {
10830
+ return new InMemoryState(state);
10970
10831
  }
10971
10832
 
10972
10833
  /**
@@ -10984,7 +10845,7 @@ declare class InMemoryState extends WithDebug implements State, WithStateView, E
10984
10845
  /**
10985
10846
  * Create a new `InMemoryState` from some other state object.
10986
10847
  */
10987
- static copyFrom(chainSpec: ChainSpec, other: State, servicesData: Map<ServiceId, ServiceEntries>) {
10848
+ static copyFrom(other: State, servicesData: Map<ServiceId, ServiceEntries>) {
10988
10849
  const services = new Map<ServiceId, InMemoryService>();
10989
10850
  for (const [id, entries] of servicesData.entries()) {
10990
10851
  const service = other.getService(id);
@@ -10995,7 +10856,7 @@ declare class InMemoryState extends WithDebug implements State, WithStateView, E
10995
10856
  services.set(id, inMemService);
10996
10857
  }
10997
10858
 
10998
- return InMemoryState.new(chainSpec, {
10859
+ return InMemoryState.create({
10999
10860
  availabilityAssignment: other.availabilityAssignment,
11000
10861
  accumulationQueue: other.accumulationQueue,
11001
10862
  designatedValidatorData: other.designatedValidatorData,
@@ -11190,12 +11051,12 @@ declare class InMemoryState extends WithDebug implements State, WithStateView, E
11190
11051
  disputesRecords: DisputesRecords;
11191
11052
  timeslot: TimeSlot;
11192
11053
  entropy: FixedSizeArray<EntropyHash, ENTROPY_ENTRIES>;
11193
- authPools: PerCore<AuthorizationPool>;
11194
- authQueues: PerCore<AuthorizationQueue>;
11195
- recentBlocks: RecentBlocks;
11054
+ authPools: PerCore<KnownSizeArray<AuthorizerHash, `At most ${typeof MAX_AUTH_POOL_SIZE}`>>;
11055
+ authQueues: PerCore<FixedSizeArray<AuthorizerHash, AUTHORIZATION_QUEUE_SIZE>>;
11056
+ recentBlocks: RecentBlocksHistory;
11196
11057
  statistics: StatisticsData;
11197
- accumulationQueue: AccumulationQueue;
11198
- recentlyAccumulated: RecentlyAccumulated;
11058
+ accumulationQueue: PerEpochBlock<readonly NotYetAccumulatedReport[]>;
11059
+ recentlyAccumulated: PerEpochBlock<ImmutableHashSet<WorkPackageHash>>;
11199
11060
  ticketsAccumulator: KnownSizeArray<Ticket, "0...EpochLength">;
11200
11061
  sealingKeySeries: SafroleSealingKeys;
11201
11062
  epochRoot: BandersnatchRingRoot;
@@ -11211,10 +11072,7 @@ declare class InMemoryState extends WithDebug implements State, WithStateView, E
11211
11072
  return this.services.get(id) ?? null;
11212
11073
  }
11213
11074
 
11214
- protected constructor(
11215
- private readonly chainSpec: ChainSpec,
11216
- s: InMemoryStateFields,
11217
- ) {
11075
+ private constructor(s: InMemoryStateFields) {
11218
11076
  super();
11219
11077
  this.availabilityAssignment = s.availabilityAssignment;
11220
11078
  this.designatedValidatorData = s.designatedValidatorData;
@@ -11238,15 +11096,11 @@ declare class InMemoryState extends WithDebug implements State, WithStateView, E
11238
11096
  this.services = s.services;
11239
11097
  }
11240
11098
 
11241
- view(): StateView {
11242
- return new InMemoryStateView(this.chainSpec, this);
11243
- }
11244
-
11245
11099
  /**
11246
11100
  * Create an empty and possibly incoherent `InMemoryState`.
11247
11101
  */
11248
11102
  static empty(spec: ChainSpec) {
11249
- return new InMemoryState(spec, {
11103
+ return new InMemoryState({
11250
11104
  availabilityAssignment: tryAsPerCore(
11251
11105
  Array.from({ length: spec.coresCount }, () => null),
11252
11106
  spec,
@@ -11313,7 +11167,7 @@ declare class InMemoryState extends WithDebug implements State, WithStateView, E
11313
11167
  ),
11314
11168
  spec,
11315
11169
  ),
11316
- recentBlocks: RecentBlocks.empty(),
11170
+ recentBlocks: RecentBlocksHistory.empty(),
11317
11171
  statistics: StatisticsData.create({
11318
11172
  current: tryAsPerValidator(
11319
11173
  Array.from({ length: spec.validatorsCount }, () => ValidatorStatistics.empty()),
@@ -11399,18 +11253,12 @@ type FieldNames<T> = {
11399
11253
  [K in keyof T]: T[K] extends Function ? never : K;
11400
11254
  }[keyof T];
11401
11255
 
11402
- type index$e_AUTHORIZATION_QUEUE_SIZE = AUTHORIZATION_QUEUE_SIZE;
11403
11256
  type index$e_AccumulationOutput = AccumulationOutput;
11404
11257
  declare const index$e_AccumulationOutput: typeof AccumulationOutput;
11405
- type index$e_AccumulationQueue = AccumulationQueue;
11406
- type index$e_AccumulationQueueView = AccumulationQueueView;
11407
- type index$e_AuthorizationPool = AuthorizationPool;
11408
- type index$e_AuthorizationQueue = AuthorizationQueue;
11409
11258
  type index$e_AutoAccumulate = AutoAccumulate;
11410
11259
  declare const index$e_AutoAccumulate: typeof AutoAccumulate;
11411
11260
  type index$e_AvailabilityAssignment = AvailabilityAssignment;
11412
11261
  declare const index$e_AvailabilityAssignment: typeof AvailabilityAssignment;
11413
- type index$e_AvailabilityAssignmentsView = AvailabilityAssignmentsView;
11414
11262
  declare const index$e_BASE_SERVICE_BALANCE: typeof BASE_SERVICE_BALANCE;
11415
11263
  type index$e_BlockState = BlockState;
11416
11264
  declare const index$e_BlockState: typeof BlockState;
@@ -11432,11 +11280,8 @@ type index$e_InMemoryStateFields = InMemoryStateFields;
11432
11280
  type index$e_LookupHistoryItem = LookupHistoryItem;
11433
11281
  declare const index$e_LookupHistoryItem: typeof LookupHistoryItem;
11434
11282
  type index$e_LookupHistorySlots = LookupHistorySlots;
11435
- type index$e_MAX_AUTH_POOL_SIZE = MAX_AUTH_POOL_SIZE;
11436
11283
  declare const index$e_MAX_LOOKUP_HISTORY_SLOTS: typeof MAX_LOOKUP_HISTORY_SLOTS;
11437
11284
  type index$e_MAX_RECENT_HISTORY = MAX_RECENT_HISTORY;
11438
- type index$e_NotYetAccumulatedReport = NotYetAccumulatedReport;
11439
- declare const index$e_NotYetAccumulatedReport: typeof NotYetAccumulatedReport;
11440
11285
  type index$e_PerCore<T> = PerCore<T>;
11441
11286
  type index$e_PreimageItem = PreimageItem;
11442
11287
  declare const index$e_PreimageItem: typeof PreimageItem;
@@ -11444,12 +11289,10 @@ type index$e_PrivilegedServices = PrivilegedServices;
11444
11289
  declare const index$e_PrivilegedServices: typeof PrivilegedServices;
11445
11290
  type index$e_RecentBlocks = RecentBlocks;
11446
11291
  declare const index$e_RecentBlocks: typeof RecentBlocks;
11447
- type index$e_RecentBlocksView = RecentBlocksView;
11448
- type index$e_RecentlyAccumulated = RecentlyAccumulated;
11449
- type index$e_RecentlyAccumulatedView = RecentlyAccumulatedView;
11292
+ type index$e_RecentBlocksHistory = RecentBlocksHistory;
11293
+ declare const index$e_RecentBlocksHistory: typeof RecentBlocksHistory;
11450
11294
  type index$e_SafroleData = SafroleData;
11451
11295
  declare const index$e_SafroleData: typeof SafroleData;
11452
- type index$e_SafroleDataView = SafroleDataView;
11453
11296
  type index$e_SafroleSealingKeys = SafroleSealingKeys;
11454
11297
  type index$e_SafroleSealingKeysData = SafroleSealingKeysData;
11455
11298
  declare const index$e_SafroleSealingKeysData: typeof SafroleSealingKeysData;
@@ -11458,17 +11301,14 @@ declare const index$e_SafroleSealingKeysKind: typeof SafroleSealingKeysKind;
11458
11301
  type index$e_Service = Service;
11459
11302
  type index$e_ServiceAccountInfo = ServiceAccountInfo;
11460
11303
  declare const index$e_ServiceAccountInfo: typeof ServiceAccountInfo;
11461
- type index$e_ServiceAccountInfoView = ServiceAccountInfoView;
11462
11304
  type index$e_ServiceData = ServiceData;
11463
11305
  type index$e_ServiceEntries = ServiceEntries;
11464
11306
  type index$e_ServiceStatistics = ServiceStatistics;
11465
11307
  declare const index$e_ServiceStatistics: typeof ServiceStatistics;
11466
11308
  type index$e_ServicesUpdate = ServicesUpdate;
11467
11309
  type index$e_State = State;
11468
- type index$e_StateView = StateView;
11469
11310
  type index$e_StatisticsData = StatisticsData;
11470
11311
  declare const index$e_StatisticsData: typeof StatisticsData;
11471
- type index$e_StatisticsDataView = StatisticsDataView;
11472
11312
  type index$e_StorageItem = StorageItem;
11473
11313
  declare const index$e_StorageItem: typeof StorageItem;
11474
11314
  type index$e_StorageKey = StorageKey;
@@ -11489,15 +11329,9 @@ declare const index$e_UpdateStorageKind: typeof UpdateStorageKind;
11489
11329
  type index$e_VALIDATOR_META_BYTES = VALIDATOR_META_BYTES;
11490
11330
  type index$e_ValidatorData = ValidatorData;
11491
11331
  declare const index$e_ValidatorData: typeof ValidatorData;
11492
- type index$e_ValidatorDataView = ValidatorDataView;
11493
11332
  type index$e_ValidatorStatistics = ValidatorStatistics;
11494
11333
  declare const index$e_ValidatorStatistics: typeof ValidatorStatistics;
11495
- type index$e_WithStateView<V = StateView> = WithStateView<V>;
11496
11334
  declare const index$e_accumulationOutputComparator: typeof accumulationOutputComparator;
11497
- declare const index$e_accumulationQueueCodec: typeof accumulationQueueCodec;
11498
- declare const index$e_authPoolsCodec: typeof authPoolsCodec;
11499
- declare const index$e_authQueuesCodec: typeof authQueuesCodec;
11500
- declare const index$e_availabilityAssignmentsCodec: typeof availabilityAssignmentsCodec;
11501
11335
  declare const index$e_codecBandersnatchKey: typeof codecBandersnatchKey;
11502
11336
  declare const index$e_codecPerCore: typeof codecPerCore;
11503
11337
  declare const index$e_codecServiceId: typeof codecServiceId;
@@ -11506,18 +11340,16 @@ declare const index$e_codecVarU16: typeof codecVarU16;
11506
11340
  declare const index$e_codecWithVersion: typeof codecWithVersion;
11507
11341
  declare const index$e_hashComparator: typeof hashComparator;
11508
11342
  declare const index$e_ignoreValueWithDefault: typeof ignoreValueWithDefault;
11509
- declare const index$e_recentlyAccumulatedCodec: typeof recentlyAccumulatedCodec;
11510
11343
  declare const index$e_serviceDataCodec: typeof serviceDataCodec;
11511
11344
  declare const index$e_serviceEntriesCodec: typeof serviceEntriesCodec;
11512
11345
  declare const index$e_sortedSetCodec: typeof sortedSetCodec;
11513
11346
  declare const index$e_tryAsLookupHistorySlots: typeof tryAsLookupHistorySlots;
11514
11347
  declare const index$e_tryAsPerCore: typeof tryAsPerCore;
11515
- declare const index$e_validatorsDataCodec: typeof validatorsDataCodec;
11516
11348
  declare const index$e_workReportsSortedSetCodec: typeof workReportsSortedSetCodec;
11517
11349
  declare const index$e_zeroSizeHint: typeof zeroSizeHint;
11518
11350
  declare namespace index$e {
11519
- export { index$e_AccumulationOutput as AccumulationOutput, index$e_AutoAccumulate as AutoAccumulate, index$e_AvailabilityAssignment as AvailabilityAssignment, index$e_BASE_SERVICE_BALANCE as BASE_SERVICE_BALANCE, index$e_BlockState as BlockState, index$e_CoreStatistics as CoreStatistics, index$e_DisputesRecords as DisputesRecords, index$e_ELECTIVE_BYTE_BALANCE as ELECTIVE_BYTE_BALANCE, index$e_ELECTIVE_ITEM_BALANCE as ELECTIVE_ITEM_BALANCE, index$e_InMemoryService as InMemoryService, index$e_InMemoryState as InMemoryState, index$e_LookupHistoryItem as LookupHistoryItem, index$e_MAX_LOOKUP_HISTORY_SLOTS as MAX_LOOKUP_HISTORY_SLOTS, index$e_NotYetAccumulatedReport as NotYetAccumulatedReport, index$e_PreimageItem as PreimageItem, index$e_PrivilegedServices as PrivilegedServices, index$e_RecentBlocks as RecentBlocks, index$e_SafroleData as SafroleData, index$e_SafroleSealingKeysData as SafroleSealingKeysData, index$e_SafroleSealingKeysKind as SafroleSealingKeysKind, index$e_ServiceAccountInfo as ServiceAccountInfo, index$e_ServiceStatistics as ServiceStatistics, index$e_StatisticsData as StatisticsData, index$e_StorageItem as StorageItem, index$e_UpdateError as UpdateError, index$e_UpdatePreimage as UpdatePreimage, index$e_UpdatePreimageKind as UpdatePreimageKind, index$e_UpdateService as UpdateService, index$e_UpdateServiceKind as UpdateServiceKind, index$e_UpdateStorage as UpdateStorage, index$e_UpdateStorageKind as UpdateStorageKind, index$e_ValidatorData as ValidatorData, index$e_ValidatorStatistics as ValidatorStatistics, index$e_accumulationOutputComparator as accumulationOutputComparator, index$e_accumulationQueueCodec as accumulationQueueCodec, index$e_authPoolsCodec as authPoolsCodec, index$e_authQueuesCodec as authQueuesCodec, index$e_availabilityAssignmentsCodec as availabilityAssignmentsCodec, index$e_codecBandersnatchKey as codecBandersnatchKey, index$e_codecPerCore as codecPerCore, index$e_codecServiceId as codecServiceId, index$e_codecVarGas as codecVarGas, index$e_codecVarU16 as codecVarU16, index$e_codecWithVersion as codecWithVersion, index$e_hashComparator as hashComparator, index$e_ignoreValueWithDefault as ignoreValueWithDefault, index$e_recentlyAccumulatedCodec as recentlyAccumulatedCodec, index$e_serviceDataCodec as serviceDataCodec, index$e_serviceEntriesCodec as serviceEntriesCodec, index$e_sortedSetCodec as sortedSetCodec, index$e_tryAsLookupHistorySlots as tryAsLookupHistorySlots, index$e_tryAsPerCore as tryAsPerCore, index$e_validatorsDataCodec as validatorsDataCodec, index$e_workReportsSortedSetCodec as workReportsSortedSetCodec, index$e_zeroSizeHint as zeroSizeHint };
11520
- export type { index$e_AUTHORIZATION_QUEUE_SIZE as AUTHORIZATION_QUEUE_SIZE, index$e_AccumulationQueue as AccumulationQueue, index$e_AccumulationQueueView as AccumulationQueueView, index$e_AuthorizationPool as AuthorizationPool, index$e_AuthorizationQueue as AuthorizationQueue, index$e_AvailabilityAssignmentsView as AvailabilityAssignmentsView, index$e_BlocksState as BlocksState, index$e_ENTROPY_ENTRIES as ENTROPY_ENTRIES, index$e_EnumerableState as EnumerableState, index$e_FieldNames as FieldNames, index$e_InMemoryStateFields as InMemoryStateFields, index$e_LookupHistorySlots as LookupHistorySlots, index$e_MAX_AUTH_POOL_SIZE as MAX_AUTH_POOL_SIZE, index$e_MAX_RECENT_HISTORY as MAX_RECENT_HISTORY, index$e_PerCore as PerCore, index$e_RecentBlocksView as RecentBlocksView, index$e_RecentlyAccumulated as RecentlyAccumulated, index$e_RecentlyAccumulatedView as RecentlyAccumulatedView, index$e_SafroleDataView as SafroleDataView, index$e_SafroleSealingKeys as SafroleSealingKeys, index$e_Service as Service, index$e_ServiceAccountInfoView as ServiceAccountInfoView, index$e_ServiceData as ServiceData, index$e_ServiceEntries as ServiceEntries, index$e_ServicesUpdate as ServicesUpdate, index$e_State as State, index$e_StateView as StateView, index$e_StatisticsDataView as StatisticsDataView, index$e_StorageKey as StorageKey, index$e_VALIDATOR_META_BYTES as VALIDATOR_META_BYTES, index$e_ValidatorDataView as ValidatorDataView, index$e_WithStateView as WithStateView };
11351
+ export { index$e_AccumulationOutput as AccumulationOutput, index$e_AutoAccumulate as AutoAccumulate, index$e_AvailabilityAssignment as AvailabilityAssignment, index$e_BASE_SERVICE_BALANCE as BASE_SERVICE_BALANCE, index$e_BlockState as BlockState, index$e_CoreStatistics as CoreStatistics, index$e_DisputesRecords as DisputesRecords, index$e_ELECTIVE_BYTE_BALANCE as ELECTIVE_BYTE_BALANCE, index$e_ELECTIVE_ITEM_BALANCE as ELECTIVE_ITEM_BALANCE, index$e_InMemoryService as InMemoryService, index$e_InMemoryState as InMemoryState, index$e_LookupHistoryItem as LookupHistoryItem, index$e_MAX_LOOKUP_HISTORY_SLOTS as MAX_LOOKUP_HISTORY_SLOTS, index$e_PreimageItem as PreimageItem, index$e_PrivilegedServices as PrivilegedServices, index$e_RecentBlocks as RecentBlocks, index$e_RecentBlocksHistory as RecentBlocksHistory, index$e_SafroleData as SafroleData, index$e_SafroleSealingKeysData as SafroleSealingKeysData, index$e_SafroleSealingKeysKind as SafroleSealingKeysKind, index$e_ServiceAccountInfo as ServiceAccountInfo, index$e_ServiceStatistics as ServiceStatistics, index$e_StatisticsData as StatisticsData, index$e_StorageItem as StorageItem, index$e_UpdateError as UpdateError, index$e_UpdatePreimage as UpdatePreimage, index$e_UpdatePreimageKind as UpdatePreimageKind, index$e_UpdateService as UpdateService, index$e_UpdateServiceKind as UpdateServiceKind, index$e_UpdateStorage as UpdateStorage, index$e_UpdateStorageKind as UpdateStorageKind, index$e_ValidatorData as ValidatorData, index$e_ValidatorStatistics as ValidatorStatistics, index$e_accumulationOutputComparator as accumulationOutputComparator, index$e_codecBandersnatchKey as codecBandersnatchKey, index$e_codecPerCore as codecPerCore, index$e_codecServiceId as codecServiceId, index$e_codecVarGas as codecVarGas, index$e_codecVarU16 as codecVarU16, index$e_codecWithVersion as codecWithVersion, index$e_hashComparator as hashComparator, index$e_ignoreValueWithDefault as ignoreValueWithDefault, index$e_serviceDataCodec as serviceDataCodec, index$e_serviceEntriesCodec as serviceEntriesCodec, index$e_sortedSetCodec as sortedSetCodec, index$e_tryAsLookupHistorySlots as tryAsLookupHistorySlots, index$e_tryAsPerCore as tryAsPerCore, index$e_workReportsSortedSetCodec as workReportsSortedSetCodec, index$e_zeroSizeHint as zeroSizeHint };
11352
+ export type { index$e_BlocksState as BlocksState, index$e_ENTROPY_ENTRIES as ENTROPY_ENTRIES, index$e_EnumerableState as EnumerableState, index$e_FieldNames as FieldNames, index$e_InMemoryStateFields as InMemoryStateFields, index$e_LookupHistorySlots as LookupHistorySlots, index$e_MAX_RECENT_HISTORY as MAX_RECENT_HISTORY, index$e_PerCore as PerCore, index$e_SafroleSealingKeys as SafroleSealingKeys, index$e_Service as Service, index$e_ServiceData as ServiceData, index$e_ServiceEntries as ServiceEntries, index$e_ServicesUpdate as ServicesUpdate, index$e_State as State, index$e_StorageKey as StorageKey, index$e_VALIDATOR_META_BYTES as VALIDATOR_META_BYTES };
11521
11353
  }
11522
11354
 
11523
11355
  type StateKey = Opaque<OpaqueHash, "stateKey">;
@@ -11658,25 +11490,33 @@ declare function legacyServiceNested(serviceId: ServiceId, hash: OpaqueHash): St
11658
11490
  return key.asOpaque();
11659
11491
  }
11660
11492
 
11661
- type StateCodec<T, V = T> = {
11493
+ type StateCodec<T> = {
11662
11494
  key: StateKey;
11663
- Codec: Descriptor<T, V>;
11495
+ Codec: Descriptor<T>;
11664
11496
  extract: (s: State) => T;
11665
11497
  };
11666
11498
 
11667
11499
  /** Serialization for particular state entries. */
11668
11500
  declare namespace serialize {
11669
11501
  /** C(1): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b15013b1501?v=0.6.7 */
11670
- export const authPools: StateCodec<State["authPools"], ReturnType<StateView["authPoolsView"]>> = {
11502
+ export const authPools: StateCodec<State["authPools"]> = {
11671
11503
  key: stateKeys.index(StateKeyIdx.Alpha),
11672
- Codec: authPoolsCodec,
11504
+ Codec: codecPerCore(
11505
+ codecKnownSizeArray(codec.bytes(HASH_SIZE).asOpaque<AuthorizerHash>(), {
11506
+ minLength: 0,
11507
+ maxLength: MAX_AUTH_POOL_SIZE,
11508
+ typicalLength: MAX_AUTH_POOL_SIZE,
11509
+ }),
11510
+ ),
11673
11511
  extract: (s) => s.authPools,
11674
11512
  };
11675
11513
 
11676
11514
  /** C(2): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b31013b3101?v=0.6.7 */
11677
- export const authQueues: StateCodec<State["authQueues"], ReturnType<StateView["authQueuesView"]>> = {
11515
+ export const authQueues: StateCodec<State["authQueues"]> = {
11678
11516
  key: stateKeys.index(StateKeyIdx.Phi),
11679
- Codec: authQueuesCodec,
11517
+ Codec: codecPerCore(
11518
+ codecFixedSizeArray(codec.bytes(HASH_SIZE).asOpaque<AuthorizerHash>(), AUTHORIZATION_QUEUE_SIZE),
11519
+ ),
11680
11520
  extract: (s) => s.authQueues,
11681
11521
  };
11682
11522
 
@@ -11684,14 +11524,14 @@ declare namespace serialize {
11684
11524
  * C(3): Recent blocks with compatibility
11685
11525
  * https://graypaper.fluffylabs.dev/#/7e6ff6a/3b3e013b3e01?v=0.6.7
11686
11526
  */
11687
- export const recentBlocks: StateCodec<RecentBlocks, RecentBlocksView> = {
11527
+ export const recentBlocks: StateCodec<State["recentBlocks"]> = {
11688
11528
  key: stateKeys.index(StateKeyIdx.Beta),
11689
- Codec: RecentBlocks.Codec,
11529
+ Codec: RecentBlocksHistory.Codec,
11690
11530
  extract: (s) => s.recentBlocks,
11691
11531
  };
11692
11532
 
11693
11533
  /** C(4): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b63013b6301?v=0.6.7 */
11694
- export const safrole: StateCodec<SafroleData, SafroleDataView> = {
11534
+ export const safrole: StateCodec<SafroleData> = {
11695
11535
  key: stateKeys.index(StateKeyIdx.Gamma),
11696
11536
  Codec: SafroleData.Codec,
11697
11537
  extract: (s) =>
@@ -11704,7 +11544,7 @@ declare namespace serialize {
11704
11544
  };
11705
11545
 
11706
11546
  /** C(5): https://graypaper.fluffylabs.dev/#/7e6ff6a/3bba013bba01?v=0.6.7 */
11707
- export const disputesRecords: StateCodec<DisputesRecords> = {
11547
+ export const disputesRecords: StateCodec<State["disputesRecords"]> = {
11708
11548
  key: stateKeys.index(StateKeyIdx.Psi),
11709
11549
  Codec: DisputesRecords.Codec,
11710
11550
  extract: (s) => s.disputesRecords,
@@ -11718,42 +11558,30 @@ declare namespace serialize {
11718
11558
  };
11719
11559
 
11720
11560
  /** C(7): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b00023b0002?v=0.6.7 */
11721
- export const designatedValidators: StateCodec<
11722
- State["designatedValidatorData"],
11723
- ReturnType<StateView["designatedValidatorDataView"]>
11724
- > = {
11561
+ export const designatedValidators: StateCodec<State["designatedValidatorData"]> = {
11725
11562
  key: stateKeys.index(StateKeyIdx.Iota),
11726
- Codec: validatorsDataCodec,
11563
+ Codec: codecPerValidator(ValidatorData.Codec),
11727
11564
  extract: (s) => s.designatedValidatorData,
11728
11565
  };
11729
11566
 
11730
11567
  /** C(8): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b0d023b0d02?v=0.6.7 */
11731
- export const currentValidators: StateCodec<
11732
- State["currentValidatorData"],
11733
- ReturnType<StateView["currentValidatorDataView"]>
11734
- > = {
11568
+ export const currentValidators: StateCodec<State["currentValidatorData"]> = {
11735
11569
  key: stateKeys.index(StateKeyIdx.Kappa),
11736
- Codec: validatorsDataCodec,
11570
+ Codec: codecPerValidator(ValidatorData.Codec),
11737
11571
  extract: (s) => s.currentValidatorData,
11738
11572
  };
11739
11573
 
11740
11574
  /** C(9): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b1a023b1a02?v=0.6.7 */
11741
- export const previousValidators: StateCodec<
11742
- State["previousValidatorData"],
11743
- ReturnType<StateView["previousValidatorDataView"]>
11744
- > = {
11575
+ export const previousValidators: StateCodec<State["previousValidatorData"]> = {
11745
11576
  key: stateKeys.index(StateKeyIdx.Lambda),
11746
- Codec: validatorsDataCodec,
11577
+ Codec: codecPerValidator(ValidatorData.Codec),
11747
11578
  extract: (s) => s.previousValidatorData,
11748
11579
  };
11749
11580
 
11750
11581
  /** C(10): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b27023b2702?v=0.6.7 */
11751
- export const availabilityAssignment: StateCodec<
11752
- State["availabilityAssignment"],
11753
- ReturnType<StateView["availabilityAssignmentView"]>
11754
- > = {
11582
+ export const availabilityAssignment: StateCodec<State["availabilityAssignment"]> = {
11755
11583
  key: stateKeys.index(StateKeyIdx.Rho),
11756
- Codec: availabilityAssignmentsCodec,
11584
+ Codec: codecPerCore(codec.optional(AvailabilityAssignment.Codec)),
11757
11585
  extract: (s) => s.availabilityAssignment,
11758
11586
  };
11759
11587
 
@@ -11772,29 +11600,28 @@ declare namespace serialize {
11772
11600
  };
11773
11601
 
11774
11602
  /** C(13): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b5e023b5e02?v=0.6.7 */
11775
- export const statistics: StateCodec<StatisticsData, StatisticsDataView> = {
11603
+ export const statistics: StateCodec<State["statistics"]> = {
11776
11604
  key: stateKeys.index(StateKeyIdx.Pi),
11777
11605
  Codec: StatisticsData.Codec,
11778
11606
  extract: (s) => s.statistics,
11779
11607
  };
11780
11608
 
11781
11609
  /** C(14): https://graypaper.fluffylabs.dev/#/1c979cb/3bf0023bf002?v=0.7.1 */
11782
- export const accumulationQueue: StateCodec<
11783
- State["accumulationQueue"],
11784
- ReturnType<StateView["accumulationQueueView"]>
11785
- > = {
11610
+ export const accumulationQueue: StateCodec<State["accumulationQueue"]> = {
11786
11611
  key: stateKeys.index(StateKeyIdx.Omega),
11787
- Codec: accumulationQueueCodec,
11612
+ Codec: codecPerEpochBlock(readonlyArray(codec.sequenceVarLen(NotYetAccumulatedReport.Codec))),
11788
11613
  extract: (s) => s.accumulationQueue,
11789
11614
  };
11790
11615
 
11791
11616
  /** C(15): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b96023b9602?v=0.6.7 */
11792
- export const recentlyAccumulated: StateCodec<
11793
- State["recentlyAccumulated"],
11794
- ReturnType<StateView["recentlyAccumulatedView"]>
11795
- > = {
11617
+ export const recentlyAccumulated: StateCodec<State["recentlyAccumulated"]> = {
11796
11618
  key: stateKeys.index(StateKeyIdx.Xi),
11797
- Codec: recentlyAccumulatedCodec,
11619
+ Codec: codecPerEpochBlock(
11620
+ codec.sequenceVarLen(codec.bytes(HASH_SIZE).asOpaque<WorkPackageHash>()).convert(
11621
+ (x) => Array.from(x),
11622
+ (x) => HashSet.from(x),
11623
+ ),
11624
+ ),
11798
11625
  extract: (s) => s.recentlyAccumulated,
11799
11626
  };
11800
11627
 
@@ -11850,107 +11677,6 @@ declare const dumpCodec = Descriptor.new<BytesBlob>(
11850
11677
  (s) => s.bytes(s.decoder.source.length - s.decoder.bytesRead()),
11851
11678
  );
11852
11679
 
11853
- /**
11854
- * Abstraction over some backend containing serialized state entries.
11855
- *
11856
- * This may or may not be backed by some on-disk database or can be just stored in memory.
11857
- */
11858
- interface SerializedStateBackend {
11859
- /** Retrieve given state key. */
11860
- get(key: StateKey): BytesBlob | null;
11861
- }
11862
-
11863
- declare class SerializedStateView<T extends SerializedStateBackend> implements StateView {
11864
- constructor(
11865
- private readonly spec: ChainSpec,
11866
- public backend: T,
11867
- /** Best-effort list of recently active services. */
11868
- private readonly recentlyUsedServices: ServiceId[],
11869
- private readonly viewCache: HashDictionary<StateKey, unknown>,
11870
- ) {}
11871
-
11872
- private retrieveView<A, B>({ key, Codec }: KeyAndCodecWithView<A, B>, description: string): B {
11873
- const cached = this.viewCache.get(key);
11874
- if (cached !== undefined) {
11875
- return cached as B;
11876
- }
11877
- const bytes = this.backend.get(key);
11878
- if (bytes === null) {
11879
- throw new Error(`Required state entry for ${description} is missing!. Accessing view of key: ${key}`);
11880
- }
11881
- // NOTE [ToDr] we are not using `Decoder.decodeObject` here because
11882
- // it needs to get to the end of the data (skip), yet that's expensive.
11883
- // we assume that the state data is correct and coherent anyway, so
11884
- // for performance reasons we simply create the view here.
11885
- const d = Decoder.fromBytesBlob(bytes);
11886
- d.attachContext(this.spec);
11887
- const view = Codec.View.decode(d);
11888
- this.viewCache.set(key, view);
11889
- return view;
11890
- }
11891
-
11892
- availabilityAssignmentView(): AvailabilityAssignmentsView {
11893
- return this.retrieveView(serialize.availabilityAssignment, "availabilityAssignmentView");
11894
- }
11895
-
11896
- designatedValidatorDataView(): SequenceView<ValidatorData, ValidatorDataView> {
11897
- return this.retrieveView(serialize.designatedValidators, "designatedValidatorsView");
11898
- }
11899
-
11900
- currentValidatorDataView(): SequenceView<ValidatorData, ValidatorDataView> {
11901
- return this.retrieveView(serialize.currentValidators, "currentValidatorsView");
11902
- }
11903
-
11904
- previousValidatorDataView(): SequenceView<ValidatorData, ValidatorDataView> {
11905
- return this.retrieveView(serialize.previousValidators, "previousValidatorsView");
11906
- }
11907
-
11908
- authPoolsView(): SequenceView<AuthorizationPool, SequenceView<AuthorizerHash>> {
11909
- return this.retrieveView(serialize.authPools, "authPoolsView");
11910
- }
11911
-
11912
- authQueuesView(): SequenceView<AuthorizationQueue, SequenceView<AuthorizerHash>> {
11913
- return this.retrieveView(serialize.authQueues, "authQueuesView");
11914
- }
11915
-
11916
- recentBlocksView(): RecentBlocksView {
11917
- return this.retrieveView(serialize.recentBlocks, "recentBlocksView");
11918
- }
11919
-
11920
- statisticsView(): StatisticsDataView {
11921
- return this.retrieveView(serialize.statistics, "statisticsView");
11922
- }
11923
-
11924
- accumulationQueueView(): AccumulationQueueView {
11925
- return this.retrieveView(serialize.accumulationQueue, "accumulationQueueView");
11926
- }
11927
-
11928
- recentlyAccumulatedView(): RecentlyAccumulatedView {
11929
- return this.retrieveView(serialize.recentlyAccumulated, "recentlyAccumulatedView");
11930
- }
11931
-
11932
- safroleDataView(): SafroleDataView {
11933
- return this.retrieveView(serialize.safrole, "safroleDataView");
11934
- }
11935
-
11936
- getServiceInfoView(id: ServiceId): ServiceAccountInfoView | null {
11937
- const serviceData = serialize.serviceData(id);
11938
- const bytes = this.backend.get(serviceData.key);
11939
- if (bytes === null) {
11940
- return null;
11941
- }
11942
- if (!this.recentlyUsedServices.includes(id)) {
11943
- this.recentlyUsedServices.push(id);
11944
- }
11945
- return Decoder.decodeObject(serviceData.Codec.View, bytes, this.spec);
11946
- }
11947
- }
11948
-
11949
- type KeyAndCodecWithView<T, V> = {
11950
- key: StateKey;
11951
- Codec: CodecWithView<T, V>;
11952
- };
11953
-
11954
11680
  /** What should be done with that key? */
11955
11681
  declare enum StateEntryUpdateAction {
11956
11682
  /** Insert an entry. */
@@ -12329,6 +12055,16 @@ declare function convertInMemoryStateToDictionary(
12329
12055
  return serialized;
12330
12056
  }
12331
12057
 
12058
+ /**
12059
+ * Abstraction over some backend containing serialized state entries.
12060
+ *
12061
+ * This may or may not be backed by some on-disk database or can be just stored in memory.
12062
+ */
12063
+ interface SerializedStateBackend {
12064
+ /** Retrieve given state key. */
12065
+ get(key: StateKey): BytesBlob | null;
12066
+ }
12067
+
12332
12068
  /**
12333
12069
  * State object which reads it's entries from some backend.
12334
12070
  *
@@ -12338,7 +12074,7 @@ declare function convertInMemoryStateToDictionary(
12338
12074
  * in the backend layer, so it MAY fail during runtime.
12339
12075
  */
12340
12076
  declare class SerializedState<T extends SerializedStateBackend = SerializedStateBackend>
12341
- implements State, WithStateView, EnumerableState
12077
+ implements State, EnumerableState
12342
12078
  {
12343
12079
  /** Create a state-like object from collection of serialized entries. */
12344
12080
  static fromStateEntries(spec: ChainSpec, blake2b: Blake2b, state: StateEntries, recentServices: ServiceId[] = []) {
@@ -12355,15 +12091,12 @@ declare class SerializedState<T extends SerializedStateBackend = SerializedState
12355
12091
  return new SerializedState(spec, blake2b, db, recentServices);
12356
12092
  }
12357
12093
 
12358
- private dataCache: HashDictionary<StateKey, unknown> = HashDictionary.new();
12359
- private viewCache: HashDictionary<StateKey, unknown> = HashDictionary.new();
12360
-
12361
12094
  private constructor(
12362
12095
  private readonly spec: ChainSpec,
12363
12096
  private readonly blake2b: Blake2b,
12364
12097
  public backend: T,
12365
12098
  /** Best-effort list of recently active services. */
12366
- private readonly recentlyUsedServices: ServiceId[],
12099
+ private readonly _recentServiceIds: ServiceId[],
12367
12100
  ) {}
12368
12101
 
12369
12102
  /** Comparing the serialized states, just means comparing their backends. */
@@ -12371,21 +12104,14 @@ declare class SerializedState<T extends SerializedStateBackend = SerializedState
12371
12104
  return this.backend;
12372
12105
  }
12373
12106
 
12374
- /** Return a non-decoding version of the state. */
12375
- view(): StateView {
12376
- return new SerializedStateView(this.spec, this.backend, this.recentlyUsedServices, this.viewCache);
12377
- }
12378
-
12379
12107
  // TODO [ToDr] Temporary method to update the state,
12380
12108
  // without changing references.
12381
12109
  public updateBackend(newBackend: T) {
12382
12110
  this.backend = newBackend;
12383
- this.dataCache = HashDictionary.new();
12384
- this.viewCache = HashDictionary.new();
12385
12111
  }
12386
12112
 
12387
12113
  recentServiceIds(): readonly ServiceId[] {
12388
- return this.recentlyUsedServices;
12114
+ return this._recentServiceIds;
12389
12115
  }
12390
12116
 
12391
12117
  getService(id: ServiceId): SerializedService | null {
@@ -12394,33 +12120,27 @@ declare class SerializedState<T extends SerializedStateBackend = SerializedState
12394
12120
  return null;
12395
12121
  }
12396
12122
 
12397
- if (!this.recentlyUsedServices.includes(id)) {
12398
- this.recentlyUsedServices.push(id);
12123
+ if (!this._recentServiceIds.includes(id)) {
12124
+ this._recentServiceIds.push(id);
12399
12125
  }
12400
12126
 
12401
12127
  return new SerializedService(this.blake2b, id, serviceData, (key) => this.retrieveOptional(key));
12402
12128
  }
12403
12129
 
12404
- private retrieve<T>(k: KeyAndCodec<T>, description: string): T {
12405
- const data = this.retrieveOptional(k);
12406
- if (data === undefined) {
12407
- throw new Error(`Required state entry for ${description} is missing!. Accessing key: ${k.key}`);
12130
+ private retrieve<T>({ key, Codec }: KeyAndCodec<T>, description: string): T {
12131
+ const bytes = this.backend.get(key);
12132
+ if (bytes === null) {
12133
+ throw new Error(`Required state entry for ${description} is missing!. Accessing key: ${key}`);
12408
12134
  }
12409
- return data;
12135
+ return Decoder.decodeObject(Codec, bytes, this.spec);
12410
12136
  }
12411
12137
 
12412
12138
  private retrieveOptional<T>({ key, Codec }: KeyAndCodec<T>): T | undefined {
12413
- const cached = this.dataCache.get(key);
12414
- if (cached !== undefined) {
12415
- return cached as T;
12416
- }
12417
12139
  const bytes = this.backend.get(key);
12418
12140
  if (bytes === null) {
12419
12141
  return undefined;
12420
12142
  }
12421
- const data = Decoder.decodeObject(Codec, bytes, this.spec);
12422
- this.dataCache.set(key, data);
12423
- return data;
12143
+ return Decoder.decodeObject(Codec, bytes, this.spec);
12424
12144
  }
12425
12145
 
12426
12146
  get availabilityAssignment(): State["availabilityAssignment"] {
@@ -12593,15 +12313,12 @@ declare function loadState(spec: ChainSpec, blake2b: Blake2b, entries: Iterable<
12593
12313
  declare const index$d_EMPTY_BLOB: typeof EMPTY_BLOB;
12594
12314
  type index$d_EncodeFun = EncodeFun;
12595
12315
  type index$d_KeyAndCodec<T> = KeyAndCodec<T>;
12596
- type index$d_KeyAndCodecWithView<T, V> = KeyAndCodecWithView<T, V>;
12597
12316
  type index$d_SerializedService = SerializedService;
12598
12317
  declare const index$d_SerializedService: typeof SerializedService;
12599
12318
  type index$d_SerializedState<T extends SerializedStateBackend = SerializedStateBackend> = SerializedState<T>;
12600
12319
  declare const index$d_SerializedState: typeof SerializedState;
12601
12320
  type index$d_SerializedStateBackend = SerializedStateBackend;
12602
- type index$d_SerializedStateView<T extends SerializedStateBackend> = SerializedStateView<T>;
12603
- declare const index$d_SerializedStateView: typeof SerializedStateView;
12604
- type index$d_StateCodec<T, V = T> = StateCodec<T, V>;
12321
+ type index$d_StateCodec<T> = StateCodec<T>;
12605
12322
  type index$d_StateEntries = StateEntries;
12606
12323
  declare const index$d_StateEntries: typeof StateEntries;
12607
12324
  type index$d_StateEntryUpdate = StateEntryUpdate;
@@ -12629,8 +12346,8 @@ declare const index$d_serializeStorage: typeof serializeStorage;
12629
12346
  declare const index$d_stateEntriesSequenceCodec: typeof stateEntriesSequenceCodec;
12630
12347
  import index$d_stateKeys = stateKeys;
12631
12348
  declare namespace index$d {
12632
- export { index$d_EMPTY_BLOB as EMPTY_BLOB, index$d_SerializedService as SerializedService, index$d_SerializedState as SerializedState, index$d_SerializedStateView as SerializedStateView, index$d_StateEntries as StateEntries, index$d_StateEntryUpdateAction as StateEntryUpdateAction, index$d_StateKeyIdx as StateKeyIdx, index$d_TYPICAL_STATE_ITEMS as TYPICAL_STATE_ITEMS, index$d_TYPICAL_STATE_ITEM_LEN as TYPICAL_STATE_ITEM_LEN, index$d_U32_BYTES as U32_BYTES, index$d_binaryMerkleization as binaryMerkleization, index$d_convertInMemoryStateToDictionary as convertInMemoryStateToDictionary, index$d_dumpCodec as dumpCodec, index$d_getSafroleData as getSafroleData, index$d_legacyServiceNested as legacyServiceNested, index$d_loadState as loadState, index$d_serialize as serialize, index$d_serializeBasicKeys as serializeBasicKeys, index$d_serializePreimages as serializePreimages, index$d_serializeRemovedServices as serializeRemovedServices, index$d_serializeServiceUpdates as serializeServiceUpdates, index$d_serializeStateUpdate as serializeStateUpdate, index$d_serializeStorage as serializeStorage, index$d_stateEntriesSequenceCodec as stateEntriesSequenceCodec, index$d_stateKeys as stateKeys };
12633
- export type { index$d_EncodeFun as EncodeFun, index$d_KeyAndCodec as KeyAndCodec, index$d_KeyAndCodecWithView as KeyAndCodecWithView, index$d_SerializedStateBackend as SerializedStateBackend, index$d_StateCodec as StateCodec, index$d_StateEntryUpdate as StateEntryUpdate, index$d_StateKey as StateKey };
12349
+ export { index$d_EMPTY_BLOB as EMPTY_BLOB, index$d_SerializedService as SerializedService, index$d_SerializedState as SerializedState, index$d_StateEntries as StateEntries, index$d_StateEntryUpdateAction as StateEntryUpdateAction, index$d_StateKeyIdx as StateKeyIdx, index$d_TYPICAL_STATE_ITEMS as TYPICAL_STATE_ITEMS, index$d_TYPICAL_STATE_ITEM_LEN as TYPICAL_STATE_ITEM_LEN, index$d_U32_BYTES as U32_BYTES, index$d_binaryMerkleization as binaryMerkleization, index$d_convertInMemoryStateToDictionary as convertInMemoryStateToDictionary, index$d_dumpCodec as dumpCodec, index$d_getSafroleData as getSafroleData, index$d_legacyServiceNested as legacyServiceNested, index$d_loadState as loadState, index$d_serialize as serialize, index$d_serializeBasicKeys as serializeBasicKeys, index$d_serializePreimages as serializePreimages, index$d_serializeRemovedServices as serializeRemovedServices, index$d_serializeServiceUpdates as serializeServiceUpdates, index$d_serializeStateUpdate as serializeStateUpdate, index$d_serializeStorage as serializeStorage, index$d_stateEntriesSequenceCodec as stateEntriesSequenceCodec, index$d_stateKeys as stateKeys };
12350
+ export type { index$d_EncodeFun as EncodeFun, index$d_KeyAndCodec as KeyAndCodec, index$d_SerializedStateBackend as SerializedStateBackend, index$d_StateCodec as StateCodec, index$d_StateEntryUpdate as StateEntryUpdate, index$d_StateKey as StateKey };
12634
12351
  }
12635
12352
 
12636
12353
  /** Error during `LeafDb` creation. */
@@ -12825,7 +12542,7 @@ declare class InMemoryStates implements StatesDb<InMemoryState> {
12825
12542
 
12826
12543
  /** Insert a full state into the database. */
12827
12544
  async insertState(headerHash: HeaderHash, state: InMemoryState): Promise<Result$2<OK, StateUpdateError>> {
12828
- const encoded = Encoder.encodeObject(inMemoryStateCodec(this.spec), state, this.spec);
12545
+ const encoded = Encoder.encodeObject(inMemoryStateCodec, state, this.spec);
12829
12546
  this.db.set(headerHash, encoded);
12830
12547
  return Result.ok(OK);
12831
12548
  }
@@ -12836,7 +12553,7 @@ declare class InMemoryStates implements StatesDb<InMemoryState> {
12836
12553
  return null;
12837
12554
  }
12838
12555
 
12839
- return Decoder.decodeObject(inMemoryStateCodec(this.spec), encodedState, this.spec);
12556
+ return Decoder.decodeObject(inMemoryStateCodec, encodedState, this.spec);
12840
12557
  }
12841
12558
  }
12842
12559
 
@@ -14098,8 +13815,8 @@ interface PartialState {
14098
13815
  /** Update authorization queue for given core and authorize a service for this core. */
14099
13816
  updateAuthorizationQueue(
14100
13817
  coreIndex: CoreIndex,
14101
- authQueue: AuthorizationQueue,
14102
- assigner: ServiceId | null,
13818
+ authQueue: FixedSizeArray<Blake2bHash, AUTHORIZATION_QUEUE_SIZE>,
13819
+ assigners: ServiceId | null,
14103
13820
  ): Result$2<OK, UpdatePrivilegesError>;
14104
13821
 
14105
13822
  /**
@@ -19564,7 +19281,7 @@ type JsonRecentBlockState = {
19564
19281
  reported: WorkPackageInfo[];
19565
19282
  };
19566
19283
 
19567
- declare const recentBlocksHistoryFromJson = json.object<JsonRecentBlocks, RecentBlocks>(
19284
+ declare const recentBlocksHistoryFromJson = json.object<JsonRecentBlocks, RecentBlocksHistory>(
19568
19285
  {
19569
19286
  history: json.array(recentBlockStateFromJson),
19570
19287
  mmr: {
@@ -19572,10 +19289,12 @@ declare const recentBlocksHistoryFromJson = json.object<JsonRecentBlocks, Recent
19572
19289
  },
19573
19290
  },
19574
19291
  ({ history, mmr }) => {
19575
- return RecentBlocks.create({
19576
- blocks: history,
19577
- accumulationLog: mmr,
19578
- });
19292
+ return RecentBlocksHistory.create(
19293
+ RecentBlocks.create({
19294
+ blocks: history,
19295
+ accumulationLog: mmr,
19296
+ }),
19297
+ );
19579
19298
  },
19580
19299
  );
19581
19300
 
@@ -19887,7 +19606,7 @@ declare const fullStateDumpFromJson = (spec: ChainSpec) =>
19887
19606
  if (Compatibility.isGreaterOrEqual(GpVersion.V0_7_1) && chi.chi_r === undefined) {
19888
19607
  throw new Error("Registrar is required in Privileges GP ^0.7.1");
19889
19608
  }
19890
- return InMemoryState.new(spec, {
19609
+ return InMemoryState.create({
19891
19610
  authPools: tryAsPerCore(
19892
19611
  alpha.map((perCore) => {
19893
19612
  if (perCore.length > MAX_AUTH_POOL_SIZE) {
@@ -19906,7 +19625,7 @@ declare const fullStateDumpFromJson = (spec: ChainSpec) =>
19906
19625
  }),
19907
19626
  spec,
19908
19627
  ),
19909
- recentBlocks: beta ?? RecentBlocks.empty(),
19628
+ recentBlocks: beta ?? RecentBlocksHistory.empty(),
19910
19629
  nextValidatorData: gamma.gamma_k,
19911
19630
  epochRoot: gamma.gamma_z,
19912
19631
  sealingKeySeries: TicketsOrKeys.toSafroleSealingKeys(gamma.gamma_s, spec),
@@ -20011,25 +19730,23 @@ declare class TransitionHasher implements MmrHasher<KeccakHash> {
20011
19730
  */
20012
19731
  extrinsic(extrinsicView: ExtrinsicView): WithHashAndBytes<ExtrinsicHash, ExtrinsicView> {
20013
19732
  // https://graypaper.fluffylabs.dev/#/cc517d7/0cfb000cfb00?v=0.6.5
20014
- const guaranteesCount = tryAsU32(extrinsicView.guarantees.view().length);
20015
- const countEncoded = Encoder.encodeObject(codec.varU32, guaranteesCount);
20016
- const guaranteesBlobs = extrinsicView.guarantees
19733
+ const guarantees = extrinsicView.guarantees
20017
19734
  .view()
20018
19735
  .map((g) => g.view())
20019
- .reduce(
20020
- (aggregated, guarantee) => {
20021
- const reportHash = this.blake2b.hashBytes(guarantee.report.encoded()).asOpaque<WorkReportHash>();
20022
- aggregated.push(reportHash.raw);
20023
- aggregated.push(guarantee.slot.encoded().raw);
20024
- aggregated.push(guarantee.credentials.encoded().raw);
20025
- return aggregated;
20026
- },
20027
- [countEncoded.raw],
20028
- );
19736
+ .map((guarantee) => {
19737
+ const reportHash = this.blake2b.hashBytes(guarantee.report.encoded()).asOpaque<WorkReportHash>();
19738
+ return BytesBlob.blobFromParts([
19739
+ reportHash.raw,
19740
+ guarantee.slot.encoded().raw,
19741
+ guarantee.credentials.encoded().raw,
19742
+ ]);
19743
+ });
19744
+
19745
+ const guaranteeBlob = Encoder.encodeObject(codec.sequenceVarLen(dumpCodec), guarantees, this.context);
20029
19746
 
20030
19747
  const et = this.blake2b.hashBytes(extrinsicView.tickets.encoded()).asOpaque<ExtrinsicHash>();
20031
19748
  const ep = this.blake2b.hashBytes(extrinsicView.preimages.encoded()).asOpaque<ExtrinsicHash>();
20032
- const eg = this.blake2b.hashBlobs(guaranteesBlobs).asOpaque<ExtrinsicHash>();
19749
+ const eg = this.blake2b.hashBytes(guaranteeBlob).asOpaque<ExtrinsicHash>();
20033
19750
  const ea = this.blake2b.hashBytes(extrinsicView.assurances.encoded()).asOpaque<ExtrinsicHash>();
20034
19751
  const ed = this.blake2b.hashBytes(extrinsicView.disputes.encoded()).asOpaque<ExtrinsicHash>();
20035
19752