@typeberry/lib 0.2.0-b6e3410 → 0.2.0-c3df163

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (4) hide show
  1. package/index.cjs +919 -767
  2. package/index.d.ts +1897 -1619
  3. package/index.js +919 -767
  4. package/package.json +1 -1
package/index.d.ts CHANGED
@@ -2427,11 +2427,15 @@ type ClassConstructor<T> = {
2427
2427
  create: (o: CodecRecord<T>) => T;
2428
2428
  };
2429
2429
 
2430
- /**
2431
- * A full codec type, i.e. the `Encode` and `Decode`.
2432
- */
2430
+ /** A full codec type, i.e. the `Encode` and `Decode`. */
2433
2431
  type Codec<T> = Encode<T> & Decode<T>;
2434
2432
 
2433
+ /** A codec descriptor with extra view. */
2434
+ type CodecWithView<T, V> = Codec<T> & {
2435
+ /** encoded data view codec. */
2436
+ View: Codec<V>;
2437
+ };
2438
+
2435
2439
  /**
2436
2440
  * Type descriptor definition.
2437
2441
  *
@@ -2440,7 +2444,7 @@ type Codec<T> = Encode<T> & Decode<T>;
2440
2444
  *
2441
2445
  * Descriptors can be composed to form more complex typings.
2442
2446
  */
2443
- declare class Descriptor<T, V = T> implements Codec<T>, Skip {
2447
+ declare class Descriptor<T, V = T> implements Codec<T>, Skip, CodecWithView<T, V> {
2444
2448
  /** A "lightweight" version of the object. */
2445
2449
  public readonly View: Descriptor<V>;
2446
2450
 
@@ -2686,6 +2690,10 @@ declare abstract class ObjectView<T> {
2686
2690
  toString() {
2687
2691
  return `View<${this.materializedConstructor.name}>(cache: ${this.cache.size})`;
2688
2692
  }
2693
+
2694
+ [TEST_COMPARE_USING]() {
2695
+ return this.materialize();
2696
+ }
2689
2697
  }
2690
2698
 
2691
2699
  /**
@@ -3237,15 +3245,25 @@ declare namespace codec$1 {
3237
3245
  sizeHint: SizeHint;
3238
3246
  },
3239
3247
  chooser: (ctx: unknown | null) => Descriptor<T, V>,
3240
- ): Descriptor<T, V> =>
3241
- Descriptor.withView(
3248
+ ): Descriptor<T, V> => {
3249
+ const Self = chooser(null);
3250
+ return Descriptor.withView(
3242
3251
  name,
3243
3252
  sizeHint,
3244
3253
  (e, x) => chooser(e.getContext()).encode(e, x),
3245
3254
  (d) => chooser(d.getContext()).decode(d),
3246
3255
  (s) => chooser(s.decoder.getContext()).skip(s),
3247
- chooser(null).View,
3256
+ hasUniqueView(Self)
3257
+ ? select(
3258
+ {
3259
+ name: Self.View.name,
3260
+ sizeHint: Self.View.sizeHint,
3261
+ },
3262
+ (ctx) => chooser(ctx).View,
3263
+ )
3264
+ : Self.View,
3248
3265
  );
3266
+ };
3249
3267
 
3250
3268
  /**
3251
3269
  * A descriptor for a more complex POJO.
@@ -3439,6 +3457,7 @@ declare function sequenceViewFixLen<T, V>(
3439
3457
  type index$q_ClassConstructor<T> = ClassConstructor<T>;
3440
3458
  type index$q_Codec<T> = Codec<T>;
3441
3459
  type index$q_CodecRecord<T> = CodecRecord<T>;
3460
+ type index$q_CodecWithView<T, V> = CodecWithView<T, V>;
3442
3461
  declare const index$q_DEFAULT_START_LENGTH: typeof DEFAULT_START_LENGTH;
3443
3462
  type index$q_Decode<T> = Decode<T>;
3444
3463
  type index$q_Decoder = Decoder;
@@ -3479,7 +3498,7 @@ declare const index$q_tryAsExactBytes: typeof tryAsExactBytes;
3479
3498
  declare const index$q_validateLength: typeof validateLength;
3480
3499
  declare namespace index$q {
3481
3500
  export { index$q_DEFAULT_START_LENGTH as DEFAULT_START_LENGTH, index$q_Decoder as Decoder, index$q_Descriptor as Descriptor, index$q_Encoder as Encoder, index$q_MASKS as MASKS, index$q_MAX_LENGTH as MAX_LENGTH, index$q_ObjectView as ObjectView, index$q_SequenceView as SequenceView, index$q_TYPICAL_DICTIONARY_LENGTH as TYPICAL_DICTIONARY_LENGTH, index$q_TYPICAL_SEQUENCE_LENGTH as TYPICAL_SEQUENCE_LENGTH, index$q_ViewField as ViewField, index$q_addSizeHints as addSizeHints, codec$1 as codec, index$q_decodeVariableLengthExtraBytes as decodeVariableLengthExtraBytes, index$q_exactHint as exactHint, index$q_forEachDescriptor as forEachDescriptor, index$q_hasUniqueView as hasUniqueView, index$q_objectView as objectView, index$q_readonlyArray as readonlyArray, index$q_sequenceViewFixLen as sequenceViewFixLen, index$q_sequenceViewVarLen as sequenceViewVarLen, index$q_tryAsExactBytes as tryAsExactBytes, index$q_validateLength as validateLength };
3482
- export type { index$q_ClassConstructor as ClassConstructor, index$q_Codec as Codec, index$q_CodecRecord as CodecRecord, index$q_Decode as Decode, index$q_DescribedBy as DescribedBy, index$q_DescriptorRecord as DescriptorRecord, index$q_Encode as Encode, index$q_LengthRange as LengthRange, index$q_OptionalRecord as OptionalRecord, Options$1 as Options, index$q_PropertyKeys as PropertyKeys, index$q_SimpleDescriptorRecord as SimpleDescriptorRecord, index$q_SizeHint as SizeHint, index$q_ViewOf as ViewOf };
3501
+ export type { index$q_ClassConstructor as ClassConstructor, index$q_Codec as Codec, index$q_CodecRecord as CodecRecord, index$q_CodecWithView as CodecWithView, index$q_Decode as Decode, index$q_DescribedBy as DescribedBy, index$q_DescriptorRecord as DescriptorRecord, index$q_Encode as Encode, index$q_LengthRange as LengthRange, index$q_OptionalRecord as OptionalRecord, Options$1 as Options, index$q_PropertyKeys as PropertyKeys, index$q_SimpleDescriptorRecord as SimpleDescriptorRecord, index$q_SizeHint as SizeHint, index$q_ViewOf as ViewOf };
3483
3502
  }
3484
3503
 
3485
3504
  /**
@@ -6917,6 +6936,17 @@ declare function emptyBlock(slot: TimeSlot = tryAsTimeSlot(0)) {
6917
6936
  });
6918
6937
  }
6919
6938
 
6939
+ /**
6940
+ * Take an input data and re-encode that data as view.
6941
+ *
6942
+ * NOTE: this function should NEVER be used in any production code,
6943
+ * it's only a test helper.
6944
+ */
6945
+ declare function reencodeAsView<T, V>(codec: Descriptor<T, V>, object: T, chainSpec?: ChainSpec): V {
6946
+ const encoded = Encoder.encodeObject(codec, object, chainSpec);
6947
+ return Decoder.decodeObject(codec.View, encoded, chainSpec);
6948
+ }
6949
+
6920
6950
  type index$l_Block = Block;
6921
6951
  declare const index$l_Block: typeof Block;
6922
6952
  type index$l_BlockView = BlockView;
@@ -6966,6 +6996,7 @@ declare const index$l_guarantees: typeof guarantees;
6966
6996
  declare const index$l_headerViewWithHashCodec: typeof headerViewWithHashCodec;
6967
6997
  declare const index$l_legacyDescriptor: typeof legacyDescriptor;
6968
6998
  declare const index$l_preimage: typeof preimage;
6999
+ declare const index$l_reencodeAsView: typeof reencodeAsView;
6969
7000
  declare const index$l_refineContext: typeof refineContext;
6970
7001
  declare const index$l_tickets: typeof tickets;
6971
7002
  declare const index$l_tryAsCoreIndex: typeof tryAsCoreIndex;
@@ -6982,7 +7013,7 @@ declare const index$l_workPackage: typeof workPackage;
6982
7013
  declare const index$l_workReport: typeof workReport;
6983
7014
  declare const index$l_workResult: typeof workResult;
6984
7015
  declare namespace index$l {
6985
- export { index$l_Block as Block, index$l_EpochMarker as EpochMarker, index$l_Extrinsic as Extrinsic, index$l_Header as Header, index$l_HeaderViewWithHash as HeaderViewWithHash, index$l_MAX_NUMBER_OF_SEGMENTS as MAX_NUMBER_OF_SEGMENTS, index$l_TicketsMarker as TicketsMarker, index$l_ValidatorKeys as ValidatorKeys, index$l_W_E as W_E, index$l_W_S as W_S, index$l_assurances as assurances, index$l_codecPerEpochBlock as codecPerEpochBlock, index$l_codecPerValidator as codecPerValidator, codec as codecUtils, index$l_disputes as disputes, index$l_emptyBlock as emptyBlock, index$l_encodeUnsealedHeader as encodeUnsealedHeader, index$l_guarantees as guarantees, index$l_headerViewWithHashCodec as headerViewWithHashCodec, index$l_legacyDescriptor as legacyDescriptor, index$l_preimage as preimage, index$l_refineContext as refineContext, index$l_tickets as tickets, index$l_tryAsCoreIndex as tryAsCoreIndex, index$l_tryAsEpoch as tryAsEpoch, index$l_tryAsPerEpochBlock as tryAsPerEpochBlock, index$l_tryAsPerValidator as tryAsPerValidator, index$l_tryAsSegmentIndex as tryAsSegmentIndex, index$l_tryAsServiceGas as tryAsServiceGas, index$l_tryAsServiceId as tryAsServiceId, index$l_tryAsTimeSlot as tryAsTimeSlot, index$l_tryAsValidatorIndex as tryAsValidatorIndex, index$l_workItem as workItem, index$l_workPackage as workPackage, index$l_workReport as workReport, index$l_workResult as workResult };
7016
+ export { index$l_Block as Block, index$l_EpochMarker as EpochMarker, index$l_Extrinsic as Extrinsic, index$l_Header as Header, index$l_HeaderViewWithHash as HeaderViewWithHash, index$l_MAX_NUMBER_OF_SEGMENTS as MAX_NUMBER_OF_SEGMENTS, index$l_TicketsMarker as TicketsMarker, index$l_ValidatorKeys as ValidatorKeys, index$l_W_E as W_E, index$l_W_S as W_S, index$l_assurances as assurances, index$l_codecPerEpochBlock as codecPerEpochBlock, index$l_codecPerValidator as codecPerValidator, codec as codecUtils, index$l_disputes as disputes, index$l_emptyBlock as emptyBlock, index$l_encodeUnsealedHeader as encodeUnsealedHeader, index$l_guarantees as guarantees, index$l_headerViewWithHashCodec as headerViewWithHashCodec, index$l_legacyDescriptor as legacyDescriptor, index$l_preimage as preimage, index$l_reencodeAsView as reencodeAsView, index$l_refineContext as refineContext, index$l_tickets as tickets, index$l_tryAsCoreIndex as tryAsCoreIndex, index$l_tryAsEpoch as tryAsEpoch, index$l_tryAsPerEpochBlock as tryAsPerEpochBlock, index$l_tryAsPerValidator as tryAsPerValidator, index$l_tryAsSegmentIndex as tryAsSegmentIndex, index$l_tryAsServiceGas as tryAsServiceGas, index$l_tryAsServiceId as tryAsServiceId, index$l_tryAsTimeSlot as tryAsTimeSlot, index$l_tryAsValidatorIndex as tryAsValidatorIndex, index$l_workItem as workItem, index$l_workPackage as workPackage, index$l_workReport as workReport, index$l_workResult as workResult };
6986
7017
  export type { index$l_BlockView as BlockView, index$l_CodeHash as CodeHash, index$l_CoreIndex as CoreIndex, index$l_EntropyHash as EntropyHash, index$l_Epoch as Epoch, index$l_EpochMarkerView as EpochMarkerView, index$l_ExtrinsicHash as ExtrinsicHash, index$l_ExtrinsicView as ExtrinsicView, index$l_HeaderHash as HeaderHash, index$l_HeaderView as HeaderView, index$l_PerEpochBlock as PerEpochBlock, index$l_PerValidator as PerValidator, index$l_SEGMENT_BYTES as SEGMENT_BYTES, index$l_Segment as Segment, index$l_SegmentIndex as SegmentIndex, index$l_ServiceGas as ServiceGas, index$l_ServiceId as ServiceId, index$l_StateRootHash as StateRootHash, index$l_TicketsMarkerView as TicketsMarkerView, index$l_TimeSlot as TimeSlot, index$l_ValidatorIndex as ValidatorIndex, index$l_WorkReportHash as WorkReportHash };
6987
7018
  }
6988
7019
 
@@ -9122,33 +9153,61 @@ declare function accumulationOutputComparator(a: AccumulationOutput, b: Accumula
9122
9153
  }
9123
9154
 
9124
9155
  /**
9125
- * Assignment of particular work report to a core.
9156
+ * `J`: The maximum sum of dependency items in a work-report.
9126
9157
  *
9127
- * Used by "Assurances" and "Disputes" subsystem, denoted by `rho`
9128
- * in state.
9158
+ * https://graypaper.fluffylabs.dev/#/5f542d7/416a00416a00?v=0.6.2
9159
+ */
9160
+ declare const MAX_REPORT_DEPENDENCIES = 8;
9161
+ type MAX_REPORT_DEPENDENCIES = typeof MAX_REPORT_DEPENDENCIES;
9162
+
9163
+ /**
9164
+ * Ready (i.e. available and/or audited) but not-yet-accumulated work-reports.
9129
9165
  *
9130
- * https://graypaper.fluffylabs.dev/#/579bd12/135800135800
9166
+ * https://graypaper.fluffylabs.dev/#/5f542d7/165300165400
9131
9167
  */
9132
- declare class AvailabilityAssignment extends WithDebug {
9133
- static Codec = codec.Class(AvailabilityAssignment, {
9134
- workReport: WorkReport.Codec,
9135
- timeout: codec.u32.asOpaque<TimeSlot>(),
9168
+ declare class NotYetAccumulatedReport extends WithDebug {
9169
+ static Codec = codec.Class(NotYetAccumulatedReport, {
9170
+ report: WorkReport.Codec,
9171
+ dependencies: codecKnownSizeArray(codec.bytes(HASH_SIZE).asOpaque<WorkPackageHash>(), {
9172
+ typicalLength: MAX_REPORT_DEPENDENCIES / 2,
9173
+ maxLength: MAX_REPORT_DEPENDENCIES,
9174
+ minLength: 0,
9175
+ }),
9136
9176
  });
9137
9177
 
9138
- static create({ workReport, timeout }: CodecRecord<AvailabilityAssignment>) {
9139
- return new AvailabilityAssignment(workReport, timeout);
9178
+ static create({ report, dependencies }: CodecRecord<NotYetAccumulatedReport>) {
9179
+ return new NotYetAccumulatedReport(report, dependencies);
9140
9180
  }
9141
9181
 
9142
9182
  private constructor(
9143
- /** Work report assigned to a core. */
9144
- public readonly workReport: WorkReport,
9145
- /** Time slot at which the report becomes obsolete. */
9146
- public readonly timeout: TimeSlot,
9183
+ /**
9184
+ * Each of these were made available at most one epoch ago
9185
+ * but have or had unfulfilled dependencies.
9186
+ */
9187
+ readonly report: WorkReport,
9188
+ /**
9189
+ * Alongside the work-report itself, we retain its un-accumulated
9190
+ * dependencies, a set of work-package hashes.
9191
+ *
9192
+ * https://graypaper.fluffylabs.dev/#/5f542d7/165800165800
9193
+ */
9194
+ readonly dependencies: KnownSizeArray<WorkPackageHash, `[0..${MAX_REPORT_DEPENDENCIES})`>,
9147
9195
  ) {
9148
9196
  super();
9149
9197
  }
9150
9198
  }
9151
9199
 
9200
+ /**
9201
+ * Accumulation queue state entry.
9202
+ */
9203
+ type AccumulationQueue = PerEpochBlock<readonly NotYetAccumulatedReport[]>;
9204
+
9205
+ declare const accumulationQueueCodec = codecPerEpochBlock(
9206
+ readonlyArray(codec.sequenceVarLen(NotYetAccumulatedReport.Codec)),
9207
+ );
9208
+
9209
+ type AccumulationQueueView = DescribedBy<typeof accumulationQueueCodec.View>;
9210
+
9152
9211
  /** One entry of kind `T` for each core. */
9153
9212
  type PerCore<T> = KnownSizeArray<T, "number of cores">;
9154
9213
  /** Check if given array has correct length before casting to the opaque type. */
@@ -9164,6 +9223,68 @@ declare const codecPerCore = <T, V>(val: Descriptor<T, V>): Descriptor<PerCore<T
9164
9223
  return codecKnownSizeArray(val, { fixedLength: context.coresCount });
9165
9224
  });
9166
9225
 
9226
+ /**
9227
+ * Assignment of particular work report to a core.
9228
+ *
9229
+ * Used by "Assurances" and "Disputes" subsystem, denoted by `rho`
9230
+ * in state.
9231
+ *
9232
+ * https://graypaper.fluffylabs.dev/#/579bd12/135800135800
9233
+ */
9234
+ declare class AvailabilityAssignment extends WithDebug {
9235
+ static Codec = codec.Class(AvailabilityAssignment, {
9236
+ workReport: WorkReport.Codec,
9237
+ timeout: codec.u32.asOpaque<TimeSlot>(),
9238
+ });
9239
+
9240
+ static create({ workReport, timeout }: CodecRecord<AvailabilityAssignment>) {
9241
+ return new AvailabilityAssignment(workReport, timeout);
9242
+ }
9243
+
9244
+ private constructor(
9245
+ /** Work report assigned to a core. */
9246
+ public readonly workReport: WorkReport,
9247
+ /** Time slot at which the report becomes obsolete. */
9248
+ public readonly timeout: TimeSlot,
9249
+ ) {
9250
+ super();
9251
+ }
9252
+ }
9253
+
9254
+ declare const availabilityAssignmentsCodec = codecPerCore(codec.optional(AvailabilityAssignment.Codec));
9255
+
9256
+ type AvailabilityAssignmentsView = DescribedBy<typeof availabilityAssignmentsCodec.View>;
9257
+
9258
+ /** `O`: Maximal authorization pool size. */
9259
+ declare const MAX_AUTH_POOL_SIZE = O;
9260
+ type MAX_AUTH_POOL_SIZE = typeof MAX_AUTH_POOL_SIZE;
9261
+
9262
+ /** `Q`: Size of the authorization queue. */
9263
+ declare const AUTHORIZATION_QUEUE_SIZE = Q;
9264
+ type AUTHORIZATION_QUEUE_SIZE = typeof AUTHORIZATION_QUEUE_SIZE;
9265
+
9266
+ /** A pool of authorization hashes that is filled from the queue. */
9267
+ type AuthorizationPool = KnownSizeArray<AuthorizerHash, `At most ${typeof MAX_AUTH_POOL_SIZE}`>;
9268
+
9269
+ /**
9270
+ * A fixed-size queue of authorization hashes used to fill up the pool.
9271
+ *
9272
+ * Can be set using `ASSIGN` host call in batches of `AUTHORIZATION_QUEUE_SIZE`.
9273
+ */
9274
+ type AuthorizationQueue = FixedSizeArray<AuthorizerHash, AUTHORIZATION_QUEUE_SIZE>;
9275
+
9276
+ declare const authPoolsCodec = codecPerCore<AuthorizationPool, SequenceView<AuthorizerHash>>(
9277
+ codecKnownSizeArray(codec.bytes(HASH_SIZE).asOpaque<AuthorizerHash>(), {
9278
+ minLength: 0,
9279
+ maxLength: MAX_AUTH_POOL_SIZE,
9280
+ typicalLength: MAX_AUTH_POOL_SIZE,
9281
+ }),
9282
+ );
9283
+
9284
+ declare const authQueuesCodec = codecPerCore<AuthorizationQueue, SequenceView<AuthorizerHash>>(
9285
+ codecFixedSizeArray(codec.bytes(HASH_SIZE).asOpaque<AuthorizerHash>(), AUTHORIZATION_QUEUE_SIZE),
9286
+ );
9287
+
9167
9288
  declare const sortedSetCodec = <T extends OpaqueHash>() =>
9168
9289
  readonlyArray(codec.sequenceVarLen(codec.bytes(HASH_SIZE))).convert<ImmutableSortedSet<T>>(
9169
9290
  (input) => input.array,
@@ -9245,769 +9366,678 @@ declare function hashComparator<V extends OpaqueHash>(a: V, b: V) {
9245
9366
  return a.compare(b);
9246
9367
  }
9247
9368
 
9248
- /**
9249
- * `J`: The maximum sum of dependency items in a work-report.
9250
- *
9251
- * https://graypaper.fluffylabs.dev/#/5f542d7/416a00416a00?v=0.6.2
9252
- */
9253
- declare const MAX_REPORT_DEPENDENCIES = 8;
9254
- type MAX_REPORT_DEPENDENCIES = typeof MAX_REPORT_DEPENDENCIES;
9369
+ declare const SUPER_PEAK_STRING = BytesBlob.blobFromString("peak");
9255
9370
 
9256
- /** `Q`: Size of the authorization queue. */
9257
- declare const AUTHORIZATION_QUEUE_SIZE = Q;
9258
- type AUTHORIZATION_QUEUE_SIZE = typeof AUTHORIZATION_QUEUE_SIZE;
9371
+ /** Merkle Mountain Range peaks. */
9372
+ interface MmrPeaks<H extends OpaqueHash> {
9373
+ /**
9374
+ * Peaks at particular positions.
9375
+ *
9376
+ * In case there is no merkle trie at given index, `null` is placed.
9377
+ */
9378
+ peaks: readonly (H | null)[];
9379
+ }
9259
9380
 
9260
- /** `O`: Maximal authorization pool size. */
9261
- declare const MAX_AUTH_POOL_SIZE = O;
9262
- type MAX_AUTH_POOL_SIZE = typeof MAX_AUTH_POOL_SIZE;
9381
+ /** Hasher interface for MMR. */
9382
+ interface MmrHasher<H extends OpaqueHash> {
9383
+ /** Hash two items together. */
9384
+ hashConcat(a: H, b: H): H;
9385
+ /** Hash two items together with extra bytes blob prepended. */
9386
+ hashConcatPrepend(id: BytesBlob, a: H, b: H): H;
9387
+ }
9263
9388
 
9264
9389
  /**
9265
- * Ready (i.e. available and/or audited) but not-yet-accumulated work-reports.
9390
+ * Merkle Mountain Range.
9266
9391
  *
9267
- * https://graypaper.fluffylabs.dev/#/5f542d7/165300165400
9392
+ * https://graypaper.fluffylabs.dev/#/5f542d7/3aa0023aa002?v=0.6.2
9268
9393
  */
9269
- declare class NotYetAccumulatedReport extends WithDebug {
9270
- static Codec = codec.Class(NotYetAccumulatedReport, {
9271
- report: WorkReport.Codec,
9272
- dependencies: codecKnownSizeArray(codec.bytes(HASH_SIZE).asOpaque<WorkPackageHash>(), {
9273
- typicalLength: MAX_REPORT_DEPENDENCIES / 2,
9274
- maxLength: MAX_REPORT_DEPENDENCIES,
9275
- minLength: 0,
9276
- }),
9277
- });
9394
+ declare class MerkleMountainRange<H extends OpaqueHash> {
9395
+ /** Construct an empty MMR. */
9396
+ static empty<H extends OpaqueHash>(hasher: MmrHasher<H>) {
9397
+ return new MerkleMountainRange(hasher);
9398
+ }
9278
9399
 
9279
- static create({ report, dependencies }: CodecRecord<NotYetAccumulatedReport>) {
9280
- return new NotYetAccumulatedReport(report, dependencies);
9400
+ /** Construct a new MMR from existing peaks. */
9401
+ static fromPeaks<H extends OpaqueHash>(hasher: MmrHasher<H>, mmr: MmrPeaks<H>) {
9402
+ return new MerkleMountainRange(
9403
+ hasher,
9404
+ mmr.peaks
9405
+ .reduce((acc: Mountain<H>[], peak, index) => {
9406
+ if (peak !== null) {
9407
+ acc.push(Mountain.fromPeak(peak, 2 ** index));
9408
+ }
9409
+ return acc;
9410
+ }, [])
9411
+ .reverse(),
9412
+ );
9281
9413
  }
9282
9414
 
9283
9415
  private constructor(
9284
- /**
9285
- * Each of these were made available at most one epoch ago
9286
- * but have or had unfulfilled dependencies.
9287
- */
9288
- readonly report: WorkReport,
9289
- /**
9290
- * Alongside the work-report itself, we retain its un-accumulated
9291
- * dependencies, a set of work-package hashes.
9292
- *
9293
- * https://graypaper.fluffylabs.dev/#/5f542d7/165800165800
9294
- */
9295
- readonly dependencies: KnownSizeArray<WorkPackageHash, `[0..${MAX_REPORT_DEPENDENCIES})`>,
9296
- ) {
9297
- super();
9298
- }
9299
- }
9416
+ private readonly hasher: MmrHasher<H>,
9417
+ /** Store non-empty merkle tries (mountains) ordered by descending size. */
9418
+ private readonly mountains: Mountain<H>[] = [],
9419
+ ) {}
9300
9420
 
9301
- /**
9302
- * `B_S`: The basic minimum balance which all services require.
9303
- *
9304
- * https://graypaper.fluffylabs.dev/#/7e6ff6a/445800445800?v=0.6.7
9305
- */
9306
- declare const BASE_SERVICE_BALANCE = 100n;
9307
- /**
9308
- * `B_I`: The additional minimum balance required per item of elective service state.
9309
- *
9310
- * https://graypaper.fluffylabs.dev/#/7e6ff6a/445000445000?v=0.6.7
9311
- */
9312
- declare const ELECTIVE_ITEM_BALANCE = 10n;
9313
- /**
9314
- * `B_L`: The additional minimum balance required per octet of elective service state.
9315
- *
9316
- * https://graypaper.fluffylabs.dev/#/7e6ff6a/445400445400?v=0.6.7
9317
- */
9318
- declare const ELECTIVE_BYTE_BALANCE = 1n;
9421
+ /**
9422
+ * Append a new hash to the MMR structure.
9423
+ *
9424
+ * https://graypaper.fluffylabs.dev/#/5f542d7/3b11003b1100?v=0.6.2
9425
+ */
9426
+ append(hash: H) {
9427
+ let newMountain = Mountain.fromPeak(hash, 1);
9319
9428
 
9320
- declare const zeroSizeHint: SizeHint = {
9321
- bytes: 0,
9322
- isExact: true,
9323
- };
9429
+ for (;;) {
9430
+ const last = this.mountains.pop();
9431
+ if (last === undefined) {
9432
+ this.mountains.push(newMountain);
9433
+ return;
9434
+ }
9324
9435
 
9325
- /** 0-byte read, return given default value */
9326
- declare const ignoreValueWithDefault = <T>(defaultValue: T) =>
9327
- Descriptor.new<T>(
9328
- "ignoreValue",
9329
- zeroSizeHint,
9330
- (_e, _v) => {},
9331
- (_d) => defaultValue,
9332
- (_s) => {},
9333
- );
9436
+ if (last.size !== newMountain.size) {
9437
+ this.mountains.push(last);
9438
+ this.mountains.push(newMountain);
9439
+ return;
9440
+ }
9334
9441
 
9335
- /** Encode and decode object with leading version number. */
9336
- declare const codecWithVersion = <T>(val: Descriptor<T>): Descriptor<T> =>
9337
- Descriptor.new<T>(
9338
- "withVersion",
9339
- {
9340
- bytes: val.sizeHint.bytes + 8,
9341
- isExact: false,
9342
- },
9343
- (e, v) => {
9344
- e.varU64(0n);
9345
- val.encode(e, v);
9346
- },
9347
- (d) => {
9348
- const version = d.varU64();
9349
- if (version !== 0n) {
9350
- throw new Error("Non-zero version is not supported!");
9351
- }
9352
- return val.decode(d);
9353
- },
9354
- (s) => {
9355
- s.varU64();
9356
- val.skip(s);
9357
- },
9358
- );
9359
-
9360
- /**
9361
- * Service account details.
9362
- *
9363
- * https://graypaper.fluffylabs.dev/#/7e6ff6a/108301108301?v=0.6.7
9364
- */
9365
- declare class ServiceAccountInfo extends WithDebug {
9366
- static Codec = codec.Class(ServiceAccountInfo, {
9367
- codeHash: codec.bytes(HASH_SIZE).asOpaque<CodeHash>(),
9368
- balance: codec.u64,
9369
- accumulateMinGas: codec.u64.convert((x) => x, tryAsServiceGas),
9370
- onTransferMinGas: codec.u64.convert((x) => x, tryAsServiceGas),
9371
- storageUtilisationBytes: codec.u64,
9372
- gratisStorage: codec.u64,
9373
- storageUtilisationCount: codec.u32,
9374
- created: codec.u32.convert((x) => x, tryAsTimeSlot),
9375
- lastAccumulation: codec.u32.convert((x) => x, tryAsTimeSlot),
9376
- parentService: codec.u32.convert((x) => x, tryAsServiceId),
9377
- });
9378
-
9379
- static create(a: CodecRecord<ServiceAccountInfo>) {
9380
- return new ServiceAccountInfo(
9381
- a.codeHash,
9382
- a.balance,
9383
- a.accumulateMinGas,
9384
- a.onTransferMinGas,
9385
- a.storageUtilisationBytes,
9386
- a.gratisStorage,
9387
- a.storageUtilisationCount,
9388
- a.created,
9389
- a.lastAccumulation,
9390
- a.parentService,
9391
- );
9442
+ newMountain = last.mergeWith(this.hasher, newMountain);
9443
+ }
9392
9444
  }
9393
9445
 
9394
9446
  /**
9395
- * `a_t = max(0, BS + BI * a_i + BL * a_o - a_f)`
9396
- * https://graypaper.fluffylabs.dev/#/7e6ff6a/119e01119e01?v=0.6.7
9447
+ * Root of the entire structure.
9448
+ *
9449
+ * https://graypaper.fluffylabs.dev/#/5f542d7/3b20013b2001?v=0.6.2
9397
9450
  */
9398
- static calculateThresholdBalance(items: U32, bytes: U64, gratisStorage: U64): U64 {
9399
- const storageCost =
9400
- BASE_SERVICE_BALANCE + ELECTIVE_ITEM_BALANCE * BigInt(items) + ELECTIVE_BYTE_BALANCE * bytes - gratisStorage;
9401
-
9402
- if (storageCost < 0n) {
9403
- return tryAsU64(0);
9451
+ getSuperPeakHash(): H {
9452
+ if (this.mountains.length === 0) {
9453
+ return Bytes.zero(HASH_SIZE).asOpaque();
9404
9454
  }
9405
-
9406
- if (storageCost >= 2n ** 64n) {
9407
- return tryAsU64(2n ** 64n - 1n);
9455
+ const revMountains = this.mountains.slice().reverse();
9456
+ const length = revMountains.length;
9457
+ let lastHash = revMountains[0].peak;
9458
+ for (let i = 1; i < length; i++) {
9459
+ const mountain = revMountains[i];
9460
+ lastHash = this.hasher.hashConcatPrepend(SUPER_PEAK_STRING, lastHash, mountain.peak);
9408
9461
  }
9462
+ return lastHash;
9463
+ }
9409
9464
 
9410
- return tryAsU64(storageCost);
9465
+ /** Get current peaks. */
9466
+ getPeaks(): MmrPeaks<H> {
9467
+ const peaks: (H | null)[] = [];
9468
+ const mountains = this.mountains;
9469
+
9470
+ // always 2**index
9471
+ let currentSize = 1;
9472
+ let currentIdx = mountains.length - 1;
9473
+ while (currentIdx >= 0) {
9474
+ const currentItem = mountains[currentIdx];
9475
+ if (currentItem.size >= currentSize && currentItem.size < 2 * currentSize) {
9476
+ peaks.push(currentItem.peak);
9477
+ currentIdx -= 1;
9478
+ } else {
9479
+ peaks.push(null);
9480
+ }
9481
+ // move to the next index.
9482
+ currentSize = currentSize << 1;
9483
+ }
9484
+ return { peaks };
9411
9485
  }
9486
+ }
9412
9487
 
9488
+ /** An internal helper structure to represent a merkle trie for MMR. */
9489
+ declare class Mountain<H extends OpaqueHash> {
9413
9490
  private constructor(
9414
- /** `a_c`: Hash of the service code. */
9415
- public readonly codeHash: CodeHash,
9416
- /** `a_b`: Current account balance. */
9417
- public readonly balance: U64,
9418
- /** `a_g`: Minimal gas required to execute Accumulate entrypoint. */
9419
- public readonly accumulateMinGas: ServiceGas,
9420
- /** `a_m`: Minimal gas required to execute On Transfer entrypoint. */
9421
- public readonly onTransferMinGas: ServiceGas,
9422
- /** `a_o`: Total number of octets in storage. */
9423
- public readonly storageUtilisationBytes: U64,
9424
- /** `a_f`: Cost-free storage. Decreases both storage item count and total byte size. */
9425
- public readonly gratisStorage: U64,
9426
- /** `a_i`: Number of items in storage. */
9427
- public readonly storageUtilisationCount: U32,
9428
- /** `a_r`: Creation account time slot. */
9429
- public readonly created: TimeSlot,
9430
- /** `a_a`: Most recent accumulation time slot. */
9431
- public readonly lastAccumulation: TimeSlot,
9432
- /** `a_p`: Parent service ID. */
9433
- public readonly parentService: ServiceId,
9434
- ) {
9435
- super();
9491
+ public readonly peak: H,
9492
+ public readonly size: number,
9493
+ ) {}
9494
+
9495
+ static fromPeak<H extends OpaqueHash>(peak: H, size: number) {
9496
+ return new Mountain(peak, size);
9497
+ }
9498
+
9499
+ static fromChildren<H extends OpaqueHash>(hasher: MmrHasher<H>, children: [Mountain<H>, Mountain<H>]) {
9500
+ const [left, right] = children;
9501
+ const peak = hasher.hashConcat(left.peak, right.peak);
9502
+ const size = left.size + right.size;
9503
+ return new Mountain(peak, size);
9504
+ }
9505
+ /** Merge with another montain of the same size. */
9506
+ mergeWith(hasher: MmrHasher<H>, other: Mountain<H>): Mountain<H> {
9507
+ return Mountain.fromChildren(hasher, [this, other]);
9508
+ }
9509
+
9510
+ toString() {
9511
+ return `${this.size} @ ${this.peak}`;
9436
9512
  }
9437
9513
  }
9438
9514
 
9439
- declare class PreimageItem extends WithDebug {
9440
- static Codec = codec.Class(PreimageItem, {
9441
- hash: codec.bytes(HASH_SIZE).asOpaque<PreimageHash>(),
9442
- blob: codec.blob,
9515
+ type index$f_MerkleMountainRange<H extends OpaqueHash> = MerkleMountainRange<H>;
9516
+ declare const index$f_MerkleMountainRange: typeof MerkleMountainRange;
9517
+ type index$f_MmrHasher<H extends OpaqueHash> = MmrHasher<H>;
9518
+ type index$f_MmrPeaks<H extends OpaqueHash> = MmrPeaks<H>;
9519
+ type index$f_Mountain<H extends OpaqueHash> = Mountain<H>;
9520
+ declare const index$f_Mountain: typeof Mountain;
9521
+ declare const index$f_SUPER_PEAK_STRING: typeof SUPER_PEAK_STRING;
9522
+ declare namespace index$f {
9523
+ export { index$f_MerkleMountainRange as MerkleMountainRange, index$f_Mountain as Mountain, index$f_SUPER_PEAK_STRING as SUPER_PEAK_STRING };
9524
+ export type { index$f_MmrHasher as MmrHasher, index$f_MmrPeaks as MmrPeaks };
9525
+ }
9526
+
9527
+ /**
9528
+ * `H = 8`: The size of recent history, in blocks.
9529
+ *
9530
+ * https://graypaper.fluffylabs.dev/#/579bd12/416300416500
9531
+ */
9532
+ declare const MAX_RECENT_HISTORY = 8;
9533
+ type MAX_RECENT_HISTORY = typeof MAX_RECENT_HISTORY;
9534
+
9535
+ /** Array of recent blocks with maximum size of `MAX_RECENT_HISTORY` */
9536
+ type BlocksState = KnownSizeArray<BlockState, `0..${typeof MAX_RECENT_HISTORY}`>;
9537
+
9538
+ /** Recent history of a single block. */
9539
+ declare class BlockState extends WithDebug {
9540
+ static Codec = codec.Class(BlockState, {
9541
+ headerHash: codec.bytes(HASH_SIZE).asOpaque<HeaderHash>(),
9542
+ accumulationResult: codec.bytes(HASH_SIZE),
9543
+ postStateRoot: codec.bytes(HASH_SIZE).asOpaque<StateRootHash>(),
9544
+ reported: codecHashDictionary(WorkPackageInfo.Codec, (x) => x.workPackageHash),
9443
9545
  });
9444
9546
 
9445
- static create({ hash, blob }: CodecRecord<PreimageItem>) {
9446
- return new PreimageItem(hash, blob);
9547
+ static create({ headerHash, accumulationResult, postStateRoot, reported }: CodecRecord<BlockState>) {
9548
+ return new BlockState(headerHash, accumulationResult, postStateRoot, reported);
9447
9549
  }
9448
9550
 
9449
9551
  private constructor(
9450
- readonly hash: PreimageHash,
9451
- readonly blob: BytesBlob,
9552
+ /** Header hash. */
9553
+ public readonly headerHash: HeaderHash,
9554
+ /** Merkle mountain belt of accumulation result. */
9555
+ public readonly accumulationResult: KeccakHash,
9556
+ /** Posterior state root filled in with a 1-block delay. */
9557
+ public postStateRoot: StateRootHash,
9558
+ /** Reported work packages (no more than number of cores). */
9559
+ public readonly reported: HashDictionary<WorkPackageHash, WorkPackageInfo>,
9452
9560
  ) {
9453
9561
  super();
9454
9562
  }
9455
9563
  }
9456
9564
 
9457
- type StorageKey = Opaque<BytesBlob, "storage key">;
9458
-
9459
- declare class StorageItem extends WithDebug {
9460
- static Codec = codec.Class(StorageItem, {
9461
- key: codec.blob.convert(
9462
- (i) => i,
9463
- (o) => asOpaqueType(o),
9464
- ),
9465
- value: codec.blob,
9565
+ /**
9566
+ * Recent history of blocks.
9567
+ *
9568
+ * https://graypaper.fluffylabs.dev/#/7e6ff6a/0fc9010fc901?v=0.6.7
9569
+ */
9570
+ declare class RecentBlocks extends WithDebug {
9571
+ static Codec = codec.Class(RecentBlocks, {
9572
+ blocks: codecKnownSizeArray(BlockState.Codec, {
9573
+ minLength: 0,
9574
+ maxLength: MAX_RECENT_HISTORY,
9575
+ typicalLength: MAX_RECENT_HISTORY,
9576
+ }),
9577
+ accumulationLog: codec.object({
9578
+ peaks: readonlyArray(codec.sequenceVarLen(codec.optional(codec.bytes(HASH_SIZE)))),
9579
+ }),
9466
9580
  });
9467
9581
 
9468
- static create({ key, value }: CodecRecord<StorageItem>) {
9469
- return new StorageItem(key, value);
9582
+ static empty() {
9583
+ return new RecentBlocks(asKnownSize([]), {
9584
+ peaks: [],
9585
+ });
9586
+ }
9587
+
9588
+ static create(a: CodecRecord<RecentBlocks>) {
9589
+ return new RecentBlocks(a.blocks, a.accumulationLog);
9470
9590
  }
9471
9591
 
9472
9592
  private constructor(
9473
- readonly key: StorageKey,
9474
- readonly value: BytesBlob,
9593
+ /**
9594
+ * Most recent blocks.
9595
+ * https://graypaper.fluffylabs.dev/#/7e6ff6a/0fea010fea01?v=0.6.7
9596
+ */
9597
+ public readonly blocks: BlocksState,
9598
+ /**
9599
+ * Accumulation output log.
9600
+ * https://graypaper.fluffylabs.dev/#/7e6ff6a/0f02020f0202?v=0.6.7
9601
+ */
9602
+ public readonly accumulationLog: MmrPeaks<KeccakHash>,
9475
9603
  ) {
9476
9604
  super();
9477
9605
  }
9478
9606
  }
9479
9607
 
9480
- declare const MAX_LOOKUP_HISTORY_SLOTS = 3;
9481
- type LookupHistorySlots = KnownSizeArray<TimeSlot, `0-${typeof MAX_LOOKUP_HISTORY_SLOTS} timeslots`>;
9482
- declare function tryAsLookupHistorySlots(items: readonly TimeSlot[]): LookupHistorySlots {
9483
- const knownSize = asKnownSize(items) as LookupHistorySlots;
9484
- if (knownSize.length > MAX_LOOKUP_HISTORY_SLOTS) {
9485
- throw new Error(`Lookup history items must contain 0-${MAX_LOOKUP_HISTORY_SLOTS} timeslots.`);
9486
- }
9487
- return knownSize;
9488
- }
9608
+ type RecentBlocksView = DescribedBy<typeof RecentBlocks.Codec.View>;
9489
9609
 
9490
- /** https://graypaper.fluffylabs.dev/#/5f542d7/115400115800 */
9491
- declare class LookupHistoryItem {
9492
- constructor(
9493
- public readonly hash: PreimageHash,
9494
- public readonly length: U32,
9495
- /**
9496
- * Preimage availability history as a sequence of time slots.
9497
- * See PreimageStatus and the following GP fragment for more details.
9498
- * https://graypaper.fluffylabs.dev/#/5f542d7/11780011a500 */
9499
- public readonly slots: LookupHistorySlots,
9500
- ) {}
9610
+ type RecentlyAccumulated = PerEpochBlock<ImmutableHashSet<WorkPackageHash>>;
9501
9611
 
9502
- static isRequested(item: LookupHistoryItem | LookupHistorySlots): boolean {
9503
- if ("slots" in item) {
9504
- return item.slots.length === 0;
9505
- }
9506
- return item.length === 0;
9507
- }
9508
- }
9509
-
9510
- /** Dictionary entry of services that auto-accumulate every block. */
9511
- declare class AutoAccumulate {
9512
- static Codec = codec.Class(AutoAccumulate, {
9513
- service: codec.u32.asOpaque<ServiceId>(),
9514
- gasLimit: codec.u64.asOpaque<ServiceGas>(),
9515
- });
9612
+ declare const recentlyAccumulatedCodec = codecPerEpochBlock<
9613
+ ImmutableHashSet<WorkPackageHash>,
9614
+ SequenceView<WorkPackageHash>
9615
+ >(
9616
+ codec.sequenceVarLen(codec.bytes(HASH_SIZE).asOpaque<WorkPackageHash>()).convert(
9617
+ (x) => Array.from(x),
9618
+ (x) => HashSet.from(x),
9619
+ ),
9620
+ );
9516
9621
 
9517
- static create({ service, gasLimit }: CodecRecord<AutoAccumulate>) {
9518
- return new AutoAccumulate(service, gasLimit);
9519
- }
9622
+ type RecentlyAccumulatedView = DescribedBy<typeof recentlyAccumulatedCodec.View>;
9520
9623
 
9521
- private constructor(
9522
- /** Service id that auto-accumulates. */
9523
- readonly service: ServiceId,
9524
- /** Gas limit for auto-accumulation. */
9525
- readonly gasLimit: ServiceGas,
9526
- ) {}
9527
- }
9624
+ /**
9625
+ * Fixed size of validator metadata.
9626
+ *
9627
+ * https://graypaper.fluffylabs.dev/#/5f542d7/0d55010d5501
9628
+ */
9629
+ declare const VALIDATOR_META_BYTES = 128;
9630
+ type VALIDATOR_META_BYTES = typeof VALIDATOR_META_BYTES;
9528
9631
 
9529
9632
  /**
9530
- * https://graypaper.fluffylabs.dev/#/ab2cdbd/114402114402?v=0.7.2
9633
+ * Details about validators' identity.
9634
+ *
9635
+ * https://graypaper.fluffylabs.dev/#/5f542d7/0d4b010d4c01
9531
9636
  */
9532
- declare class PrivilegedServices {
9533
- /** https://graypaper.fluffylabs.dev/#/ab2cdbd/3bbd023bcb02?v=0.7.2 */
9534
- static Codec = codec.Class(PrivilegedServices, {
9535
- manager: codec.u32.asOpaque<ServiceId>(),
9536
- assigners: codecPerCore(codec.u32.asOpaque<ServiceId>()),
9537
- delegator: codec.u32.asOpaque<ServiceId>(),
9538
- registrar: Compatibility.isGreaterOrEqual(GpVersion.V0_7_1)
9539
- ? codec.u32.asOpaque<ServiceId>()
9540
- : ignoreValueWithDefault(tryAsServiceId(2 ** 32 - 1)),
9541
- autoAccumulateServices: readonlyArray(codec.sequenceVarLen(AutoAccumulate.Codec)),
9637
+ declare class ValidatorData extends WithDebug {
9638
+ static Codec = codec.Class(ValidatorData, {
9639
+ bandersnatch: codec.bytes(BANDERSNATCH_KEY_BYTES).asOpaque<BandersnatchKey>(),
9640
+ ed25519: codec.bytes(ED25519_KEY_BYTES).asOpaque<Ed25519Key>(),
9641
+ bls: codec.bytes(BLS_KEY_BYTES).asOpaque<BlsKey>(),
9642
+ metadata: codec.bytes(VALIDATOR_META_BYTES),
9542
9643
  });
9543
9644
 
9544
- static create(a: CodecRecord<PrivilegedServices>) {
9545
- return new PrivilegedServices(a.manager, a.delegator, a.registrar, a.assigners, a.autoAccumulateServices);
9645
+ static create({ ed25519, bandersnatch, bls, metadata }: CodecRecord<ValidatorData>) {
9646
+ return new ValidatorData(bandersnatch, ed25519, bls, metadata);
9546
9647
  }
9547
9648
 
9548
9649
  private constructor(
9549
- /**
9550
- * `χ_M`: Manages alteration of χ from block to block,
9551
- * as well as bestow services with storage deposit credits.
9552
- * https://graypaper.fluffylabs.dev/#/ab2cdbd/111502111902?v=0.7.2
9553
- */
9554
- readonly manager: ServiceId,
9555
- /** `χ_V`: Managers validator keys. */
9556
- readonly delegator: ServiceId,
9557
- /**
9558
- * `χ_R`: Manages the creation of services in protected range.
9559
- *
9560
- * https://graypaper.fluffylabs.dev/#/ab2cdbd/111b02111d02?v=0.7.2
9561
- */
9562
- readonly registrar: ServiceId,
9563
- /** `χ_A`: Manages authorization queue one for each core. */
9564
- readonly assigners: PerCore<ServiceId>,
9565
- /** `χ_Z`: Dictionary of services that auto-accumulate every block with their gas limit. */
9566
- readonly autoAccumulateServices: readonly AutoAccumulate[],
9567
- ) {}
9650
+ /** Bandersnatch public key. */
9651
+ public readonly bandersnatch: BandersnatchKey,
9652
+ /** ED25519 key data. */
9653
+ public readonly ed25519: Ed25519Key,
9654
+ /** BLS public key. */
9655
+ public readonly bls: BlsKey,
9656
+ /** Validator-defined additional metdata. */
9657
+ public readonly metadata: Bytes<VALIDATOR_META_BYTES>,
9658
+ ) {
9659
+ super();
9660
+ }
9568
9661
  }
9569
9662
 
9570
- declare const SUPER_PEAK_STRING = BytesBlob.blobFromString("peak");
9663
+ type ValidatorDataView = DescribedBy<typeof ValidatorData.Codec.View>;
9571
9664
 
9572
- /** Merkle Mountain Range peaks. */
9573
- interface MmrPeaks<H extends OpaqueHash> {
9574
- /**
9575
- * Peaks at particular positions.
9576
- *
9577
- * In case there is no merkle trie at given index, `null` is placed.
9578
- */
9579
- peaks: readonly (H | null)[];
9580
- }
9665
+ declare const validatorsDataCodec = codecPerValidator(ValidatorData.Codec);
9581
9666
 
9582
- /** Hasher interface for MMR. */
9583
- interface MmrHasher<H extends OpaqueHash> {
9584
- /** Hash two items together. */
9585
- hashConcat(a: H, b: H): H;
9586
- /** Hash two items together with extra bytes blob prepended. */
9587
- hashConcatPrepend(id: BytesBlob, a: H, b: H): H;
9667
+ declare enum SafroleSealingKeysKind {
9668
+ Tickets = 0,
9669
+ Keys = 1,
9588
9670
  }
9589
9671
 
9590
- /**
9591
- * Merkle Mountain Range.
9592
- *
9593
- * https://graypaper.fluffylabs.dev/#/5f542d7/3aa0023aa002?v=0.6.2
9594
- */
9595
- declare class MerkleMountainRange<H extends OpaqueHash> {
9596
- /** Construct an empty MMR. */
9597
- static empty<H extends OpaqueHash>(hasher: MmrHasher<H>) {
9598
- return new MerkleMountainRange(hasher);
9599
- }
9672
+ type SafroleSealingKeys =
9673
+ | {
9674
+ kind: SafroleSealingKeysKind.Keys;
9675
+ keys: PerEpochBlock<BandersnatchKey>;
9676
+ }
9677
+ | {
9678
+ kind: SafroleSealingKeysKind.Tickets;
9679
+ tickets: PerEpochBlock<Ticket>;
9680
+ };
9600
9681
 
9601
- /** Construct a new MMR from existing peaks. */
9602
- static fromPeaks<H extends OpaqueHash>(hasher: MmrHasher<H>, mmr: MmrPeaks<H>) {
9603
- return new MerkleMountainRange(
9604
- hasher,
9605
- mmr.peaks
9606
- .reduce((acc: Mountain<H>[], peak, index) => {
9607
- if (peak !== null) {
9608
- acc.push(Mountain.fromPeak(peak, 2 ** index));
9609
- }
9610
- return acc;
9611
- }, [])
9612
- .reverse(),
9613
- );
9614
- }
9682
+ declare const codecBandersnatchKey = codec.bytes(BANDERSNATCH_KEY_BYTES).asOpaque<BandersnatchKey>();
9615
9683
 
9616
- private constructor(
9617
- private readonly hasher: MmrHasher<H>,
9618
- /** Store non-empty merkle tries (mountains) ordered by descending size. */
9619
- private readonly mountains: Mountain<H>[] = [],
9620
- ) {}
9684
+ declare class SafroleSealingKeysData extends WithDebug {
9685
+ static Codec = codecWithContext((context) => {
9686
+ return codec.custom<SafroleSealingKeys>(
9687
+ {
9688
+ name: "SafroleSealingKeys",
9689
+ sizeHint: { bytes: 1 + HASH_SIZE * context.epochLength, isExact: false },
9690
+ },
9691
+ (e, x) => {
9692
+ e.varU32(tryAsU32(x.kind));
9693
+ if (x.kind === SafroleSealingKeysKind.Keys) {
9694
+ e.sequenceFixLen(codecBandersnatchKey, x.keys);
9695
+ } else {
9696
+ e.sequenceFixLen(Ticket.Codec, x.tickets);
9697
+ }
9698
+ },
9699
+ (d) => {
9700
+ const epochLength = context.epochLength;
9701
+ const kind = d.varU32();
9702
+ if (kind === SafroleSealingKeysKind.Keys) {
9703
+ const keys = d.sequenceFixLen<BandersnatchKey>(codecBandersnatchKey, epochLength);
9704
+ return SafroleSealingKeysData.keys(tryAsPerEpochBlock(keys, context));
9705
+ }
9621
9706
 
9622
- /**
9623
- * Append a new hash to the MMR structure.
9624
- *
9625
- * https://graypaper.fluffylabs.dev/#/5f542d7/3b11003b1100?v=0.6.2
9626
- */
9627
- append(hash: H) {
9628
- let newMountain = Mountain.fromPeak(hash, 1);
9707
+ if (kind === SafroleSealingKeysKind.Tickets) {
9708
+ const tickets = d.sequenceFixLen(Ticket.Codec, epochLength);
9709
+ return SafroleSealingKeysData.tickets(tryAsPerEpochBlock(tickets, context));
9710
+ }
9629
9711
 
9630
- for (;;) {
9631
- const last = this.mountains.pop();
9632
- if (last === undefined) {
9633
- this.mountains.push(newMountain);
9634
- return;
9635
- }
9712
+ throw new Error(`Unexpected safrole sealing keys kind: ${kind}`);
9713
+ },
9714
+ (s) => {
9715
+ const kind = s.decoder.varU32();
9716
+ if (kind === SafroleSealingKeysKind.Keys) {
9717
+ s.sequenceFixLen(codecBandersnatchKey, context.epochLength);
9718
+ return;
9719
+ }
9720
+ if (kind === SafroleSealingKeysKind.Tickets) {
9721
+ s.sequenceFixLen(Ticket.Codec, context.epochLength);
9722
+ return;
9723
+ }
9636
9724
 
9637
- if (last.size !== newMountain.size) {
9638
- this.mountains.push(last);
9639
- this.mountains.push(newMountain);
9640
- return;
9641
- }
9725
+ throw new Error(`Unexpected safrole sealing keys kind: ${kind}`);
9726
+ },
9727
+ );
9728
+ });
9642
9729
 
9643
- newMountain = last.mergeWith(this.hasher, newMountain);
9644
- }
9730
+ static keys(keys: PerEpochBlock<BandersnatchKey>): SafroleSealingKeys {
9731
+ return new SafroleSealingKeysData(SafroleSealingKeysKind.Keys, keys, undefined) as SafroleSealingKeys;
9645
9732
  }
9646
9733
 
9647
- /**
9648
- * Root of the entire structure.
9649
- *
9650
- * https://graypaper.fluffylabs.dev/#/5f542d7/3b20013b2001?v=0.6.2
9651
- */
9652
- getSuperPeakHash(): H {
9653
- if (this.mountains.length === 0) {
9654
- return Bytes.zero(HASH_SIZE).asOpaque();
9655
- }
9656
- const revMountains = this.mountains.slice().reverse();
9657
- const length = revMountains.length;
9658
- let lastHash = revMountains[0].peak;
9659
- for (let i = 1; i < length; i++) {
9660
- const mountain = revMountains[i];
9661
- lastHash = this.hasher.hashConcatPrepend(SUPER_PEAK_STRING, lastHash, mountain.peak);
9662
- }
9663
- return lastHash;
9734
+ static tickets(tickets: PerEpochBlock<Ticket>): SafroleSealingKeys {
9735
+ return new SafroleSealingKeysData(SafroleSealingKeysKind.Tickets, undefined, tickets) as SafroleSealingKeys;
9664
9736
  }
9665
9737
 
9666
- /** Get current peaks. */
9667
- getPeaks(): MmrPeaks<H> {
9668
- const peaks: (H | null)[] = [];
9669
- const mountains = this.mountains;
9670
-
9671
- // always 2**index
9672
- let currentSize = 1;
9673
- let currentIdx = mountains.length - 1;
9674
- while (currentIdx >= 0) {
9675
- const currentItem = mountains[currentIdx];
9676
- if (currentItem.size >= currentSize && currentItem.size < 2 * currentSize) {
9677
- peaks.push(currentItem.peak);
9678
- currentIdx -= 1;
9679
- } else {
9680
- peaks.push(null);
9681
- }
9682
- // move to the next index.
9683
- currentSize = currentSize << 1;
9684
- }
9685
- return { peaks };
9738
+ private constructor(
9739
+ readonly kind: SafroleSealingKeysKind,
9740
+ readonly keys?: PerEpochBlock<BandersnatchKey>,
9741
+ readonly tickets?: PerEpochBlock<Ticket>,
9742
+ ) {
9743
+ super();
9686
9744
  }
9687
9745
  }
9688
9746
 
9689
- /** An internal helper structure to represent a merkle trie for MMR. */
9690
- declare class Mountain<H extends OpaqueHash> {
9691
- private constructor(
9692
- public readonly peak: H,
9693
- public readonly size: number,
9694
- ) {}
9695
-
9696
- static fromPeak<H extends OpaqueHash>(peak: H, size: number) {
9697
- return new Mountain(peak, size);
9698
- }
9747
+ declare class SafroleData {
9748
+ static Codec = codec.Class(SafroleData, {
9749
+ nextValidatorData: codecPerValidator(ValidatorData.Codec),
9750
+ epochRoot: codec.bytes(BANDERSNATCH_RING_ROOT_BYTES).asOpaque<BandersnatchRingRoot>(),
9751
+ sealingKeySeries: SafroleSealingKeysData.Codec,
9752
+ ticketsAccumulator: readonlyArray(codec.sequenceVarLen(Ticket.Codec)).convert(seeThrough, asKnownSize),
9753
+ });
9699
9754
 
9700
- static fromChildren<H extends OpaqueHash>(hasher: MmrHasher<H>, children: [Mountain<H>, Mountain<H>]) {
9701
- const [left, right] = children;
9702
- const peak = hasher.hashConcat(left.peak, right.peak);
9703
- const size = left.size + right.size;
9704
- return new Mountain(peak, size);
9705
- }
9706
- /** Merge with another montain of the same size. */
9707
- mergeWith(hasher: MmrHasher<H>, other: Mountain<H>): Mountain<H> {
9708
- return Mountain.fromChildren(hasher, [this, other]);
9755
+ static create({ nextValidatorData, epochRoot, sealingKeySeries, ticketsAccumulator }: CodecRecord<SafroleData>) {
9756
+ return new SafroleData(nextValidatorData, epochRoot, sealingKeySeries, ticketsAccumulator);
9709
9757
  }
9710
9758
 
9711
- toString() {
9712
- return `${this.size} @ ${this.peak}`;
9713
- }
9759
+ private constructor(
9760
+ /** gamma_k */
9761
+ public readonly nextValidatorData: PerValidator<ValidatorData>,
9762
+ /** gamma_z */
9763
+ public readonly epochRoot: BandersnatchRingRoot,
9764
+ /** gamma_s */
9765
+ public readonly sealingKeySeries: SafroleSealingKeys,
9766
+ /** gamma_a */
9767
+ public readonly ticketsAccumulator: KnownSizeArray<Ticket, "0...EpochLength">,
9768
+ ) {}
9714
9769
  }
9715
9770
 
9716
- type index$f_MerkleMountainRange<H extends OpaqueHash> = MerkleMountainRange<H>;
9717
- declare const index$f_MerkleMountainRange: typeof MerkleMountainRange;
9718
- type index$f_MmrHasher<H extends OpaqueHash> = MmrHasher<H>;
9719
- type index$f_MmrPeaks<H extends OpaqueHash> = MmrPeaks<H>;
9720
- type index$f_Mountain<H extends OpaqueHash> = Mountain<H>;
9721
- declare const index$f_Mountain: typeof Mountain;
9722
- declare const index$f_SUPER_PEAK_STRING: typeof SUPER_PEAK_STRING;
9723
- declare namespace index$f {
9724
- export { index$f_MerkleMountainRange as MerkleMountainRange, index$f_Mountain as Mountain, index$f_SUPER_PEAK_STRING as SUPER_PEAK_STRING };
9725
- export type { index$f_MmrHasher as MmrHasher, index$f_MmrPeaks as MmrPeaks };
9726
- }
9771
+ type SafroleDataView = DescribedBy<typeof SafroleData.Codec.View>;
9727
9772
 
9728
9773
  /**
9729
- * `H = 8`: The size of recent history, in blocks.
9774
+ * `B_S`: The basic minimum balance which all services require.
9730
9775
  *
9731
- * https://graypaper.fluffylabs.dev/#/579bd12/416300416500
9776
+ * https://graypaper.fluffylabs.dev/#/7e6ff6a/445800445800?v=0.6.7
9732
9777
  */
9733
- declare const MAX_RECENT_HISTORY = 8;
9734
- type MAX_RECENT_HISTORY = typeof MAX_RECENT_HISTORY;
9778
+ declare const BASE_SERVICE_BALANCE = 100n;
9779
+ /**
9780
+ * `B_I`: The additional minimum balance required per item of elective service state.
9781
+ *
9782
+ * https://graypaper.fluffylabs.dev/#/7e6ff6a/445000445000?v=0.6.7
9783
+ */
9784
+ declare const ELECTIVE_ITEM_BALANCE = 10n;
9785
+ /**
9786
+ * `B_L`: The additional minimum balance required per octet of elective service state.
9787
+ *
9788
+ * https://graypaper.fluffylabs.dev/#/7e6ff6a/445400445400?v=0.6.7
9789
+ */
9790
+ declare const ELECTIVE_BYTE_BALANCE = 1n;
9735
9791
 
9736
- /** Array of recent blocks with maximum size of `MAX_RECENT_HISTORY` */
9737
- type BlocksState = KnownSizeArray<BlockState, `0..${typeof MAX_RECENT_HISTORY}`>;
9792
+ declare const zeroSizeHint: SizeHint = {
9793
+ bytes: 0,
9794
+ isExact: true,
9795
+ };
9738
9796
 
9739
- /** Recent history of a single block. */
9740
- declare class BlockState extends WithDebug {
9741
- static Codec = codec.Class(BlockState, {
9742
- headerHash: codec.bytes(HASH_SIZE).asOpaque<HeaderHash>(),
9743
- accumulationResult: codec.bytes(HASH_SIZE),
9744
- postStateRoot: codec.bytes(HASH_SIZE).asOpaque<StateRootHash>(),
9745
- reported: codecHashDictionary(WorkPackageInfo.Codec, (x) => x.workPackageHash),
9797
+ /** 0-byte read, return given default value */
9798
+ declare const ignoreValueWithDefault = <T>(defaultValue: T) =>
9799
+ Descriptor.new<T>(
9800
+ "ignoreValue",
9801
+ zeroSizeHint,
9802
+ (_e, _v) => {},
9803
+ (_d) => defaultValue,
9804
+ (_s) => {},
9805
+ );
9806
+
9807
+ /** Encode and decode object with leading version number. */
9808
+ declare const codecWithVersion = <T>(val: Descriptor<T>): Descriptor<T> =>
9809
+ Descriptor.new<T>(
9810
+ "withVersion",
9811
+ {
9812
+ bytes: val.sizeHint.bytes + 8,
9813
+ isExact: false,
9814
+ },
9815
+ (e, v) => {
9816
+ e.varU64(0n);
9817
+ val.encode(e, v);
9818
+ },
9819
+ (d) => {
9820
+ const version = d.varU64();
9821
+ if (version !== 0n) {
9822
+ throw new Error("Non-zero version is not supported!");
9823
+ }
9824
+ return val.decode(d);
9825
+ },
9826
+ (s) => {
9827
+ s.varU64();
9828
+ val.skip(s);
9829
+ },
9830
+ );
9831
+
9832
+ /**
9833
+ * Service account details.
9834
+ *
9835
+ * https://graypaper.fluffylabs.dev/#/7e6ff6a/108301108301?v=0.6.7
9836
+ */
9837
+ declare class ServiceAccountInfo extends WithDebug {
9838
+ static Codec = codec.Class(ServiceAccountInfo, {
9839
+ codeHash: codec.bytes(HASH_SIZE).asOpaque<CodeHash>(),
9840
+ balance: codec.u64,
9841
+ accumulateMinGas: codec.u64.convert((x) => x, tryAsServiceGas),
9842
+ onTransferMinGas: codec.u64.convert((x) => x, tryAsServiceGas),
9843
+ storageUtilisationBytes: codec.u64,
9844
+ gratisStorage: codec.u64,
9845
+ storageUtilisationCount: codec.u32,
9846
+ created: codec.u32.convert((x) => x, tryAsTimeSlot),
9847
+ lastAccumulation: codec.u32.convert((x) => x, tryAsTimeSlot),
9848
+ parentService: codec.u32.convert((x) => x, tryAsServiceId),
9746
9849
  });
9747
9850
 
9748
- static create({ headerHash, accumulationResult, postStateRoot, reported }: CodecRecord<BlockState>) {
9749
- return new BlockState(headerHash, accumulationResult, postStateRoot, reported);
9851
+ static create(a: CodecRecord<ServiceAccountInfo>) {
9852
+ return new ServiceAccountInfo(
9853
+ a.codeHash,
9854
+ a.balance,
9855
+ a.accumulateMinGas,
9856
+ a.onTransferMinGas,
9857
+ a.storageUtilisationBytes,
9858
+ a.gratisStorage,
9859
+ a.storageUtilisationCount,
9860
+ a.created,
9861
+ a.lastAccumulation,
9862
+ a.parentService,
9863
+ );
9864
+ }
9865
+
9866
+ /**
9867
+ * `a_t = max(0, BS + BI * a_i + BL * a_o - a_f)`
9868
+ * https://graypaper.fluffylabs.dev/#/7e6ff6a/119e01119e01?v=0.6.7
9869
+ */
9870
+ static calculateThresholdBalance(items: U32, bytes: U64, gratisStorage: U64): U64 {
9871
+ const storageCost =
9872
+ BASE_SERVICE_BALANCE + ELECTIVE_ITEM_BALANCE * BigInt(items) + ELECTIVE_BYTE_BALANCE * bytes - gratisStorage;
9873
+
9874
+ if (storageCost < 0n) {
9875
+ return tryAsU64(0);
9876
+ }
9877
+
9878
+ if (storageCost >= 2n ** 64n) {
9879
+ return tryAsU64(2n ** 64n - 1n);
9880
+ }
9881
+
9882
+ return tryAsU64(storageCost);
9750
9883
  }
9751
9884
 
9752
9885
  private constructor(
9753
- /** Header hash. */
9754
- public readonly headerHash: HeaderHash,
9755
- /** Merkle mountain belt of accumulation result. */
9756
- public readonly accumulationResult: KeccakHash,
9757
- /** Posterior state root filled in with a 1-block delay. */
9758
- public postStateRoot: StateRootHash,
9759
- /** Reported work packages (no more than number of cores). */
9760
- public readonly reported: HashDictionary<WorkPackageHash, WorkPackageInfo>,
9886
+ /** `a_c`: Hash of the service code. */
9887
+ public readonly codeHash: CodeHash,
9888
+ /** `a_b`: Current account balance. */
9889
+ public readonly balance: U64,
9890
+ /** `a_g`: Minimal gas required to execute Accumulate entrypoint. */
9891
+ public readonly accumulateMinGas: ServiceGas,
9892
+ /** `a_m`: Minimal gas required to execute On Transfer entrypoint. */
9893
+ public readonly onTransferMinGas: ServiceGas,
9894
+ /** `a_o`: Total number of octets in storage. */
9895
+ public readonly storageUtilisationBytes: U64,
9896
+ /** `a_f`: Cost-free storage. Decreases both storage item count and total byte size. */
9897
+ public readonly gratisStorage: U64,
9898
+ /** `a_i`: Number of items in storage. */
9899
+ public readonly storageUtilisationCount: U32,
9900
+ /** `a_r`: Creation account time slot. */
9901
+ public readonly created: TimeSlot,
9902
+ /** `a_a`: Most recent accumulation time slot. */
9903
+ public readonly lastAccumulation: TimeSlot,
9904
+ /** `a_p`: Parent service ID. */
9905
+ public readonly parentService: ServiceId,
9761
9906
  ) {
9762
9907
  super();
9763
9908
  }
9764
9909
  }
9765
9910
 
9766
- declare class RecentBlocks extends WithDebug {
9767
- static Codec = codec.Class(RecentBlocks, {
9768
- blocks: codecKnownSizeArray(BlockState.Codec, {
9769
- minLength: 0,
9770
- maxLength: MAX_RECENT_HISTORY,
9771
- typicalLength: MAX_RECENT_HISTORY,
9772
- }),
9773
- accumulationLog: codec.object({
9774
- peaks: readonlyArray(codec.sequenceVarLen(codec.optional(codec.bytes(HASH_SIZE)))),
9775
- }),
9911
+ type ServiceAccountInfoView = DescribedBy<typeof ServiceAccountInfo.Codec.View>;
9912
+
9913
+ declare class PreimageItem extends WithDebug {
9914
+ static Codec = codec.Class(PreimageItem, {
9915
+ hash: codec.bytes(HASH_SIZE).asOpaque<PreimageHash>(),
9916
+ blob: codec.blob,
9776
9917
  });
9777
9918
 
9778
- static create(a: CodecRecord<RecentBlocks>) {
9779
- return new RecentBlocks(a.blocks, a.accumulationLog);
9919
+ static create({ hash, blob }: CodecRecord<PreimageItem>) {
9920
+ return new PreimageItem(hash, blob);
9780
9921
  }
9781
9922
 
9782
9923
  private constructor(
9783
- /**
9784
- * Most recent blocks.
9785
- * https://graypaper.fluffylabs.dev/#/7e6ff6a/0fea010fea01?v=0.6.7
9786
- */
9787
- public readonly blocks: BlocksState,
9788
- /**
9789
- * Accumulation output log.
9790
- * https://graypaper.fluffylabs.dev/#/7e6ff6a/0f02020f0202?v=0.6.7
9791
- */
9792
- public readonly accumulationLog: MmrPeaks<KeccakHash>,
9924
+ readonly hash: PreimageHash,
9925
+ readonly blob: BytesBlob,
9793
9926
  ) {
9794
9927
  super();
9795
9928
  }
9796
9929
  }
9797
9930
 
9798
- /**
9799
- * Recent history of blocks.
9800
- *
9801
- * https://graypaper.fluffylabs.dev/#/7e6ff6a/0fc9010fc901?v=0.6.7
9802
- */
9803
- declare class RecentBlocksHistory extends WithDebug {
9804
- static Codec = Descriptor.new<RecentBlocksHistory>(
9805
- "RecentBlocksHistory",
9806
- RecentBlocks.Codec.sizeHint,
9807
- (encoder, value) => RecentBlocks.Codec.encode(encoder, value.asCurrent()),
9808
- (decoder) => {
9809
- const recentBlocks = RecentBlocks.Codec.decode(decoder);
9810
- return RecentBlocksHistory.create(recentBlocks);
9811
- },
9812
- (skip) => {
9813
- return RecentBlocks.Codec.skip(skip);
9814
- },
9815
- );
9816
-
9817
- static create(recentBlocks: RecentBlocks) {
9818
- return new RecentBlocksHistory(recentBlocks);
9819
- }
9931
+ type StorageKey = Opaque<BytesBlob, "storage key">;
9820
9932
 
9821
- static empty() {
9822
- return RecentBlocksHistory.create(
9823
- RecentBlocks.create({
9824
- blocks: asKnownSize([]),
9825
- accumulationLog: { peaks: [] },
9826
- }),
9827
- );
9828
- }
9933
+ declare class StorageItem extends WithDebug {
9934
+ static Codec = codec.Class(StorageItem, {
9935
+ key: codec.blob.convert(
9936
+ (i) => i,
9937
+ (o) => asOpaqueType(o),
9938
+ ),
9939
+ value: codec.blob,
9940
+ });
9829
9941
 
9830
- /**
9831
- * Returns the block's BEEFY super peak.
9832
- */
9833
- static accumulationResult(block: BlockState): KeccakHash {
9834
- return (block as BlockState).accumulationResult;
9942
+ static create({ key, value }: CodecRecord<StorageItem>) {
9943
+ return new StorageItem(key, value);
9835
9944
  }
9836
9945
 
9837
- private constructor(private readonly current: RecentBlocks | null) {
9946
+ private constructor(
9947
+ readonly key: StorageKey,
9948
+ readonly value: BytesBlob,
9949
+ ) {
9838
9950
  super();
9839
9951
  }
9952
+ }
9840
9953
 
9841
- /** History of recent blocks with maximum size of `MAX_RECENT_HISTORY` */
9842
- get blocks(): readonly BlockState[] {
9843
- if (this.current !== null) {
9844
- return this.current.blocks;
9845
- }
9846
-
9847
- throw new Error("RecentBlocksHistory is in invalid state");
9848
- }
9849
-
9850
- asCurrent() {
9851
- if (this.current === null) {
9852
- throw new Error("Cannot access current RecentBlocks format");
9853
- }
9854
- return this.current;
9855
- }
9856
-
9857
- updateBlocks(blocks: BlockState[]): RecentBlocksHistory {
9858
- if (this.current !== null) {
9859
- return RecentBlocksHistory.create(
9860
- RecentBlocks.create({
9861
- ...this.current,
9862
- blocks: asOpaqueType(blocks as BlockState[]),
9863
- }),
9864
- );
9865
- }
9866
-
9867
- throw new Error("RecentBlocksHistory is in invalid state. Cannot be updated!");
9954
+ declare const MAX_LOOKUP_HISTORY_SLOTS = 3;
9955
+ type LookupHistorySlots = KnownSizeArray<TimeSlot, `0-${typeof MAX_LOOKUP_HISTORY_SLOTS} timeslots`>;
9956
+ declare function tryAsLookupHistorySlots(items: readonly TimeSlot[]): LookupHistorySlots {
9957
+ const knownSize = asKnownSize(items) as LookupHistorySlots;
9958
+ if (knownSize.length > MAX_LOOKUP_HISTORY_SLOTS) {
9959
+ throw new Error(`Lookup history items must contain 0-${MAX_LOOKUP_HISTORY_SLOTS} timeslots.`);
9868
9960
  }
9961
+ return knownSize;
9869
9962
  }
9870
9963
 
9871
- /**
9872
- * Fixed size of validator metadata.
9873
- *
9874
- * https://graypaper.fluffylabs.dev/#/5f542d7/0d55010d5501
9875
- */
9876
- declare const VALIDATOR_META_BYTES = 128;
9877
- type VALIDATOR_META_BYTES = typeof VALIDATOR_META_BYTES;
9878
-
9879
- /**
9880
- * Details about validators' identity.
9881
- *
9882
- * https://graypaper.fluffylabs.dev/#/5f542d7/0d4b010d4c01
9883
- */
9884
- declare class ValidatorData extends WithDebug {
9885
- static Codec = codec.Class(ValidatorData, {
9886
- bandersnatch: codec.bytes(BANDERSNATCH_KEY_BYTES).asOpaque<BandersnatchKey>(),
9887
- ed25519: codec.bytes(ED25519_KEY_BYTES).asOpaque<Ed25519Key>(),
9888
- bls: codec.bytes(BLS_KEY_BYTES).asOpaque<BlsKey>(),
9889
- metadata: codec.bytes(VALIDATOR_META_BYTES),
9890
- });
9891
-
9892
- static create({ ed25519, bandersnatch, bls, metadata }: CodecRecord<ValidatorData>) {
9893
- return new ValidatorData(bandersnatch, ed25519, bls, metadata);
9894
- }
9964
+ /** https://graypaper.fluffylabs.dev/#/5f542d7/115400115800 */
9965
+ declare class LookupHistoryItem {
9966
+ constructor(
9967
+ public readonly hash: PreimageHash,
9968
+ public readonly length: U32,
9969
+ /**
9970
+ * Preimage availability history as a sequence of time slots.
9971
+ * See PreimageStatus and the following GP fragment for more details.
9972
+ * https://graypaper.fluffylabs.dev/#/5f542d7/11780011a500 */
9973
+ public readonly slots: LookupHistorySlots,
9974
+ ) {}
9895
9975
 
9896
- private constructor(
9897
- /** Bandersnatch public key. */
9898
- public readonly bandersnatch: BandersnatchKey,
9899
- /** ED25519 key data. */
9900
- public readonly ed25519: Ed25519Key,
9901
- /** BLS public key. */
9902
- public readonly bls: BlsKey,
9903
- /** Validator-defined additional metdata. */
9904
- public readonly metadata: Bytes<VALIDATOR_META_BYTES>,
9905
- ) {
9906
- super();
9976
+ static isRequested(item: LookupHistoryItem | LookupHistorySlots): boolean {
9977
+ if ("slots" in item) {
9978
+ return item.slots.length === 0;
9979
+ }
9980
+ return item.length === 0;
9907
9981
  }
9908
9982
  }
9909
9983
 
9910
- declare enum SafroleSealingKeysKind {
9911
- Tickets = 0,
9912
- Keys = 1,
9913
- }
9914
-
9915
- type SafroleSealingKeys =
9916
- | {
9917
- kind: SafroleSealingKeysKind.Keys;
9918
- keys: PerEpochBlock<BandersnatchKey>;
9919
- }
9920
- | {
9921
- kind: SafroleSealingKeysKind.Tickets;
9922
- tickets: PerEpochBlock<Ticket>;
9923
- };
9924
-
9925
- declare const codecBandersnatchKey = codec.bytes(BANDERSNATCH_KEY_BYTES).asOpaque<BandersnatchKey>();
9926
-
9927
- declare class SafroleSealingKeysData extends WithDebug {
9928
- static Codec = codecWithContext((context) => {
9929
- return codec.custom<SafroleSealingKeys>(
9930
- {
9931
- name: "SafroleSealingKeys",
9932
- sizeHint: { bytes: 1 + HASH_SIZE * context.epochLength, isExact: false },
9933
- },
9934
- (e, x) => {
9935
- e.varU32(tryAsU32(x.kind));
9936
- if (x.kind === SafroleSealingKeysKind.Keys) {
9937
- e.sequenceFixLen(codecBandersnatchKey, x.keys);
9938
- } else {
9939
- e.sequenceFixLen(Ticket.Codec, x.tickets);
9940
- }
9941
- },
9942
- (d) => {
9943
- const epochLength = context.epochLength;
9944
- const kind = d.varU32();
9945
- if (kind === SafroleSealingKeysKind.Keys) {
9946
- const keys = d.sequenceFixLen<BandersnatchKey>(codecBandersnatchKey, epochLength);
9947
- return SafroleSealingKeysData.keys(tryAsPerEpochBlock(keys, context));
9948
- }
9949
-
9950
- if (kind === SafroleSealingKeysKind.Tickets) {
9951
- const tickets = d.sequenceFixLen(Ticket.Codec, epochLength);
9952
- return SafroleSealingKeysData.tickets(tryAsPerEpochBlock(tickets, context));
9953
- }
9954
-
9955
- throw new Error(`Unexpected safrole sealing keys kind: ${kind}`);
9956
- },
9957
- (s) => {
9958
- const kind = s.decoder.varU32();
9959
- if (kind === SafroleSealingKeysKind.Keys) {
9960
- s.sequenceFixLen(codecBandersnatchKey, context.epochLength);
9961
- return;
9962
- }
9963
- if (kind === SafroleSealingKeysKind.Tickets) {
9964
- s.sequenceFixLen(Ticket.Codec, context.epochLength);
9965
- return;
9966
- }
9967
-
9968
- throw new Error(`Unexpected safrole sealing keys kind: ${kind}`);
9969
- },
9970
- );
9984
+ /** Dictionary entry of services that auto-accumulate every block. */
9985
+ declare class AutoAccumulate {
9986
+ static Codec = codec.Class(AutoAccumulate, {
9987
+ service: codec.u32.asOpaque<ServiceId>(),
9988
+ gasLimit: codec.u64.asOpaque<ServiceGas>(),
9971
9989
  });
9972
9990
 
9973
- static keys(keys: PerEpochBlock<BandersnatchKey>): SafroleSealingKeys {
9974
- return new SafroleSealingKeysData(SafroleSealingKeysKind.Keys, keys, undefined) as SafroleSealingKeys;
9975
- }
9976
-
9977
- static tickets(tickets: PerEpochBlock<Ticket>): SafroleSealingKeys {
9978
- return new SafroleSealingKeysData(SafroleSealingKeysKind.Tickets, undefined, tickets) as SafroleSealingKeys;
9991
+ static create({ service, gasLimit }: CodecRecord<AutoAccumulate>) {
9992
+ return new AutoAccumulate(service, gasLimit);
9979
9993
  }
9980
9994
 
9981
9995
  private constructor(
9982
- readonly kind: SafroleSealingKeysKind,
9983
- readonly keys?: PerEpochBlock<BandersnatchKey>,
9984
- readonly tickets?: PerEpochBlock<Ticket>,
9985
- ) {
9986
- super();
9987
- }
9996
+ /** Service id that auto-accumulates. */
9997
+ readonly service: ServiceId,
9998
+ /** Gas limit for auto-accumulation. */
9999
+ readonly gasLimit: ServiceGas,
10000
+ ) {}
9988
10001
  }
9989
10002
 
9990
- declare class SafroleData {
9991
- static Codec = codec.Class(SafroleData, {
9992
- nextValidatorData: codecPerValidator(ValidatorData.Codec),
9993
- epochRoot: codec.bytes(BANDERSNATCH_RING_ROOT_BYTES).asOpaque<BandersnatchRingRoot>(),
9994
- sealingKeySeries: SafroleSealingKeysData.Codec,
9995
- ticketsAccumulator: readonlyArray(codec.sequenceVarLen(Ticket.Codec)).convert(seeThrough, asKnownSize),
10003
+ /**
10004
+ * https://graypaper.fluffylabs.dev/#/ab2cdbd/114402114402?v=0.7.2
10005
+ */
10006
+ declare class PrivilegedServices {
10007
+ /** https://graypaper.fluffylabs.dev/#/ab2cdbd/3bbd023bcb02?v=0.7.2 */
10008
+ static Codec = codec.Class(PrivilegedServices, {
10009
+ manager: codec.u32.asOpaque<ServiceId>(),
10010
+ assigners: codecPerCore(codec.u32.asOpaque<ServiceId>()),
10011
+ delegator: codec.u32.asOpaque<ServiceId>(),
10012
+ registrar: Compatibility.isGreaterOrEqual(GpVersion.V0_7_1)
10013
+ ? codec.u32.asOpaque<ServiceId>()
10014
+ : ignoreValueWithDefault(tryAsServiceId(2 ** 32 - 1)),
10015
+ autoAccumulateServices: readonlyArray(codec.sequenceVarLen(AutoAccumulate.Codec)),
9996
10016
  });
9997
10017
 
9998
- static create({ nextValidatorData, epochRoot, sealingKeySeries, ticketsAccumulator }: CodecRecord<SafroleData>) {
9999
- return new SafroleData(nextValidatorData, epochRoot, sealingKeySeries, ticketsAccumulator);
10018
+ static create(a: CodecRecord<PrivilegedServices>) {
10019
+ return new PrivilegedServices(a.manager, a.delegator, a.registrar, a.assigners, a.autoAccumulateServices);
10000
10020
  }
10001
10021
 
10002
10022
  private constructor(
10003
- /** gamma_k */
10004
- public readonly nextValidatorData: PerValidator<ValidatorData>,
10005
- /** gamma_z */
10006
- public readonly epochRoot: BandersnatchRingRoot,
10007
- /** gamma_s */
10008
- public readonly sealingKeySeries: SafroleSealingKeys,
10009
- /** gamma_a */
10010
- public readonly ticketsAccumulator: KnownSizeArray<Ticket, "0...EpochLength">,
10023
+ /**
10024
+ * `χ_M`: Manages alteration of χ from block to block,
10025
+ * as well as bestow services with storage deposit credits.
10026
+ * https://graypaper.fluffylabs.dev/#/ab2cdbd/111502111902?v=0.7.2
10027
+ */
10028
+ readonly manager: ServiceId,
10029
+ /** `χ_V`: Managers validator keys. */
10030
+ readonly delegator: ServiceId,
10031
+ /**
10032
+ * `χ_R`: Manages the creation of services in protected range.
10033
+ *
10034
+ * https://graypaper.fluffylabs.dev/#/ab2cdbd/111b02111d02?v=0.7.2
10035
+ */
10036
+ readonly registrar: ServiceId,
10037
+ /** `χ_A`: Manages authorization queue one for each core. */
10038
+ readonly assigners: PerCore<ServiceId>,
10039
+ /** `χ_Z`: Dictionary of services that auto-accumulate every block with their gas limit. */
10040
+ readonly autoAccumulateServices: readonly AutoAccumulate[],
10011
10041
  ) {}
10012
10042
  }
10013
10043
 
@@ -10289,6 +10319,8 @@ declare class StatisticsData {
10289
10319
  ) {}
10290
10320
  }
10291
10321
 
10322
+ type StatisticsDataView = DescribedBy<typeof StatisticsData.Codec.View>;
10323
+
10292
10324
  /**
10293
10325
  * In addition to the entropy accumulator η_0, we retain
10294
10326
  * three additional historical values of the accumulator at
@@ -10340,7 +10372,7 @@ type State = {
10340
10372
  /**
10341
10373
  * `γₖ gamma_k`: The keys for the validators of the next epoch, equivalent to those keys which constitute γ_z .
10342
10374
  */
10343
- readonly nextValidatorData: SafroleData["nextValidatorData"];
10375
+ readonly nextValidatorData: PerValidator<ValidatorData>;
10344
10376
 
10345
10377
  /**
10346
10378
  * `κ kappa`: Validators, who are the set of economic actors uniquely
@@ -10386,7 +10418,7 @@ type State = {
10386
10418
  *
10387
10419
  * https://graypaper-reader.netlify.app/#/6e1c0cd/102400102400
10388
10420
  */
10389
- readonly authPools: PerCore<KnownSizeArray<AuthorizerHash, `At most ${typeof MAX_AUTH_POOL_SIZE}`>>;
10421
+ readonly authPools: PerCore<AuthorizationPool>;
10390
10422
 
10391
10423
  /**
10392
10424
  * `φ phi`: A queue of authorizers for each core used to fill up the pool.
@@ -10395,14 +10427,14 @@ type State = {
10395
10427
  *
10396
10428
  * https://graypaper-reader.netlify.app/#/6e1c0cd/102400102400
10397
10429
  */
10398
- readonly authQueues: PerCore<FixedSizeArray<AuthorizerHash, AUTHORIZATION_QUEUE_SIZE>>;
10430
+ readonly authQueues: PerCore<AuthorizationQueue>;
10399
10431
 
10400
10432
  /**
10401
10433
  * `β beta`: State of the blocks from recent history.
10402
10434
  *
10403
10435
  * https://graypaper.fluffylabs.dev/#/579bd12/0fb7010fb701
10404
10436
  */
10405
- readonly recentBlocks: RecentBlocksHistory;
10437
+ readonly recentBlocks: RecentBlocks;
10406
10438
 
10407
10439
  /**
10408
10440
  * `π pi`: Previous and current statistics of each validator,
@@ -10419,7 +10451,7 @@ type State = {
10419
10451
  *
10420
10452
  * https://graypaper.fluffylabs.dev/#/5f542d7/165300165500
10421
10453
  */
10422
- readonly accumulationQueue: PerEpochBlock<readonly NotYetAccumulatedReport[]>;
10454
+ readonly accumulationQueue: AccumulationQueue;
10423
10455
 
10424
10456
  /**
10425
10457
  * `ξ xi`: In order to know which work-packages have been
@@ -10429,7 +10461,7 @@ type State = {
10429
10461
  *
10430
10462
  * https://graypaper.fluffylabs.dev/#/5f542d7/161a00161d00
10431
10463
  */
10432
- readonly recentlyAccumulated: PerEpochBlock<ImmutableHashSet<WorkPackageHash>>;
10464
+ readonly recentlyAccumulated: RecentlyAccumulated;
10433
10465
 
10434
10466
  /*
10435
10467
  * `γₐ gamma_a`: The ticket accumulator - a series of highest-scoring ticket identifiers to be
@@ -10502,61 +10534,168 @@ interface Service {
10502
10534
  getLookupHistory(hash: PreimageHash, len: U32): LookupHistorySlots | null;
10503
10535
  }
10504
10536
 
10505
- declare enum UpdatePreimageKind {
10506
- /** Insert new preimage and optionally update it's lookup history. */
10507
- Provide = 0,
10508
- /** Remove a preimage and it's lookup history. */
10509
- Remove = 1,
10510
- /** update or add lookup history for preimage hash/len to given value. */
10511
- UpdateOrAdd = 2,
10512
- }
10537
+ /** Additional marker interface, when state view is supported/required. */
10538
+ type WithStateView<V = StateView> = {
10539
+ /** Get view of the state. */
10540
+ view(): V;
10541
+ };
10542
+
10513
10543
  /**
10514
- * A preimage update.
10544
+ * A non-decoding version of the `State`.
10515
10545
  *
10516
- * Can be one of the following cases:
10517
- * 1. Provide a new preimage blob and set the lookup history to available at `slot`.
10518
- * 2. Remove (expunge) a preimage and it's lookup history.
10519
- * 3. Update `LookupHistory` with given value.
10546
+ * Note we don't require all fields to have view accessors, since
10547
+ * it's only beneficial for large collections to be read via views.
10548
+ *
10549
+ * https://graypaper.fluffylabs.dev/#/579bd12/08f10008f100
10520
10550
  */
10521
- declare class UpdatePreimage {
10522
- private constructor(
10523
- public readonly serviceId: ServiceId,
10524
- public readonly action:
10525
- | {
10526
- kind: UpdatePreimageKind.Provide;
10527
- preimage: PreimageItem;
10528
- // optionally set lookup history of that preimage to "available"
10529
- slot: TimeSlot | null;
10530
- }
10531
- | {
10532
- kind: UpdatePreimageKind.Remove;
10533
- hash: PreimageHash;
10534
- length: U32;
10535
- }
10536
- | {
10537
- kind: UpdatePreimageKind.UpdateOrAdd;
10538
- item: LookupHistoryItem;
10539
- },
10540
- ) {}
10541
-
10542
- /** A preimage is provided. We should update the lookuphistory and add the preimage to db. */
10543
- static provide({
10544
- serviceId,
10545
- preimage,
10546
- slot,
10547
- }: {
10548
- serviceId: ServiceId;
10549
- preimage: PreimageItem;
10550
- slot: TimeSlot | null;
10551
- }) {
10552
- return new UpdatePreimage(serviceId, {
10553
- kind: UpdatePreimageKind.Provide,
10554
- preimage,
10555
- slot,
10556
- });
10557
- }
10551
+ type StateView = {
10552
+ /**
10558
10553
 
10559
- /** The preimage should be removed completely from the database. */
10554
+ * rho`: work-reports which have been reported but are not yet known to be
10555
+ * available to a super-majority of validators, together with the time
10556
+ * at which each was reported.
10557
+ *
10558
+ * https://graypaper.fluffylabs.dev/#/579bd12/135800135800
10559
+ */
10560
+ availabilityAssignmentView(): AvailabilityAssignmentsView;
10561
+
10562
+ /**
10563
+ * `ι iota`: The validator keys and metadata to be drawn from next.
10564
+ */
10565
+ designatedValidatorDataView(): SequenceView<ValidatorData, ValidatorDataView>;
10566
+
10567
+ /**
10568
+ * `κ kappa`: Validators, who are the set of economic actors uniquely
10569
+ * privileged to help build and maintain the Jam chain, are
10570
+ * identified within κ, archived in λ and enqueued from ι.
10571
+ *
10572
+ * https://graypaper.fluffylabs.dev/#/579bd12/080201080601
10573
+ */
10574
+ currentValidatorDataView(): SequenceView<ValidatorData, ValidatorDataView>;
10575
+
10576
+ /**
10577
+ * `λ lambda`: Validators, who are the set of economic actors uniquely
10578
+ * privileged to help build and maintain the Jam chain, are
10579
+ * identified within κ, archived in λ and enqueued from ι.
10580
+ *
10581
+ * https://graypaper.fluffylabs.dev/#/579bd12/080201080601
10582
+ */
10583
+ previousValidatorDataView(): SequenceView<ValidatorData, ValidatorDataView>;
10584
+
10585
+ /**
10586
+ * `α alpha`: Authorizers available for each core (authorizer pool).
10587
+ *
10588
+ * https://graypaper-reader.netlify.app/#/6e1c0cd/102400102400
10589
+ */
10590
+ authPoolsView(): SequenceView<AuthorizationPool, SequenceView<AuthorizerHash>>;
10591
+
10592
+ /**
10593
+ * `φ phi`: A queue of authorizers for each core used to fill up the pool.
10594
+ *
10595
+ * Only updated by `accumulate` calls using `assign` host call.
10596
+ *
10597
+ * https://graypaper-reader.netlify.app/#/6e1c0cd/102400102400
10598
+ */
10599
+ authQueuesView(): SequenceView<AuthorizationQueue, SequenceView<AuthorizerHash>>;
10600
+
10601
+ /**
10602
+ * `β beta`: State of the blocks from recent history.
10603
+ *
10604
+ * https://graypaper.fluffylabs.dev/#/579bd12/0fb7010fb701
10605
+ */
10606
+ recentBlocksView(): RecentBlocksView;
10607
+
10608
+ /**
10609
+ * `π pi`: Previous and current statistics of each validator,
10610
+ * cores statistics and services statistics.
10611
+ *
10612
+ * https://graypaper.fluffylabs.dev/#/68eaa1f/18f60118f601?v=0.6.4
10613
+ */
10614
+ statisticsView(): StatisticsDataView;
10615
+
10616
+ /**
10617
+ * `ϑ theta`: We also maintain knowledge of ready (i.e. available
10618
+ * and/or audited) but not-yet-accumulated work-reports in
10619
+ * the state item ϑ.
10620
+ *
10621
+ * https://graypaper.fluffylabs.dev/#/5f542d7/165300165500
10622
+ */
10623
+ accumulationQueueView(): AccumulationQueueView;
10624
+
10625
+ /**
10626
+ * `ξ xi`: In order to know which work-packages have been
10627
+ * accumulated already, we maintain a history of what has
10628
+ * been accumulated. This history, ξ, is sufficiently large
10629
+ * for an epoch worth of work-reports.
10630
+ *
10631
+ * https://graypaper.fluffylabs.dev/#/5f542d7/161a00161d00
10632
+ */
10633
+ recentlyAccumulatedView(): RecentlyAccumulatedView;
10634
+
10635
+ /*
10636
+ * `γ gamma`: Safrole data.
10637
+ */
10638
+ safroleDataView(): SafroleDataView;
10639
+
10640
+ /** Retrieve details about single service. */
10641
+ getServiceInfoView(id: ServiceId): ServiceAccountInfoView | null;
10642
+ };
10643
+
10644
+ declare enum UpdatePreimageKind {
10645
+ /** Insert new preimage and optionally update it's lookup history. */
10646
+ Provide = 0,
10647
+ /** Remove a preimage and it's lookup history. */
10648
+ Remove = 1,
10649
+ /** update or add lookup history for preimage hash/len to given value. */
10650
+ UpdateOrAdd = 2,
10651
+ }
10652
+ /**
10653
+ * A preimage update.
10654
+ *
10655
+ * Can be one of the following cases:
10656
+ * 1. Provide a new preimage blob and set the lookup history to available at `slot`.
10657
+ * 2. Remove (expunge) a preimage and it's lookup history.
10658
+ * 3. Update `LookupHistory` with given value.
10659
+ */
10660
+ declare class UpdatePreimage {
10661
+ private constructor(
10662
+ public readonly serviceId: ServiceId,
10663
+ public readonly action:
10664
+ | {
10665
+ kind: UpdatePreimageKind.Provide;
10666
+ preimage: PreimageItem;
10667
+ // optionally set lookup history of that preimage to "available"
10668
+ slot: TimeSlot | null;
10669
+ }
10670
+ | {
10671
+ kind: UpdatePreimageKind.Remove;
10672
+ hash: PreimageHash;
10673
+ length: U32;
10674
+ }
10675
+ | {
10676
+ kind: UpdatePreimageKind.UpdateOrAdd;
10677
+ item: LookupHistoryItem;
10678
+ },
10679
+ ) {}
10680
+
10681
+ /** A preimage is provided. We should update the lookuphistory and add the preimage to db. */
10682
+ static provide({
10683
+ serviceId,
10684
+ preimage,
10685
+ slot,
10686
+ }: {
10687
+ serviceId: ServiceId;
10688
+ preimage: PreimageItem;
10689
+ slot: TimeSlot | null;
10690
+ }) {
10691
+ return new UpdatePreimage(serviceId, {
10692
+ kind: UpdatePreimageKind.Provide,
10693
+ preimage,
10694
+ slot,
10695
+ });
10696
+ }
10697
+
10698
+ /** The preimage should be removed completely from the database. */
10560
10699
  static remove({ serviceId, hash, length }: { serviceId: ServiceId; hash: PreimageHash; length: U32 }) {
10561
10700
  return new UpdatePreimage(serviceId, {
10562
10701
  kind: UpdatePreimageKind.Remove,
@@ -10824,10 +10963,10 @@ declare class InMemoryService extends WithDebug implements Service {
10824
10963
  /**
10825
10964
  * A special version of state, stored fully in-memory.
10826
10965
  */
10827
- declare class InMemoryState extends WithDebug implements State, EnumerableState {
10966
+ declare class InMemoryState extends WithDebug implements State, WithStateView, EnumerableState {
10828
10967
  /** Create a new `InMemoryState` by providing all required fields. */
10829
- static create(state: InMemoryStateFields) {
10830
- return new InMemoryState(state);
10968
+ static new(chainSpec: ChainSpec, state: InMemoryStateFields) {
10969
+ return new InMemoryState(chainSpec, state);
10831
10970
  }
10832
10971
 
10833
10972
  /**
@@ -10845,7 +10984,7 @@ declare class InMemoryState extends WithDebug implements State, EnumerableState
10845
10984
  /**
10846
10985
  * Create a new `InMemoryState` from some other state object.
10847
10986
  */
10848
- static copyFrom(other: State, servicesData: Map<ServiceId, ServiceEntries>) {
10987
+ static copyFrom(chainSpec: ChainSpec, other: State, servicesData: Map<ServiceId, ServiceEntries>) {
10849
10988
  const services = new Map<ServiceId, InMemoryService>();
10850
10989
  for (const [id, entries] of servicesData.entries()) {
10851
10990
  const service = other.getService(id);
@@ -10856,7 +10995,7 @@ declare class InMemoryState extends WithDebug implements State, EnumerableState
10856
10995
  services.set(id, inMemService);
10857
10996
  }
10858
10997
 
10859
- return InMemoryState.create({
10998
+ return InMemoryState.new(chainSpec, {
10860
10999
  availabilityAssignment: other.availabilityAssignment,
10861
11000
  accumulationQueue: other.accumulationQueue,
10862
11001
  designatedValidatorData: other.designatedValidatorData,
@@ -11051,12 +11190,12 @@ declare class InMemoryState extends WithDebug implements State, EnumerableState
11051
11190
  disputesRecords: DisputesRecords;
11052
11191
  timeslot: TimeSlot;
11053
11192
  entropy: FixedSizeArray<EntropyHash, ENTROPY_ENTRIES>;
11054
- authPools: PerCore<KnownSizeArray<AuthorizerHash, `At most ${typeof MAX_AUTH_POOL_SIZE}`>>;
11055
- authQueues: PerCore<FixedSizeArray<AuthorizerHash, AUTHORIZATION_QUEUE_SIZE>>;
11056
- recentBlocks: RecentBlocksHistory;
11193
+ authPools: PerCore<AuthorizationPool>;
11194
+ authQueues: PerCore<AuthorizationQueue>;
11195
+ recentBlocks: RecentBlocks;
11057
11196
  statistics: StatisticsData;
11058
- accumulationQueue: PerEpochBlock<readonly NotYetAccumulatedReport[]>;
11059
- recentlyAccumulated: PerEpochBlock<ImmutableHashSet<WorkPackageHash>>;
11197
+ accumulationQueue: AccumulationQueue;
11198
+ recentlyAccumulated: RecentlyAccumulated;
11060
11199
  ticketsAccumulator: KnownSizeArray<Ticket, "0...EpochLength">;
11061
11200
  sealingKeySeries: SafroleSealingKeys;
11062
11201
  epochRoot: BandersnatchRingRoot;
@@ -11072,7 +11211,10 @@ declare class InMemoryState extends WithDebug implements State, EnumerableState
11072
11211
  return this.services.get(id) ?? null;
11073
11212
  }
11074
11213
 
11075
- private constructor(s: InMemoryStateFields) {
11214
+ protected constructor(
11215
+ private readonly chainSpec: ChainSpec,
11216
+ s: InMemoryStateFields,
11217
+ ) {
11076
11218
  super();
11077
11219
  this.availabilityAssignment = s.availabilityAssignment;
11078
11220
  this.designatedValidatorData = s.designatedValidatorData;
@@ -11096,11 +11238,15 @@ declare class InMemoryState extends WithDebug implements State, EnumerableState
11096
11238
  this.services = s.services;
11097
11239
  }
11098
11240
 
11241
+ view(): StateView {
11242
+ return new InMemoryStateView(this.chainSpec, this);
11243
+ }
11244
+
11099
11245
  /**
11100
11246
  * Create an empty and possibly incoherent `InMemoryState`.
11101
11247
  */
11102
11248
  static empty(spec: ChainSpec) {
11103
- return new InMemoryState({
11249
+ return new InMemoryState(spec, {
11104
11250
  availabilityAssignment: tryAsPerCore(
11105
11251
  Array.from({ length: spec.coresCount }, () => null),
11106
11252
  spec,
@@ -11167,7 +11313,7 @@ declare class InMemoryState extends WithDebug implements State, EnumerableState
11167
11313
  ),
11168
11314
  spec,
11169
11315
  ),
11170
- recentBlocks: RecentBlocksHistory.empty(),
11316
+ recentBlocks: RecentBlocks.empty(),
11171
11317
  statistics: StatisticsData.create({
11172
11318
  current: tryAsPerValidator(
11173
11319
  Array.from({ length: spec.validatorsCount }, () => ValidatorStatistics.empty()),
@@ -11253,12 +11399,18 @@ type FieldNames<T> = {
11253
11399
  [K in keyof T]: T[K] extends Function ? never : K;
11254
11400
  }[keyof T];
11255
11401
 
11402
+ type index$e_AUTHORIZATION_QUEUE_SIZE = AUTHORIZATION_QUEUE_SIZE;
11256
11403
  type index$e_AccumulationOutput = AccumulationOutput;
11257
11404
  declare const index$e_AccumulationOutput: typeof AccumulationOutput;
11405
+ type index$e_AccumulationQueue = AccumulationQueue;
11406
+ type index$e_AccumulationQueueView = AccumulationQueueView;
11407
+ type index$e_AuthorizationPool = AuthorizationPool;
11408
+ type index$e_AuthorizationQueue = AuthorizationQueue;
11258
11409
  type index$e_AutoAccumulate = AutoAccumulate;
11259
11410
  declare const index$e_AutoAccumulate: typeof AutoAccumulate;
11260
11411
  type index$e_AvailabilityAssignment = AvailabilityAssignment;
11261
11412
  declare const index$e_AvailabilityAssignment: typeof AvailabilityAssignment;
11413
+ type index$e_AvailabilityAssignmentsView = AvailabilityAssignmentsView;
11262
11414
  declare const index$e_BASE_SERVICE_BALANCE: typeof BASE_SERVICE_BALANCE;
11263
11415
  type index$e_BlockState = BlockState;
11264
11416
  declare const index$e_BlockState: typeof BlockState;
@@ -11280,8 +11432,11 @@ type index$e_InMemoryStateFields = InMemoryStateFields;
11280
11432
  type index$e_LookupHistoryItem = LookupHistoryItem;
11281
11433
  declare const index$e_LookupHistoryItem: typeof LookupHistoryItem;
11282
11434
  type index$e_LookupHistorySlots = LookupHistorySlots;
11435
+ type index$e_MAX_AUTH_POOL_SIZE = MAX_AUTH_POOL_SIZE;
11283
11436
  declare const index$e_MAX_LOOKUP_HISTORY_SLOTS: typeof MAX_LOOKUP_HISTORY_SLOTS;
11284
11437
  type index$e_MAX_RECENT_HISTORY = MAX_RECENT_HISTORY;
11438
+ type index$e_NotYetAccumulatedReport = NotYetAccumulatedReport;
11439
+ declare const index$e_NotYetAccumulatedReport: typeof NotYetAccumulatedReport;
11285
11440
  type index$e_PerCore<T> = PerCore<T>;
11286
11441
  type index$e_PreimageItem = PreimageItem;
11287
11442
  declare const index$e_PreimageItem: typeof PreimageItem;
@@ -11289,10 +11444,12 @@ type index$e_PrivilegedServices = PrivilegedServices;
11289
11444
  declare const index$e_PrivilegedServices: typeof PrivilegedServices;
11290
11445
  type index$e_RecentBlocks = RecentBlocks;
11291
11446
  declare const index$e_RecentBlocks: typeof RecentBlocks;
11292
- type index$e_RecentBlocksHistory = RecentBlocksHistory;
11293
- declare const index$e_RecentBlocksHistory: typeof RecentBlocksHistory;
11447
+ type index$e_RecentBlocksView = RecentBlocksView;
11448
+ type index$e_RecentlyAccumulated = RecentlyAccumulated;
11449
+ type index$e_RecentlyAccumulatedView = RecentlyAccumulatedView;
11294
11450
  type index$e_SafroleData = SafroleData;
11295
11451
  declare const index$e_SafroleData: typeof SafroleData;
11452
+ type index$e_SafroleDataView = SafroleDataView;
11296
11453
  type index$e_SafroleSealingKeys = SafroleSealingKeys;
11297
11454
  type index$e_SafroleSealingKeysData = SafroleSealingKeysData;
11298
11455
  declare const index$e_SafroleSealingKeysData: typeof SafroleSealingKeysData;
@@ -11301,14 +11458,17 @@ declare const index$e_SafroleSealingKeysKind: typeof SafroleSealingKeysKind;
11301
11458
  type index$e_Service = Service;
11302
11459
  type index$e_ServiceAccountInfo = ServiceAccountInfo;
11303
11460
  declare const index$e_ServiceAccountInfo: typeof ServiceAccountInfo;
11461
+ type index$e_ServiceAccountInfoView = ServiceAccountInfoView;
11304
11462
  type index$e_ServiceData = ServiceData;
11305
11463
  type index$e_ServiceEntries = ServiceEntries;
11306
11464
  type index$e_ServiceStatistics = ServiceStatistics;
11307
11465
  declare const index$e_ServiceStatistics: typeof ServiceStatistics;
11308
11466
  type index$e_ServicesUpdate = ServicesUpdate;
11309
11467
  type index$e_State = State;
11468
+ type index$e_StateView = StateView;
11310
11469
  type index$e_StatisticsData = StatisticsData;
11311
11470
  declare const index$e_StatisticsData: typeof StatisticsData;
11471
+ type index$e_StatisticsDataView = StatisticsDataView;
11312
11472
  type index$e_StorageItem = StorageItem;
11313
11473
  declare const index$e_StorageItem: typeof StorageItem;
11314
11474
  type index$e_StorageKey = StorageKey;
@@ -11329,9 +11489,15 @@ declare const index$e_UpdateStorageKind: typeof UpdateStorageKind;
11329
11489
  type index$e_VALIDATOR_META_BYTES = VALIDATOR_META_BYTES;
11330
11490
  type index$e_ValidatorData = ValidatorData;
11331
11491
  declare const index$e_ValidatorData: typeof ValidatorData;
11492
+ type index$e_ValidatorDataView = ValidatorDataView;
11332
11493
  type index$e_ValidatorStatistics = ValidatorStatistics;
11333
11494
  declare const index$e_ValidatorStatistics: typeof ValidatorStatistics;
11495
+ type index$e_WithStateView<V = StateView> = WithStateView<V>;
11334
11496
  declare const index$e_accumulationOutputComparator: typeof accumulationOutputComparator;
11497
+ declare const index$e_accumulationQueueCodec: typeof accumulationQueueCodec;
11498
+ declare const index$e_authPoolsCodec: typeof authPoolsCodec;
11499
+ declare const index$e_authQueuesCodec: typeof authQueuesCodec;
11500
+ declare const index$e_availabilityAssignmentsCodec: typeof availabilityAssignmentsCodec;
11335
11501
  declare const index$e_codecBandersnatchKey: typeof codecBandersnatchKey;
11336
11502
  declare const index$e_codecPerCore: typeof codecPerCore;
11337
11503
  declare const index$e_codecServiceId: typeof codecServiceId;
@@ -11340,16 +11506,18 @@ declare const index$e_codecVarU16: typeof codecVarU16;
11340
11506
  declare const index$e_codecWithVersion: typeof codecWithVersion;
11341
11507
  declare const index$e_hashComparator: typeof hashComparator;
11342
11508
  declare const index$e_ignoreValueWithDefault: typeof ignoreValueWithDefault;
11509
+ declare const index$e_recentlyAccumulatedCodec: typeof recentlyAccumulatedCodec;
11343
11510
  declare const index$e_serviceDataCodec: typeof serviceDataCodec;
11344
11511
  declare const index$e_serviceEntriesCodec: typeof serviceEntriesCodec;
11345
11512
  declare const index$e_sortedSetCodec: typeof sortedSetCodec;
11346
11513
  declare const index$e_tryAsLookupHistorySlots: typeof tryAsLookupHistorySlots;
11347
11514
  declare const index$e_tryAsPerCore: typeof tryAsPerCore;
11515
+ declare const index$e_validatorsDataCodec: typeof validatorsDataCodec;
11348
11516
  declare const index$e_workReportsSortedSetCodec: typeof workReportsSortedSetCodec;
11349
11517
  declare const index$e_zeroSizeHint: typeof zeroSizeHint;
11350
11518
  declare namespace index$e {
11351
- export { index$e_AccumulationOutput as AccumulationOutput, index$e_AutoAccumulate as AutoAccumulate, index$e_AvailabilityAssignment as AvailabilityAssignment, index$e_BASE_SERVICE_BALANCE as BASE_SERVICE_BALANCE, index$e_BlockState as BlockState, index$e_CoreStatistics as CoreStatistics, index$e_DisputesRecords as DisputesRecords, index$e_ELECTIVE_BYTE_BALANCE as ELECTIVE_BYTE_BALANCE, index$e_ELECTIVE_ITEM_BALANCE as ELECTIVE_ITEM_BALANCE, index$e_InMemoryService as InMemoryService, index$e_InMemoryState as InMemoryState, index$e_LookupHistoryItem as LookupHistoryItem, index$e_MAX_LOOKUP_HISTORY_SLOTS as MAX_LOOKUP_HISTORY_SLOTS, index$e_PreimageItem as PreimageItem, index$e_PrivilegedServices as PrivilegedServices, index$e_RecentBlocks as RecentBlocks, index$e_RecentBlocksHistory as RecentBlocksHistory, index$e_SafroleData as SafroleData, index$e_SafroleSealingKeysData as SafroleSealingKeysData, index$e_SafroleSealingKeysKind as SafroleSealingKeysKind, index$e_ServiceAccountInfo as ServiceAccountInfo, index$e_ServiceStatistics as ServiceStatistics, index$e_StatisticsData as StatisticsData, index$e_StorageItem as StorageItem, index$e_UpdateError as UpdateError, index$e_UpdatePreimage as UpdatePreimage, index$e_UpdatePreimageKind as UpdatePreimageKind, index$e_UpdateService as UpdateService, index$e_UpdateServiceKind as UpdateServiceKind, index$e_UpdateStorage as UpdateStorage, index$e_UpdateStorageKind as UpdateStorageKind, index$e_ValidatorData as ValidatorData, index$e_ValidatorStatistics as ValidatorStatistics, index$e_accumulationOutputComparator as accumulationOutputComparator, index$e_codecBandersnatchKey as codecBandersnatchKey, index$e_codecPerCore as codecPerCore, index$e_codecServiceId as codecServiceId, index$e_codecVarGas as codecVarGas, index$e_codecVarU16 as codecVarU16, index$e_codecWithVersion as codecWithVersion, index$e_hashComparator as hashComparator, index$e_ignoreValueWithDefault as ignoreValueWithDefault, index$e_serviceDataCodec as serviceDataCodec, index$e_serviceEntriesCodec as serviceEntriesCodec, index$e_sortedSetCodec as sortedSetCodec, index$e_tryAsLookupHistorySlots as tryAsLookupHistorySlots, index$e_tryAsPerCore as tryAsPerCore, index$e_workReportsSortedSetCodec as workReportsSortedSetCodec, index$e_zeroSizeHint as zeroSizeHint };
11352
- export type { index$e_BlocksState as BlocksState, index$e_ENTROPY_ENTRIES as ENTROPY_ENTRIES, index$e_EnumerableState as EnumerableState, index$e_FieldNames as FieldNames, index$e_InMemoryStateFields as InMemoryStateFields, index$e_LookupHistorySlots as LookupHistorySlots, index$e_MAX_RECENT_HISTORY as MAX_RECENT_HISTORY, index$e_PerCore as PerCore, index$e_SafroleSealingKeys as SafroleSealingKeys, index$e_Service as Service, index$e_ServiceData as ServiceData, index$e_ServiceEntries as ServiceEntries, index$e_ServicesUpdate as ServicesUpdate, index$e_State as State, index$e_StorageKey as StorageKey, index$e_VALIDATOR_META_BYTES as VALIDATOR_META_BYTES };
11519
+ export { index$e_AccumulationOutput as AccumulationOutput, index$e_AutoAccumulate as AutoAccumulate, index$e_AvailabilityAssignment as AvailabilityAssignment, index$e_BASE_SERVICE_BALANCE as BASE_SERVICE_BALANCE, index$e_BlockState as BlockState, index$e_CoreStatistics as CoreStatistics, index$e_DisputesRecords as DisputesRecords, index$e_ELECTIVE_BYTE_BALANCE as ELECTIVE_BYTE_BALANCE, index$e_ELECTIVE_ITEM_BALANCE as ELECTIVE_ITEM_BALANCE, index$e_InMemoryService as InMemoryService, index$e_InMemoryState as InMemoryState, index$e_LookupHistoryItem as LookupHistoryItem, index$e_MAX_LOOKUP_HISTORY_SLOTS as MAX_LOOKUP_HISTORY_SLOTS, index$e_NotYetAccumulatedReport as NotYetAccumulatedReport, index$e_PreimageItem as PreimageItem, index$e_PrivilegedServices as PrivilegedServices, index$e_RecentBlocks as RecentBlocks, index$e_SafroleData as SafroleData, index$e_SafroleSealingKeysData as SafroleSealingKeysData, index$e_SafroleSealingKeysKind as SafroleSealingKeysKind, index$e_ServiceAccountInfo as ServiceAccountInfo, index$e_ServiceStatistics as ServiceStatistics, index$e_StatisticsData as StatisticsData, index$e_StorageItem as StorageItem, index$e_UpdateError as UpdateError, index$e_UpdatePreimage as UpdatePreimage, index$e_UpdatePreimageKind as UpdatePreimageKind, index$e_UpdateService as UpdateService, index$e_UpdateServiceKind as UpdateServiceKind, index$e_UpdateStorage as UpdateStorage, index$e_UpdateStorageKind as UpdateStorageKind, index$e_ValidatorData as ValidatorData, index$e_ValidatorStatistics as ValidatorStatistics, index$e_accumulationOutputComparator as accumulationOutputComparator, index$e_accumulationQueueCodec as accumulationQueueCodec, index$e_authPoolsCodec as authPoolsCodec, index$e_authQueuesCodec as authQueuesCodec, index$e_availabilityAssignmentsCodec as availabilityAssignmentsCodec, index$e_codecBandersnatchKey as codecBandersnatchKey, index$e_codecPerCore as codecPerCore, index$e_codecServiceId as codecServiceId, index$e_codecVarGas as codecVarGas, index$e_codecVarU16 as codecVarU16, index$e_codecWithVersion as codecWithVersion, index$e_hashComparator as hashComparator, index$e_ignoreValueWithDefault as ignoreValueWithDefault, index$e_recentlyAccumulatedCodec as recentlyAccumulatedCodec, index$e_serviceDataCodec as serviceDataCodec, index$e_serviceEntriesCodec as serviceEntriesCodec, index$e_sortedSetCodec as sortedSetCodec, index$e_tryAsLookupHistorySlots as tryAsLookupHistorySlots, index$e_tryAsPerCore as tryAsPerCore, index$e_validatorsDataCodec as validatorsDataCodec, index$e_workReportsSortedSetCodec as workReportsSortedSetCodec, index$e_zeroSizeHint as zeroSizeHint };
11520
+ export type { index$e_AUTHORIZATION_QUEUE_SIZE as AUTHORIZATION_QUEUE_SIZE, index$e_AccumulationQueue as AccumulationQueue, index$e_AccumulationQueueView as AccumulationQueueView, index$e_AuthorizationPool as AuthorizationPool, index$e_AuthorizationQueue as AuthorizationQueue, index$e_AvailabilityAssignmentsView as AvailabilityAssignmentsView, index$e_BlocksState as BlocksState, index$e_ENTROPY_ENTRIES as ENTROPY_ENTRIES, index$e_EnumerableState as EnumerableState, index$e_FieldNames as FieldNames, index$e_InMemoryStateFields as InMemoryStateFields, index$e_LookupHistorySlots as LookupHistorySlots, index$e_MAX_AUTH_POOL_SIZE as MAX_AUTH_POOL_SIZE, index$e_MAX_RECENT_HISTORY as MAX_RECENT_HISTORY, index$e_PerCore as PerCore, index$e_RecentBlocksView as RecentBlocksView, index$e_RecentlyAccumulated as RecentlyAccumulated, index$e_RecentlyAccumulatedView as RecentlyAccumulatedView, index$e_SafroleDataView as SafroleDataView, index$e_SafroleSealingKeys as SafroleSealingKeys, index$e_Service as Service, index$e_ServiceAccountInfoView as ServiceAccountInfoView, index$e_ServiceData as ServiceData, index$e_ServiceEntries as ServiceEntries, index$e_ServicesUpdate as ServicesUpdate, index$e_State as State, index$e_StateView as StateView, index$e_StatisticsDataView as StatisticsDataView, index$e_StorageKey as StorageKey, index$e_VALIDATOR_META_BYTES as VALIDATOR_META_BYTES, index$e_ValidatorDataView as ValidatorDataView, index$e_WithStateView as WithStateView };
11353
11521
  }
11354
11522
 
11355
11523
  type StateKey = Opaque<OpaqueHash, "stateKey">;
@@ -11490,33 +11658,25 @@ declare function legacyServiceNested(serviceId: ServiceId, hash: OpaqueHash): St
11490
11658
  return key.asOpaque();
11491
11659
  }
11492
11660
 
11493
- type StateCodec<T> = {
11661
+ type StateCodec<T, V = T> = {
11494
11662
  key: StateKey;
11495
- Codec: Descriptor<T>;
11663
+ Codec: Descriptor<T, V>;
11496
11664
  extract: (s: State) => T;
11497
11665
  };
11498
11666
 
11499
11667
  /** Serialization for particular state entries. */
11500
11668
  declare namespace serialize {
11501
11669
  /** C(1): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b15013b1501?v=0.6.7 */
11502
- export const authPools: StateCodec<State["authPools"]> = {
11670
+ export const authPools: StateCodec<State["authPools"], ReturnType<StateView["authPoolsView"]>> = {
11503
11671
  key: stateKeys.index(StateKeyIdx.Alpha),
11504
- Codec: codecPerCore(
11505
- codecKnownSizeArray(codec.bytes(HASH_SIZE).asOpaque<AuthorizerHash>(), {
11506
- minLength: 0,
11507
- maxLength: MAX_AUTH_POOL_SIZE,
11508
- typicalLength: MAX_AUTH_POOL_SIZE,
11509
- }),
11510
- ),
11672
+ Codec: authPoolsCodec,
11511
11673
  extract: (s) => s.authPools,
11512
11674
  };
11513
11675
 
11514
11676
  /** C(2): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b31013b3101?v=0.6.7 */
11515
- export const authQueues: StateCodec<State["authQueues"]> = {
11677
+ export const authQueues: StateCodec<State["authQueues"], ReturnType<StateView["authQueuesView"]>> = {
11516
11678
  key: stateKeys.index(StateKeyIdx.Phi),
11517
- Codec: codecPerCore(
11518
- codecFixedSizeArray(codec.bytes(HASH_SIZE).asOpaque<AuthorizerHash>(), AUTHORIZATION_QUEUE_SIZE),
11519
- ),
11679
+ Codec: authQueuesCodec,
11520
11680
  extract: (s) => s.authQueues,
11521
11681
  };
11522
11682
 
@@ -11524,14 +11684,14 @@ declare namespace serialize {
11524
11684
  * C(3): Recent blocks with compatibility
11525
11685
  * https://graypaper.fluffylabs.dev/#/7e6ff6a/3b3e013b3e01?v=0.6.7
11526
11686
  */
11527
- export const recentBlocks: StateCodec<State["recentBlocks"]> = {
11687
+ export const recentBlocks: StateCodec<RecentBlocks, RecentBlocksView> = {
11528
11688
  key: stateKeys.index(StateKeyIdx.Beta),
11529
- Codec: RecentBlocksHistory.Codec,
11689
+ Codec: RecentBlocks.Codec,
11530
11690
  extract: (s) => s.recentBlocks,
11531
11691
  };
11532
11692
 
11533
11693
  /** C(4): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b63013b6301?v=0.6.7 */
11534
- export const safrole: StateCodec<SafroleData> = {
11694
+ export const safrole: StateCodec<SafroleData, SafroleDataView> = {
11535
11695
  key: stateKeys.index(StateKeyIdx.Gamma),
11536
11696
  Codec: SafroleData.Codec,
11537
11697
  extract: (s) =>
@@ -11544,7 +11704,7 @@ declare namespace serialize {
11544
11704
  };
11545
11705
 
11546
11706
  /** C(5): https://graypaper.fluffylabs.dev/#/7e6ff6a/3bba013bba01?v=0.6.7 */
11547
- export const disputesRecords: StateCodec<State["disputesRecords"]> = {
11707
+ export const disputesRecords: StateCodec<DisputesRecords> = {
11548
11708
  key: stateKeys.index(StateKeyIdx.Psi),
11549
11709
  Codec: DisputesRecords.Codec,
11550
11710
  extract: (s) => s.disputesRecords,
@@ -11558,30 +11718,42 @@ declare namespace serialize {
11558
11718
  };
11559
11719
 
11560
11720
  /** C(7): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b00023b0002?v=0.6.7 */
11561
- export const designatedValidators: StateCodec<State["designatedValidatorData"]> = {
11721
+ export const designatedValidators: StateCodec<
11722
+ State["designatedValidatorData"],
11723
+ ReturnType<StateView["designatedValidatorDataView"]>
11724
+ > = {
11562
11725
  key: stateKeys.index(StateKeyIdx.Iota),
11563
- Codec: codecPerValidator(ValidatorData.Codec),
11726
+ Codec: validatorsDataCodec,
11564
11727
  extract: (s) => s.designatedValidatorData,
11565
11728
  };
11566
11729
 
11567
11730
  /** C(8): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b0d023b0d02?v=0.6.7 */
11568
- export const currentValidators: StateCodec<State["currentValidatorData"]> = {
11731
+ export const currentValidators: StateCodec<
11732
+ State["currentValidatorData"],
11733
+ ReturnType<StateView["currentValidatorDataView"]>
11734
+ > = {
11569
11735
  key: stateKeys.index(StateKeyIdx.Kappa),
11570
- Codec: codecPerValidator(ValidatorData.Codec),
11736
+ Codec: validatorsDataCodec,
11571
11737
  extract: (s) => s.currentValidatorData,
11572
11738
  };
11573
11739
 
11574
11740
  /** C(9): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b1a023b1a02?v=0.6.7 */
11575
- export const previousValidators: StateCodec<State["previousValidatorData"]> = {
11741
+ export const previousValidators: StateCodec<
11742
+ State["previousValidatorData"],
11743
+ ReturnType<StateView["previousValidatorDataView"]>
11744
+ > = {
11576
11745
  key: stateKeys.index(StateKeyIdx.Lambda),
11577
- Codec: codecPerValidator(ValidatorData.Codec),
11746
+ Codec: validatorsDataCodec,
11578
11747
  extract: (s) => s.previousValidatorData,
11579
11748
  };
11580
11749
 
11581
11750
  /** C(10): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b27023b2702?v=0.6.7 */
11582
- export const availabilityAssignment: StateCodec<State["availabilityAssignment"]> = {
11751
+ export const availabilityAssignment: StateCodec<
11752
+ State["availabilityAssignment"],
11753
+ ReturnType<StateView["availabilityAssignmentView"]>
11754
+ > = {
11583
11755
  key: stateKeys.index(StateKeyIdx.Rho),
11584
- Codec: codecPerCore(codec.optional(AvailabilityAssignment.Codec)),
11756
+ Codec: availabilityAssignmentsCodec,
11585
11757
  extract: (s) => s.availabilityAssignment,
11586
11758
  };
11587
11759
 
@@ -11600,28 +11772,29 @@ declare namespace serialize {
11600
11772
  };
11601
11773
 
11602
11774
  /** C(13): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b5e023b5e02?v=0.6.7 */
11603
- export const statistics: StateCodec<State["statistics"]> = {
11775
+ export const statistics: StateCodec<StatisticsData, StatisticsDataView> = {
11604
11776
  key: stateKeys.index(StateKeyIdx.Pi),
11605
11777
  Codec: StatisticsData.Codec,
11606
11778
  extract: (s) => s.statistics,
11607
11779
  };
11608
11780
 
11609
11781
  /** C(14): https://graypaper.fluffylabs.dev/#/1c979cb/3bf0023bf002?v=0.7.1 */
11610
- export const accumulationQueue: StateCodec<State["accumulationQueue"]> = {
11782
+ export const accumulationQueue: StateCodec<
11783
+ State["accumulationQueue"],
11784
+ ReturnType<StateView["accumulationQueueView"]>
11785
+ > = {
11611
11786
  key: stateKeys.index(StateKeyIdx.Omega),
11612
- Codec: codecPerEpochBlock(readonlyArray(codec.sequenceVarLen(NotYetAccumulatedReport.Codec))),
11787
+ Codec: accumulationQueueCodec,
11613
11788
  extract: (s) => s.accumulationQueue,
11614
11789
  };
11615
11790
 
11616
11791
  /** C(15): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b96023b9602?v=0.6.7 */
11617
- export const recentlyAccumulated: StateCodec<State["recentlyAccumulated"]> = {
11792
+ export const recentlyAccumulated: StateCodec<
11793
+ State["recentlyAccumulated"],
11794
+ ReturnType<StateView["recentlyAccumulatedView"]>
11795
+ > = {
11618
11796
  key: stateKeys.index(StateKeyIdx.Xi),
11619
- Codec: codecPerEpochBlock(
11620
- codec.sequenceVarLen(codec.bytes(HASH_SIZE).asOpaque<WorkPackageHash>()).convert(
11621
- (x) => Array.from(x),
11622
- (x) => HashSet.from(x),
11623
- ),
11624
- ),
11797
+ Codec: recentlyAccumulatedCodec,
11625
11798
  extract: (s) => s.recentlyAccumulated,
11626
11799
  };
11627
11800
 
@@ -11677,6 +11850,107 @@ declare const dumpCodec = Descriptor.new<BytesBlob>(
11677
11850
  (s) => s.bytes(s.decoder.source.length - s.decoder.bytesRead()),
11678
11851
  );
11679
11852
 
11853
+ /**
11854
+ * Abstraction over some backend containing serialized state entries.
11855
+ *
11856
+ * This may or may not be backed by some on-disk database or can be just stored in memory.
11857
+ */
11858
+ interface SerializedStateBackend {
11859
+ /** Retrieve given state key. */
11860
+ get(key: StateKey): BytesBlob | null;
11861
+ }
11862
+
11863
+ declare class SerializedStateView<T extends SerializedStateBackend> implements StateView {
11864
+ constructor(
11865
+ private readonly spec: ChainSpec,
11866
+ public backend: T,
11867
+ /** Best-effort list of recently active services. */
11868
+ private readonly recentlyUsedServices: ServiceId[],
11869
+ private readonly viewCache: HashDictionary<StateKey, unknown>,
11870
+ ) {}
11871
+
11872
+ private retrieveView<A, B>({ key, Codec }: KeyAndCodecWithView<A, B>, description: string): B {
11873
+ const cached = this.viewCache.get(key);
11874
+ if (cached !== undefined) {
11875
+ return cached as B;
11876
+ }
11877
+ const bytes = this.backend.get(key);
11878
+ if (bytes === null) {
11879
+ throw new Error(`Required state entry for ${description} is missing!. Accessing view of key: ${key}`);
11880
+ }
11881
+ // NOTE [ToDr] we are not using `Decoder.decodeObject` here because
11882
+ // it needs to get to the end of the data (skip), yet that's expensive.
11883
+ // we assume that the state data is correct and coherent anyway, so
11884
+ // for performance reasons we simply create the view here.
11885
+ const d = Decoder.fromBytesBlob(bytes);
11886
+ d.attachContext(this.spec);
11887
+ const view = Codec.View.decode(d);
11888
+ this.viewCache.set(key, view);
11889
+ return view;
11890
+ }
11891
+
11892
+ availabilityAssignmentView(): AvailabilityAssignmentsView {
11893
+ return this.retrieveView(serialize.availabilityAssignment, "availabilityAssignmentView");
11894
+ }
11895
+
11896
+ designatedValidatorDataView(): SequenceView<ValidatorData, ValidatorDataView> {
11897
+ return this.retrieveView(serialize.designatedValidators, "designatedValidatorsView");
11898
+ }
11899
+
11900
+ currentValidatorDataView(): SequenceView<ValidatorData, ValidatorDataView> {
11901
+ return this.retrieveView(serialize.currentValidators, "currentValidatorsView");
11902
+ }
11903
+
11904
+ previousValidatorDataView(): SequenceView<ValidatorData, ValidatorDataView> {
11905
+ return this.retrieveView(serialize.previousValidators, "previousValidatorsView");
11906
+ }
11907
+
11908
+ authPoolsView(): SequenceView<AuthorizationPool, SequenceView<AuthorizerHash>> {
11909
+ return this.retrieveView(serialize.authPools, "authPoolsView");
11910
+ }
11911
+
11912
+ authQueuesView(): SequenceView<AuthorizationQueue, SequenceView<AuthorizerHash>> {
11913
+ return this.retrieveView(serialize.authQueues, "authQueuesView");
11914
+ }
11915
+
11916
+ recentBlocksView(): RecentBlocksView {
11917
+ return this.retrieveView(serialize.recentBlocks, "recentBlocksView");
11918
+ }
11919
+
11920
+ statisticsView(): StatisticsDataView {
11921
+ return this.retrieveView(serialize.statistics, "statisticsView");
11922
+ }
11923
+
11924
+ accumulationQueueView(): AccumulationQueueView {
11925
+ return this.retrieveView(serialize.accumulationQueue, "accumulationQueueView");
11926
+ }
11927
+
11928
+ recentlyAccumulatedView(): RecentlyAccumulatedView {
11929
+ return this.retrieveView(serialize.recentlyAccumulated, "recentlyAccumulatedView");
11930
+ }
11931
+
11932
+ safroleDataView(): SafroleDataView {
11933
+ return this.retrieveView(serialize.safrole, "safroleDataView");
11934
+ }
11935
+
11936
+ getServiceInfoView(id: ServiceId): ServiceAccountInfoView | null {
11937
+ const serviceData = serialize.serviceData(id);
11938
+ const bytes = this.backend.get(serviceData.key);
11939
+ if (bytes === null) {
11940
+ return null;
11941
+ }
11942
+ if (!this.recentlyUsedServices.includes(id)) {
11943
+ this.recentlyUsedServices.push(id);
11944
+ }
11945
+ return Decoder.decodeObject(serviceData.Codec.View, bytes, this.spec);
11946
+ }
11947
+ }
11948
+
11949
+ type KeyAndCodecWithView<T, V> = {
11950
+ key: StateKey;
11951
+ Codec: CodecWithView<T, V>;
11952
+ };
11953
+
11680
11954
  /** What should be done with that key? */
11681
11955
  declare enum StateEntryUpdateAction {
11682
11956
  /** Insert an entry. */
@@ -12055,16 +12329,6 @@ declare function convertInMemoryStateToDictionary(
12055
12329
  return serialized;
12056
12330
  }
12057
12331
 
12058
- /**
12059
- * Abstraction over some backend containing serialized state entries.
12060
- *
12061
- * This may or may not be backed by some on-disk database or can be just stored in memory.
12062
- */
12063
- interface SerializedStateBackend {
12064
- /** Retrieve given state key. */
12065
- get(key: StateKey): BytesBlob | null;
12066
- }
12067
-
12068
12332
  /**
12069
12333
  * State object which reads it's entries from some backend.
12070
12334
  *
@@ -12074,7 +12338,7 @@ interface SerializedStateBackend {
12074
12338
  * in the backend layer, so it MAY fail during runtime.
12075
12339
  */
12076
12340
  declare class SerializedState<T extends SerializedStateBackend = SerializedStateBackend>
12077
- implements State, EnumerableState
12341
+ implements State, WithStateView, EnumerableState
12078
12342
  {
12079
12343
  /** Create a state-like object from collection of serialized entries. */
12080
12344
  static fromStateEntries(spec: ChainSpec, blake2b: Blake2b, state: StateEntries, recentServices: ServiceId[] = []) {
@@ -12091,12 +12355,15 @@ declare class SerializedState<T extends SerializedStateBackend = SerializedState
12091
12355
  return new SerializedState(spec, blake2b, db, recentServices);
12092
12356
  }
12093
12357
 
12358
+ private dataCache: HashDictionary<StateKey, unknown> = HashDictionary.new();
12359
+ private viewCache: HashDictionary<StateKey, unknown> = HashDictionary.new();
12360
+
12094
12361
  private constructor(
12095
12362
  private readonly spec: ChainSpec,
12096
12363
  private readonly blake2b: Blake2b,
12097
12364
  public backend: T,
12098
12365
  /** Best-effort list of recently active services. */
12099
- private readonly _recentServiceIds: ServiceId[],
12366
+ private readonly recentlyUsedServices: ServiceId[],
12100
12367
  ) {}
12101
12368
 
12102
12369
  /** Comparing the serialized states, just means comparing their backends. */
@@ -12104,14 +12371,21 @@ declare class SerializedState<T extends SerializedStateBackend = SerializedState
12104
12371
  return this.backend;
12105
12372
  }
12106
12373
 
12374
+ /** Return a non-decoding version of the state. */
12375
+ view(): StateView {
12376
+ return new SerializedStateView(this.spec, this.backend, this.recentlyUsedServices, this.viewCache);
12377
+ }
12378
+
12107
12379
  // TODO [ToDr] Temporary method to update the state,
12108
12380
  // without changing references.
12109
12381
  public updateBackend(newBackend: T) {
12110
12382
  this.backend = newBackend;
12383
+ this.dataCache = HashDictionary.new();
12384
+ this.viewCache = HashDictionary.new();
12111
12385
  }
12112
12386
 
12113
12387
  recentServiceIds(): readonly ServiceId[] {
12114
- return this._recentServiceIds;
12388
+ return this.recentlyUsedServices;
12115
12389
  }
12116
12390
 
12117
12391
  getService(id: ServiceId): SerializedService | null {
@@ -12120,27 +12394,33 @@ declare class SerializedState<T extends SerializedStateBackend = SerializedState
12120
12394
  return null;
12121
12395
  }
12122
12396
 
12123
- if (!this._recentServiceIds.includes(id)) {
12124
- this._recentServiceIds.push(id);
12397
+ if (!this.recentlyUsedServices.includes(id)) {
12398
+ this.recentlyUsedServices.push(id);
12125
12399
  }
12126
12400
 
12127
12401
  return new SerializedService(this.blake2b, id, serviceData, (key) => this.retrieveOptional(key));
12128
12402
  }
12129
12403
 
12130
- private retrieve<T>({ key, Codec }: KeyAndCodec<T>, description: string): T {
12131
- const bytes = this.backend.get(key);
12132
- if (bytes === null) {
12133
- throw new Error(`Required state entry for ${description} is missing!. Accessing key: ${key}`);
12404
+ private retrieve<T>(k: KeyAndCodec<T>, description: string): T {
12405
+ const data = this.retrieveOptional(k);
12406
+ if (data === undefined) {
12407
+ throw new Error(`Required state entry for ${description} is missing!. Accessing key: ${k.key}`);
12134
12408
  }
12135
- return Decoder.decodeObject(Codec, bytes, this.spec);
12409
+ return data;
12136
12410
  }
12137
12411
 
12138
12412
  private retrieveOptional<T>({ key, Codec }: KeyAndCodec<T>): T | undefined {
12413
+ const cached = this.dataCache.get(key);
12414
+ if (cached !== undefined) {
12415
+ return cached as T;
12416
+ }
12139
12417
  const bytes = this.backend.get(key);
12140
12418
  if (bytes === null) {
12141
12419
  return undefined;
12142
12420
  }
12143
- return Decoder.decodeObject(Codec, bytes, this.spec);
12421
+ const data = Decoder.decodeObject(Codec, bytes, this.spec);
12422
+ this.dataCache.set(key, data);
12423
+ return data;
12144
12424
  }
12145
12425
 
12146
12426
  get availabilityAssignment(): State["availabilityAssignment"] {
@@ -12313,12 +12593,15 @@ declare function loadState(spec: ChainSpec, blake2b: Blake2b, entries: Iterable<
12313
12593
  declare const index$d_EMPTY_BLOB: typeof EMPTY_BLOB;
12314
12594
  type index$d_EncodeFun = EncodeFun;
12315
12595
  type index$d_KeyAndCodec<T> = KeyAndCodec<T>;
12596
+ type index$d_KeyAndCodecWithView<T, V> = KeyAndCodecWithView<T, V>;
12316
12597
  type index$d_SerializedService = SerializedService;
12317
12598
  declare const index$d_SerializedService: typeof SerializedService;
12318
12599
  type index$d_SerializedState<T extends SerializedStateBackend = SerializedStateBackend> = SerializedState<T>;
12319
12600
  declare const index$d_SerializedState: typeof SerializedState;
12320
12601
  type index$d_SerializedStateBackend = SerializedStateBackend;
12321
- type index$d_StateCodec<T> = StateCodec<T>;
12602
+ type index$d_SerializedStateView<T extends SerializedStateBackend> = SerializedStateView<T>;
12603
+ declare const index$d_SerializedStateView: typeof SerializedStateView;
12604
+ type index$d_StateCodec<T, V = T> = StateCodec<T, V>;
12322
12605
  type index$d_StateEntries = StateEntries;
12323
12606
  declare const index$d_StateEntries: typeof StateEntries;
12324
12607
  type index$d_StateEntryUpdate = StateEntryUpdate;
@@ -12346,8 +12629,8 @@ declare const index$d_serializeStorage: typeof serializeStorage;
12346
12629
  declare const index$d_stateEntriesSequenceCodec: typeof stateEntriesSequenceCodec;
12347
12630
  import index$d_stateKeys = stateKeys;
12348
12631
  declare namespace index$d {
12349
- export { index$d_EMPTY_BLOB as EMPTY_BLOB, index$d_SerializedService as SerializedService, index$d_SerializedState as SerializedState, index$d_StateEntries as StateEntries, index$d_StateEntryUpdateAction as StateEntryUpdateAction, index$d_StateKeyIdx as StateKeyIdx, index$d_TYPICAL_STATE_ITEMS as TYPICAL_STATE_ITEMS, index$d_TYPICAL_STATE_ITEM_LEN as TYPICAL_STATE_ITEM_LEN, index$d_U32_BYTES as U32_BYTES, index$d_binaryMerkleization as binaryMerkleization, index$d_convertInMemoryStateToDictionary as convertInMemoryStateToDictionary, index$d_dumpCodec as dumpCodec, index$d_getSafroleData as getSafroleData, index$d_legacyServiceNested as legacyServiceNested, index$d_loadState as loadState, index$d_serialize as serialize, index$d_serializeBasicKeys as serializeBasicKeys, index$d_serializePreimages as serializePreimages, index$d_serializeRemovedServices as serializeRemovedServices, index$d_serializeServiceUpdates as serializeServiceUpdates, index$d_serializeStateUpdate as serializeStateUpdate, index$d_serializeStorage as serializeStorage, index$d_stateEntriesSequenceCodec as stateEntriesSequenceCodec, index$d_stateKeys as stateKeys };
12350
- export type { index$d_EncodeFun as EncodeFun, index$d_KeyAndCodec as KeyAndCodec, index$d_SerializedStateBackend as SerializedStateBackend, index$d_StateCodec as StateCodec, index$d_StateEntryUpdate as StateEntryUpdate, index$d_StateKey as StateKey };
12632
+ export { index$d_EMPTY_BLOB as EMPTY_BLOB, index$d_SerializedService as SerializedService, index$d_SerializedState as SerializedState, index$d_SerializedStateView as SerializedStateView, index$d_StateEntries as StateEntries, index$d_StateEntryUpdateAction as StateEntryUpdateAction, index$d_StateKeyIdx as StateKeyIdx, index$d_TYPICAL_STATE_ITEMS as TYPICAL_STATE_ITEMS, index$d_TYPICAL_STATE_ITEM_LEN as TYPICAL_STATE_ITEM_LEN, index$d_U32_BYTES as U32_BYTES, index$d_binaryMerkleization as binaryMerkleization, index$d_convertInMemoryStateToDictionary as convertInMemoryStateToDictionary, index$d_dumpCodec as dumpCodec, index$d_getSafroleData as getSafroleData, index$d_legacyServiceNested as legacyServiceNested, index$d_loadState as loadState, index$d_serialize as serialize, index$d_serializeBasicKeys as serializeBasicKeys, index$d_serializePreimages as serializePreimages, index$d_serializeRemovedServices as serializeRemovedServices, index$d_serializeServiceUpdates as serializeServiceUpdates, index$d_serializeStateUpdate as serializeStateUpdate, index$d_serializeStorage as serializeStorage, index$d_stateEntriesSequenceCodec as stateEntriesSequenceCodec, index$d_stateKeys as stateKeys };
12633
+ export type { index$d_EncodeFun as EncodeFun, index$d_KeyAndCodec as KeyAndCodec, index$d_KeyAndCodecWithView as KeyAndCodecWithView, index$d_SerializedStateBackend as SerializedStateBackend, index$d_StateCodec as StateCodec, index$d_StateEntryUpdate as StateEntryUpdate, index$d_StateKey as StateKey };
12351
12634
  }
12352
12635
 
12353
12636
  /** Error during `LeafDb` creation. */
@@ -12542,7 +12825,7 @@ declare class InMemoryStates implements StatesDb<InMemoryState> {
12542
12825
 
12543
12826
  /** Insert a full state into the database. */
12544
12827
  async insertState(headerHash: HeaderHash, state: InMemoryState): Promise<Result$2<OK, StateUpdateError>> {
12545
- const encoded = Encoder.encodeObject(inMemoryStateCodec, state, this.spec);
12828
+ const encoded = Encoder.encodeObject(inMemoryStateCodec(this.spec), state, this.spec);
12546
12829
  this.db.set(headerHash, encoded);
12547
12830
  return Result.ok(OK);
12548
12831
  }
@@ -12553,7 +12836,7 @@ declare class InMemoryStates implements StatesDb<InMemoryState> {
12553
12836
  return null;
12554
12837
  }
12555
12838
 
12556
- return Decoder.decodeObject(inMemoryStateCodec, encodedState, this.spec);
12839
+ return Decoder.decodeObject(inMemoryStateCodec(this.spec), encodedState, this.spec);
12557
12840
  }
12558
12841
  }
12559
12842
 
@@ -13815,8 +14098,8 @@ interface PartialState {
13815
14098
  /** Update authorization queue for given core and authorize a service for this core. */
13816
14099
  updateAuthorizationQueue(
13817
14100
  coreIndex: CoreIndex,
13818
- authQueue: FixedSizeArray<Blake2bHash, AUTHORIZATION_QUEUE_SIZE>,
13819
- assigners: ServiceId | null,
14101
+ authQueue: AuthorizationQueue,
14102
+ assigner: ServiceId | null,
13820
14103
  ): Result$2<OK, UpdatePrivilegesError>;
13821
14104
 
13822
14105
  /**
@@ -13913,40 +14196,116 @@ interface GasCounter {
13913
14196
  sub(g: Gas): boolean;
13914
14197
  }
13915
14198
 
13916
- /**
13917
- * Mask class is an implementation of skip function defined in GP.
13918
- *
13919
- * https://graypaper.fluffylabs.dev/#/5f542d7/237201239801
13920
- */
13921
- declare class Mask {
13922
- /**
13923
- * The lookup table will have `0` at the index which corresponds to an instruction on the same index in the bytecode.
13924
- * In case the value is non-zero it signifies the offset to the index with next instruction.
13925
- *
13926
- * Example:
13927
- * ```
13928
- * 0..1..2..3..4..5..6..7..8..9 # Indices
13929
- * 0..2..1..0..1..0..3..2..1..0 # lookupTable forward values
13930
- * ```
13931
- * There are instructions at indices `0, 3, 5, 9`.
13932
- */
13933
- private lookupTableForward: Uint8Array;
14199
+ declare const NO_OF_REGISTERS$1 = 13;
13934
14200
 
13935
- constructor(mask: BitVec) {
13936
- this.lookupTableForward = this.buildLookupTableForward(mask);
14201
+ type RegisterIndex = Opaque<number, "register index">;
14202
+
14203
+ declare class Registers {
14204
+ private asSigned: BigInt64Array;
14205
+ private asUnsigned: BigUint64Array;
14206
+
14207
+ constructor(private readonly bytes = safeAllocUint8Array(NO_OF_REGISTERS << REGISTER_SIZE_SHIFT)) {
14208
+ check`${bytes.length === NO_OF_REGISTERS << REGISTER_SIZE_SHIFT} Invalid size of registers array.`;
14209
+ this.asSigned = new BigInt64Array(bytes.buffer, bytes.byteOffset);
14210
+ this.asUnsigned = new BigUint64Array(bytes.buffer, bytes.byteOffset);
13937
14211
  }
13938
14212
 
13939
- isInstruction(index: number) {
13940
- return this.lookupTableForward[index] === 0;
14213
+ static fromBytes(bytes: Uint8Array) {
14214
+ check`${bytes.length === NO_OF_REGISTERS << REGISTER_SIZE_SHIFT} Invalid size of registers array.`;
14215
+ return new Registers(bytes);
13941
14216
  }
13942
14217
 
13943
- getNoOfBytesToNextInstruction(index: number) {
13944
- check`${index >= 0} index (${index}) cannot be a negative number`;
13945
- return Math.min(this.lookupTableForward[index] ?? 0, MAX_INSTRUCTION_DISTANCE);
14218
+ getBytesAsLittleEndian(index: number, len: number) {
14219
+ const offset = index << REGISTER_SIZE_SHIFT;
14220
+ return this.bytes.subarray(offset, offset + len);
13946
14221
  }
13947
14222
 
13948
- private buildLookupTableForward(mask: BitVec) {
13949
- const table = safeAllocUint8Array(mask.bitLength);
14223
+ getAllBytesAsLittleEndian() {
14224
+ return this.bytes;
14225
+ }
14226
+
14227
+ copyFrom(regs: Registers | BigUint64Array) {
14228
+ const array = regs instanceof BigUint64Array ? regs : regs.asUnsigned;
14229
+ this.asUnsigned.set(array);
14230
+ }
14231
+
14232
+ reset() {
14233
+ for (let i = 0; i < NO_OF_REGISTERS; i++) {
14234
+ this.asUnsigned[i] = 0n;
14235
+ }
14236
+ }
14237
+
14238
+ getLowerU32(registerIndex: number) {
14239
+ return Number(this.asUnsigned[registerIndex] & 0xff_ff_ff_ffn);
14240
+ }
14241
+
14242
+ getLowerI32(registerIndex: number) {
14243
+ return Number(this.getLowerU32(registerIndex)) >> 0;
14244
+ }
14245
+
14246
+ setU32(registerIndex: number, value: number) {
14247
+ this.asUnsigned[registerIndex] = signExtend32To64(value);
14248
+ }
14249
+
14250
+ setI32(registerIndex: number, value: number) {
14251
+ this.asSigned[registerIndex] = signExtend32To64(value);
14252
+ }
14253
+
14254
+ getU64(registerIndex: number) {
14255
+ return this.asUnsigned[registerIndex];
14256
+ }
14257
+
14258
+ getI64(registerIndex: number) {
14259
+ return this.asSigned[registerIndex];
14260
+ }
14261
+
14262
+ setU64(registerIndex: number, value: bigint) {
14263
+ this.asUnsigned[registerIndex] = value;
14264
+ }
14265
+
14266
+ setI64(registerIndex: number, value: bigint) {
14267
+ this.asSigned[registerIndex] = value;
14268
+ }
14269
+
14270
+ getAllU64() {
14271
+ return this.asUnsigned;
14272
+ }
14273
+ }
14274
+
14275
+ /**
14276
+ * Mask class is an implementation of skip function defined in GP.
14277
+ *
14278
+ * https://graypaper.fluffylabs.dev/#/5f542d7/237201239801
14279
+ */
14280
+ declare class Mask {
14281
+ /**
14282
+ * The lookup table will have `0` at the index which corresponds to an instruction on the same index in the bytecode.
14283
+ * In case the value is non-zero it signifies the offset to the index with next instruction.
14284
+ *
14285
+ * Example:
14286
+ * ```
14287
+ * 0..1..2..3..4..5..6..7..8..9 # Indices
14288
+ * 0..2..1..0..1..0..3..2..1..0 # lookupTable forward values
14289
+ * ```
14290
+ * There are instructions at indices `0, 3, 5, 9`.
14291
+ */
14292
+ private lookupTableForward: Uint8Array;
14293
+
14294
+ constructor(mask: BitVec) {
14295
+ this.lookupTableForward = this.buildLookupTableForward(mask);
14296
+ }
14297
+
14298
+ isInstruction(index: number) {
14299
+ return this.lookupTableForward[index] === 0;
14300
+ }
14301
+
14302
+ getNoOfBytesToNextInstruction(index: number) {
14303
+ check`${index >= 0} index (${index}) cannot be a negative number`;
14304
+ return Math.min(this.lookupTableForward[index] ?? 0, MAX_INSTRUCTION_DISTANCE);
14305
+ }
14306
+
14307
+ private buildLookupTableForward(mask: BitVec) {
14308
+ const table = safeAllocUint8Array(mask.bitLength);
13950
14309
  let lastInstructionOffset = 0;
13951
14310
  for (let i = mask.bitLength - 1; i >= 0; i--) {
13952
14311
  if (mask.isSet(i)) {
@@ -14082,82 +14441,6 @@ declare class ImmediateDecoder {
14082
14441
  }
14083
14442
  }
14084
14443
 
14085
- declare const NO_OF_REGISTERS$1 = 13;
14086
-
14087
- type RegisterIndex = Opaque<number, "register index">;
14088
-
14089
- declare class Registers {
14090
- private asSigned: BigInt64Array;
14091
- private asUnsigned: BigUint64Array;
14092
-
14093
- constructor(private readonly bytes = safeAllocUint8Array(NO_OF_REGISTERS << REGISTER_SIZE_SHIFT)) {
14094
- check`${bytes.length === NO_OF_REGISTERS << REGISTER_SIZE_SHIFT} Invalid size of registers array.`;
14095
- this.asSigned = new BigInt64Array(bytes.buffer, bytes.byteOffset);
14096
- this.asUnsigned = new BigUint64Array(bytes.buffer, bytes.byteOffset);
14097
- }
14098
-
14099
- static fromBytes(bytes: Uint8Array) {
14100
- check`${bytes.length === NO_OF_REGISTERS << REGISTER_SIZE_SHIFT} Invalid size of registers array.`;
14101
- return new Registers(bytes);
14102
- }
14103
-
14104
- getBytesAsLittleEndian(index: number, len: number) {
14105
- const offset = index << REGISTER_SIZE_SHIFT;
14106
- return this.bytes.subarray(offset, offset + len);
14107
- }
14108
-
14109
- getAllBytesAsLittleEndian() {
14110
- return this.bytes;
14111
- }
14112
-
14113
- copyFrom(regs: Registers | BigUint64Array) {
14114
- const array = regs instanceof BigUint64Array ? regs : regs.asUnsigned;
14115
- this.asUnsigned.set(array);
14116
- }
14117
-
14118
- reset() {
14119
- for (let i = 0; i < NO_OF_REGISTERS; i++) {
14120
- this.asUnsigned[i] = 0n;
14121
- }
14122
- }
14123
-
14124
- getLowerU32(registerIndex: number) {
14125
- return Number(this.asUnsigned[registerIndex] & 0xff_ff_ff_ffn);
14126
- }
14127
-
14128
- getLowerI32(registerIndex: number) {
14129
- return Number(this.getLowerU32(registerIndex)) >> 0;
14130
- }
14131
-
14132
- setU32(registerIndex: number, value: number) {
14133
- this.asUnsigned[registerIndex] = signExtend32To64(value);
14134
- }
14135
-
14136
- setI32(registerIndex: number, value: number) {
14137
- this.asSigned[registerIndex] = signExtend32To64(value);
14138
- }
14139
-
14140
- getU64(registerIndex: number) {
14141
- return this.asUnsigned[registerIndex];
14142
- }
14143
-
14144
- getI64(registerIndex: number) {
14145
- return this.asSigned[registerIndex];
14146
- }
14147
-
14148
- setU64(registerIndex: number, value: bigint) {
14149
- this.asUnsigned[registerIndex] = value;
14150
- }
14151
-
14152
- setI64(registerIndex: number, value: bigint) {
14153
- this.asSigned[registerIndex] = value;
14154
- }
14155
-
14156
- getAllU64() {
14157
- return this.asUnsigned;
14158
- }
14159
- }
14160
-
14161
14444
  declare class NibblesDecoder {
14162
14445
  private byte = new Int8Array(1);
14163
14446
 
@@ -17594,870 +17877,865 @@ declare namespace index$8 {
17594
17877
  export type { index$8_BigGas as BigGas, index$8_Gas as Gas, index$8_GasCounter as GasCounter, index$8_InterpreterOptions as InterpreterOptions, index$8_MemoryIndex as MemoryIndex, index$8_SbrkIndex as SbrkIndex, index$8_SmallGas as SmallGas };
17595
17878
  }
17596
17879
 
17597
- /**
17598
- * Program counter is a 64-bit unsigned integer that points to the next instruction
17599
- *
17600
- * https://graypaper.fluffylabs.dev/#/1c979cb/2e3f012e3f01?v=0.7.1
17601
- */
17602
- type ProgramCounter = Opaque<U64, "ProgramCounter[u64]">;
17603
- /** Convert a number into ProgramCounter. */
17604
- declare const tryAsProgramCounter = (v: number | bigint): ProgramCounter => asOpaqueType(tryAsU64(v));
17880
+ interface IHostCallMemory {
17881
+ storeFrom(address: U64, bytes: Uint8Array): Result$2<OK, PageFault | OutOfBounds>;
17882
+ loadInto(result: Uint8Array, startAddress: U64): Result$2<OK, PageFault | OutOfBounds>;
17883
+ }
17605
17884
 
17606
- /** Running PVM instance identifier. */
17607
- type MachineId = Opaque<U64, "MachineId[u64]">;
17608
- /** Convert a number into PVM instance identifier. */
17609
- declare const tryAsMachineId = (v: number | bigint): MachineId => asOpaqueType(tryAsU64(v));
17885
+ declare class HostCallMemory implements IHostCallMemory {
17886
+ constructor(private readonly memory: Memory) {}
17610
17887
 
17611
- declare class MachineInstance {
17612
- async run(gas: BigGas, registers: Registers): Promise<MachineResult> {
17613
- return {
17614
- result: {
17615
- status: Status.OK,
17616
- },
17617
- gas,
17618
- registers,
17619
- };
17620
- }
17621
- }
17888
+ storeFrom(address: U64, bytes: Uint8Array): Result$2<OK, PageFault | OutOfBounds> {
17889
+ if (bytes.length === 0) {
17890
+ return Result.ok(OK);
17891
+ }
17622
17892
 
17623
- type MachineStatus =
17624
- | {
17625
- status: typeof Status.HOST;
17626
- hostCallIndex: U64;
17893
+ if (address + tryAsU64(bytes.length) > MEMORY_SIZE) {
17894
+ return Result.error(new OutOfBounds());
17627
17895
  }
17628
- | {
17629
- status: typeof Status.FAULT;
17630
- address: U64;
17896
+
17897
+ return this.memory.storeFrom(tryAsMemoryIndex(Number(address)), bytes);
17898
+ }
17899
+
17900
+ loadInto(result: Uint8Array, startAddress: U64): Result$2<OK, PageFault | OutOfBounds> {
17901
+ if (result.length === 0) {
17902
+ return Result.ok(OK);
17631
17903
  }
17632
- | {
17633
- status: typeof Status.OK | typeof Status.HALT | typeof Status.PANIC | typeof Status.OOG;
17634
- };
17635
17904
 
17636
- /** Data returned by a machine invocation. */
17637
- type MachineResult = {
17638
- result: MachineStatus;
17639
- gas: BigGas;
17640
- registers: Registers;
17641
- };
17905
+ if (startAddress + tryAsU64(result.length) > MEMORY_SIZE) {
17906
+ return Result.error(new OutOfBounds());
17907
+ }
17642
17908
 
17643
- /** Types of possbile operations to request by Pages host call. */
17644
- declare enum MemoryOperation {
17645
- /** Zeroes memory and set access to unreadable. */
17646
- Void = 0,
17647
- /** Zeroes memory and set access to read-only. */
17648
- ZeroRead = 1,
17649
- /** Zeroes memory and set access to read-write. */
17650
- ZeroWrite = 2,
17651
- /** Preserve memory and set access to read-only. */
17652
- Read = 3,
17653
- /** Preserve memory and set access to read-write. */
17654
- Write = 4,
17909
+ return this.memory.loadInto(result, tryAsMemoryIndex(Number(startAddress)));
17910
+ }
17655
17911
  }
17656
17912
 
17657
- /** Convert a number into MemoryOperation or null (if invalid). */
17658
- declare const toMemoryOperation = (v: number | bigint): MemoryOperation | null =>
17659
- v <= MemoryOperation.Write && v >= MemoryOperation.Void ? Number(v) : null;
17660
-
17661
- /** An error that may occur during `peek` or `poke` host call. */
17662
- declare enum PeekPokeError {
17663
- /** Source page fault. */
17664
- SourcePageFault = 0,
17665
- /** Destination page fault. */
17666
- DestinationPageFault = 1,
17667
- /** No machine under given machine index. */
17668
- NoMachine = 2,
17913
+ interface IHostCallRegisters {
17914
+ get(registerIndex: number): U64;
17915
+ set(registerIndex: number, value: U64): void;
17669
17916
  }
17670
17917
 
17671
- declare enum ZeroVoidError {
17672
- /** No machine under given machine index. */
17673
- NoMachine = 0,
17674
- /** Attempting to void or zero non-accessible page. */
17675
- InvalidPage = 1,
17676
- }
17677
-
17678
- declare enum PagesError {
17679
- /** No machine under given machine index. */
17680
- NoMachine = 0,
17681
- /** Invalid memory operation. */
17682
- InvalidOperation = 1,
17683
- /** Attempting to change non-accessible page or trying to preserve value of voided page. */
17684
- InvalidPage = 2,
17685
- }
17918
+ declare class HostCallRegisters implements IHostCallRegisters {
17919
+ constructor(private readonly registers: Registers) {}
17686
17920
 
17687
- /** Error machine is not found. */
17688
- declare const NoMachineError = Symbol("Machine index not found.");
17689
- type NoMachineError = typeof NoMachineError;
17921
+ get(registerIndex: number): U64 {
17922
+ return tryAsU64(this.registers.getU64(registerIndex));
17923
+ }
17690
17924
 
17691
- /** Too many segments already exported. */
17692
- declare const SegmentExportError = Symbol("Too many segments already exported.");
17693
- type SegmentExportError = typeof SegmentExportError;
17925
+ set(registerIndex: number, value: U64) {
17926
+ this.registers.setU64(registerIndex, value);
17927
+ }
17928
+ }
17694
17929
 
17695
- /** Host functions external invocations available during refine phase. */
17696
- interface RefineExternalities {
17697
- /** Forget a previously started nested VM. */
17698
- machineExpunge(machineIndex: MachineId): Promise<Result$2<ProgramCounter, NoMachineError>>;
17930
+ /** Strictly-typed host call index. */
17931
+ type HostCallIndex = Opaque<U32, "HostCallIndex[U32]">;
17932
+ /** Attempt to convert a number into `HostCallIndex`. */
17933
+ declare const tryAsHostCallIndex = (v: number): HostCallIndex => asOpaqueType(tryAsU32(v));
17699
17934
 
17700
- /** Set given range of pages as non-accessible and re-initialize them with zeros. */
17701
- machineVoidPages(machineIndex: MachineId, pageStart: U64, pageCount: U64): Promise<Result$2<OK, ZeroVoidError>>;
17935
+ /**
17936
+ * Host-call exit reason.
17937
+ *
17938
+ * https://graypaper.fluffylabs.dev/#/ab2cdbd/24a30124a501?v=0.7.2
17939
+ */
17940
+ declare enum PvmExecution {
17941
+ Halt = 0,
17942
+ Panic = 1,
17943
+ OOG = 2, // out-of-gas
17944
+ }
17702
17945
 
17703
- /** Set given range of pages as writeable and initialize them with zeros. */
17704
- machineZeroPages(machineIndex: MachineId, pageStart: U64, pageCount: U64): Promise<Result$2<OK, ZeroVoidError>>;
17946
+ /** A utility function to easily trace a bunch of registers. */
17947
+ declare function traceRegisters(...regs: number[]) {
17948
+ return regs.map(tryAsRegisterIndex);
17949
+ }
17705
17950
 
17706
- /** Copy a fragment of memory from `machineIndex` into given destination memory. */
17707
- machinePeekFrom(
17708
- machineIndex: MachineId,
17709
- destinationStart: U64,
17710
- sourceStart: U64,
17711
- length: U64,
17712
- destination: Memory,
17713
- ): Promise<Result$2<OK, PeekPokeError>>;
17951
+ /** An interface for a host call implementation */
17952
+ interface HostCallHandler {
17953
+ /** Index of that host call (i.e. what PVM invokes via `ecalli`) */
17954
+ readonly index: HostCallIndex;
17714
17955
 
17715
- /** Write a fragment of memory into `machineIndex` from given source memory. */
17716
- machinePokeInto(
17717
- machineIndex: MachineId,
17718
- sourceStart: U64,
17719
- destinationStart: U64,
17720
- length: U64,
17721
- source: Memory,
17722
- ): Promise<Result$2<OK, PeekPokeError>>;
17956
+ /**
17957
+ * The gas cost of invocation of that host call.
17958
+ *
17959
+ * NOTE: `((reg: IHostCallRegisters) => Gas)` function is for compatibility reasons: pre GP 0.7.2
17960
+ */
17961
+ readonly basicGasCost: SmallGas | ((reg: IHostCallRegisters) => Gas);
17723
17962
 
17724
- /** Start an inner PVM instance with given entry point and starting code. */
17725
- machineInit(code: BytesBlob, programCounter: ProgramCounter): Promise<Result$2<MachineId, ProgramDecoderError>>;
17963
+ /** Currently executing service id. */
17964
+ readonly currentServiceId: U32;
17726
17965
 
17727
- /** Run a previously initialized PVM instance with given gas and registers. */
17728
- machineInvoke(
17729
- machineIndex: MachineId,
17730
- gas: BigGas,
17731
- registers: Registers,
17732
- ): Promise<Result$2<MachineResult, NoMachineError>>;
17966
+ /** Input&Output registers that we should add to tracing log. */
17967
+ readonly tracedRegisters: RegisterIndex[];
17733
17968
 
17734
17969
  /**
17735
- * Export segment for future retrieval.
17970
+ * Actually execute the host call.
17736
17971
  *
17737
- * Returns the index assigned to that segment or an error if there is too many already exported.
17972
+ * NOTE the call is ALLOWED and expected to modify registers and memory.
17738
17973
  */
17739
- exportSegment(segment: Segment): Result$2<SegmentIndex, SegmentExportError>;
17740
-
17741
- /** Lookup a historical preimage. */
17742
- historicalLookup(serviceId: ServiceId | null, hash: Blake2bHash): Promise<BytesBlob | null>;
17743
-
17744
- /** Change access to and/or zero the value of memory. */
17745
- machinePages(
17746
- machineIndex: MachineId,
17747
- pageStart: U64,
17748
- pageCount: U64,
17749
- requestType: MemoryOperation | null,
17750
- ): Promise<Result$2<OK, PagesError>>;
17974
+ execute(gas: GasCounter, regs: IHostCallRegisters, memory: IHostCallMemory): Promise<undefined | PvmExecution>;
17751
17975
  }
17752
17976
 
17753
- declare const InsufficientFundsError = "insufficient funds";
17754
- type InsufficientFundsError = typeof InsufficientFundsError;
17755
-
17756
- /** Update of the state entries coming from accumulation of a single service. */
17757
- type ServiceStateUpdate = Partial<Pick<State, "privilegedServices" | "authQueues" | "designatedValidatorData">> &
17758
- ServicesUpdate;
17977
+ /** Container for all available host calls. */
17978
+ declare class HostCallsManager {
17979
+ private readonly hostCalls = new Map<HostCallIndex, HostCallHandler>();
17980
+ private readonly missing;
17759
17981
 
17760
- /**
17761
- * State updates that currently accumulating service produced.
17762
- *
17763
- * `x_u`: https://graypaper.fluffylabs.dev/#/9a08063/2f31012f3101?v=0.6.6
17764
- */
17765
- declare class AccumulationStateUpdate {
17766
- /** Updated authorization queues for cores. */
17767
- public readonly authorizationQueues: Map<CoreIndex, FixedSizeArray<AuthorizerHash, AUTHORIZATION_QUEUE_SIZE>> =
17768
- new Map();
17769
- /** New validators data. */
17770
- public validatorsData: PerValidator<ValidatorData> | null = null;
17771
- /** Updated priviliged services. */
17772
- public privilegedServices: PrivilegedServices | null = null;
17982
+ constructor({
17983
+ missing,
17984
+ handlers = [],
17985
+ }: {
17986
+ missing: HostCallHandler;
17987
+ handlers?: HostCallHandler[];
17988
+ }) {
17989
+ this.missing = missing;
17773
17990
 
17774
- private constructor(
17775
- /** Services state updates. */
17776
- public readonly services: ServicesUpdate,
17777
- /** Pending transfers. */
17778
- public transfers: PendingTransfer[],
17779
- /** Yielded accumulation root. */
17780
- public readonly yieldedRoots: Map<ServiceId, OpaqueHash> = new Map(),
17781
- ) {}
17991
+ for (const handler of handlers) {
17992
+ check`${this.hostCalls.get(handler.index) === undefined} Overwriting host call handler at index ${handler.index}`;
17993
+ this.hostCalls.set(handler.index, handler);
17994
+ }
17995
+ }
17782
17996
 
17783
- /** Create new empty state update. */
17784
- static empty(): AccumulationStateUpdate {
17785
- return new AccumulationStateUpdate(
17786
- {
17787
- servicesUpdates: [],
17788
- servicesRemoved: [],
17789
- preimages: [],
17790
- storage: [],
17791
- },
17792
- [],
17793
- );
17997
+ /** Get a host call by index. */
17998
+ get(hostCallIndex: HostCallIndex): HostCallHandler {
17999
+ return this.hostCalls.get(hostCallIndex) ?? this.missing;
17794
18000
  }
17795
18001
 
17796
- /** Create a state update with some existing, yet uncommited services updates. */
17797
- static new(update: ServicesUpdate): AccumulationStateUpdate {
17798
- return new AccumulationStateUpdate(
17799
- {
17800
- ...update,
17801
- },
17802
- [],
17803
- );
18002
+ traceHostCall(
18003
+ context: string,
18004
+ hostCallIndex: HostCallIndex,
18005
+ hostCallHandler: HostCallHandler,
18006
+ registers: IHostCallRegisters,
18007
+ gas: Gas,
18008
+ ) {
18009
+ const { currentServiceId } = hostCallHandler;
18010
+ const requested = hostCallIndex !== hostCallHandler.index ? ` (${hostCallIndex})` : "";
18011
+ const name = `${hostCallHandler.constructor.name}:${hostCallHandler.index}`;
18012
+ const registerValues = hostCallHandler.tracedRegisters
18013
+ .map((idx) => [idx.toString().padStart(2, "0"), registers.get(idx)] as const)
18014
+ .filter((v) => v[1] !== 0n)
18015
+ .map(([idx, value]) => {
18016
+ return `r${idx}=${value} (0x${value.toString(16)})`;
18017
+ })
18018
+ .join(", ");
18019
+ logger.insane`[${currentServiceId}] ${context} ${name}${requested}. Gas: ${gas}. Regs: ${registerValues}.`;
17804
18020
  }
18021
+ }
17805
18022
 
17806
- /** Create a copy of another `StateUpdate`. Used by checkpoints. */
17807
- static copyFrom(from: AccumulationStateUpdate): AccumulationStateUpdate {
17808
- const serviceUpdates: ServicesUpdate = {
17809
- servicesUpdates: [...from.services.servicesUpdates],
17810
- servicesRemoved: [...from.services.servicesRemoved],
17811
- preimages: [...from.services.preimages],
17812
- storage: [...from.services.storage],
17813
- };
17814
- const transfers = [...from.transfers];
17815
- const update = new AccumulationStateUpdate(serviceUpdates, transfers, new Map(from.yieldedRoots));
18023
+ type ResolveFn = (pvm: Interpreter) => void;
17816
18024
 
17817
- // update entries
17818
- for (const [k, v] of from.authorizationQueues) {
17819
- update.authorizationQueues.set(k, v);
17820
- }
18025
+ declare class InterpreterInstanceManager {
18026
+ private instances: Interpreter[] = [];
18027
+ private waitingQueue: ResolveFn[] = [];
17821
18028
 
17822
- if (from.validatorsData !== null) {
17823
- update.validatorsData = asKnownSize([...from.validatorsData]);
18029
+ constructor(noOfPvmInstances: number) {
18030
+ for (let i = 0; i < noOfPvmInstances; i++) {
18031
+ this.instances.push(
18032
+ new Interpreter({
18033
+ useSbrkGas: false,
18034
+ }),
18035
+ );
17824
18036
  }
18037
+ }
17825
18038
 
17826
- if (from.privilegedServices !== null) {
17827
- update.privilegedServices = PrivilegedServices.create({
17828
- ...from.privilegedServices,
17829
- assigners: asKnownSize([...from.privilegedServices.assigners]),
17830
- });
18039
+ async getInstance(): Promise<Interpreter> {
18040
+ const instance = this.instances.pop();
18041
+ if (instance !== undefined) {
18042
+ return Promise.resolve(instance);
17831
18043
  }
17832
- return update;
18044
+ return new Promise((resolve) => {
18045
+ this.waitingQueue.push(resolve);
18046
+ });
17833
18047
  }
17834
18048
 
17835
- /** Retrieve and clear pending transfers. */
17836
- takeTransfers() {
17837
- const transfers = this.transfers;
17838
- this.transfers = [];
17839
- return transfers;
18049
+ releaseInstance(pvm: Interpreter) {
18050
+ const waiting = this.waitingQueue.shift();
18051
+ if (waiting !== undefined) {
18052
+ return waiting(pvm);
18053
+ }
18054
+ this.instances.push(pvm);
17840
18055
  }
17841
18056
  }
17842
18057
 
17843
- type StateSlice = Pick<State, "getService" | "privilegedServices">;
18058
+ declare class ReturnValue {
18059
+ private constructor(
18060
+ public consumedGas: Gas,
18061
+ public status: Status | null,
18062
+ public memorySlice: Uint8Array | null,
18063
+ ) {
18064
+ check`
18065
+ ${(status === null && memorySlice !== null) || (status !== null && memorySlice === null)}
18066
+ 'status' and 'memorySlice' must not both be null or both be non-null — exactly one must be provided
18067
+ `;
18068
+ }
17844
18069
 
17845
- declare class PartiallyUpdatedState<T extends StateSlice = StateSlice> {
17846
- /** A collection of state updates. */
17847
- public readonly stateUpdate;
17848
-
17849
- constructor(
17850
- /** Original (unmodified state). */
17851
- public readonly state: T,
17852
- stateUpdate?: AccumulationStateUpdate,
17853
- ) {
17854
- this.stateUpdate =
17855
- stateUpdate === undefined ? AccumulationStateUpdate.empty() : AccumulationStateUpdate.copyFrom(stateUpdate);
18070
+ static fromStatus(consumedGas: Gas, status: Status) {
18071
+ return new ReturnValue(consumedGas, status, null);
17856
18072
  }
17857
18073
 
17858
- /**
17859
- * Retrieve info of service with given id.
17860
- *
17861
- * NOTE the info may be updated compared to what is in the state.
17862
- *
17863
- * Takes into account ejected and newly created services as well.
17864
- */
17865
- getServiceInfo(destination: ServiceId | null): ServiceAccountInfo | null {
17866
- if (destination === null) {
17867
- return null;
17868
- }
17869
-
17870
- const maybeNewService = this.stateUpdate.services.servicesUpdates.find(
17871
- (update) => update.serviceId === destination,
17872
- );
17873
-
17874
- if (maybeNewService !== undefined) {
17875
- return maybeNewService.action.account;
17876
- }
17877
-
17878
- const maybeService = this.state.getService(destination);
17879
- if (maybeService === null) {
17880
- return null;
17881
- }
17882
-
17883
- return maybeService.getInfo();
18074
+ static fromMemorySlice(consumedGas: Gas, memorySlice: Uint8Array) {
18075
+ return new ReturnValue(consumedGas, null, memorySlice);
17884
18076
  }
17885
18077
 
17886
- getStorage(serviceId: ServiceId, rawKey: StorageKey): BytesBlob | null {
17887
- const item = this.stateUpdate.services.storage.find((x) => x.serviceId === serviceId && x.key.isEqualTo(rawKey));
17888
- if (item !== undefined) {
17889
- return item.value;
17890
- }
18078
+ hasMemorySlice(): this is this & { status: null; memorySlice: Uint8Array } {
18079
+ return this.memorySlice instanceof Uint8Array && this.status === null;
18080
+ }
17891
18081
 
17892
- const service = this.state.getService(serviceId);
17893
- return service?.getStorage(rawKey) ?? null;
18082
+ hasStatus(): this is this & { status: Status; memorySlice: null } {
18083
+ return !this.hasMemorySlice();
17894
18084
  }
18085
+ }
18086
+ declare class HostCalls {
18087
+ constructor(
18088
+ private pvmInstanceManager: InterpreterInstanceManager,
18089
+ private hostCalls: HostCallsManager,
18090
+ ) {}
17895
18091
 
17896
- /**
17897
- * Returns `true` if the preimage is already provided either in current
17898
- * accumulation scope or earlier.
17899
- *
17900
- * NOTE: Does not check if the preimage is available, we just check
17901
- * the existence in `preimages` map.
17902
- */
17903
- hasPreimage(serviceId: ServiceId, hash: PreimageHash): boolean {
17904
- const providedPreimage = this.stateUpdate.services.preimages.find(
17905
- // we ignore the action here, since if there is <any> update on that
17906
- // hash it means it has to exist, right?
17907
- (p) => p.serviceId === serviceId && p.hash.isEqualTo(hash),
17908
- );
17909
- if (providedPreimage !== undefined) {
17910
- return true;
18092
+ private getReturnValue(status: Status, pvmInstance: Interpreter): ReturnValue {
18093
+ const gasConsumed = pvmInstance.getGasConsumed();
18094
+ if (status === Status.OOG) {
18095
+ return ReturnValue.fromStatus(gasConsumed, status);
17911
18096
  }
17912
18097
 
17913
- // fallback to state preimages
17914
- const service = this.state.getService(serviceId);
17915
- if (service === undefined) {
17916
- return false;
17917
- }
18098
+ if (status === Status.HALT) {
18099
+ const memory = pvmInstance.getMemory();
18100
+ const regs = pvmInstance.getRegisters();
18101
+ const maybeAddress = regs.getLowerU32(7);
18102
+ const maybeLength = regs.getLowerU32(8);
17918
18103
 
17919
- return service?.hasPreimage(hash) ?? false;
17920
- }
18104
+ const result = safeAllocUint8Array(maybeLength);
18105
+ const startAddress = tryAsMemoryIndex(maybeAddress);
18106
+ const loadResult = memory.loadInto(result, startAddress);
17921
18107
 
17922
- getPreimage(serviceId: ServiceId, hash: PreimageHash): BytesBlob | null {
17923
- // TODO [ToDr] Should we verify availability here?
17924
- const freshlyProvided = this.stateUpdate.services.preimages.find(
17925
- (x) => x.serviceId === serviceId && x.hash.isEqualTo(hash),
17926
- );
17927
- if (freshlyProvided !== undefined && freshlyProvided.action.kind === UpdatePreimageKind.Provide) {
17928
- return freshlyProvided.action.preimage.blob;
18108
+ if (loadResult.isError) {
18109
+ return ReturnValue.fromMemorySlice(gasConsumed, new Uint8Array());
18110
+ }
18111
+
18112
+ return ReturnValue.fromMemorySlice(gasConsumed, result);
17929
18113
  }
17930
18114
 
17931
- const service = this.state.getService(serviceId);
17932
- return service?.getPreimage(hash) ?? null;
18115
+ return ReturnValue.fromStatus(gasConsumed, Status.PANIC);
17933
18116
  }
17934
18117
 
17935
- /** Get status of a preimage of current service taking into account any updates. */
17936
- getLookupHistory(
17937
- currentTimeslot: TimeSlot,
17938
- serviceId: ServiceId,
17939
- hash: PreimageHash,
17940
- length: U64,
17941
- ): LookupHistoryItem | null {
17942
- // TODO [ToDr] This is most likely wrong. We may have `provide` and `remove` within
17943
- // the same state update. We should however switch to proper "updated state"
17944
- // representation soon.
17945
- const updatedPreimage = this.stateUpdate.services.preimages.findLast(
17946
- (update) => update.serviceId === serviceId && update.hash.isEqualTo(hash) && BigInt(update.length) === length,
17947
- );
17948
-
17949
- const stateFallback = () => {
17950
- // fallback to state lookup
17951
- const service = this.state.getService(serviceId);
17952
- const lenU32 = preimageLenAsU32(length);
17953
- if (lenU32 === null || service === null) {
17954
- return null;
18118
+ private async execute(pvmInstance: Interpreter) {
18119
+ pvmInstance.runProgram();
18120
+ for (;;) {
18121
+ let status = pvmInstance.getStatus();
18122
+ if (status !== Status.HOST) {
18123
+ return this.getReturnValue(status, pvmInstance);
17955
18124
  }
18125
+ check`
18126
+ ${pvmInstance.getExitParam() !== null}
18127
+ "We know that the exit param is not null, because the status is 'Status.HOST'
18128
+ `;
18129
+ const hostCallIndex = pvmInstance.getExitParam() ?? -1;
18130
+ const gas = pvmInstance.getGasCounter();
18131
+ const regs = new HostCallRegisters(pvmInstance.getRegisters());
18132
+ const memory = new HostCallMemory(pvmInstance.getMemory());
18133
+ const index = tryAsHostCallIndex(hostCallIndex);
17956
18134
 
17957
- const slots = service.getLookupHistory(hash, lenU32);
17958
- return slots === null ? null : new LookupHistoryItem(hash, lenU32, slots);
17959
- };
17960
-
17961
- if (updatedPreimage === undefined) {
17962
- return stateFallback();
17963
- }
18135
+ const hostCall = this.hostCalls.get(index);
18136
+ const gasBefore = gas.get();
18137
+ // NOTE: `basicGasCost(regs)` function is for compatibility reasons: pre GP 0.7.2
18138
+ const basicGasCost =
18139
+ typeof hostCall.basicGasCost === "number" ? hostCall.basicGasCost : hostCall.basicGasCost(regs);
18140
+ const underflow = gas.sub(basicGasCost);
17964
18141
 
17965
- const { action } = updatedPreimage;
17966
- switch (action.kind) {
17967
- case UpdatePreimageKind.Provide: {
17968
- // casting to U32 is safe, since we compare with object we have in memory.
17969
- return new LookupHistoryItem(hash, updatedPreimage.length, tryAsLookupHistorySlots([currentTimeslot]));
18142
+ const pcLog = `[PC: ${pvmInstance.getPC()}]`;
18143
+ if (underflow) {
18144
+ this.hostCalls.traceHostCall(`${pcLog} OOG`, index, hostCall, regs, gas.get());
18145
+ return ReturnValue.fromStatus(pvmInstance.getGasConsumed(), Status.OOG);
17970
18146
  }
17971
- case UpdatePreimageKind.Remove: {
17972
- const state = stateFallback();
17973
- // kinda impossible, since we know it's there because it's removed.
17974
- if (state === null) {
17975
- return null;
17976
- }
18147
+ this.hostCalls.traceHostCall(`${pcLog} Invoking`, index, hostCall, regs, gasBefore);
18148
+ const result = await hostCall.execute(gas, regs, memory);
18149
+ this.hostCalls.traceHostCall(
18150
+ result === undefined ? `${pcLog} Result` : `${pcLog} Status(${PvmExecution[result]})`,
18151
+ index,
18152
+ hostCall,
18153
+ regs,
18154
+ gas.get(),
18155
+ );
17977
18156
 
17978
- return new LookupHistoryItem(hash, state.length, tryAsLookupHistorySlots([...state.slots, currentTimeslot]));
17979
- }
17980
- case UpdatePreimageKind.UpdateOrAdd: {
17981
- return action.item;
18157
+ if (result === PvmExecution.Halt) {
18158
+ status = Status.HALT;
18159
+ return this.getReturnValue(status, pvmInstance);
17982
18160
  }
17983
- }
17984
18161
 
17985
- assertNever(action);
17986
- }
18162
+ if (result === PvmExecution.Panic) {
18163
+ status = Status.PANIC;
18164
+ return this.getReturnValue(status, pvmInstance);
18165
+ }
17987
18166
 
17988
- /* State update functions. */
18167
+ if (result === PvmExecution.OOG) {
18168
+ status = Status.OOG;
18169
+ return this.getReturnValue(status, pvmInstance);
18170
+ }
17989
18171
 
17990
- updateStorage(serviceId: ServiceId, key: StorageKey, value: BytesBlob | null) {
17991
- const update =
17992
- value === null
17993
- ? UpdateStorage.remove({ serviceId, key })
17994
- : UpdateStorage.set({
17995
- serviceId,
17996
- storage: StorageItem.create({ key, value }),
17997
- });
18172
+ if (result === undefined) {
18173
+ pvmInstance.runProgram();
18174
+ status = pvmInstance.getStatus();
18175
+ continue;
18176
+ }
17998
18177
 
17999
- const index = this.stateUpdate.services.storage.findIndex(
18000
- (x) => x.serviceId === update.serviceId && x.key.isEqualTo(key),
18001
- );
18002
- const count = index === -1 ? 0 : 1;
18003
- this.stateUpdate.services.storage.splice(index, count, update);
18178
+ assertNever(result);
18179
+ }
18004
18180
  }
18005
18181
 
18006
- /**
18007
- * Update a preimage.
18008
- *
18009
- * Note we store all previous entries as well, since there might be a sequence of:
18010
- * `provide` -> `remove` and both should update the end state somehow.
18011
- */
18012
- updatePreimage(newUpdate: UpdatePreimage) {
18013
- this.stateUpdate.services.preimages.push(newUpdate);
18182
+ async runProgram(
18183
+ rawProgram: Uint8Array,
18184
+ initialPc: number,
18185
+ initialGas: Gas,
18186
+ maybeRegisters?: Registers,
18187
+ maybeMemory?: Memory,
18188
+ ): Promise<ReturnValue> {
18189
+ const pvmInstance = await this.pvmInstanceManager.getInstance();
18190
+ pvmInstance.reset(rawProgram, initialPc, initialGas, maybeRegisters, maybeMemory);
18191
+ try {
18192
+ return await this.execute(pvmInstance);
18193
+ } finally {
18194
+ this.pvmInstanceManager.releaseInstance(pvmInstance);
18195
+ }
18014
18196
  }
18197
+ }
18015
18198
 
18016
- updateServiceStorageUtilisation(
18017
- serviceId: ServiceId,
18018
- items: number,
18019
- bytes: bigint,
18020
- serviceInfo: ServiceAccountInfo,
18021
- ): Result$2<OK, InsufficientFundsError> {
18022
- check`${items >= 0} storageUtilisationCount has to be a positive number, got: ${items}`;
18023
- check`${bytes >= 0} storageUtilisationBytes has to be a positive number, got: ${bytes}`;
18024
-
18025
- const overflowItems = !isU32(items);
18026
- const overflowBytes = !isU64(bytes);
18027
-
18028
- // TODO [ToDr] this is not specified in GP, but it seems sensible.
18029
- if (overflowItems || overflowBytes) {
18030
- return Result.error(InsufficientFundsError);
18031
- }
18199
+ type index$7_HostCallHandler = HostCallHandler;
18200
+ type index$7_HostCallMemory = HostCallMemory;
18201
+ declare const index$7_HostCallMemory: typeof HostCallMemory;
18202
+ type index$7_HostCallRegisters = HostCallRegisters;
18203
+ declare const index$7_HostCallRegisters: typeof HostCallRegisters;
18204
+ type index$7_IHostCallMemory = IHostCallMemory;
18205
+ type index$7_IHostCallRegisters = IHostCallRegisters;
18206
+ type index$7_PvmExecution = PvmExecution;
18207
+ declare const index$7_PvmExecution: typeof PvmExecution;
18208
+ declare const index$7_traceRegisters: typeof traceRegisters;
18209
+ declare const index$7_tryAsHostCallIndex: typeof tryAsHostCallIndex;
18210
+ declare namespace index$7 {
18211
+ export { index$7_HostCallMemory as HostCallMemory, index$7_HostCallRegisters as HostCallRegisters, HostCallsManager as HostCalls, index$7_PvmExecution as PvmExecution, HostCalls as PvmHostCallExtension, InterpreterInstanceManager as PvmInstanceManager, index$7_traceRegisters as traceRegisters, index$7_tryAsHostCallIndex as tryAsHostCallIndex };
18212
+ export type { index$7_HostCallHandler as HostCallHandler, index$7_IHostCallMemory as IHostCallMemory, index$7_IHostCallRegisters as IHostCallRegisters };
18213
+ }
18032
18214
 
18033
- const thresholdBalance = ServiceAccountInfo.calculateThresholdBalance(items, bytes, serviceInfo.gratisStorage);
18034
- if (serviceInfo.balance < thresholdBalance) {
18035
- return Result.error(InsufficientFundsError);
18036
- }
18215
+ /**
18216
+ * Program counter is a 64-bit unsigned integer that points to the next instruction
18217
+ *
18218
+ * https://graypaper.fluffylabs.dev/#/1c979cb/2e3f012e3f01?v=0.7.1
18219
+ */
18220
+ type ProgramCounter = Opaque<U64, "ProgramCounter[u64]">;
18221
+ /** Convert a number into ProgramCounter. */
18222
+ declare const tryAsProgramCounter = (v: number | bigint): ProgramCounter => asOpaqueType(tryAsU64(v));
18037
18223
 
18038
- // Update service info with new details.
18039
- this.updateServiceInfo(
18040
- serviceId,
18041
- ServiceAccountInfo.create({
18042
- ...serviceInfo,
18043
- storageUtilisationBytes: bytes,
18044
- storageUtilisationCount: items,
18045
- }),
18046
- );
18047
- return Result.ok(OK);
18224
+ /** Running PVM instance identifier. */
18225
+ type MachineId = Opaque<U64, "MachineId[u64]">;
18226
+ /** Convert a number into PVM instance identifier. */
18227
+ declare const tryAsMachineId = (v: number | bigint): MachineId => asOpaqueType(tryAsU64(v));
18228
+
18229
+ declare class MachineInstance {
18230
+ async run(gas: BigGas, registers: Registers): Promise<MachineResult> {
18231
+ return {
18232
+ result: {
18233
+ status: Status.OK,
18234
+ },
18235
+ gas,
18236
+ registers,
18237
+ };
18048
18238
  }
18239
+ }
18049
18240
 
18050
- updateServiceInfo(serviceId: ServiceId, newInfo: ServiceAccountInfo) {
18051
- const idx = this.stateUpdate.services.servicesUpdates.findIndex((x) => x.serviceId === serviceId);
18052
- const toRemove = idx === -1 ? 0 : 1;
18053
- const existingItem = this.stateUpdate.services.servicesUpdates[idx];
18241
+ type MachineStatus =
18242
+ | {
18243
+ status: typeof Status.HOST;
18244
+ hostCallIndex: U64;
18245
+ }
18246
+ | {
18247
+ status: typeof Status.FAULT;
18248
+ address: U64;
18249
+ }
18250
+ | {
18251
+ status: typeof Status.OK | typeof Status.HALT | typeof Status.PANIC | typeof Status.OOG;
18252
+ };
18054
18253
 
18055
- if (existingItem?.action.kind === UpdateServiceKind.Create) {
18056
- this.stateUpdate.services.servicesUpdates.splice(
18057
- idx,
18058
- toRemove,
18059
- UpdateService.create({
18060
- serviceId,
18061
- serviceInfo: newInfo,
18062
- lookupHistory: existingItem.action.lookupHistory,
18063
- }),
18064
- );
18254
+ /** Data returned by a machine invocation. */
18255
+ type MachineResult = {
18256
+ result: MachineStatus;
18257
+ gas: BigGas;
18258
+ registers: Registers;
18259
+ };
18065
18260
 
18066
- return;
18067
- }
18261
+ /** Types of possbile operations to request by Pages host call. */
18262
+ declare enum MemoryOperation {
18263
+ /** Zeroes memory and set access to unreadable. */
18264
+ Void = 0,
18265
+ /** Zeroes memory and set access to read-only. */
18266
+ ZeroRead = 1,
18267
+ /** Zeroes memory and set access to read-write. */
18268
+ ZeroWrite = 2,
18269
+ /** Preserve memory and set access to read-only. */
18270
+ Read = 3,
18271
+ /** Preserve memory and set access to read-write. */
18272
+ Write = 4,
18273
+ }
18068
18274
 
18069
- this.stateUpdate.services.servicesUpdates.splice(
18070
- idx,
18071
- toRemove,
18072
- UpdateService.update({
18073
- serviceId,
18074
- serviceInfo: newInfo,
18075
- }),
18076
- );
18077
- }
18275
+ /** Convert a number into MemoryOperation or null (if invalid). */
18276
+ declare const toMemoryOperation = (v: number | bigint): MemoryOperation | null =>
18277
+ v <= MemoryOperation.Write && v >= MemoryOperation.Void ? Number(v) : null;
18078
18278
 
18079
- getPrivilegedServices() {
18080
- if (this.stateUpdate.privilegedServices !== null) {
18081
- return this.stateUpdate.privilegedServices;
18082
- }
18279
+ /** An error that may occur during `peek` or `poke` host call. */
18280
+ declare enum PeekPokeError {
18281
+ /** Source page fault. */
18282
+ SourcePageFault = 0,
18283
+ /** Destination page fault. */
18284
+ DestinationPageFault = 1,
18285
+ /** No machine under given machine index. */
18286
+ NoMachine = 2,
18287
+ }
18083
18288
 
18084
- return this.state.privilegedServices;
18085
- }
18289
+ declare enum ZeroVoidError {
18290
+ /** No machine under given machine index. */
18291
+ NoMachine = 0,
18292
+ /** Attempting to void or zero non-accessible page. */
18293
+ InvalidPage = 1,
18086
18294
  }
18087
18295
 
18088
- declare function preimageLenAsU32(length: U64) {
18089
- // Safe to convert to Number and U32: we check that len < 2^32 before conversion
18090
- return length >= 2n ** 32n ? null : tryAsU32(Number(length));
18296
+ declare enum PagesError {
18297
+ /** No machine under given machine index. */
18298
+ NoMachine = 0,
18299
+ /** Invalid memory operation. */
18300
+ InvalidOperation = 1,
18301
+ /** Attempting to change non-accessible page or trying to preserve value of voided page. */
18302
+ InvalidPage = 2,
18091
18303
  }
18092
18304
 
18093
- /**
18094
- * Host call result constants.
18095
- *
18096
- * https://graypaper.fluffylabs.dev/#/85129da/2c7c022c7c02?v=0.6.3
18097
- */
18098
- declare const HostCallResult = {
18099
- /** The return value indicating an item does not exist. */
18100
- NONE: tryAsU64(0xffff_ffff_ffff_ffffn), // 2**64 - 1
18101
- /** Name unknown. */
18102
- WHAT: tryAsU64(0xffff_ffff_ffff_fffen), // 2**64 - 2
18103
- /** The inner PVM memory index provided for reading/writing is not accessible. */
18104
- OOB: tryAsU64(0xffff_ffff_ffff_fffdn), // 2**64 - 3
18105
- /** Index unknown. */
18106
- WHO: tryAsU64(0xffff_ffff_ffff_fffcn), // 2**64 - 4
18107
- /** Storage full or resource already allocated. */
18108
- FULL: tryAsU64(0xffff_ffff_ffff_fffbn), // 2**64 - 5
18109
- /** Core index unknown. */
18110
- CORE: tryAsU64(0xffff_ffff_ffff_fffan), // 2**64 - 6
18111
- /** Insufficient funds. */
18112
- CASH: tryAsU64(0xffff_ffff_ffff_fff9n), // 2**64 - 7
18113
- /** Gas limit too low. */
18114
- LOW: tryAsU64(0xffff_ffff_ffff_fff8n), // 2**64 - 8
18115
- /** The item is already solicited, cannot be forgotten or the operation is invalid due to privilege level. */
18116
- HUH: tryAsU64(0xffff_ffff_ffff_fff7n), // 2**64 - 9
18117
- /** The return value indicating general success. */
18118
- OK: tryAsU64(0n),
18119
- } as const;
18305
+ /** Error machine is not found. */
18306
+ declare const NoMachineError = Symbol("Machine index not found.");
18307
+ type NoMachineError = typeof NoMachineError;
18120
18308
 
18121
- interface IHostCallMemory {
18122
- storeFrom(address: U64, bytes: Uint8Array): Result$2<OK, PageFault | OutOfBounds>;
18123
- loadInto(result: Uint8Array, startAddress: U64): Result$2<OK, PageFault | OutOfBounds>;
18124
- getMemory(): Memory;
18309
+ /** Too many segments already exported. */
18310
+ declare const SegmentExportError = Symbol("Too many segments already exported.");
18311
+ type SegmentExportError = typeof SegmentExportError;
18312
+
18313
+ /** Host functions external invocations available during refine phase. */
18314
+ interface RefineExternalities {
18315
+ /** Forget a previously started nested VM. */
18316
+ machineExpunge(machineIndex: MachineId): Promise<Result$2<ProgramCounter, NoMachineError>>;
18317
+
18318
+ /** Set given range of pages as non-accessible and re-initialize them with zeros. */
18319
+ machineVoidPages(machineIndex: MachineId, pageStart: U64, pageCount: U64): Promise<Result$2<OK, ZeroVoidError>>;
18320
+
18321
+ /** Set given range of pages as writeable and initialize them with zeros. */
18322
+ machineZeroPages(machineIndex: MachineId, pageStart: U64, pageCount: U64): Promise<Result$2<OK, ZeroVoidError>>;
18323
+
18324
+ /** Copy a fragment of memory from `machineIndex` into given destination memory. */
18325
+ machinePeekFrom(
18326
+ machineIndex: MachineId,
18327
+ destinationStart: U64,
18328
+ sourceStart: U64,
18329
+ length: U64,
18330
+ destination: IHostCallMemory,
18331
+ ): Promise<Result$2<OK, PeekPokeError>>;
18332
+
18333
+ /** Write a fragment of memory into `machineIndex` from given source memory. */
18334
+ machinePokeInto(
18335
+ machineIndex: MachineId,
18336
+ sourceStart: U64,
18337
+ destinationStart: U64,
18338
+ length: U64,
18339
+ source: IHostCallMemory,
18340
+ ): Promise<Result$2<OK, PeekPokeError>>;
18341
+
18342
+ /** Start an inner PVM instance with given entry point and starting code. */
18343
+ machineInit(code: BytesBlob, programCounter: ProgramCounter): Promise<Result$2<MachineId, ProgramDecoderError>>;
18344
+
18345
+ /** Run a previously initialized PVM instance with given gas and registers. */
18346
+ machineInvoke(
18347
+ machineIndex: MachineId,
18348
+ gas: BigGas,
18349
+ registers: Registers,
18350
+ ): Promise<Result$2<MachineResult, NoMachineError>>;
18351
+
18352
+ /**
18353
+ * Export segment for future retrieval.
18354
+ *
18355
+ * Returns the index assigned to that segment or an error if there is too many already exported.
18356
+ */
18357
+ exportSegment(segment: Segment): Result$2<SegmentIndex, SegmentExportError>;
18358
+
18359
+ /** Lookup a historical preimage. */
18360
+ historicalLookup(serviceId: ServiceId | null, hash: Blake2bHash): Promise<BytesBlob | null>;
18361
+
18362
+ /** Change access to and/or zero the value of memory. */
18363
+ machinePages(
18364
+ machineIndex: MachineId,
18365
+ pageStart: U64,
18366
+ pageCount: U64,
18367
+ requestType: MemoryOperation | null,
18368
+ ): Promise<Result$2<OK, PagesError>>;
18125
18369
  }
18126
18370
 
18127
- declare class HostCallMemory implements IHostCallMemory {
18128
- constructor(private readonly memory: Memory) {}
18371
+ declare const InsufficientFundsError = "insufficient funds";
18372
+ type InsufficientFundsError = typeof InsufficientFundsError;
18129
18373
 
18130
- storeFrom(address: U64, bytes: Uint8Array): Result$2<OK, PageFault | OutOfBounds> {
18131
- if (bytes.length === 0) {
18132
- return Result.ok(OK);
18133
- }
18374
+ /** Update of the state entries coming from accumulation of a single service. */
18375
+ type ServiceStateUpdate = Partial<Pick<State, "privilegedServices" | "authQueues" | "designatedValidatorData">> &
18376
+ ServicesUpdate;
18134
18377
 
18135
- if (address + tryAsU64(bytes.length) > MEMORY_SIZE) {
18136
- return Result.error(new OutOfBounds());
18137
- }
18378
+ /**
18379
+ * State updates that currently accumulating service produced.
18380
+ *
18381
+ * `x_u`: https://graypaper.fluffylabs.dev/#/9a08063/2f31012f3101?v=0.6.6
18382
+ */
18383
+ declare class AccumulationStateUpdate {
18384
+ /** Updated authorization queues for cores. */
18385
+ public readonly authorizationQueues: Map<CoreIndex, FixedSizeArray<AuthorizerHash, AUTHORIZATION_QUEUE_SIZE>> =
18386
+ new Map();
18387
+ /** New validators data. */
18388
+ public validatorsData: PerValidator<ValidatorData> | null = null;
18389
+ /** Updated priviliged services. */
18390
+ public privilegedServices: PrivilegedServices | null = null;
18138
18391
 
18139
- return this.memory.storeFrom(tryAsMemoryIndex(Number(address)), bytes);
18392
+ private constructor(
18393
+ /** Services state updates. */
18394
+ public readonly services: ServicesUpdate,
18395
+ /** Pending transfers. */
18396
+ public transfers: PendingTransfer[],
18397
+ /** Yielded accumulation root. */
18398
+ public readonly yieldedRoots: Map<ServiceId, OpaqueHash> = new Map(),
18399
+ ) {}
18400
+
18401
+ /** Create new empty state update. */
18402
+ static empty(): AccumulationStateUpdate {
18403
+ return new AccumulationStateUpdate(
18404
+ {
18405
+ servicesUpdates: [],
18406
+ servicesRemoved: [],
18407
+ preimages: [],
18408
+ storage: [],
18409
+ },
18410
+ [],
18411
+ );
18412
+ }
18413
+
18414
+ /** Create a state update with some existing, yet uncommited services updates. */
18415
+ static new(update: ServicesUpdate): AccumulationStateUpdate {
18416
+ return new AccumulationStateUpdate(
18417
+ {
18418
+ ...update,
18419
+ },
18420
+ [],
18421
+ );
18140
18422
  }
18141
18423
 
18142
- loadInto(result: Uint8Array, startAddress: U64): Result$2<OK, PageFault | OutOfBounds> {
18143
- if (result.length === 0) {
18144
- return Result.ok(OK);
18424
+ /** Create a copy of another `StateUpdate`. Used by checkpoints. */
18425
+ static copyFrom(from: AccumulationStateUpdate): AccumulationStateUpdate {
18426
+ const serviceUpdates: ServicesUpdate = {
18427
+ servicesUpdates: [...from.services.servicesUpdates],
18428
+ servicesRemoved: [...from.services.servicesRemoved],
18429
+ preimages: [...from.services.preimages],
18430
+ storage: [...from.services.storage],
18431
+ };
18432
+ const transfers = [...from.transfers];
18433
+ const update = new AccumulationStateUpdate(serviceUpdates, transfers, new Map(from.yieldedRoots));
18434
+
18435
+ // update entries
18436
+ for (const [k, v] of from.authorizationQueues) {
18437
+ update.authorizationQueues.set(k, v);
18145
18438
  }
18146
18439
 
18147
- if (startAddress + tryAsU64(result.length) > MEMORY_SIZE) {
18148
- return Result.error(new OutOfBounds());
18440
+ if (from.validatorsData !== null) {
18441
+ update.validatorsData = asKnownSize([...from.validatorsData]);
18149
18442
  }
18150
18443
 
18151
- return this.memory.loadInto(result, tryAsMemoryIndex(Number(startAddress)));
18444
+ if (from.privilegedServices !== null) {
18445
+ update.privilegedServices = PrivilegedServices.create({
18446
+ ...from.privilegedServices,
18447
+ assigners: asKnownSize([...from.privilegedServices.assigners]),
18448
+ });
18449
+ }
18450
+ return update;
18152
18451
  }
18153
18452
 
18154
- getMemory(): Memory {
18155
- return this.memory;
18453
+ /** Retrieve and clear pending transfers. */
18454
+ takeTransfers() {
18455
+ const transfers = this.transfers;
18456
+ this.transfers = [];
18457
+ return transfers;
18156
18458
  }
18157
18459
  }
18158
18460
 
18159
- interface IHostCallRegisters {
18160
- get(registerIndex: number): U64;
18161
- set(registerIndex: number, value: U64): void;
18162
- }
18163
-
18164
- declare class HostCallRegisters implements IHostCallRegisters {
18165
- constructor(private readonly registers: Registers) {}
18461
+ type StateSlice = Pick<State, "getService" | "privilegedServices">;
18166
18462
 
18167
- get(registerIndex: number): U64 {
18168
- return tryAsU64(this.registers.getU64(registerIndex));
18169
- }
18463
+ declare class PartiallyUpdatedState<T extends StateSlice = StateSlice> {
18464
+ /** A collection of state updates. */
18465
+ public readonly stateUpdate;
18170
18466
 
18171
- set(registerIndex: number, value: U64) {
18172
- this.registers.setU64(registerIndex, value);
18467
+ constructor(
18468
+ /** Original (unmodified state). */
18469
+ public readonly state: T,
18470
+ stateUpdate?: AccumulationStateUpdate,
18471
+ ) {
18472
+ this.stateUpdate =
18473
+ stateUpdate === undefined ? AccumulationStateUpdate.empty() : AccumulationStateUpdate.copyFrom(stateUpdate);
18173
18474
  }
18174
- }
18175
18475
 
18176
- /** Strictly-typed host call index. */
18177
- type HostCallIndex = Opaque<U32, "HostCallIndex[U32]">;
18178
- /** Attempt to convert a number into `HostCallIndex`. */
18179
- declare const tryAsHostCallIndex = (v: number): HostCallIndex => asOpaqueType(tryAsU32(v));
18476
+ /**
18477
+ * Retrieve info of service with given id.
18478
+ *
18479
+ * NOTE the info may be updated compared to what is in the state.
18480
+ *
18481
+ * Takes into account ejected and newly created services as well.
18482
+ */
18483
+ getServiceInfo(destination: ServiceId | null): ServiceAccountInfo | null {
18484
+ if (destination === null) {
18485
+ return null;
18486
+ }
18180
18487
 
18181
- /**
18182
- * Host-call exit reason.
18183
- *
18184
- * https://graypaper.fluffylabs.dev/#/ab2cdbd/24a30124a501?v=0.7.2
18185
- */
18186
- declare enum PvmExecution {
18187
- Halt = 0,
18188
- Panic = 1,
18189
- OOG = 2, // out-of-gas
18190
- }
18488
+ const maybeNewService = this.stateUpdate.services.servicesUpdates.find(
18489
+ (update) => update.serviceId === destination,
18490
+ );
18191
18491
 
18192
- /** A utility function to easily trace a bunch of registers. */
18193
- declare function traceRegisters(...regs: number[]) {
18194
- return regs.map(tryAsRegisterIndex);
18195
- }
18492
+ if (maybeNewService !== undefined) {
18493
+ return maybeNewService.action.account;
18494
+ }
18196
18495
 
18197
- /** An interface for a host call implementation */
18198
- interface HostCallHandler {
18199
- /** Index of that host call (i.e. what PVM invokes via `ecalli`) */
18200
- readonly index: HostCallIndex;
18496
+ const maybeService = this.state.getService(destination);
18497
+ if (maybeService === null) {
18498
+ return null;
18499
+ }
18201
18500
 
18202
- /**
18203
- * The gas cost of invocation of that host call.
18204
- *
18205
- * NOTE: `((reg: IHostCallRegisters) => Gas)` function is for compatibility reasons: pre GP 0.7.2
18206
- */
18207
- readonly basicGasCost: SmallGas | ((reg: IHostCallRegisters) => Gas);
18501
+ return maybeService.getInfo();
18502
+ }
18208
18503
 
18209
- /** Currently executing service id. */
18210
- readonly currentServiceId: U32;
18504
+ getStorage(serviceId: ServiceId, rawKey: StorageKey): BytesBlob | null {
18505
+ const item = this.stateUpdate.services.storage.find((x) => x.serviceId === serviceId && x.key.isEqualTo(rawKey));
18506
+ if (item !== undefined) {
18507
+ return item.value;
18508
+ }
18211
18509
 
18212
- /** Input&Output registers that we should add to tracing log. */
18213
- readonly tracedRegisters: RegisterIndex[];
18510
+ const service = this.state.getService(serviceId);
18511
+ return service?.getStorage(rawKey) ?? null;
18512
+ }
18214
18513
 
18215
18514
  /**
18216
- * Actually execute the host call.
18515
+ * Returns `true` if the preimage is already provided either in current
18516
+ * accumulation scope or earlier.
18217
18517
  *
18218
- * NOTE the call is ALLOWED and expected to modify registers and memory.
18518
+ * NOTE: Does not check if the preimage is available, we just check
18519
+ * the existence in `preimages` map.
18219
18520
  */
18220
- execute(gas: GasCounter, regs: IHostCallRegisters, memory: IHostCallMemory): Promise<undefined | PvmExecution>;
18221
- }
18222
-
18223
- /** Container for all available host calls. */
18224
- declare class HostCallsManager {
18225
- private readonly hostCalls = new Map<HostCallIndex, HostCallHandler>();
18226
- private readonly missing;
18227
-
18228
- constructor({
18229
- missing,
18230
- handlers = [],
18231
- }: {
18232
- missing: HostCallHandler;
18233
- handlers?: HostCallHandler[];
18234
- }) {
18235
- this.missing = missing;
18521
+ hasPreimage(serviceId: ServiceId, hash: PreimageHash): boolean {
18522
+ const providedPreimage = this.stateUpdate.services.preimages.find(
18523
+ // we ignore the action here, since if there is <any> update on that
18524
+ // hash it means it has to exist, right?
18525
+ (p) => p.serviceId === serviceId && p.hash.isEqualTo(hash),
18526
+ );
18527
+ if (providedPreimage !== undefined) {
18528
+ return true;
18529
+ }
18236
18530
 
18237
- for (const handler of handlers) {
18238
- check`${this.hostCalls.get(handler.index) === undefined} Overwriting host call handler at index ${handler.index}`;
18239
- this.hostCalls.set(handler.index, handler);
18531
+ // fallback to state preimages
18532
+ const service = this.state.getService(serviceId);
18533
+ if (service === undefined) {
18534
+ return false;
18240
18535
  }
18241
- }
18242
18536
 
18243
- /** Get a host call by index. */
18244
- get(hostCallIndex: HostCallIndex): HostCallHandler {
18245
- return this.hostCalls.get(hostCallIndex) ?? this.missing;
18537
+ return service?.hasPreimage(hash) ?? false;
18246
18538
  }
18247
18539
 
18248
- traceHostCall(
18249
- context: string,
18250
- hostCallIndex: HostCallIndex,
18251
- hostCallHandler: HostCallHandler,
18252
- registers: IHostCallRegisters,
18253
- gas: Gas,
18254
- ) {
18255
- const { currentServiceId } = hostCallHandler;
18256
- const requested = hostCallIndex !== hostCallHandler.index ? ` (${hostCallIndex})` : "";
18257
- const name = `${hostCallHandler.constructor.name}:${hostCallHandler.index}`;
18258
- const registerValues = hostCallHandler.tracedRegisters
18259
- .map((idx) => [idx.toString().padStart(2, "0"), registers.get(idx)] as const)
18260
- .filter((v) => v[1] !== 0n)
18261
- .map(([idx, value]) => {
18262
- return `r${idx}=${value} (0x${value.toString(16)})`;
18263
- })
18264
- .join(", ");
18265
- logger.insane`[${currentServiceId}] ${context} ${name}${requested}. Gas: ${gas}. Regs: ${registerValues}.`;
18540
+ getPreimage(serviceId: ServiceId, hash: PreimageHash): BytesBlob | null {
18541
+ // TODO [ToDr] Should we verify availability here?
18542
+ const freshlyProvided = this.stateUpdate.services.preimages.find(
18543
+ (x) => x.serviceId === serviceId && x.hash.isEqualTo(hash),
18544
+ );
18545
+ if (freshlyProvided !== undefined && freshlyProvided.action.kind === UpdatePreimageKind.Provide) {
18546
+ return freshlyProvided.action.preimage.blob;
18547
+ }
18548
+
18549
+ const service = this.state.getService(serviceId);
18550
+ return service?.getPreimage(hash) ?? null;
18266
18551
  }
18267
- }
18268
18552
 
18269
- type ResolveFn = (pvm: Interpreter) => void;
18553
+ /** Get status of a preimage of current service taking into account any updates. */
18554
+ getLookupHistory(
18555
+ currentTimeslot: TimeSlot,
18556
+ serviceId: ServiceId,
18557
+ hash: PreimageHash,
18558
+ length: U64,
18559
+ ): LookupHistoryItem | null {
18560
+ // TODO [ToDr] This is most likely wrong. We may have `provide` and `remove` within
18561
+ // the same state update. We should however switch to proper "updated state"
18562
+ // representation soon.
18563
+ const updatedPreimage = this.stateUpdate.services.preimages.findLast(
18564
+ (update) => update.serviceId === serviceId && update.hash.isEqualTo(hash) && BigInt(update.length) === length,
18565
+ );
18270
18566
 
18271
- declare class InterpreterInstanceManager {
18272
- private instances: Interpreter[] = [];
18273
- private waitingQueue: ResolveFn[] = [];
18567
+ const stateFallback = () => {
18568
+ // fallback to state lookup
18569
+ const service = this.state.getService(serviceId);
18570
+ const lenU32 = preimageLenAsU32(length);
18571
+ if (lenU32 === null || service === null) {
18572
+ return null;
18573
+ }
18274
18574
 
18275
- constructor(noOfPvmInstances: number) {
18276
- for (let i = 0; i < noOfPvmInstances; i++) {
18277
- this.instances.push(
18278
- new Interpreter({
18279
- useSbrkGas: false,
18280
- }),
18281
- );
18282
- }
18283
- }
18575
+ const slots = service.getLookupHistory(hash, lenU32);
18576
+ return slots === null ? null : new LookupHistoryItem(hash, lenU32, slots);
18577
+ };
18284
18578
 
18285
- async getInstance(): Promise<Interpreter> {
18286
- const instance = this.instances.pop();
18287
- if (instance !== undefined) {
18288
- return Promise.resolve(instance);
18579
+ if (updatedPreimage === undefined) {
18580
+ return stateFallback();
18289
18581
  }
18290
- return new Promise((resolve) => {
18291
- this.waitingQueue.push(resolve);
18292
- });
18293
- }
18294
18582
 
18295
- releaseInstance(pvm: Interpreter) {
18296
- const waiting = this.waitingQueue.shift();
18297
- if (waiting !== undefined) {
18298
- return waiting(pvm);
18583
+ const { action } = updatedPreimage;
18584
+ switch (action.kind) {
18585
+ case UpdatePreimageKind.Provide: {
18586
+ // casting to U32 is safe, since we compare with object we have in memory.
18587
+ return new LookupHistoryItem(hash, updatedPreimage.length, tryAsLookupHistorySlots([currentTimeslot]));
18588
+ }
18589
+ case UpdatePreimageKind.Remove: {
18590
+ const state = stateFallback();
18591
+ // kinda impossible, since we know it's there because it's removed.
18592
+ if (state === null) {
18593
+ return null;
18594
+ }
18595
+
18596
+ return new LookupHistoryItem(hash, state.length, tryAsLookupHistorySlots([...state.slots, currentTimeslot]));
18597
+ }
18598
+ case UpdatePreimageKind.UpdateOrAdd: {
18599
+ return action.item;
18600
+ }
18299
18601
  }
18300
- this.instances.push(pvm);
18301
- }
18302
- }
18303
18602
 
18304
- declare class ReturnValue {
18305
- private constructor(
18306
- public consumedGas: Gas,
18307
- public status: Status | null,
18308
- public memorySlice: Uint8Array | null,
18309
- ) {
18310
- check`
18311
- ${(status === null && memorySlice !== null) || (status !== null && memorySlice === null)}
18312
- 'status' and 'memorySlice' must not both be null or both be non-null — exactly one must be provided
18313
- `;
18603
+ assertNever(action);
18314
18604
  }
18315
18605
 
18316
- static fromStatus(consumedGas: Gas, status: Status) {
18317
- return new ReturnValue(consumedGas, status, null);
18318
- }
18606
+ /* State update functions. */
18319
18607
 
18320
- static fromMemorySlice(consumedGas: Gas, memorySlice: Uint8Array) {
18321
- return new ReturnValue(consumedGas, null, memorySlice);
18322
- }
18608
+ updateStorage(serviceId: ServiceId, key: StorageKey, value: BytesBlob | null) {
18609
+ const update =
18610
+ value === null
18611
+ ? UpdateStorage.remove({ serviceId, key })
18612
+ : UpdateStorage.set({
18613
+ serviceId,
18614
+ storage: StorageItem.create({ key, value }),
18615
+ });
18323
18616
 
18324
- hasMemorySlice(): this is this & { status: null; memorySlice: Uint8Array } {
18325
- return this.memorySlice instanceof Uint8Array && this.status === null;
18617
+ const index = this.stateUpdate.services.storage.findIndex(
18618
+ (x) => x.serviceId === update.serviceId && x.key.isEqualTo(key),
18619
+ );
18620
+ const count = index === -1 ? 0 : 1;
18621
+ this.stateUpdate.services.storage.splice(index, count, update);
18326
18622
  }
18327
18623
 
18328
- hasStatus(): this is this & { status: Status; memorySlice: null } {
18329
- return !this.hasMemorySlice();
18624
+ /**
18625
+ * Update a preimage.
18626
+ *
18627
+ * Note we store all previous entries as well, since there might be a sequence of:
18628
+ * `provide` -> `remove` and both should update the end state somehow.
18629
+ */
18630
+ updatePreimage(newUpdate: UpdatePreimage) {
18631
+ this.stateUpdate.services.preimages.push(newUpdate);
18330
18632
  }
18331
- }
18332
- declare class HostCalls {
18333
- constructor(
18334
- private pvmInstanceManager: InterpreterInstanceManager,
18335
- private hostCalls: HostCallsManager,
18336
- ) {}
18337
-
18338
- private getReturnValue(status: Status, pvmInstance: Interpreter): ReturnValue {
18339
- const gasConsumed = pvmInstance.getGasConsumed();
18340
- if (status === Status.OOG) {
18341
- return ReturnValue.fromStatus(gasConsumed, status);
18342
- }
18343
18633
 
18344
- if (status === Status.HALT) {
18345
- const memory = pvmInstance.getMemory();
18346
- const regs = pvmInstance.getRegisters();
18347
- const maybeAddress = regs.getLowerU32(7);
18348
- const maybeLength = regs.getLowerU32(8);
18634
+ updateServiceStorageUtilisation(
18635
+ serviceId: ServiceId,
18636
+ items: number,
18637
+ bytes: bigint,
18638
+ serviceInfo: ServiceAccountInfo,
18639
+ ): Result$2<OK, InsufficientFundsError> {
18640
+ check`${items >= 0} storageUtilisationCount has to be a positive number, got: ${items}`;
18641
+ check`${bytes >= 0} storageUtilisationBytes has to be a positive number, got: ${bytes}`;
18349
18642
 
18350
- const result = safeAllocUint8Array(maybeLength);
18351
- const startAddress = tryAsMemoryIndex(maybeAddress);
18352
- const loadResult = memory.loadInto(result, startAddress);
18643
+ const overflowItems = !isU32(items);
18644
+ const overflowBytes = !isU64(bytes);
18353
18645
 
18354
- if (loadResult.isError) {
18355
- return ReturnValue.fromMemorySlice(gasConsumed, new Uint8Array());
18356
- }
18646
+ // TODO [ToDr] this is not specified in GP, but it seems sensible.
18647
+ if (overflowItems || overflowBytes) {
18648
+ return Result.error(InsufficientFundsError);
18649
+ }
18357
18650
 
18358
- return ReturnValue.fromMemorySlice(gasConsumed, result);
18651
+ const thresholdBalance = ServiceAccountInfo.calculateThresholdBalance(items, bytes, serviceInfo.gratisStorage);
18652
+ if (serviceInfo.balance < thresholdBalance) {
18653
+ return Result.error(InsufficientFundsError);
18359
18654
  }
18360
18655
 
18361
- return ReturnValue.fromStatus(gasConsumed, Status.PANIC);
18656
+ // Update service info with new details.
18657
+ this.updateServiceInfo(
18658
+ serviceId,
18659
+ ServiceAccountInfo.create({
18660
+ ...serviceInfo,
18661
+ storageUtilisationBytes: bytes,
18662
+ storageUtilisationCount: items,
18663
+ }),
18664
+ );
18665
+ return Result.ok(OK);
18362
18666
  }
18363
18667
 
18364
- private async execute(pvmInstance: Interpreter) {
18365
- pvmInstance.runProgram();
18366
- for (;;) {
18367
- let status = pvmInstance.getStatus();
18368
- if (status !== Status.HOST) {
18369
- return this.getReturnValue(status, pvmInstance);
18370
- }
18371
- check`
18372
- ${pvmInstance.getExitParam() !== null}
18373
- "We know that the exit param is not null, because the status is 'Status.HOST'
18374
- `;
18375
- const hostCallIndex = pvmInstance.getExitParam() ?? -1;
18376
- const gas = pvmInstance.getGasCounter();
18377
- const regs = new HostCallRegisters(pvmInstance.getRegisters());
18378
- const memory = new HostCallMemory(pvmInstance.getMemory());
18379
- const index = tryAsHostCallIndex(hostCallIndex);
18380
-
18381
- const hostCall = this.hostCalls.get(index);
18382
- const gasBefore = gas.get();
18383
- // NOTE: `basicGasCost(regs)` function is for compatibility reasons: pre GP 0.7.2
18384
- const basicGasCost =
18385
- typeof hostCall.basicGasCost === "number" ? hostCall.basicGasCost : hostCall.basicGasCost(regs);
18386
- const underflow = gas.sub(basicGasCost);
18668
+ updateServiceInfo(serviceId: ServiceId, newInfo: ServiceAccountInfo) {
18669
+ const idx = this.stateUpdate.services.servicesUpdates.findIndex((x) => x.serviceId === serviceId);
18670
+ const toRemove = idx === -1 ? 0 : 1;
18671
+ const existingItem = this.stateUpdate.services.servicesUpdates[idx];
18387
18672
 
18388
- const pcLog = `[PC: ${pvmInstance.getPC()}]`;
18389
- if (underflow) {
18390
- this.hostCalls.traceHostCall(`${pcLog} OOG`, index, hostCall, regs, gas.get());
18391
- return ReturnValue.fromStatus(pvmInstance.getGasConsumed(), Status.OOG);
18392
- }
18393
- this.hostCalls.traceHostCall(`${pcLog} Invoking`, index, hostCall, regs, gasBefore);
18394
- const result = await hostCall.execute(gas, regs, memory);
18395
- this.hostCalls.traceHostCall(
18396
- result === undefined ? `${pcLog} Result` : `${pcLog} Status(${PvmExecution[result]})`,
18397
- index,
18398
- hostCall,
18399
- regs,
18400
- gas.get(),
18673
+ if (existingItem?.action.kind === UpdateServiceKind.Create) {
18674
+ this.stateUpdate.services.servicesUpdates.splice(
18675
+ idx,
18676
+ toRemove,
18677
+ UpdateService.create({
18678
+ serviceId,
18679
+ serviceInfo: newInfo,
18680
+ lookupHistory: existingItem.action.lookupHistory,
18681
+ }),
18401
18682
  );
18402
18683
 
18403
- if (result === PvmExecution.Halt) {
18404
- status = Status.HALT;
18405
- return this.getReturnValue(status, pvmInstance);
18406
- }
18407
-
18408
- if (result === PvmExecution.Panic) {
18409
- status = Status.PANIC;
18410
- return this.getReturnValue(status, pvmInstance);
18411
- }
18412
-
18413
- if (result === PvmExecution.OOG) {
18414
- status = Status.OOG;
18415
- return this.getReturnValue(status, pvmInstance);
18416
- }
18417
-
18418
- if (result === undefined) {
18419
- pvmInstance.runProgram();
18420
- status = pvmInstance.getStatus();
18421
- continue;
18422
- }
18423
-
18424
- assertNever(result);
18684
+ return;
18425
18685
  }
18686
+
18687
+ this.stateUpdate.services.servicesUpdates.splice(
18688
+ idx,
18689
+ toRemove,
18690
+ UpdateService.update({
18691
+ serviceId,
18692
+ serviceInfo: newInfo,
18693
+ }),
18694
+ );
18426
18695
  }
18427
18696
 
18428
- async runProgram(
18429
- rawProgram: Uint8Array,
18430
- initialPc: number,
18431
- initialGas: Gas,
18432
- maybeRegisters?: Registers,
18433
- maybeMemory?: Memory,
18434
- ): Promise<ReturnValue> {
18435
- const pvmInstance = await this.pvmInstanceManager.getInstance();
18436
- pvmInstance.reset(rawProgram, initialPc, initialGas, maybeRegisters, maybeMemory);
18437
- try {
18438
- return await this.execute(pvmInstance);
18439
- } finally {
18440
- this.pvmInstanceManager.releaseInstance(pvmInstance);
18697
+ getPrivilegedServices() {
18698
+ if (this.stateUpdate.privilegedServices !== null) {
18699
+ return this.stateUpdate.privilegedServices;
18441
18700
  }
18701
+
18702
+ return this.state.privilegedServices;
18442
18703
  }
18443
18704
  }
18444
18705
 
18445
- type index$7_HostCallHandler = HostCallHandler;
18446
- type index$7_HostCallMemory = HostCallMemory;
18447
- declare const index$7_HostCallMemory: typeof HostCallMemory;
18448
- type index$7_HostCallRegisters = HostCallRegisters;
18449
- declare const index$7_HostCallRegisters: typeof HostCallRegisters;
18450
- type index$7_IHostCallMemory = IHostCallMemory;
18451
- type index$7_IHostCallRegisters = IHostCallRegisters;
18452
- type index$7_PvmExecution = PvmExecution;
18453
- declare const index$7_PvmExecution: typeof PvmExecution;
18454
- declare const index$7_traceRegisters: typeof traceRegisters;
18455
- declare const index$7_tryAsHostCallIndex: typeof tryAsHostCallIndex;
18456
- declare namespace index$7 {
18457
- export { index$7_HostCallMemory as HostCallMemory, index$7_HostCallRegisters as HostCallRegisters, HostCallsManager as HostCalls, index$7_PvmExecution as PvmExecution, HostCalls as PvmHostCallExtension, InterpreterInstanceManager as PvmInstanceManager, index$7_traceRegisters as traceRegisters, index$7_tryAsHostCallIndex as tryAsHostCallIndex };
18458
- export type { index$7_HostCallHandler as HostCallHandler, index$7_IHostCallMemory as IHostCallMemory, index$7_IHostCallRegisters as IHostCallRegisters };
18706
+ declare function preimageLenAsU32(length: U64) {
18707
+ // Safe to convert to Number and U32: we check that len < 2^32 before conversion
18708
+ return length >= 2n ** 32n ? null : tryAsU32(Number(length));
18459
18709
  }
18460
18710
 
18711
+ /**
18712
+ * Host call result constants.
18713
+ *
18714
+ * https://graypaper.fluffylabs.dev/#/85129da/2c7c022c7c02?v=0.6.3
18715
+ */
18716
+ declare const HostCallResult = {
18717
+ /** The return value indicating an item does not exist. */
18718
+ NONE: tryAsU64(0xffff_ffff_ffff_ffffn), // 2**64 - 1
18719
+ /** Name unknown. */
18720
+ WHAT: tryAsU64(0xffff_ffff_ffff_fffen), // 2**64 - 2
18721
+ /** The inner PVM memory index provided for reading/writing is not accessible. */
18722
+ OOB: tryAsU64(0xffff_ffff_ffff_fffdn), // 2**64 - 3
18723
+ /** Index unknown. */
18724
+ WHO: tryAsU64(0xffff_ffff_ffff_fffcn), // 2**64 - 4
18725
+ /** Storage full or resource already allocated. */
18726
+ FULL: tryAsU64(0xffff_ffff_ffff_fffbn), // 2**64 - 5
18727
+ /** Core index unknown. */
18728
+ CORE: tryAsU64(0xffff_ffff_ffff_fffan), // 2**64 - 6
18729
+ /** Insufficient funds. */
18730
+ CASH: tryAsU64(0xffff_ffff_ffff_fff9n), // 2**64 - 7
18731
+ /** Gas limit too low. */
18732
+ LOW: tryAsU64(0xffff_ffff_ffff_fff8n), // 2**64 - 8
18733
+ /** The item is already solicited, cannot be forgotten or the operation is invalid due to privilege level. */
18734
+ HUH: tryAsU64(0xffff_ffff_ffff_fff7n), // 2**64 - 9
18735
+ /** The return value indicating general success. */
18736
+ OK: tryAsU64(0n),
18737
+ } as const;
18738
+
18461
18739
  declare const MAX_U32 = tryAsU32(2 ** 32 - 1);
18462
18740
  declare const MAX_U32_BIG_INT = tryAsU64(MAX_U32);
18463
18741
  declare const SERVICE_ID_BYTES = 4;
@@ -19281,7 +19559,7 @@ type JsonRecentBlockState = {
19281
19559
  reported: WorkPackageInfo[];
19282
19560
  };
19283
19561
 
19284
- declare const recentBlocksHistoryFromJson = json.object<JsonRecentBlocks, RecentBlocksHistory>(
19562
+ declare const recentBlocksHistoryFromJson = json.object<JsonRecentBlocks, RecentBlocks>(
19285
19563
  {
19286
19564
  history: json.array(recentBlockStateFromJson),
19287
19565
  mmr: {
@@ -19289,12 +19567,10 @@ declare const recentBlocksHistoryFromJson = json.object<JsonRecentBlocks, Recent
19289
19567
  },
19290
19568
  },
19291
19569
  ({ history, mmr }) => {
19292
- return RecentBlocksHistory.create(
19293
- RecentBlocks.create({
19294
- blocks: history,
19295
- accumulationLog: mmr,
19296
- }),
19297
- );
19570
+ return RecentBlocks.create({
19571
+ blocks: history,
19572
+ accumulationLog: mmr,
19573
+ });
19298
19574
  },
19299
19575
  );
19300
19576
 
@@ -19606,7 +19882,7 @@ declare const fullStateDumpFromJson = (spec: ChainSpec) =>
19606
19882
  if (Compatibility.isGreaterOrEqual(GpVersion.V0_7_1) && chi.chi_r === undefined) {
19607
19883
  throw new Error("Registrar is required in Privileges GP ^0.7.1");
19608
19884
  }
19609
- return InMemoryState.create({
19885
+ return InMemoryState.new(spec, {
19610
19886
  authPools: tryAsPerCore(
19611
19887
  alpha.map((perCore) => {
19612
19888
  if (perCore.length > MAX_AUTH_POOL_SIZE) {
@@ -19625,7 +19901,7 @@ declare const fullStateDumpFromJson = (spec: ChainSpec) =>
19625
19901
  }),
19626
19902
  spec,
19627
19903
  ),
19628
- recentBlocks: beta ?? RecentBlocksHistory.empty(),
19904
+ recentBlocks: beta ?? RecentBlocks.empty(),
19629
19905
  nextValidatorData: gamma.gamma_k,
19630
19906
  epochRoot: gamma.gamma_z,
19631
19907
  sealingKeySeries: TicketsOrKeys.toSafroleSealingKeys(gamma.gamma_s, spec),
@@ -19730,23 +20006,25 @@ declare class TransitionHasher implements MmrHasher<KeccakHash> {
19730
20006
  */
19731
20007
  extrinsic(extrinsicView: ExtrinsicView): WithHashAndBytes<ExtrinsicHash, ExtrinsicView> {
19732
20008
  // https://graypaper.fluffylabs.dev/#/cc517d7/0cfb000cfb00?v=0.6.5
19733
- const guarantees = extrinsicView.guarantees
20009
+ const guaranteesCount = tryAsU32(extrinsicView.guarantees.view().length);
20010
+ const countEncoded = Encoder.encodeObject(codec.varU32, guaranteesCount);
20011
+ const guaranteesBlobs = extrinsicView.guarantees
19734
20012
  .view()
19735
20013
  .map((g) => g.view())
19736
- .map((guarantee) => {
19737
- const reportHash = this.blake2b.hashBytes(guarantee.report.encoded()).asOpaque<WorkReportHash>();
19738
- return BytesBlob.blobFromParts([
19739
- reportHash.raw,
19740
- guarantee.slot.encoded().raw,
19741
- guarantee.credentials.encoded().raw,
19742
- ]);
19743
- });
19744
-
19745
- const guaranteeBlob = Encoder.encodeObject(codec.sequenceVarLen(dumpCodec), guarantees, this.context);
20014
+ .reduce(
20015
+ (aggregated, guarantee) => {
20016
+ const reportHash = this.blake2b.hashBytes(guarantee.report.encoded()).asOpaque<WorkReportHash>();
20017
+ aggregated.push(reportHash.raw);
20018
+ aggregated.push(guarantee.slot.encoded().raw);
20019
+ aggregated.push(guarantee.credentials.encoded().raw);
20020
+ return aggregated;
20021
+ },
20022
+ [countEncoded.raw],
20023
+ );
19746
20024
 
19747
20025
  const et = this.blake2b.hashBytes(extrinsicView.tickets.encoded()).asOpaque<ExtrinsicHash>();
19748
20026
  const ep = this.blake2b.hashBytes(extrinsicView.preimages.encoded()).asOpaque<ExtrinsicHash>();
19749
- const eg = this.blake2b.hashBytes(guaranteeBlob).asOpaque<ExtrinsicHash>();
20027
+ const eg = this.blake2b.hashBlobs(guaranteesBlobs).asOpaque<ExtrinsicHash>();
19750
20028
  const ea = this.blake2b.hashBytes(extrinsicView.assurances.encoded()).asOpaque<ExtrinsicHash>();
19751
20029
  const ed = this.blake2b.hashBytes(extrinsicView.disputes.encoded()).asOpaque<ExtrinsicHash>();
19752
20030