@typeberry/lib 0.1.3 → 0.2.0-e767e74

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (4) hide show
  1. package/index.cjs +1214 -1684
  2. package/index.d.ts +1173 -855
  3. package/index.js +1213 -1683
  4. package/package.json +1 -1
package/index.d.ts CHANGED
@@ -1,7 +1,7 @@
1
1
  declare enum GpVersion {
2
2
  V0_6_7 = "0.6.7",
3
3
  V0_7_0 = "0.7.0",
4
- V0_7_1 = "0.7.1-preview",
4
+ V0_7_1 = "0.7.1",
5
5
  V0_7_2 = "0.7.2-preview",
6
6
  }
7
7
 
@@ -11,12 +11,12 @@ declare enum TestSuite {
11
11
  }
12
12
 
13
13
  declare const DEFAULT_SUITE = TestSuite.W3F_DAVXY;
14
-
15
- declare const ALL_VERSIONS_IN_ORDER = [GpVersion.V0_6_7, GpVersion.V0_7_0, GpVersion.V0_7_1, GpVersion.V0_7_2];
16
- declare const DEFAULT_VERSION = GpVersion.V0_7_0;
14
+ declare const DEFAULT_VERSION = GpVersion.V0_7_1;
17
15
  declare let CURRENT_VERSION = parseCurrentVersion(env.GP_VERSION) ?? DEFAULT_VERSION;
18
16
  declare let CURRENT_SUITE = parseCurrentSuite(env.TEST_SUITE) ?? DEFAULT_SUITE;
19
17
 
18
+ declare const ALL_VERSIONS_IN_ORDER = [GpVersion.V0_6_7, GpVersion.V0_7_0, GpVersion.V0_7_1, GpVersion.V0_7_2];
19
+
20
20
  declare function parseCurrentVersion(env?: string): GpVersion | undefined {
21
21
  if (env === undefined) {
22
22
  return undefined;
@@ -35,7 +35,9 @@ declare function parseCurrentVersion(env?: string): GpVersion | undefined {
35
35
  }
36
36
 
37
37
  declare function parseCurrentSuite(env?: string): TestSuite | undefined {
38
- if (env === undefined) return undefined;
38
+ if (env === undefined) {
39
+ return undefined;
40
+ }
39
41
  switch (env) {
40
42
  case TestSuite.W3F_DAVXY:
41
43
  case TestSuite.JAMDUNA:
@@ -420,6 +422,20 @@ declare const Result$2 = {
420
422
  },
421
423
  };
422
424
 
425
+ // about 2GB, the maximum ArrayBuffer length on Chrome confirmed by several sources:
426
+ // - https://issues.chromium.org/issues/40055619
427
+ // - https://stackoverflow.com/a/72124984
428
+ // - https://onnxruntime.ai/docs/tutorials/web/large-models.html#maximum-size-of-arraybuffer
429
+ declare const MAX_LENGTH$1 = 2145386496;
430
+
431
+ declare function safeAllocUint8Array(length: number) {
432
+ if (length > MAX_LENGTH) {
433
+ // biome-ignore lint/suspicious/noConsole: can't have a dependency on logger here
434
+ console.warn(`Trying to allocate ${length} bytes, which is greater than the maximum of ${MAX_LENGTH}.`);
435
+ }
436
+ return new Uint8Array(Math.min(MAX_LENGTH, length));
437
+ }
438
+
423
439
  /**
424
440
  * Utilities for tests.
425
441
  */
@@ -573,8 +589,12 @@ declare function deepEqual<T>(
573
589
  const aKey = `${a.key}`;
574
590
  const bKey = `${b.key}`;
575
591
 
576
- if (aKey < bKey) return -1;
577
- if (bKey < aKey) return 1;
592
+ if (aKey < bKey) {
593
+ return -1;
594
+ }
595
+ if (bKey < aKey) {
596
+ return 1;
597
+ }
578
598
  return 0;
579
599
  });
580
600
  };
@@ -755,11 +775,12 @@ declare const index$u_oomWarningPrinted: typeof oomWarningPrinted;
755
775
  declare const index$u_parseCurrentSuite: typeof parseCurrentSuite;
756
776
  declare const index$u_parseCurrentVersion: typeof parseCurrentVersion;
757
777
  declare const index$u_resultToString: typeof resultToString;
778
+ declare const index$u_safeAllocUint8Array: typeof safeAllocUint8Array;
758
779
  declare const index$u_seeThrough: typeof seeThrough;
759
780
  declare const index$u_trimStack: typeof trimStack;
760
781
  declare const index$u_workspacePathFix: typeof workspacePathFix;
761
782
  declare namespace index$u {
762
- export { index$u_ALL_VERSIONS_IN_ORDER as ALL_VERSIONS_IN_ORDER, index$u_CURRENT_SUITE as CURRENT_SUITE, index$u_CURRENT_VERSION as CURRENT_VERSION, index$u_Compatibility as Compatibility, index$u_DEFAULT_SUITE as DEFAULT_SUITE, index$u_DEFAULT_VERSION as DEFAULT_VERSION, index$u_ErrorsCollector as ErrorsCollector, index$u_GpVersion as GpVersion, Result$2 as Result, index$u_RichTaggedError as RichTaggedError, index$u_TEST_COMPARE_USING as TEST_COMPARE_USING, index$u_TestSuite as TestSuite, index$u_WithDebug as WithDebug, index$u___OPAQUE_TYPE__ as __OPAQUE_TYPE__, index$u_asOpaqueType as asOpaqueType, index$u_assertEmpty as assertEmpty, index$u_assertNever as assertNever, index$u_callCompareFunction as callCompareFunction, index$u_check as check, index$u_deepEqual as deepEqual, index$u_getAllKeysSorted as getAllKeysSorted, index$u_inspect as inspect, index$u_isBrowser as isBrowser, index$u_isResult as isResult, index$u_isTaggedError as isTaggedError, index$u_maybeTaggedErrorToString as maybeTaggedErrorToString, index$u_measure as measure, index$u_oomWarningPrinted as oomWarningPrinted, index$u_parseCurrentSuite as parseCurrentSuite, index$u_parseCurrentVersion as parseCurrentVersion, index$u_resultToString as resultToString, index$u_seeThrough as seeThrough, index$u_trimStack as trimStack, index$u_workspacePathFix as workspacePathFix };
783
+ export { index$u_ALL_VERSIONS_IN_ORDER as ALL_VERSIONS_IN_ORDER, index$u_CURRENT_SUITE as CURRENT_SUITE, index$u_CURRENT_VERSION as CURRENT_VERSION, index$u_Compatibility as Compatibility, index$u_DEFAULT_SUITE as DEFAULT_SUITE, index$u_DEFAULT_VERSION as DEFAULT_VERSION, index$u_ErrorsCollector as ErrorsCollector, index$u_GpVersion as GpVersion, MAX_LENGTH$1 as MAX_LENGTH, Result$2 as Result, index$u_RichTaggedError as RichTaggedError, index$u_TEST_COMPARE_USING as TEST_COMPARE_USING, index$u_TestSuite as TestSuite, index$u_WithDebug as WithDebug, index$u___OPAQUE_TYPE__ as __OPAQUE_TYPE__, index$u_asOpaqueType as asOpaqueType, index$u_assertEmpty as assertEmpty, index$u_assertNever as assertNever, index$u_callCompareFunction as callCompareFunction, index$u_check as check, index$u_deepEqual as deepEqual, index$u_getAllKeysSorted as getAllKeysSorted, index$u_inspect as inspect, index$u_isBrowser as isBrowser, index$u_isResult as isResult, index$u_isTaggedError as isTaggedError, index$u_maybeTaggedErrorToString as maybeTaggedErrorToString, index$u_measure as measure, index$u_oomWarningPrinted as oomWarningPrinted, index$u_parseCurrentSuite as parseCurrentSuite, index$u_parseCurrentVersion as parseCurrentVersion, index$u_resultToString as resultToString, index$u_safeAllocUint8Array as safeAllocUint8Array, index$u_seeThrough as seeThrough, index$u_trimStack as trimStack, index$u_workspacePathFix as workspacePathFix };
763
784
  export type { index$u_DeepEqualOptions as DeepEqualOptions, index$u_EnumMapping as EnumMapping, index$u_ErrorResult as ErrorResult, index$u_OK as OK, index$u_OkResult as OkResult, index$u_Opaque as Opaque, index$u_StringLiteral as StringLiteral, index$u_TaggedError as TaggedError, index$u_TokenOf as TokenOf, index$u_Uninstantiable as Uninstantiable, index$u_WithOpaque as WithOpaque };
764
785
  }
765
786
 
@@ -929,7 +950,7 @@ declare class BytesBlob {
929
950
  static blobFromParts(v: Uint8Array | Uint8Array[], ...rest: Uint8Array[]) {
930
951
  const vArr = v instanceof Uint8Array ? [v] : v;
931
952
  const totalLength = vArr.reduce((a, v) => a + v.length, 0) + rest.reduce((a, v) => a + v.length, 0);
932
- const buffer = new Uint8Array(totalLength);
953
+ const buffer = safeAllocUint8Array(totalLength);
933
954
  let offset = 0;
934
955
  for (const r of vArr) {
935
956
  buffer.set(r, offset);
@@ -1012,7 +1033,7 @@ declare class Bytes<T extends number> extends BytesBlob {
1012
1033
 
1013
1034
  /** Create an empty [`Bytes<X>`] of given length. */
1014
1035
  static zero<X extends number>(len: X): Bytes<X> {
1015
- return new Bytes(new Uint8Array(len), len);
1036
+ return new Bytes(safeAllocUint8Array(len), len);
1016
1037
  }
1017
1038
 
1018
1039
  // TODO [ToDr] `fill` should have the argments swapped to align with the rest.
@@ -1133,7 +1154,7 @@ declare class BitVec {
1133
1154
  * Create new [`BitVec`] with all values set to `false`.
1134
1155
  */
1135
1156
  static empty(bitLength: number) {
1136
- const data = new Uint8Array(Math.ceil(bitLength / 8));
1157
+ const data = safeAllocUint8Array(Math.ceil(bitLength / 8));
1137
1158
  return new BitVec(data, bitLength);
1138
1159
  }
1139
1160
 
@@ -2406,11 +2427,15 @@ type ClassConstructor<T> = {
2406
2427
  create: (o: CodecRecord<T>) => T;
2407
2428
  };
2408
2429
 
2409
- /**
2410
- * A full codec type, i.e. the `Encode` and `Decode`.
2411
- */
2430
+ /** A full codec type, i.e. the `Encode` and `Decode`. */
2412
2431
  type Codec<T> = Encode<T> & Decode<T>;
2413
2432
 
2433
+ /** A codec descriptor with extra view. */
2434
+ type CodecWithView<T, V> = Codec<T> & {
2435
+ /** encoded data view codec. */
2436
+ View: Codec<V>;
2437
+ };
2438
+
2414
2439
  /**
2415
2440
  * Type descriptor definition.
2416
2441
  *
@@ -2419,7 +2444,7 @@ type Codec<T> = Encode<T> & Decode<T>;
2419
2444
  *
2420
2445
  * Descriptors can be composed to form more complex typings.
2421
2446
  */
2422
- declare class Descriptor<T, V = T> implements Codec<T>, Skip {
2447
+ declare class Descriptor<T, V = T> implements Codec<T>, Skip, CodecWithView<T, V> {
2423
2448
  /** A "lightweight" version of the object. */
2424
2449
  public readonly View: Descriptor<V>;
2425
2450
 
@@ -2665,6 +2690,10 @@ declare abstract class ObjectView<T> {
2665
2690
  toString() {
2666
2691
  return `View<${this.materializedConstructor.name}>(cache: ${this.cache.size})`;
2667
2692
  }
2693
+
2694
+ [TEST_COMPARE_USING]() {
2695
+ return this.materialize();
2696
+ }
2668
2697
  }
2669
2698
 
2670
2699
  /**
@@ -3216,15 +3245,25 @@ declare namespace codec$1 {
3216
3245
  sizeHint: SizeHint;
3217
3246
  },
3218
3247
  chooser: (ctx: unknown | null) => Descriptor<T, V>,
3219
- ): Descriptor<T, V> =>
3220
- Descriptor.withView(
3248
+ ): Descriptor<T, V> => {
3249
+ const Self = chooser(null);
3250
+ return Descriptor.withView(
3221
3251
  name,
3222
3252
  sizeHint,
3223
3253
  (e, x) => chooser(e.getContext()).encode(e, x),
3224
3254
  (d) => chooser(d.getContext()).decode(d),
3225
3255
  (s) => chooser(s.decoder.getContext()).skip(s),
3226
- chooser(null).View,
3256
+ hasUniqueView(Self)
3257
+ ? select(
3258
+ {
3259
+ name: Self.View.name,
3260
+ sizeHint: Self.View.sizeHint,
3261
+ },
3262
+ (ctx) => chooser(ctx).View,
3263
+ )
3264
+ : Self.View,
3227
3265
  );
3266
+ };
3228
3267
 
3229
3268
  /**
3230
3269
  * A descriptor for a more complex POJO.
@@ -3418,6 +3457,7 @@ declare function sequenceViewFixLen<T, V>(
3418
3457
  type index$q_ClassConstructor<T> = ClassConstructor<T>;
3419
3458
  type index$q_Codec<T> = Codec<T>;
3420
3459
  type index$q_CodecRecord<T> = CodecRecord<T>;
3460
+ type index$q_CodecWithView<T, V> = CodecWithView<T, V>;
3421
3461
  declare const index$q_DEFAULT_START_LENGTH: typeof DEFAULT_START_LENGTH;
3422
3462
  type index$q_Decode<T> = Decode<T>;
3423
3463
  type index$q_Decoder = Decoder;
@@ -3458,9 +3498,102 @@ declare const index$q_tryAsExactBytes: typeof tryAsExactBytes;
3458
3498
  declare const index$q_validateLength: typeof validateLength;
3459
3499
  declare namespace index$q {
3460
3500
  export { index$q_DEFAULT_START_LENGTH as DEFAULT_START_LENGTH, index$q_Decoder as Decoder, index$q_Descriptor as Descriptor, index$q_Encoder as Encoder, index$q_MASKS as MASKS, index$q_MAX_LENGTH as MAX_LENGTH, index$q_ObjectView as ObjectView, index$q_SequenceView as SequenceView, index$q_TYPICAL_DICTIONARY_LENGTH as TYPICAL_DICTIONARY_LENGTH, index$q_TYPICAL_SEQUENCE_LENGTH as TYPICAL_SEQUENCE_LENGTH, index$q_ViewField as ViewField, index$q_addSizeHints as addSizeHints, codec$1 as codec, index$q_decodeVariableLengthExtraBytes as decodeVariableLengthExtraBytes, index$q_exactHint as exactHint, index$q_forEachDescriptor as forEachDescriptor, index$q_hasUniqueView as hasUniqueView, index$q_objectView as objectView, index$q_readonlyArray as readonlyArray, index$q_sequenceViewFixLen as sequenceViewFixLen, index$q_sequenceViewVarLen as sequenceViewVarLen, index$q_tryAsExactBytes as tryAsExactBytes, index$q_validateLength as validateLength };
3461
- export type { index$q_ClassConstructor as ClassConstructor, index$q_Codec as Codec, index$q_CodecRecord as CodecRecord, index$q_Decode as Decode, index$q_DescribedBy as DescribedBy, index$q_DescriptorRecord as DescriptorRecord, index$q_Encode as Encode, index$q_LengthRange as LengthRange, index$q_OptionalRecord as OptionalRecord, Options$1 as Options, index$q_PropertyKeys as PropertyKeys, index$q_SimpleDescriptorRecord as SimpleDescriptorRecord, index$q_SizeHint as SizeHint, index$q_ViewOf as ViewOf };
3501
+ export type { index$q_ClassConstructor as ClassConstructor, index$q_Codec as Codec, index$q_CodecRecord as CodecRecord, index$q_CodecWithView as CodecWithView, index$q_Decode as Decode, index$q_DescribedBy as DescribedBy, index$q_DescriptorRecord as DescriptorRecord, index$q_Encode as Encode, index$q_LengthRange as LengthRange, index$q_OptionalRecord as OptionalRecord, Options$1 as Options, index$q_PropertyKeys as PropertyKeys, index$q_SimpleDescriptorRecord as SimpleDescriptorRecord, index$q_SizeHint as SizeHint, index$q_ViewOf as ViewOf };
3502
+ }
3503
+
3504
+ /**
3505
+ * A utility class providing a readonly view over a portion of an array without copying it.
3506
+ */
3507
+ declare class ArrayView<T> implements Iterable<T> {
3508
+ private readonly source: T[];
3509
+ public readonly length: number;
3510
+
3511
+ private constructor(
3512
+ source: T[],
3513
+ private readonly start: number,
3514
+ private readonly end: number,
3515
+ ) {
3516
+ this.source = source;
3517
+ this.length = end - start;
3518
+ }
3519
+
3520
+ static from<T>(source: T[], start = 0, end = source.length): ArrayView<T> {
3521
+ check`
3522
+ ${start >= 0 && end <= source.length && start <= end}
3523
+ Invalid start (${start})/end (${end}) for ArrayView
3524
+ `;
3525
+ return new ArrayView(source, start, end);
3526
+ }
3527
+
3528
+ get(i: number): T {
3529
+ check`
3530
+ ${i >= 0 && i < this.length}
3531
+ Index out of bounds: ${i} < ${this.length}
3532
+ `;
3533
+ return this.source[this.start + i];
3534
+ }
3535
+
3536
+ subview(from: number, to: number = this.length): ArrayView<T> {
3537
+ return ArrayView.from(this.source, this.start + from, this.start + to);
3538
+ }
3539
+
3540
+ toArray(): T[] {
3541
+ return this.source.slice(this.start, this.end);
3542
+ }
3543
+
3544
+ *[Symbol.iterator](): Iterator<T> {
3545
+ for (let i = this.start; i < this.end; i++) {
3546
+ yield this.source[i];
3547
+ }
3548
+ }
3462
3549
  }
3463
3550
 
3551
+ type ITypedArray = Uint8Array | Uint16Array | Uint32Array;
3552
+ type IDataType = string | Buffer | ITypedArray;
3553
+
3554
+ type IHasher = {
3555
+ /**
3556
+ * Initializes hash state to default value
3557
+ */
3558
+ init: () => IHasher;
3559
+ /**
3560
+ * Updates the hash content with the given data
3561
+ */
3562
+ update: (data: IDataType) => IHasher;
3563
+ /**
3564
+ * Calculates the hash of all of the data passed to be hashed with hash.update().
3565
+ * Defaults to hexadecimal string
3566
+ * @param outputType If outputType is "binary", it returns Uint8Array. Otherwise it
3567
+ * returns hexadecimal string
3568
+ */
3569
+ digest: {
3570
+ (outputType: "binary"): Uint8Array;
3571
+ (outputType?: "hex"): string;
3572
+ };
3573
+ /**
3574
+ * Save the current internal state of the hasher for later resumption with load().
3575
+ * Cannot be called before .init() or after .digest()
3576
+ *
3577
+ * Note that this state can include arbitrary information about the value being hashed (e.g.
3578
+ * could include N plaintext bytes from the value), so needs to be treated as being as
3579
+ * sensitive as the input value itself.
3580
+ */
3581
+ save: () => Uint8Array;
3582
+ /**
3583
+ * Resume a state that was created by save(). If this state was not created by a
3584
+ * compatible build of hash-wasm, an exception will be thrown.
3585
+ */
3586
+ load: (state: Uint8Array) => IHasher;
3587
+ /**
3588
+ * Block size in bytes
3589
+ */
3590
+ blockSize: number;
3591
+ /**
3592
+ * Digest size in bytes
3593
+ */
3594
+ digestSize: number;
3595
+ };
3596
+
3464
3597
  /**
3465
3598
  * Size of the output of the hash functions.
3466
3599
  *
@@ -3516,144 +3649,46 @@ declare class WithHashAndBytes<THash extends OpaqueHash, TData> extends WithHash
3516
3649
  }
3517
3650
  }
3518
3651
 
3519
- /** Allocator interface - returns an empty bytes vector that can be filled with the hash. */
3520
- interface HashAllocator {
3521
- /** Return a new hash destination. */
3522
- emptyHash(): OpaqueHash;
3523
- }
3524
-
3525
- /** The simplest allocator returning just a fresh copy of bytes each time. */
3526
- declare class SimpleAllocator implements HashAllocator {
3527
- emptyHash(): OpaqueHash {
3528
- return Bytes.zero(HASH_SIZE);
3529
- }
3530
- }
3531
-
3532
- /** An allocator that works by allocating larger (continuous) pages of memory. */
3533
- declare class PageAllocator implements HashAllocator {
3534
- private page: Uint8Array = new Uint8Array(0);
3535
- private currentHash = 0;
3652
+ declare const zero$1 = Bytes.zero(HASH_SIZE);
3536
3653
 
3537
- // TODO [ToDr] Benchmark the performance!
3538
- constructor(private readonly hashesPerPage: number) {
3539
- check`${hashesPerPage > 0 && hashesPerPage >>> 0 === hashesPerPage} Expected a non-zero integer.`;
3540
- this.resetPage();
3654
+ declare class Blake2b {
3655
+ static async createHasher() {
3656
+ return new Blake2b(await createBLAKE2b(HASH_SIZE * 8));
3541
3657
  }
3542
3658
 
3543
- private resetPage() {
3544
- const pageSizeBytes = this.hashesPerPage * HASH_SIZE;
3545
- this.currentHash = 0;
3546
- this.page = new Uint8Array(pageSizeBytes);
3547
- }
3548
-
3549
- emptyHash(): OpaqueHash {
3550
- const startIdx = this.currentHash * HASH_SIZE;
3551
- const endIdx = startIdx + HASH_SIZE;
3659
+ private constructor(private readonly hasher: IHasher) {}
3552
3660
 
3553
- this.currentHash += 1;
3554
- if (this.currentHash >= this.hashesPerPage) {
3555
- this.resetPage();
3661
+ /**
3662
+ * Hash given collection of blobs.
3663
+ *
3664
+ * If empty array is given a zero-hash is returned.
3665
+ */
3666
+ hashBlobs<H extends Blake2bHash>(r: (BytesBlob | Uint8Array)[]): H {
3667
+ if (r.length === 0) {
3668
+ return zero.asOpaque();
3556
3669
  }
3557
3670
 
3558
- return Bytes.fromBlob(this.page.subarray(startIdx, endIdx), HASH_SIZE);
3671
+ const hasher = this.hasher.init();
3672
+ for (const v of r) {
3673
+ hasher.update(v instanceof BytesBlob ? v.raw : v);
3674
+ }
3675
+ return Bytes.fromBlob(hasher.digest("binary"), HASH_SIZE).asOpaque();
3559
3676
  }
3560
- }
3561
-
3562
- declare const defaultAllocator = new SimpleAllocator();
3563
3677
 
3564
- /**
3565
- * Hash given collection of blobs.
3566
- *
3567
- * If empty array is given a zero-hash is returned.
3568
- */
3569
- declare function hashBlobs$1<H extends Blake2bHash>(
3570
- r: (BytesBlob | Uint8Array)[],
3571
- allocator: HashAllocator = defaultAllocator,
3572
- ): H {
3573
- const out = allocator.emptyHash();
3574
- if (r.length === 0) {
3575
- return out.asOpaque();
3678
+ /** Hash given blob of bytes. */
3679
+ hashBytes(blob: BytesBlob | Uint8Array): Blake2bHash {
3680
+ const hasher = this.hasher.init();
3681
+ const bytes = blob instanceof BytesBlob ? blob.raw : blob;
3682
+ hasher.update(bytes);
3683
+ return Bytes.fromBlob(hasher.digest("binary"), HASH_SIZE).asOpaque();
3576
3684
  }
3577
3685
 
3578
- const hasher = blake2b(HASH_SIZE);
3579
- for (const v of r) {
3580
- hasher?.update(v instanceof BytesBlob ? v.raw : v);
3686
+ /** Convert given string into bytes and hash it. */
3687
+ hashString(str: string) {
3688
+ return this.hashBytes(BytesBlob.blobFromString(str));
3581
3689
  }
3582
- hasher?.digest(out.raw);
3583
- return out.asOpaque();
3584
- }
3585
-
3586
- /** Hash given blob of bytes. */
3587
- declare function hashBytes(blob: BytesBlob | Uint8Array, allocator: HashAllocator = defaultAllocator): Blake2bHash {
3588
- const hasher = blake2b(HASH_SIZE);
3589
- const bytes = blob instanceof BytesBlob ? blob.raw : blob;
3590
- hasher?.update(bytes);
3591
- const out = allocator.emptyHash();
3592
- hasher?.digest(out.raw);
3593
- return out;
3594
- }
3595
-
3596
- /** Convert given string into bytes and hash it. */
3597
- declare function hashString(str: string, allocator: HashAllocator = defaultAllocator) {
3598
- return hashBytes(BytesBlob.blobFromString(str), allocator);
3599
- }
3600
-
3601
- declare const blake2b_hashBytes: typeof hashBytes;
3602
- declare const blake2b_hashString: typeof hashString;
3603
- declare namespace blake2b {
3604
- export {
3605
- hashBlobs$1 as hashBlobs,
3606
- blake2b_hashBytes as hashBytes,
3607
- blake2b_hashString as hashString,
3608
- };
3609
3690
  }
3610
3691
 
3611
- type ITypedArray = Uint8Array | Uint16Array | Uint32Array;
3612
- type IDataType = string | Buffer | ITypedArray;
3613
-
3614
- type IHasher = {
3615
- /**
3616
- * Initializes hash state to default value
3617
- */
3618
- init: () => IHasher;
3619
- /**
3620
- * Updates the hash content with the given data
3621
- */
3622
- update: (data: IDataType) => IHasher;
3623
- /**
3624
- * Calculates the hash of all of the data passed to be hashed with hash.update().
3625
- * Defaults to hexadecimal string
3626
- * @param outputType If outputType is "binary", it returns Uint8Array. Otherwise it
3627
- * returns hexadecimal string
3628
- */
3629
- digest: {
3630
- (outputType: "binary"): Uint8Array;
3631
- (outputType?: "hex"): string;
3632
- };
3633
- /**
3634
- * Save the current internal state of the hasher for later resumption with load().
3635
- * Cannot be called before .init() or after .digest()
3636
- *
3637
- * Note that this state can include arbitrary information about the value being hashed (e.g.
3638
- * could include N plaintext bytes from the value), so needs to be treated as being as
3639
- * sensitive as the input value itself.
3640
- */
3641
- save: () => Uint8Array;
3642
- /**
3643
- * Resume a state that was created by save(). If this state was not created by a
3644
- * compatible build of hash-wasm, an exception will be thrown.
3645
- */
3646
- load: (state: Uint8Array) => IHasher;
3647
- /**
3648
- * Block size in bytes
3649
- */
3650
- blockSize: number;
3651
- /**
3652
- * Digest size in bytes
3653
- */
3654
- digestSize: number;
3655
- };
3656
-
3657
3692
  declare class KeccakHasher {
3658
3693
  static async create(): Promise<KeccakHasher> {
3659
3694
  return new KeccakHasher(await createKeccak(256));
@@ -3681,15 +3716,15 @@ declare namespace keccak {
3681
3716
  };
3682
3717
  }
3683
3718
 
3719
+ // TODO [ToDr] (#213) this should most likely be moved to a separate
3720
+ // package to avoid pulling in unnecessary deps.
3721
+
3722
+ type index$p_Blake2b = Blake2b;
3723
+ declare const index$p_Blake2b: typeof Blake2b;
3684
3724
  type index$p_Blake2bHash = Blake2bHash;
3685
3725
  type index$p_HASH_SIZE = HASH_SIZE;
3686
- type index$p_HashAllocator = HashAllocator;
3687
3726
  type index$p_KeccakHash = KeccakHash;
3688
3727
  type index$p_OpaqueHash = OpaqueHash;
3689
- type index$p_PageAllocator = PageAllocator;
3690
- declare const index$p_PageAllocator: typeof PageAllocator;
3691
- type index$p_SimpleAllocator = SimpleAllocator;
3692
- declare const index$p_SimpleAllocator: typeof SimpleAllocator;
3693
3728
  type index$p_TRUNCATED_HASH_SIZE = TRUNCATED_HASH_SIZE;
3694
3729
  type index$p_TruncatedHash = TruncatedHash;
3695
3730
  type index$p_WithHash<THash extends OpaqueHash, TData> = WithHash<THash, TData>;
@@ -3697,12 +3732,10 @@ declare const index$p_WithHash: typeof WithHash;
3697
3732
  type index$p_WithHashAndBytes<THash extends OpaqueHash, TData> = WithHashAndBytes<THash, TData>;
3698
3733
  declare const index$p_WithHashAndBytes: typeof WithHashAndBytes;
3699
3734
  declare const index$p_ZERO_HASH: typeof ZERO_HASH;
3700
- declare const index$p_blake2b: typeof blake2b;
3701
- declare const index$p_defaultAllocator: typeof defaultAllocator;
3702
3735
  declare const index$p_keccak: typeof keccak;
3703
3736
  declare namespace index$p {
3704
- export { index$p_PageAllocator as PageAllocator, index$p_SimpleAllocator as SimpleAllocator, index$p_WithHash as WithHash, index$p_WithHashAndBytes as WithHashAndBytes, index$p_ZERO_HASH as ZERO_HASH, index$p_blake2b as blake2b, index$p_defaultAllocator as defaultAllocator, index$p_keccak as keccak };
3705
- export type { index$p_Blake2bHash as Blake2bHash, index$p_HASH_SIZE as HASH_SIZE, index$p_HashAllocator as HashAllocator, index$p_KeccakHash as KeccakHash, index$p_OpaqueHash as OpaqueHash, index$p_TRUNCATED_HASH_SIZE as TRUNCATED_HASH_SIZE, index$p_TruncatedHash as TruncatedHash };
3737
+ export { index$p_Blake2b as Blake2b, index$p_WithHash as WithHash, index$p_WithHashAndBytes as WithHashAndBytes, index$p_ZERO_HASH as ZERO_HASH, index$p_keccak as keccak, zero$1 as zero };
3738
+ export type { index$p_Blake2bHash as Blake2bHash, index$p_HASH_SIZE as HASH_SIZE, index$p_KeccakHash as KeccakHash, index$p_OpaqueHash as OpaqueHash, index$p_TRUNCATED_HASH_SIZE as TRUNCATED_HASH_SIZE, index$p_TruncatedHash as TruncatedHash };
3706
3739
  }
3707
3740
 
3708
3741
  /** Immutable view of the `HashDictionary`. */
@@ -4479,6 +4512,8 @@ declare class TruncatedHashDictionary<T extends OpaqueHash, V> {
4479
4512
  }
4480
4513
  }
4481
4514
 
4515
+ type index$o_ArrayView<T> = ArrayView<T>;
4516
+ declare const index$o_ArrayView: typeof ArrayView;
4482
4517
  type index$o_FixedSizeArray<T, N extends number> = FixedSizeArray<T, N>;
4483
4518
  declare const index$o_FixedSizeArray: typeof FixedSizeArray;
4484
4519
  type index$o_HashDictionary<K extends OpaqueHash, V> = HashDictionary<K, V>;
@@ -4506,7 +4541,7 @@ type index$o_TruncatedHashDictionary<T extends OpaqueHash, V> = TruncatedHashDic
4506
4541
  declare const index$o_TruncatedHashDictionary: typeof TruncatedHashDictionary;
4507
4542
  declare const index$o_asKnownSize: typeof asKnownSize;
4508
4543
  declare namespace index$o {
4509
- export { index$o_FixedSizeArray as FixedSizeArray, index$o_HashDictionary as HashDictionary, index$o_HashSet as HashSet, index$o_MultiMap as MultiMap, index$o_SortedArray as SortedArray, index$o_SortedSet as SortedSet, index$o_TruncatedHashDictionary as TruncatedHashDictionary, index$o_asKnownSize as asKnownSize };
4544
+ export { index$o_ArrayView as ArrayView, index$o_FixedSizeArray as FixedSizeArray, index$o_HashDictionary as HashDictionary, index$o_HashSet as HashSet, index$o_MultiMap as MultiMap, index$o_SortedArray as SortedArray, index$o_SortedSet as SortedSet, index$o_TruncatedHashDictionary as TruncatedHashDictionary, index$o_asKnownSize as asKnownSize };
4510
4545
  export type { index$o_HashWithZeroedBit as HashWithZeroedBit, index$o_ImmutableHashDictionary as ImmutableHashDictionary, index$o_ImmutableHashSet as ImmutableHashSet, index$o_ImmutableSortedArray as ImmutableSortedArray, index$o_ImmutableSortedSet as ImmutableSortedSet, index$o_KeyMapper as KeyMapper, index$o_KeyMappers as KeyMappers, index$o_KnownSize as KnownSize, index$o_KnownSizeArray as KnownSizeArray, index$o_KnownSizeId as KnownSizeId, index$o_NestedMaps as NestedMaps };
4511
4546
  }
4512
4547
 
@@ -4735,7 +4770,7 @@ declare async function verify<T extends BytesBlob>(input: Input<T>[]): Promise<b
4735
4770
  (acc, { message, key, signature }) => acc + key.length + signature.length + message.length + 1,
4736
4771
  0,
4737
4772
  );
4738
- const data = new Uint8Array(dataLength);
4773
+ const data = safeAllocUint8Array(dataLength);
4739
4774
 
4740
4775
  let offset = 0;
4741
4776
 
@@ -4825,22 +4860,16 @@ declare function trivialSeed(s: U32): KeySeed {
4825
4860
  * Derives a Ed25519 secret key from a seed.
4826
4861
  * https://github.com/polkadot-fellows/JIPs/blob/7048f79edf4f4eb8bfe6fb42e6bbf61900f44c65/JIP-5.md#derivation-method
4827
4862
  */
4828
- declare function deriveEd25519SecretKey(
4829
- seed: KeySeed,
4830
- allocator: SimpleAllocator = new SimpleAllocator(),
4831
- ): Ed25519SecretSeed {
4832
- return blake2b.hashBytes(BytesBlob.blobFromParts([ED25519_SECRET_KEY.raw, seed.raw]), allocator).asOpaque();
4863
+ declare function deriveEd25519SecretKey(seed: KeySeed, blake2b: Blake2b): Ed25519SecretSeed {
4864
+ return blake2b.hashBytes(BytesBlob.blobFromParts([ED25519_SECRET_KEY.raw, seed.raw])).asOpaque();
4833
4865
  }
4834
4866
 
4835
4867
  /**
4836
4868
  * Derives a Bandersnatch secret key from a seed.
4837
4869
  * https://github.com/polkadot-fellows/JIPs/blob/7048f79edf4f4eb8bfe6fb42e6bbf61900f44c65/JIP-5.md#derivation-method
4838
4870
  */
4839
- declare function deriveBandersnatchSecretKey(
4840
- seed: KeySeed,
4841
- allocator: SimpleAllocator = new SimpleAllocator(),
4842
- ): BandersnatchSecretSeed {
4843
- return blake2b.hashBytes(BytesBlob.blobFromParts([BANDERSNATCH_SECRET_KEY.raw, seed.raw]), allocator).asOpaque();
4871
+ declare function deriveBandersnatchSecretKey(seed: KeySeed, blake2b: Blake2b): BandersnatchSecretSeed {
4872
+ return blake2b.hashBytes(BytesBlob.blobFromParts([BANDERSNATCH_SECRET_KEY.raw, seed.raw])).asOpaque();
4844
4873
  }
4845
4874
 
4846
4875
  /**
@@ -6907,6 +6936,17 @@ declare function emptyBlock(slot: TimeSlot = tryAsTimeSlot(0)) {
6907
6936
  });
6908
6937
  }
6909
6938
 
6939
+ /**
6940
+ * Take an input data and re-encode that data as view.
6941
+ *
6942
+ * NOTE: this function should NEVER be used in any production code,
6943
+ * it's only a test helper.
6944
+ */
6945
+ declare function reencodeAsView<T, V>(codec: Descriptor<T, V>, object: T, chainSpec?: ChainSpec): V {
6946
+ const encoded = Encoder.encodeObject(codec, object, chainSpec);
6947
+ return Decoder.decodeObject(codec.View, encoded, chainSpec);
6948
+ }
6949
+
6910
6950
  type index$l_Block = Block;
6911
6951
  declare const index$l_Block: typeof Block;
6912
6952
  type index$l_BlockView = BlockView;
@@ -6956,6 +6996,7 @@ declare const index$l_guarantees: typeof guarantees;
6956
6996
  declare const index$l_headerViewWithHashCodec: typeof headerViewWithHashCodec;
6957
6997
  declare const index$l_legacyDescriptor: typeof legacyDescriptor;
6958
6998
  declare const index$l_preimage: typeof preimage;
6999
+ declare const index$l_reencodeAsView: typeof reencodeAsView;
6959
7000
  declare const index$l_refineContext: typeof refineContext;
6960
7001
  declare const index$l_tickets: typeof tickets;
6961
7002
  declare const index$l_tryAsCoreIndex: typeof tryAsCoreIndex;
@@ -6972,7 +7013,7 @@ declare const index$l_workPackage: typeof workPackage;
6972
7013
  declare const index$l_workReport: typeof workReport;
6973
7014
  declare const index$l_workResult: typeof workResult;
6974
7015
  declare namespace index$l {
6975
- export { index$l_Block as Block, index$l_EpochMarker as EpochMarker, index$l_Extrinsic as Extrinsic, index$l_Header as Header, index$l_HeaderViewWithHash as HeaderViewWithHash, index$l_MAX_NUMBER_OF_SEGMENTS as MAX_NUMBER_OF_SEGMENTS, index$l_TicketsMarker as TicketsMarker, index$l_ValidatorKeys as ValidatorKeys, index$l_W_E as W_E, index$l_W_S as W_S, index$l_assurances as assurances, index$l_codecPerEpochBlock as codecPerEpochBlock, index$l_codecPerValidator as codecPerValidator, codec as codecUtils, index$l_disputes as disputes, index$l_emptyBlock as emptyBlock, index$l_encodeUnsealedHeader as encodeUnsealedHeader, index$l_guarantees as guarantees, index$l_headerViewWithHashCodec as headerViewWithHashCodec, index$l_legacyDescriptor as legacyDescriptor, index$l_preimage as preimage, index$l_refineContext as refineContext, index$l_tickets as tickets, index$l_tryAsCoreIndex as tryAsCoreIndex, index$l_tryAsEpoch as tryAsEpoch, index$l_tryAsPerEpochBlock as tryAsPerEpochBlock, index$l_tryAsPerValidator as tryAsPerValidator, index$l_tryAsSegmentIndex as tryAsSegmentIndex, index$l_tryAsServiceGas as tryAsServiceGas, index$l_tryAsServiceId as tryAsServiceId, index$l_tryAsTimeSlot as tryAsTimeSlot, index$l_tryAsValidatorIndex as tryAsValidatorIndex, index$l_workItem as workItem, index$l_workPackage as workPackage, index$l_workReport as workReport, index$l_workResult as workResult };
7016
+ export { index$l_Block as Block, index$l_EpochMarker as EpochMarker, index$l_Extrinsic as Extrinsic, index$l_Header as Header, index$l_HeaderViewWithHash as HeaderViewWithHash, index$l_MAX_NUMBER_OF_SEGMENTS as MAX_NUMBER_OF_SEGMENTS, index$l_TicketsMarker as TicketsMarker, index$l_ValidatorKeys as ValidatorKeys, index$l_W_E as W_E, index$l_W_S as W_S, index$l_assurances as assurances, index$l_codecPerEpochBlock as codecPerEpochBlock, index$l_codecPerValidator as codecPerValidator, codec as codecUtils, index$l_disputes as disputes, index$l_emptyBlock as emptyBlock, index$l_encodeUnsealedHeader as encodeUnsealedHeader, index$l_guarantees as guarantees, index$l_headerViewWithHashCodec as headerViewWithHashCodec, index$l_legacyDescriptor as legacyDescriptor, index$l_preimage as preimage, index$l_reencodeAsView as reencodeAsView, index$l_refineContext as refineContext, index$l_tickets as tickets, index$l_tryAsCoreIndex as tryAsCoreIndex, index$l_tryAsEpoch as tryAsEpoch, index$l_tryAsPerEpochBlock as tryAsPerEpochBlock, index$l_tryAsPerValidator as tryAsPerValidator, index$l_tryAsSegmentIndex as tryAsSegmentIndex, index$l_tryAsServiceGas as tryAsServiceGas, index$l_tryAsServiceId as tryAsServiceId, index$l_tryAsTimeSlot as tryAsTimeSlot, index$l_tryAsValidatorIndex as tryAsValidatorIndex, index$l_workItem as workItem, index$l_workPackage as workPackage, index$l_workReport as workReport, index$l_workResult as workResult };
6976
7017
  export type { index$l_BlockView as BlockView, index$l_CodeHash as CodeHash, index$l_CoreIndex as CoreIndex, index$l_EntropyHash as EntropyHash, index$l_Epoch as Epoch, index$l_EpochMarkerView as EpochMarkerView, index$l_ExtrinsicHash as ExtrinsicHash, index$l_ExtrinsicView as ExtrinsicView, index$l_HeaderHash as HeaderHash, index$l_HeaderView as HeaderView, index$l_PerEpochBlock as PerEpochBlock, index$l_PerValidator as PerValidator, index$l_SEGMENT_BYTES as SEGMENT_BYTES, index$l_Segment as Segment, index$l_SegmentIndex as SegmentIndex, index$l_ServiceGas as ServiceGas, index$l_ServiceId as ServiceId, index$l_StateRootHash as StateRootHash, index$l_TicketsMarkerView as TicketsMarkerView, index$l_TimeSlot as TimeSlot, index$l_ValidatorIndex as ValidatorIndex, index$l_WorkReportHash as WorkReportHash };
6977
7018
  }
6978
7019
 
@@ -8373,7 +8414,7 @@ declare enum NodeType {
8373
8414
  declare class TrieNode {
8374
8415
  constructor(
8375
8416
  /** Exactly 512 bits / 64 bytes */
8376
- public readonly raw: Uint8Array = new Uint8Array(TRIE_NODE_BYTES),
8417
+ public readonly raw: Uint8Array = safeAllocUint8Array(TRIE_NODE_BYTES),
8377
8418
  ) {}
8378
8419
 
8379
8420
  /** Returns the type of the node */
@@ -9111,49 +9152,62 @@ declare function accumulationOutputComparator(a: AccumulationOutput, b: Accumula
9111
9152
  return Ordering.Equal;
9112
9153
  }
9113
9154
 
9114
- declare const codecWithHash = <T, V, H extends OpaqueHash>(val: Descriptor<T, V>): Descriptor<WithHash<H, T>, V> =>
9115
- Descriptor.withView(
9116
- val.name,
9117
- val.sizeHint,
9118
- (e, elem) => val.encode(e, elem.data),
9119
- (d): WithHash<H, T> => {
9120
- const decoder2 = d.clone();
9121
- const encoded = val.skipEncoded(decoder2);
9122
- const hash = blake2b.hashBytes(encoded);
9123
- return new WithHash(hash.asOpaque(), val.decode(d));
9124
- },
9125
- val.skip,
9126
- val.View,
9127
- );
9128
-
9129
9155
  /**
9130
- * Assignment of particular work report to a core.
9156
+ * `J`: The maximum sum of dependency items in a work-report.
9131
9157
  *
9132
- * Used by "Assurances" and "Disputes" subsystem, denoted by `rho`
9133
- * in state.
9158
+ * https://graypaper.fluffylabs.dev/#/5f542d7/416a00416a00?v=0.6.2
9159
+ */
9160
+ declare const MAX_REPORT_DEPENDENCIES = 8;
9161
+ type MAX_REPORT_DEPENDENCIES = typeof MAX_REPORT_DEPENDENCIES;
9162
+
9163
+ /**
9164
+ * Ready (i.e. available and/or audited) but not-yet-accumulated work-reports.
9134
9165
  *
9135
- * https://graypaper.fluffylabs.dev/#/579bd12/135800135800
9166
+ * https://graypaper.fluffylabs.dev/#/5f542d7/165300165400
9136
9167
  */
9137
- declare class AvailabilityAssignment extends WithDebug {
9138
- static Codec = codec.Class(AvailabilityAssignment, {
9139
- workReport: codecWithHash(WorkReport.Codec),
9140
- timeout: codec.u32.asOpaque<TimeSlot>(),
9168
+ declare class NotYetAccumulatedReport extends WithDebug {
9169
+ static Codec = codec.Class(NotYetAccumulatedReport, {
9170
+ report: WorkReport.Codec,
9171
+ dependencies: codecKnownSizeArray(codec.bytes(HASH_SIZE).asOpaque<WorkPackageHash>(), {
9172
+ typicalLength: MAX_REPORT_DEPENDENCIES / 2,
9173
+ maxLength: MAX_REPORT_DEPENDENCIES,
9174
+ minLength: 0,
9175
+ }),
9141
9176
  });
9142
9177
 
9143
- static create({ workReport, timeout }: CodecRecord<AvailabilityAssignment>) {
9144
- return new AvailabilityAssignment(workReport, timeout);
9178
+ static create({ report, dependencies }: CodecRecord<NotYetAccumulatedReport>) {
9179
+ return new NotYetAccumulatedReport(report, dependencies);
9145
9180
  }
9146
9181
 
9147
9182
  private constructor(
9148
- /** Work report assigned to a core. */
9149
- public readonly workReport: WithHash<WorkReportHash, WorkReport>,
9150
- /** Time slot at which the report becomes obsolete. */
9151
- public readonly timeout: TimeSlot,
9183
+ /**
9184
+ * Each of these were made available at most one epoch ago
9185
+ * but have or had unfulfilled dependencies.
9186
+ */
9187
+ readonly report: WorkReport,
9188
+ /**
9189
+ * Alongside the work-report itself, we retain its un-accumulated
9190
+ * dependencies, a set of work-package hashes.
9191
+ *
9192
+ * https://graypaper.fluffylabs.dev/#/5f542d7/165800165800
9193
+ */
9194
+ readonly dependencies: KnownSizeArray<WorkPackageHash, `[0..${MAX_REPORT_DEPENDENCIES})`>,
9152
9195
  ) {
9153
9196
  super();
9154
9197
  }
9155
9198
  }
9156
9199
 
9200
+ /**
9201
+ * Accumulation queue state entry.
9202
+ */
9203
+ type AccumulationQueue = PerEpochBlock<readonly NotYetAccumulatedReport[]>;
9204
+
9205
+ declare const accumulationQueueCodec = codecPerEpochBlock(
9206
+ readonlyArray(codec.sequenceVarLen(NotYetAccumulatedReport.Codec)),
9207
+ );
9208
+
9209
+ type AccumulationQueueView = DescribedBy<typeof accumulationQueueCodec.View>;
9210
+
9157
9211
  /** One entry of kind `T` for each core. */
9158
9212
  type PerCore<T> = KnownSizeArray<T, "number of cores">;
9159
9213
  /** Check if given array has correct length before casting to the opaque type. */
@@ -9169,172 +9223,147 @@ declare const codecPerCore = <T, V>(val: Descriptor<T, V>): Descriptor<PerCore<T
9169
9223
  return codecKnownSizeArray(val, { fixedLength: context.coresCount });
9170
9224
  });
9171
9225
 
9172
- declare const sortedSetCodec = <T extends OpaqueHash>() =>
9173
- readonlyArray(codec.sequenceVarLen(codec.bytes(HASH_SIZE))).convert<ImmutableSortedSet<T>>(
9174
- (input) => input.array,
9175
- (output) => {
9176
- const typed: T[] = output.map((x) => x.asOpaque());
9177
- return SortedSet.fromSortedArray(hashComparator, typed);
9178
- },
9179
- );
9180
- declare const workReportsSortedSetCodec = sortedSetCodec<WorkReportHash>();
9181
-
9182
9226
  /**
9183
- * A set of judgements over particular work reports identified by hashes.
9227
+ * Assignment of particular work report to a core.
9184
9228
  *
9185
- * https://graypaper.fluffylabs.dev/#/579bd12/122b00124700
9229
+ * Used by "Assurances" and "Disputes" subsystem, denoted by `rho`
9230
+ * in state.
9231
+ *
9232
+ * https://graypaper.fluffylabs.dev/#/579bd12/135800135800
9186
9233
  */
9187
- declare class DisputesRecords {
9188
- static Codec = codec.Class(DisputesRecords, {
9189
- goodSet: workReportsSortedSetCodec,
9190
- badSet: workReportsSortedSetCodec,
9191
- wonkySet: workReportsSortedSetCodec,
9192
- punishSet: sortedSetCodec(),
9234
+ declare class AvailabilityAssignment extends WithDebug {
9235
+ static Codec = codec.Class(AvailabilityAssignment, {
9236
+ workReport: WorkReport.Codec,
9237
+ timeout: codec.u32.asOpaque<TimeSlot>(),
9193
9238
  });
9194
9239
 
9195
- static create({ goodSet, badSet, wonkySet, punishSet }: CodecRecord<DisputesRecords>) {
9196
- return new DisputesRecords(goodSet, badSet, wonkySet, punishSet);
9240
+ static create({ workReport, timeout }: CodecRecord<AvailabilityAssignment>) {
9241
+ return new AvailabilityAssignment(workReport, timeout);
9197
9242
  }
9198
9243
 
9199
9244
  private constructor(
9200
- /** `goodSet`: all work-reports hashes which were judged to be correct */
9201
- public readonly goodSet: ImmutableSortedSet<WorkReportHash>,
9202
- /** `badSet`: all work-reports hashes which were judged to be incorrect */
9203
- public readonly badSet: ImmutableSortedSet<WorkReportHash>,
9204
- /** `wonkySet`: all work-reports hashes which appear to be impossible to judge */
9205
- public readonly wonkySet: ImmutableSortedSet<WorkReportHash>,
9206
- /** `punishSet`: set of Ed25519 keys representing validators which were found to have misjudged a work-report */
9207
- public readonly punishSet: ImmutableSortedSet<Ed25519Key>,
9208
- ) {}
9209
-
9210
- static fromSortedArrays({
9211
- goodSet,
9212
- badSet,
9213
- wonkySet,
9214
- punishSet,
9215
- }: {
9216
- goodSet: WorkReportHash[];
9217
- badSet: WorkReportHash[];
9218
- wonkySet: WorkReportHash[];
9219
- punishSet: Ed25519Key[];
9220
- }) {
9221
- return new DisputesRecords(
9222
- SortedSet.fromSortedArray(hashComparator, goodSet),
9223
- SortedSet.fromSortedArray(hashComparator, badSet),
9224
- SortedSet.fromSortedArray(hashComparator, wonkySet),
9225
- SortedSet.fromSortedArray(hashComparator, punishSet),
9226
- );
9245
+ /** Work report assigned to a core. */
9246
+ public readonly workReport: WorkReport,
9247
+ /** Time slot at which the report becomes obsolete. */
9248
+ public readonly timeout: TimeSlot,
9249
+ ) {
9250
+ super();
9227
9251
  }
9228
9252
  }
9229
9253
 
9230
- declare function hashComparator<V extends OpaqueHash>(a: V, b: V) {
9231
- return a.compare(b);
9232
- }
9254
+ declare const availabilityAssignmentsCodec = codecPerCore(codec.optional(AvailabilityAssignment.Codec));
9233
9255
 
9234
- // TODO [ToDr] Not sure where these should live yet :(
9256
+ type AvailabilityAssignmentsView = DescribedBy<typeof availabilityAssignmentsCodec.View>;
9235
9257
 
9236
- /**
9237
- * `J`: The maximum sum of dependency items in a work-report.
9238
- *
9239
- * https://graypaper.fluffylabs.dev/#/5f542d7/416a00416a00?v=0.6.2
9240
- */
9241
- declare const MAX_REPORT_DEPENDENCIES = 8;
9242
- type MAX_REPORT_DEPENDENCIES = typeof MAX_REPORT_DEPENDENCIES;
9258
+ /** `O`: Maximal authorization pool size. */
9259
+ declare const MAX_AUTH_POOL_SIZE = O;
9260
+ type MAX_AUTH_POOL_SIZE = typeof MAX_AUTH_POOL_SIZE;
9243
9261
 
9244
9262
  /** `Q`: Size of the authorization queue. */
9245
9263
  declare const AUTHORIZATION_QUEUE_SIZE = Q;
9246
9264
  type AUTHORIZATION_QUEUE_SIZE = typeof AUTHORIZATION_QUEUE_SIZE;
9247
9265
 
9248
- /** `O`: Maximal authorization pool size. */
9249
- declare const MAX_AUTH_POOL_SIZE = O;
9250
- type MAX_AUTH_POOL_SIZE = typeof MAX_AUTH_POOL_SIZE;
9266
+ /** A pool of authorization hashes that is filled from the queue. */
9267
+ type AuthorizationPool = KnownSizeArray<AuthorizerHash, `At most ${typeof MAX_AUTH_POOL_SIZE}`>;
9251
9268
 
9252
9269
  /**
9253
- * Ready (i.e. available and/or audited) but not-yet-accumulated work-reports.
9270
+ * A fixed-size queue of authorization hashes used to fill up the pool.
9254
9271
  *
9255
- * https://graypaper.fluffylabs.dev/#/5f542d7/165300165400
9272
+ * Can be set using `ASSIGN` host call in batches of `AUTHORIZATION_QUEUE_SIZE`.
9256
9273
  */
9257
- declare class NotYetAccumulatedReport extends WithDebug {
9258
- static Codec = codec.Class(NotYetAccumulatedReport, {
9259
- report: WorkReport.Codec,
9260
- dependencies: codecKnownSizeArray(codec.bytes(HASH_SIZE).asOpaque<WorkPackageHash>(), {
9261
- typicalLength: MAX_REPORT_DEPENDENCIES / 2,
9262
- maxLength: MAX_REPORT_DEPENDENCIES,
9263
- minLength: 0,
9264
- }),
9265
- });
9274
+ type AuthorizationQueue = FixedSizeArray<AuthorizerHash, AUTHORIZATION_QUEUE_SIZE>;
9275
+
9276
+ declare const authPoolsCodec = codecPerCore<AuthorizationPool, SequenceView<AuthorizerHash>>(
9277
+ codecKnownSizeArray(codec.bytes(HASH_SIZE).asOpaque<AuthorizerHash>(), {
9278
+ minLength: 0,
9279
+ maxLength: MAX_AUTH_POOL_SIZE,
9280
+ typicalLength: MAX_AUTH_POOL_SIZE,
9281
+ }),
9282
+ );
9266
9283
 
9267
- static create({ report, dependencies }: CodecRecord<NotYetAccumulatedReport>) {
9268
- return new NotYetAccumulatedReport(report, dependencies);
9269
- }
9284
+ declare const authQueuesCodec = codecPerCore<AuthorizationQueue, SequenceView<AuthorizerHash>>(
9285
+ codecFixedSizeArray(codec.bytes(HASH_SIZE).asOpaque<AuthorizerHash>(), AUTHORIZATION_QUEUE_SIZE),
9286
+ );
9270
9287
 
9271
- private constructor(
9272
- /**
9273
- * Each of these were made available at most one epoch ago
9274
- * but have or had unfulfilled dependencies.
9275
- */
9276
- readonly report: WorkReport,
9277
- /**
9278
- * Alongside the work-report itself, we retain its un-accumulated
9279
- * dependencies, a set of work-package hashes.
9280
- *
9281
- * https://graypaper.fluffylabs.dev/#/5f542d7/165800165800
9282
- */
9283
- readonly dependencies: KnownSizeArray<WorkPackageHash, `[0..${MAX_REPORT_DEPENDENCIES})`>,
9284
- ) {
9285
- super();
9286
- }
9287
- }
9288
+ declare const sortedSetCodec = <T extends OpaqueHash>() =>
9289
+ readonlyArray(codec.sequenceVarLen(codec.bytes(HASH_SIZE))).convert<ImmutableSortedSet<T>>(
9290
+ (input) => input.array,
9291
+ (output) => {
9292
+ const typed: T[] = output.map((x) => x.asOpaque());
9293
+ return SortedSet.fromSortedArray(hashComparator, typed);
9294
+ },
9295
+ );
9296
+ declare const workReportsSortedSetCodec = sortedSetCodec<WorkReportHash>();
9288
9297
 
9289
- /** Dictionary entry of services that auto-accumulate every block. */
9290
- declare class AutoAccumulate {
9291
- static Codec = codec.Class(AutoAccumulate, {
9292
- service: codec.u32.asOpaque<ServiceId>(),
9293
- gasLimit: codec.u64.asOpaque<ServiceGas>(),
9298
+ /**
9299
+ * A set of judgements over particular work reports identified by hashes.
9300
+ *
9301
+ * https://graypaper.fluffylabs.dev/#/579bd12/122b00124700
9302
+ */
9303
+ declare class DisputesRecords {
9304
+ static Codec = codec.Class(DisputesRecords, {
9305
+ goodSet: workReportsSortedSetCodec,
9306
+ badSet: workReportsSortedSetCodec,
9307
+ wonkySet: workReportsSortedSetCodec,
9308
+ punishSet: sortedSetCodec(),
9294
9309
  });
9295
9310
 
9296
- static create({ service, gasLimit }: CodecRecord<AutoAccumulate>) {
9297
- return new AutoAccumulate(service, gasLimit);
9311
+ static create({ goodSet, badSet, wonkySet, punishSet }: CodecRecord<DisputesRecords>) {
9312
+ return new DisputesRecords(goodSet, badSet, wonkySet, punishSet);
9298
9313
  }
9299
9314
 
9315
+ private readonly goodSetDict: ImmutableHashSet<WorkReportHash>;
9316
+ private readonly badSetDict: ImmutableHashSet<WorkReportHash>;
9317
+ private readonly wonkySetDict: ImmutableHashSet<WorkReportHash>;
9318
+ private readonly punishSetDict: ImmutableHashSet<Ed25519Key>;
9319
+
9300
9320
  private constructor(
9301
- /** Service id that auto-accumulates. */
9302
- readonly service: ServiceId,
9303
- /** Gas limit for auto-accumulation. */
9304
- readonly gasLimit: ServiceGas,
9305
- ) {}
9306
- }
9321
+ /** `goodSet`: all work-reports hashes which were judged to be correct */
9322
+ public readonly goodSet: ImmutableSortedSet<WorkReportHash>,
9323
+ /** `badSet`: all work-reports hashes which were judged to be incorrect */
9324
+ public readonly badSet: ImmutableSortedSet<WorkReportHash>,
9325
+ /** `wonkySet`: all work-reports hashes which appear to be impossible to judge */
9326
+ public readonly wonkySet: ImmutableSortedSet<WorkReportHash>,
9327
+ /** `punishSet`: set of Ed25519 keys representing validators which were found to have misjudged a work-report */
9328
+ public readonly punishSet: ImmutableSortedSet<Ed25519Key>,
9329
+ ) {
9330
+ this.goodSetDict = HashSet.from(goodSet.array);
9331
+ this.badSetDict = HashSet.from(badSet.array);
9332
+ this.wonkySetDict = HashSet.from(wonkySet.array);
9333
+ this.punishSetDict = HashSet.from(punishSet.array);
9334
+ }
9307
9335
 
9308
- /**
9309
- * https://graypaper.fluffylabs.dev/#/7e6ff6a/11da0111da01?v=0.6.7
9310
- */
9311
- declare class PrivilegedServices {
9312
- static Codec = codec.Class(PrivilegedServices, {
9313
- manager: codec.u32.asOpaque<ServiceId>(),
9314
- authManager: codecPerCore(codec.u32.asOpaque<ServiceId>()),
9315
- validatorsManager: codec.u32.asOpaque<ServiceId>(),
9316
- autoAccumulateServices: readonlyArray(codec.sequenceVarLen(AutoAccumulate.Codec)),
9317
- });
9336
+ public asDictionaries() {
9337
+ return {
9338
+ goodSet: this.goodSetDict,
9339
+ badSet: this.badSetDict,
9340
+ wonkySet: this.wonkySetDict,
9341
+ punishSet: this.punishSetDict,
9342
+ };
9343
+ }
9318
9344
 
9319
- static create({ manager, authManager, validatorsManager, autoAccumulateServices }: CodecRecord<PrivilegedServices>) {
9320
- return new PrivilegedServices(manager, authManager, validatorsManager, autoAccumulateServices);
9345
+ static fromSortedArrays({
9346
+ goodSet,
9347
+ badSet,
9348
+ wonkySet,
9349
+ punishSet,
9350
+ }: {
9351
+ goodSet: WorkReportHash[];
9352
+ badSet: WorkReportHash[];
9353
+ wonkySet: WorkReportHash[];
9354
+ punishSet: Ed25519Key[];
9355
+ }) {
9356
+ return new DisputesRecords(
9357
+ SortedSet.fromSortedArray(hashComparator, goodSet),
9358
+ SortedSet.fromSortedArray(hashComparator, badSet),
9359
+ SortedSet.fromSortedArray(hashComparator, wonkySet),
9360
+ SortedSet.fromSortedArray(hashComparator, punishSet),
9361
+ );
9321
9362
  }
9363
+ }
9322
9364
 
9323
- private constructor(
9324
- /**
9325
- * `chi_m`: The first, χm, is the index of the manager service which is
9326
- * the service able to effect an alteration of χ from block to block,
9327
- * as well as bestow services with storage deposit credits.
9328
- * https://graypaper.fluffylabs.dev/#/7e6ff6a/11a40111a801?v=0.6.7
9329
- */
9330
- readonly manager: ServiceId,
9331
- /** `chi_a`: Manages authorization queue one for each core. */
9332
- readonly authManager: PerCore<ServiceId>,
9333
- /** `chi_v`: Managers validator keys. */
9334
- readonly validatorsManager: ServiceId,
9335
- /** `chi_g`: Dictionary of services that auto-accumulate every block with their gas limit. */
9336
- readonly autoAccumulateServices: readonly AutoAccumulate[],
9337
- ) {}
9365
+ declare function hashComparator<V extends OpaqueHash>(a: V, b: V) {
9366
+ return a.compare(b);
9338
9367
  }
9339
9368
 
9340
9369
  declare const SUPER_PEAK_STRING = BytesBlob.blobFromString("peak");
@@ -9533,6 +9562,11 @@ declare class BlockState extends WithDebug {
9533
9562
  }
9534
9563
  }
9535
9564
 
9565
+ /**
9566
+ * Recent history of blocks.
9567
+ *
9568
+ * https://graypaper.fluffylabs.dev/#/7e6ff6a/0fc9010fc901?v=0.6.7
9569
+ */
9536
9570
  declare class RecentBlocks extends WithDebug {
9537
9571
  static Codec = codec.Class(RecentBlocks, {
9538
9572
  blocks: codecKnownSizeArray(BlockState.Codec, {
@@ -9545,6 +9579,12 @@ declare class RecentBlocks extends WithDebug {
9545
9579
  }),
9546
9580
  });
9547
9581
 
9582
+ static empty() {
9583
+ return new RecentBlocks(asKnownSize([]), {
9584
+ peaks: [],
9585
+ });
9586
+ }
9587
+
9548
9588
  static create(a: CodecRecord<RecentBlocks>) {
9549
9589
  return new RecentBlocks(a.blocks, a.accumulationLog);
9550
9590
  }
@@ -9565,78 +9605,21 @@ declare class RecentBlocks extends WithDebug {
9565
9605
  }
9566
9606
  }
9567
9607
 
9568
- /**
9569
- * Recent history of blocks.
9570
- *
9571
- * https://graypaper.fluffylabs.dev/#/7e6ff6a/0fc9010fc901?v=0.6.7
9572
- */
9573
- declare class RecentBlocksHistory extends WithDebug {
9574
- static Codec = Descriptor.new<RecentBlocksHistory>(
9575
- "RecentBlocksHistory",
9576
- RecentBlocks.Codec.sizeHint,
9577
- (encoder, value) => RecentBlocks.Codec.encode(encoder, value.asCurrent()),
9578
- (decoder) => {
9579
- const recentBlocks = RecentBlocks.Codec.decode(decoder);
9580
- return RecentBlocksHistory.create(recentBlocks);
9581
- },
9582
- (skip) => {
9583
- return RecentBlocks.Codec.skip(skip);
9584
- },
9585
- );
9608
+ type RecentBlocksView = DescribedBy<typeof RecentBlocks.Codec.View>;
9586
9609
 
9587
- static create(recentBlocks: RecentBlocks) {
9588
- return new RecentBlocksHistory(recentBlocks);
9589
- }
9590
-
9591
- static empty() {
9592
- return RecentBlocksHistory.create(
9593
- RecentBlocks.create({
9594
- blocks: asKnownSize([]),
9595
- accumulationLog: { peaks: [] },
9596
- }),
9597
- );
9598
- }
9599
-
9600
- /**
9601
- * Returns the block's BEEFY super peak.
9602
- */
9603
- static accumulationResult(block: BlockState): KeccakHash {
9604
- return (block as BlockState).accumulationResult;
9605
- }
9606
-
9607
- private constructor(private readonly current: RecentBlocks | null) {
9608
- super();
9609
- }
9610
+ type RecentlyAccumulated = PerEpochBlock<ImmutableHashSet<WorkPackageHash>>;
9610
9611
 
9611
- /** History of recent blocks with maximum size of `MAX_RECENT_HISTORY` */
9612
- get blocks(): readonly BlockState[] {
9613
- if (this.current !== null) {
9614
- return this.current.blocks;
9615
- }
9616
-
9617
- throw new Error("RecentBlocksHistory is in invalid state");
9618
- }
9619
-
9620
- asCurrent() {
9621
- if (this.current === null) {
9622
- throw new Error("Cannot access current RecentBlocks format");
9623
- }
9624
- return this.current;
9625
- }
9626
-
9627
- updateBlocks(blocks: BlockState[]): RecentBlocksHistory {
9628
- if (this.current !== null) {
9629
- return RecentBlocksHistory.create(
9630
- RecentBlocks.create({
9631
- ...this.current,
9632
- blocks: asOpaqueType(blocks as BlockState[]),
9633
- }),
9634
- );
9635
- }
9612
+ declare const recentlyAccumulatedCodec = codecPerEpochBlock<
9613
+ ImmutableHashSet<WorkPackageHash>,
9614
+ SequenceView<WorkPackageHash>
9615
+ >(
9616
+ codec.sequenceVarLen(codec.bytes(HASH_SIZE).asOpaque<WorkPackageHash>()).convert(
9617
+ (x) => Array.from(x),
9618
+ (x) => HashSet.from(x),
9619
+ ),
9620
+ );
9636
9621
 
9637
- throw new Error("RecentBlocksHistory is in invalid state. Cannot be updated!");
9638
- }
9639
- }
9622
+ type RecentlyAccumulatedView = DescribedBy<typeof recentlyAccumulatedCodec.View>;
9640
9623
 
9641
9624
  /**
9642
9625
  * Fixed size of validator metadata.
@@ -9677,6 +9660,10 @@ declare class ValidatorData extends WithDebug {
9677
9660
  }
9678
9661
  }
9679
9662
 
9663
+ type ValidatorDataView = DescribedBy<typeof ValidatorData.Codec.View>;
9664
+
9665
+ declare const validatorsDataCodec = codecPerValidator(ValidatorData.Codec);
9666
+
9680
9667
  declare enum SafroleSealingKeysKind {
9681
9668
  Tickets = 0,
9682
9669
  Keys = 1,
@@ -9781,6 +9768,8 @@ declare class SafroleData {
9781
9768
  ) {}
9782
9769
  }
9783
9770
 
9771
+ type SafroleDataView = DescribedBy<typeof SafroleData.Codec.View>;
9772
+
9784
9773
  /**
9785
9774
  * `B_S`: The basic minimum balance which all services require.
9786
9775
  *
@@ -9815,6 +9804,31 @@ declare const ignoreValueWithDefault = <T>(defaultValue: T) =>
9815
9804
  (_s) => {},
9816
9805
  );
9817
9806
 
9807
+ /** Encode and decode object with leading version number. */
9808
+ declare const codecWithVersion = <T>(val: Descriptor<T>): Descriptor<T> =>
9809
+ Descriptor.new<T>(
9810
+ "withVersion",
9811
+ {
9812
+ bytes: val.sizeHint.bytes + 8,
9813
+ isExact: false,
9814
+ },
9815
+ (e, v) => {
9816
+ e.varU64(0n);
9817
+ val.encode(e, v);
9818
+ },
9819
+ (d) => {
9820
+ const version = d.varU64();
9821
+ if (version !== 0n) {
9822
+ throw new Error("Non-zero version is not supported!");
9823
+ }
9824
+ return val.decode(d);
9825
+ },
9826
+ (s) => {
9827
+ s.varU64();
9828
+ val.skip(s);
9829
+ },
9830
+ );
9831
+
9818
9832
  /**
9819
9833
  * Service account details.
9820
9834
  *
@@ -9894,6 +9908,8 @@ declare class ServiceAccountInfo extends WithDebug {
9894
9908
  }
9895
9909
  }
9896
9910
 
9911
+ type ServiceAccountInfoView = DescribedBy<typeof ServiceAccountInfo.Codec.View>;
9912
+
9897
9913
  declare class PreimageItem extends WithDebug {
9898
9914
  static Codec = codec.Class(PreimageItem, {
9899
9915
  hash: codec.bytes(HASH_SIZE).asOpaque<PreimageHash>(),
@@ -9965,6 +9981,66 @@ declare class LookupHistoryItem {
9965
9981
  }
9966
9982
  }
9967
9983
 
9984
+ /** Dictionary entry of services that auto-accumulate every block. */
9985
+ declare class AutoAccumulate {
9986
+ static Codec = codec.Class(AutoAccumulate, {
9987
+ service: codec.u32.asOpaque<ServiceId>(),
9988
+ gasLimit: codec.u64.asOpaque<ServiceGas>(),
9989
+ });
9990
+
9991
+ static create({ service, gasLimit }: CodecRecord<AutoAccumulate>) {
9992
+ return new AutoAccumulate(service, gasLimit);
9993
+ }
9994
+
9995
+ private constructor(
9996
+ /** Service id that auto-accumulates. */
9997
+ readonly service: ServiceId,
9998
+ /** Gas limit for auto-accumulation. */
9999
+ readonly gasLimit: ServiceGas,
10000
+ ) {}
10001
+ }
10002
+
10003
+ /**
10004
+ * https://graypaper.fluffylabs.dev/#/ab2cdbd/114402114402?v=0.7.2
10005
+ */
10006
+ declare class PrivilegedServices {
10007
+ /** https://graypaper.fluffylabs.dev/#/ab2cdbd/3bbd023bcb02?v=0.7.2 */
10008
+ static Codec = codec.Class(PrivilegedServices, {
10009
+ manager: codec.u32.asOpaque<ServiceId>(),
10010
+ assigners: codecPerCore(codec.u32.asOpaque<ServiceId>()),
10011
+ delegator: codec.u32.asOpaque<ServiceId>(),
10012
+ registrar: Compatibility.isGreaterOrEqual(GpVersion.V0_7_1)
10013
+ ? codec.u32.asOpaque<ServiceId>()
10014
+ : ignoreValueWithDefault(tryAsServiceId(2 ** 32 - 1)),
10015
+ autoAccumulateServices: readonlyArray(codec.sequenceVarLen(AutoAccumulate.Codec)),
10016
+ });
10017
+
10018
+ static create(a: CodecRecord<PrivilegedServices>) {
10019
+ return new PrivilegedServices(a.manager, a.delegator, a.registrar, a.assigners, a.autoAccumulateServices);
10020
+ }
10021
+
10022
+ private constructor(
10023
+ /**
10024
+ * `χ_M`: Manages alteration of χ from block to block,
10025
+ * as well as bestow services with storage deposit credits.
10026
+ * https://graypaper.fluffylabs.dev/#/ab2cdbd/111502111902?v=0.7.2
10027
+ */
10028
+ readonly manager: ServiceId,
10029
+ /** `χ_V`: Managers validator keys. */
10030
+ readonly delegator: ServiceId,
10031
+ /**
10032
+ * `χ_R`: Manages the creation of services in protected range.
10033
+ *
10034
+ * https://graypaper.fluffylabs.dev/#/ab2cdbd/111b02111d02?v=0.7.2
10035
+ */
10036
+ readonly registrar: ServiceId,
10037
+ /** `χ_A`: Manages authorization queue one for each core. */
10038
+ readonly assigners: PerCore<ServiceId>,
10039
+ /** `χ_Z`: Dictionary of services that auto-accumulate every block with their gas limit. */
10040
+ readonly autoAccumulateServices: readonly AutoAccumulate[],
10041
+ ) {}
10042
+ }
10043
+
9968
10044
  declare const codecServiceId: Descriptor<ServiceId> =
9969
10045
  Compatibility.isSuite(TestSuite.W3F_DAVXY) || Compatibility.isSuite(TestSuite.JAMDUNA, GpVersion.V0_6_7)
9970
10046
  ? codec.u32.asOpaque<ServiceId>()
@@ -10105,12 +10181,26 @@ declare class CoreStatistics {
10105
10181
  * Service statistics.
10106
10182
  * Updated per block, based on available work reports (`W`).
10107
10183
  *
10108
- * https://graypaper.fluffylabs.dev/#/68eaa1f/185104185104?v=0.6.4
10109
- * https://github.com/gavofyork/graypaper/blob/9bffb08f3ea7b67832019176754df4fb36b9557d/text/statistics.tex#L77
10184
+ * https://graypaper.fluffylabs.dev/#/1c979cb/199802199802?v=0.7.1
10110
10185
  */
10111
10186
  declare class ServiceStatistics {
10112
- static Codec = Compatibility.isGreaterOrEqual(GpVersion.V0_7_0)
10113
- ? codec.Class(ServiceStatistics, {
10187
+ static Codec = Compatibility.selectIfGreaterOrEqual({
10188
+ fallback: codec.Class(ServiceStatistics, {
10189
+ providedCount: codecVarU16,
10190
+ providedSize: codec.varU32,
10191
+ refinementCount: codec.varU32,
10192
+ refinementGasUsed: codecVarGas,
10193
+ imports: codecVarU16,
10194
+ exports: codecVarU16,
10195
+ extrinsicSize: codec.varU32,
10196
+ extrinsicCount: codecVarU16,
10197
+ accumulateCount: codec.varU32,
10198
+ accumulateGasUsed: codecVarGas,
10199
+ onTransfersCount: codec.varU32,
10200
+ onTransfersGasUsed: codecVarGas,
10201
+ }),
10202
+ versions: {
10203
+ [GpVersion.V0_7_0]: codec.Class(ServiceStatistics, {
10114
10204
  providedCount: codecVarU16,
10115
10205
  providedSize: codec.varU32,
10116
10206
  refinementCount: codec.varU32,
@@ -10123,21 +10213,23 @@ declare class ServiceStatistics {
10123
10213
  accumulateGasUsed: codecVarGas,
10124
10214
  onTransfersCount: codec.varU32,
10125
10215
  onTransfersGasUsed: codecVarGas,
10126
- })
10127
- : codec.Class(ServiceStatistics, {
10216
+ }),
10217
+ [GpVersion.V0_7_1]: codec.Class(ServiceStatistics, {
10128
10218
  providedCount: codecVarU16,
10129
10219
  providedSize: codec.varU32,
10130
10220
  refinementCount: codec.varU32,
10131
10221
  refinementGasUsed: codecVarGas,
10132
10222
  imports: codecVarU16,
10133
- exports: codecVarU16,
10134
- extrinsicSize: codec.varU32,
10135
10223
  extrinsicCount: codecVarU16,
10224
+ extrinsicSize: codec.varU32,
10225
+ exports: codecVarU16,
10136
10226
  accumulateCount: codec.varU32,
10137
10227
  accumulateGasUsed: codecVarGas,
10138
- onTransfersCount: codec.varU32,
10139
- onTransfersGasUsed: codecVarGas,
10140
- });
10228
+ onTransfersCount: ignoreValueWithDefault(tryAsU32(0)),
10229
+ onTransfersGasUsed: ignoreValueWithDefault(tryAsServiceGas(0)),
10230
+ }),
10231
+ },
10232
+ });
10141
10233
 
10142
10234
  static create(v: CodecRecord<ServiceStatistics>) {
10143
10235
  return new ServiceStatistics(
@@ -10177,9 +10269,9 @@ declare class ServiceStatistics {
10177
10269
  public accumulateCount: U32,
10178
10270
  /** `a.1` */
10179
10271
  public accumulateGasUsed: ServiceGas,
10180
- /** `t.0` */
10272
+ /** `t.0` @deprecated since 0.7.1 */
10181
10273
  public onTransfersCount: U32,
10182
- /** `t.1` */
10274
+ /** `t.1` @deprecated since 0.7.1 */
10183
10275
  public onTransfersGasUsed: ServiceGas,
10184
10276
  ) {}
10185
10277
 
@@ -10227,6 +10319,8 @@ declare class StatisticsData {
10227
10319
  ) {}
10228
10320
  }
10229
10321
 
10322
+ type StatisticsDataView = DescribedBy<typeof StatisticsData.Codec.View>;
10323
+
10230
10324
  /**
10231
10325
  * In addition to the entropy accumulator η_0, we retain
10232
10326
  * three additional historical values of the accumulator at
@@ -10278,7 +10372,7 @@ type State = {
10278
10372
  /**
10279
10373
  * `γₖ gamma_k`: The keys for the validators of the next epoch, equivalent to those keys which constitute γ_z .
10280
10374
  */
10281
- readonly nextValidatorData: SafroleData["nextValidatorData"];
10375
+ readonly nextValidatorData: PerValidator<ValidatorData>;
10282
10376
 
10283
10377
  /**
10284
10378
  * `κ kappa`: Validators, who are the set of economic actors uniquely
@@ -10310,21 +10404,190 @@ type State = {
10310
10404
  *
10311
10405
  * https://graypaper.fluffylabs.dev/#/579bd12/186401186401
10312
10406
  */
10313
- readonly timeslot: TimeSlot;
10407
+ readonly timeslot: TimeSlot;
10408
+
10409
+ /**
10410
+ * `η eta`: An on-chain entropy pool is retained in η.
10411
+ *
10412
+ * https://graypaper.fluffylabs.dev/#/579bd12/080c01080d01
10413
+ */
10414
+ readonly entropy: FixedSizeArray<EntropyHash, ENTROPY_ENTRIES>;
10415
+
10416
+ /**
10417
+ * `α alpha`: Authorizers available for each core (authorizer pool).
10418
+ *
10419
+ * https://graypaper-reader.netlify.app/#/6e1c0cd/102400102400
10420
+ */
10421
+ readonly authPools: PerCore<AuthorizationPool>;
10422
+
10423
+ /**
10424
+ * `φ phi`: A queue of authorizers for each core used to fill up the pool.
10425
+ *
10426
+ * Only updated by `accumulate` calls using `assign` host call.
10427
+ *
10428
+ * https://graypaper-reader.netlify.app/#/6e1c0cd/102400102400
10429
+ */
10430
+ readonly authQueues: PerCore<AuthorizationQueue>;
10431
+
10432
+ /**
10433
+ * `β beta`: State of the blocks from recent history.
10434
+ *
10435
+ * https://graypaper.fluffylabs.dev/#/579bd12/0fb7010fb701
10436
+ */
10437
+ readonly recentBlocks: RecentBlocks;
10438
+
10439
+ /**
10440
+ * `π pi`: Previous and current statistics of each validator,
10441
+ * cores statistics and services statistics.
10442
+ *
10443
+ * https://graypaper.fluffylabs.dev/#/68eaa1f/18f60118f601?v=0.6.4
10444
+ */
10445
+ readonly statistics: StatisticsData;
10446
+
10447
+ /**
10448
+ * `ϑ theta`: We also maintain knowledge of ready (i.e. available
10449
+ * and/or audited) but not-yet-accumulated work-reports in
10450
+ * the state item ϑ.
10451
+ *
10452
+ * https://graypaper.fluffylabs.dev/#/5f542d7/165300165500
10453
+ */
10454
+ readonly accumulationQueue: AccumulationQueue;
10455
+
10456
+ /**
10457
+ * `ξ xi`: In order to know which work-packages have been
10458
+ * accumulated already, we maintain a history of what has
10459
+ * been accumulated. This history, ξ, is sufficiently large
10460
+ * for an epoch worth of work-reports.
10461
+ *
10462
+ * https://graypaper.fluffylabs.dev/#/5f542d7/161a00161d00
10463
+ */
10464
+ readonly recentlyAccumulated: RecentlyAccumulated;
10465
+
10466
+ /*
10467
+ * `γₐ gamma_a`: The ticket accumulator - a series of highest-scoring ticket identifiers to be
10468
+ * used for the next epoch.
10469
+ *
10470
+ * https://graypaper.fluffylabs.dev/#/5f542d7/0dc3000dc500
10471
+ */
10472
+ readonly ticketsAccumulator: SafroleData["ticketsAccumulator"];
10473
+
10474
+ /**
10475
+ * `γₛ gamma_s`: γs is the current epoch’s slot-sealer series, which is either a full complement
10476
+ * of `E` tickets or, in the case of a fallback mode, a series of `E` Bandersnatch
10477
+ * keys.
10478
+ *
10479
+ * https://graypaper.fluffylabs.dev/#/5f542d7/0dc6000dc800
10480
+ */
10481
+ readonly sealingKeySeries: SafroleData["sealingKeySeries"];
10482
+
10483
+ /**
10484
+ * `γ_z gamma_z`: The epoch’s root, a Bandersnatch ring root composed with the one Bandersnatch
10485
+ * key of each of the next epoch’s validators, defined in γ_k.
10486
+ *
10487
+ * https://graypaper.fluffylabs.dev/#/5f542d7/0da8000db800
10488
+ */
10489
+ readonly epochRoot: SafroleData["epochRoot"];
10490
+
10491
+ /**
10492
+ * `χ chi`: Up to three services may be recognized as privileged. The portion of state in which
10493
+ * this is held is denoted χ and has three service index components together with
10494
+ * a gas limit.
10495
+ *
10496
+ * https://graypaper.fluffylabs.dev/#/85129da/116f01117201?v=0.6.3
10497
+ */
10498
+ readonly privilegedServices: PrivilegedServices;
10499
+
10500
+ /**
10501
+ * `θ theta`: Sequence of merkle mountain belts from recent accumulations
10502
+ * with service that accumulated them.
10503
+ *
10504
+ * https://graypaper.fluffylabs.dev/#/7e6ff6a/3bad023bad02?v=0.6.7
10505
+ *
10506
+ * NOTE Maximum size of this array is unspecified in GP
10507
+ */
10508
+ readonly accumulationOutputLog: SortedArray<AccumulationOutput>;
10509
+
10510
+ /**
10511
+ * Retrieve details about single service.
10512
+ */
10513
+ getService(id: ServiceId): Service | null;
10514
+ };
10515
+
10516
+ /** Service details. */
10517
+ interface Service {
10518
+ /** Service id. */
10519
+ readonly serviceId: ServiceId;
10520
+
10521
+ /** Retrieve service account info. */
10522
+ getInfo(): ServiceAccountInfo;
10523
+
10524
+ /** Read one particular storage item. */
10525
+ getStorage(storage: StorageKey): BytesBlob | null;
10526
+
10527
+ /** Check if preimage is present without retrieving the blob. */
10528
+ hasPreimage(hash: PreimageHash): boolean;
10529
+
10530
+ /** Retrieve a preimage. */
10531
+ getPreimage(hash: PreimageHash): BytesBlob | null;
10532
+
10533
+ /** Retrieve lookup history of a preimage. */
10534
+ getLookupHistory(hash: PreimageHash, len: U32): LookupHistorySlots | null;
10535
+ }
10536
+
10537
+ /** Additional marker interface, when state view is supported/required. */
10538
+ type WithStateView<V = StateView> = {
10539
+ /** Get view of the state. */
10540
+ view(): V;
10541
+ };
10542
+
10543
+ /**
10544
+ * A non-decoding version of the `State`.
10545
+ *
10546
+ * Note we don't require all fields to have view accessors, since
10547
+ * it's only beneficial for large collections to be read via views.
10548
+ *
10549
+ * https://graypaper.fluffylabs.dev/#/579bd12/08f10008f100
10550
+ */
10551
+ type StateView = {
10552
+ /**
10553
+
10554
+ * `ρ rho`: work-reports which have been reported but are not yet known to be
10555
+ * available to a super-majority of validators, together with the time
10556
+ * at which each was reported.
10557
+ *
10558
+ * https://graypaper.fluffylabs.dev/#/579bd12/135800135800
10559
+ */
10560
+ availabilityAssignmentView(): AvailabilityAssignmentsView;
10561
+
10562
+ /**
10563
+ * `ι iota`: The validator keys and metadata to be drawn from next.
10564
+ */
10565
+ designatedValidatorDataView(): SequenceView<ValidatorData, ValidatorDataView>;
10566
+
10567
+ /**
10568
+ * `κ kappa`: Validators, who are the set of economic actors uniquely
10569
+ * privileged to help build and maintain the Jam chain, are
10570
+ * identified within κ, archived in λ and enqueued from ι.
10571
+ *
10572
+ * https://graypaper.fluffylabs.dev/#/579bd12/080201080601
10573
+ */
10574
+ currentValidatorDataView(): SequenceView<ValidatorData, ValidatorDataView>;
10314
10575
 
10315
10576
  /**
10316
- * eta`: An on-chain entropy pool is retained in η.
10577
+ * lambda`: Validators, who are the set of economic actors uniquely
10578
+ * privileged to help build and maintain the Jam chain, are
10579
+ * identified within κ, archived in λ and enqueued from ι.
10317
10580
  *
10318
- * https://graypaper.fluffylabs.dev/#/579bd12/080c01080d01
10581
+ * https://graypaper.fluffylabs.dev/#/579bd12/080201080601
10319
10582
  */
10320
- readonly entropy: FixedSizeArray<EntropyHash, ENTROPY_ENTRIES>;
10583
+ previousValidatorDataView(): SequenceView<ValidatorData, ValidatorDataView>;
10321
10584
 
10322
10585
  /**
10323
10586
  * `α alpha`: Authorizers available for each core (authorizer pool).
10324
10587
  *
10325
10588
  * https://graypaper-reader.netlify.app/#/6e1c0cd/102400102400
10326
10589
  */
10327
- readonly authPools: PerCore<KnownSizeArray<AuthorizerHash, `At most ${typeof MAX_AUTH_POOL_SIZE}`>>;
10590
+ authPoolsView(): SequenceView<AuthorizationPool, SequenceView<AuthorizerHash>>;
10328
10591
 
10329
10592
  /**
10330
10593
  * `φ phi`: A queue of authorizers for each core used to fill up the pool.
@@ -10333,14 +10596,14 @@ type State = {
10333
10596
  *
10334
10597
  * https://graypaper-reader.netlify.app/#/6e1c0cd/102400102400
10335
10598
  */
10336
- readonly authQueues: PerCore<FixedSizeArray<AuthorizerHash, AUTHORIZATION_QUEUE_SIZE>>;
10599
+ authQueuesView(): SequenceView<AuthorizationQueue, SequenceView<AuthorizerHash>>;
10337
10600
 
10338
10601
  /**
10339
10602
  * `β beta`: State of the blocks from recent history.
10340
10603
  *
10341
10604
  * https://graypaper.fluffylabs.dev/#/579bd12/0fb7010fb701
10342
10605
  */
10343
- readonly recentBlocks: RecentBlocksHistory;
10606
+ recentBlocksView(): RecentBlocksView;
10344
10607
 
10345
10608
  /**
10346
10609
  * `π pi`: Previous and current statistics of each validator,
@@ -10348,7 +10611,7 @@ type State = {
10348
10611
  *
10349
10612
  * https://graypaper.fluffylabs.dev/#/68eaa1f/18f60118f601?v=0.6.4
10350
10613
  */
10351
- readonly statistics: StatisticsData;
10614
+ statisticsView(): StatisticsDataView;
10352
10615
 
10353
10616
  /**
10354
10617
  * `ϑ theta`: We also maintain knowledge of ready (i.e. available
@@ -10357,7 +10620,7 @@ type State = {
10357
10620
  *
10358
10621
  * https://graypaper.fluffylabs.dev/#/5f542d7/165300165500
10359
10622
  */
10360
- readonly accumulationQueue: PerEpochBlock<readonly NotYetAccumulatedReport[]>;
10623
+ accumulationQueueView(): AccumulationQueueView;
10361
10624
 
10362
10625
  /**
10363
10626
  * `ξ xi`: In order to know which work-packages have been
@@ -10367,79 +10630,17 @@ type State = {
10367
10630
  *
10368
10631
  * https://graypaper.fluffylabs.dev/#/5f542d7/161a00161d00
10369
10632
  */
10370
- readonly recentlyAccumulated: PerEpochBlock<ImmutableHashSet<WorkPackageHash>>;
10633
+ recentlyAccumulatedView(): RecentlyAccumulatedView;
10371
10634
 
10372
10635
  /*
10373
- * `γₐ gamma_a`: The ticket accumulator - a series of highest-scoring ticket identifiers to be
10374
- * used for the next epoch.
10375
- *
10376
- * https://graypaper.fluffylabs.dev/#/5f542d7/0dc3000dc500
10377
- */
10378
- readonly ticketsAccumulator: SafroleData["ticketsAccumulator"];
10379
-
10380
- /**
10381
- * `γₛ gamma_s`: γs is the current epoch’s slot-sealer series, which is either a full complement
10382
- * of `E` tickets or, in the case of a fallback mode, a series of `E` Bandersnatch
10383
- * keys.
10384
- *
10385
- * https://graypaper.fluffylabs.dev/#/5f542d7/0dc6000dc800
10386
- */
10387
- readonly sealingKeySeries: SafroleData["sealingKeySeries"];
10388
-
10389
- /**
10390
- * `γ_z gamma_z`: The epoch’s root, a Bandersnatch ring root composed with the one Bandersnatch
10391
- * key of each of the next epoch’s validators, defined in γ_k.
10392
- *
10393
- * https://graypaper.fluffylabs.dev/#/5f542d7/0da8000db800
10394
- */
10395
- readonly epochRoot: SafroleData["epochRoot"];
10396
-
10397
- /**
10398
- * `χ chi`: Up to three services may be recognized as privileged. The portion of state in which
10399
- * this is held is denoted χ and has three service index components together with
10400
- * a gas limit.
10401
- *
10402
- * https://graypaper.fluffylabs.dev/#/85129da/116f01117201?v=0.6.3
10403
- */
10404
- readonly privilegedServices: PrivilegedServices;
10405
-
10406
- /**
10407
- * `θ theta`: Sequence of merkle mountain belts from recent accumulations
10408
- * with service that accumulated them.
10409
- *
10410
- * https://graypaper.fluffylabs.dev/#/7e6ff6a/3bad023bad02?v=0.6.7
10411
- *
10412
- * NOTE Maximum size of this array is unspecified in GP
10636
+ * gamma`: Safrole data.
10413
10637
  */
10414
- readonly accumulationOutputLog: SortedArray<AccumulationOutput>;
10638
+ safroleDataView(): SafroleDataView;
10415
10639
 
10416
- /**
10417
- * Retrieve details about single service.
10418
- */
10419
- getService(id: ServiceId): Service | null;
10640
+ /** Retrieve details about single service. */
10641
+ getServiceInfoView(id: ServiceId): ServiceAccountInfoView | null;
10420
10642
  };
10421
10643
 
10422
- /** Service details. */
10423
- interface Service {
10424
- /** Service id. */
10425
- readonly serviceId: ServiceId;
10426
-
10427
- /** Retrieve service account info. */
10428
- getInfo(): ServiceAccountInfo;
10429
-
10430
- /** Read one particular storage item. */
10431
- getStorage(storage: StorageKey): BytesBlob | null;
10432
-
10433
- /** Check if preimage is present without retrieving the blob. */
10434
- hasPreimage(hash: PreimageHash): boolean;
10435
-
10436
- /** Retrieve a preimage. */
10437
- getPreimage(hash: PreimageHash): BytesBlob | null;
10438
-
10439
- /** Retrieve lookup history of a preimage. */
10440
- getLookupHistory(hash: PreimageHash, len: U32): LookupHistorySlots | null;
10441
- }
10442
-
10443
10644
  declare enum UpdatePreimageKind {
10444
10645
  /** Insert new preimage and optionally update it's lookup history. */
10445
10646
  Provide = 0,
@@ -10762,10 +10963,10 @@ declare class InMemoryService extends WithDebug implements Service {
10762
10963
  /**
10763
10964
  * A special version of state, stored fully in-memory.
10764
10965
  */
10765
- declare class InMemoryState extends WithDebug implements State, EnumerableState {
10966
+ declare class InMemoryState extends WithDebug implements State, WithStateView, EnumerableState {
10766
10967
  /** Create a new `InMemoryState` by providing all required fields. */
10767
- static create(state: InMemoryStateFields) {
10768
- return new InMemoryState(state);
10968
+ static new(chainSpec: ChainSpec, state: InMemoryStateFields) {
10969
+ return new InMemoryState(chainSpec, state);
10769
10970
  }
10770
10971
 
10771
10972
  /**
@@ -10783,7 +10984,7 @@ declare class InMemoryState extends WithDebug implements State, EnumerableState
10783
10984
  /**
10784
10985
  * Create a new `InMemoryState` from some other state object.
10785
10986
  */
10786
- static copyFrom(other: State, servicesData: Map<ServiceId, ServiceEntries>) {
10987
+ static copyFrom(chainSpec: ChainSpec, other: State, servicesData: Map<ServiceId, ServiceEntries>) {
10787
10988
  const services = new Map<ServiceId, InMemoryService>();
10788
10989
  for (const [id, entries] of servicesData.entries()) {
10789
10990
  const service = other.getService(id);
@@ -10794,7 +10995,7 @@ declare class InMemoryState extends WithDebug implements State, EnumerableState
10794
10995
  services.set(id, inMemService);
10795
10996
  }
10796
10997
 
10797
- return InMemoryState.create({
10998
+ return InMemoryState.new(chainSpec, {
10798
10999
  availabilityAssignment: other.availabilityAssignment,
10799
11000
  accumulationQueue: other.accumulationQueue,
10800
11001
  designatedValidatorData: other.designatedValidatorData,
@@ -10989,12 +11190,12 @@ declare class InMemoryState extends WithDebug implements State, EnumerableState
10989
11190
  disputesRecords: DisputesRecords;
10990
11191
  timeslot: TimeSlot;
10991
11192
  entropy: FixedSizeArray<EntropyHash, ENTROPY_ENTRIES>;
10992
- authPools: PerCore<KnownSizeArray<AuthorizerHash, `At most ${typeof MAX_AUTH_POOL_SIZE}`>>;
10993
- authQueues: PerCore<FixedSizeArray<AuthorizerHash, AUTHORIZATION_QUEUE_SIZE>>;
10994
- recentBlocks: RecentBlocksHistory;
11193
+ authPools: PerCore<AuthorizationPool>;
11194
+ authQueues: PerCore<AuthorizationQueue>;
11195
+ recentBlocks: RecentBlocks;
10995
11196
  statistics: StatisticsData;
10996
- accumulationQueue: PerEpochBlock<readonly NotYetAccumulatedReport[]>;
10997
- recentlyAccumulated: PerEpochBlock<ImmutableHashSet<WorkPackageHash>>;
11197
+ accumulationQueue: AccumulationQueue;
11198
+ recentlyAccumulated: RecentlyAccumulated;
10998
11199
  ticketsAccumulator: KnownSizeArray<Ticket, "0...EpochLength">;
10999
11200
  sealingKeySeries: SafroleSealingKeys;
11000
11201
  epochRoot: BandersnatchRingRoot;
@@ -11010,7 +11211,10 @@ declare class InMemoryState extends WithDebug implements State, EnumerableState
11010
11211
  return this.services.get(id) ?? null;
11011
11212
  }
11012
11213
 
11013
- private constructor(s: InMemoryStateFields) {
11214
+ protected constructor(
11215
+ private readonly chainSpec: ChainSpec,
11216
+ s: InMemoryStateFields,
11217
+ ) {
11014
11218
  super();
11015
11219
  this.availabilityAssignment = s.availabilityAssignment;
11016
11220
  this.designatedValidatorData = s.designatedValidatorData;
@@ -11034,11 +11238,15 @@ declare class InMemoryState extends WithDebug implements State, EnumerableState
11034
11238
  this.services = s.services;
11035
11239
  }
11036
11240
 
11241
+ view(): StateView {
11242
+ return new InMemoryStateView(this.chainSpec, this);
11243
+ }
11244
+
11037
11245
  /**
11038
11246
  * Create an empty and possibly incoherent `InMemoryState`.
11039
11247
  */
11040
11248
  static empty(spec: ChainSpec) {
11041
- return new InMemoryState({
11249
+ return new InMemoryState(spec, {
11042
11250
  availabilityAssignment: tryAsPerCore(
11043
11251
  Array.from({ length: spec.coresCount }, () => null),
11044
11252
  spec,
@@ -11105,7 +11313,7 @@ declare class InMemoryState extends WithDebug implements State, EnumerableState
11105
11313
  ),
11106
11314
  spec,
11107
11315
  ),
11108
- recentBlocks: RecentBlocksHistory.empty(),
11316
+ recentBlocks: RecentBlocks.empty(),
11109
11317
  statistics: StatisticsData.create({
11110
11318
  current: tryAsPerValidator(
11111
11319
  Array.from({ length: spec.validatorsCount }, () => ValidatorStatistics.empty()),
@@ -11139,8 +11347,9 @@ declare class InMemoryState extends WithDebug implements State, EnumerableState
11139
11347
  epochRoot: Bytes.zero(BANDERSNATCH_RING_ROOT_BYTES).asOpaque(),
11140
11348
  privilegedServices: PrivilegedServices.create({
11141
11349
  manager: tryAsServiceId(0),
11142
- authManager: tryAsPerCore(new Array(spec.coresCount).fill(tryAsServiceId(0)), spec),
11143
- validatorsManager: tryAsServiceId(0),
11350
+ assigners: tryAsPerCore(new Array(spec.coresCount).fill(tryAsServiceId(0)), spec),
11351
+ delegator: tryAsServiceId(0),
11352
+ registrar: tryAsServiceId(MAX_VALUE),
11144
11353
  autoAccumulateServices: [],
11145
11354
  }),
11146
11355
  accumulationOutputLog: SortedArray.fromArray(accumulationOutputComparator, []),
@@ -11190,12 +11399,18 @@ type FieldNames<T> = {
11190
11399
  [K in keyof T]: T[K] extends Function ? never : K;
11191
11400
  }[keyof T];
11192
11401
 
11402
+ type index$e_AUTHORIZATION_QUEUE_SIZE = AUTHORIZATION_QUEUE_SIZE;
11193
11403
  type index$e_AccumulationOutput = AccumulationOutput;
11194
11404
  declare const index$e_AccumulationOutput: typeof AccumulationOutput;
11405
+ type index$e_AccumulationQueue = AccumulationQueue;
11406
+ type index$e_AccumulationQueueView = AccumulationQueueView;
11407
+ type index$e_AuthorizationPool = AuthorizationPool;
11408
+ type index$e_AuthorizationQueue = AuthorizationQueue;
11195
11409
  type index$e_AutoAccumulate = AutoAccumulate;
11196
11410
  declare const index$e_AutoAccumulate: typeof AutoAccumulate;
11197
11411
  type index$e_AvailabilityAssignment = AvailabilityAssignment;
11198
11412
  declare const index$e_AvailabilityAssignment: typeof AvailabilityAssignment;
11413
+ type index$e_AvailabilityAssignmentsView = AvailabilityAssignmentsView;
11199
11414
  declare const index$e_BASE_SERVICE_BALANCE: typeof BASE_SERVICE_BALANCE;
11200
11415
  type index$e_BlockState = BlockState;
11201
11416
  declare const index$e_BlockState: typeof BlockState;
@@ -11217,8 +11432,11 @@ type index$e_InMemoryStateFields = InMemoryStateFields;
11217
11432
  type index$e_LookupHistoryItem = LookupHistoryItem;
11218
11433
  declare const index$e_LookupHistoryItem: typeof LookupHistoryItem;
11219
11434
  type index$e_LookupHistorySlots = LookupHistorySlots;
11435
+ type index$e_MAX_AUTH_POOL_SIZE = MAX_AUTH_POOL_SIZE;
11220
11436
  declare const index$e_MAX_LOOKUP_HISTORY_SLOTS: typeof MAX_LOOKUP_HISTORY_SLOTS;
11221
11437
  type index$e_MAX_RECENT_HISTORY = MAX_RECENT_HISTORY;
11438
+ type index$e_NotYetAccumulatedReport = NotYetAccumulatedReport;
11439
+ declare const index$e_NotYetAccumulatedReport: typeof NotYetAccumulatedReport;
11222
11440
  type index$e_PerCore<T> = PerCore<T>;
11223
11441
  type index$e_PreimageItem = PreimageItem;
11224
11442
  declare const index$e_PreimageItem: typeof PreimageItem;
@@ -11226,10 +11444,12 @@ type index$e_PrivilegedServices = PrivilegedServices;
11226
11444
  declare const index$e_PrivilegedServices: typeof PrivilegedServices;
11227
11445
  type index$e_RecentBlocks = RecentBlocks;
11228
11446
  declare const index$e_RecentBlocks: typeof RecentBlocks;
11229
- type index$e_RecentBlocksHistory = RecentBlocksHistory;
11230
- declare const index$e_RecentBlocksHistory: typeof RecentBlocksHistory;
11447
+ type index$e_RecentBlocksView = RecentBlocksView;
11448
+ type index$e_RecentlyAccumulated = RecentlyAccumulated;
11449
+ type index$e_RecentlyAccumulatedView = RecentlyAccumulatedView;
11231
11450
  type index$e_SafroleData = SafroleData;
11232
11451
  declare const index$e_SafroleData: typeof SafroleData;
11452
+ type index$e_SafroleDataView = SafroleDataView;
11233
11453
  type index$e_SafroleSealingKeys = SafroleSealingKeys;
11234
11454
  type index$e_SafroleSealingKeysData = SafroleSealingKeysData;
11235
11455
  declare const index$e_SafroleSealingKeysData: typeof SafroleSealingKeysData;
@@ -11238,14 +11458,17 @@ declare const index$e_SafroleSealingKeysKind: typeof SafroleSealingKeysKind;
11238
11458
  type index$e_Service = Service;
11239
11459
  type index$e_ServiceAccountInfo = ServiceAccountInfo;
11240
11460
  declare const index$e_ServiceAccountInfo: typeof ServiceAccountInfo;
11461
+ type index$e_ServiceAccountInfoView = ServiceAccountInfoView;
11241
11462
  type index$e_ServiceData = ServiceData;
11242
11463
  type index$e_ServiceEntries = ServiceEntries;
11243
11464
  type index$e_ServiceStatistics = ServiceStatistics;
11244
11465
  declare const index$e_ServiceStatistics: typeof ServiceStatistics;
11245
11466
  type index$e_ServicesUpdate = ServicesUpdate;
11246
11467
  type index$e_State = State;
11468
+ type index$e_StateView = StateView;
11247
11469
  type index$e_StatisticsData = StatisticsData;
11248
11470
  declare const index$e_StatisticsData: typeof StatisticsData;
11471
+ type index$e_StatisticsDataView = StatisticsDataView;
11249
11472
  type index$e_StorageItem = StorageItem;
11250
11473
  declare const index$e_StorageItem: typeof StorageItem;
11251
11474
  type index$e_StorageKey = StorageKey;
@@ -11266,27 +11489,35 @@ declare const index$e_UpdateStorageKind: typeof UpdateStorageKind;
11266
11489
  type index$e_VALIDATOR_META_BYTES = VALIDATOR_META_BYTES;
11267
11490
  type index$e_ValidatorData = ValidatorData;
11268
11491
  declare const index$e_ValidatorData: typeof ValidatorData;
11492
+ type index$e_ValidatorDataView = ValidatorDataView;
11269
11493
  type index$e_ValidatorStatistics = ValidatorStatistics;
11270
11494
  declare const index$e_ValidatorStatistics: typeof ValidatorStatistics;
11495
+ type index$e_WithStateView<V = StateView> = WithStateView<V>;
11271
11496
  declare const index$e_accumulationOutputComparator: typeof accumulationOutputComparator;
11497
+ declare const index$e_accumulationQueueCodec: typeof accumulationQueueCodec;
11498
+ declare const index$e_authPoolsCodec: typeof authPoolsCodec;
11499
+ declare const index$e_authQueuesCodec: typeof authQueuesCodec;
11500
+ declare const index$e_availabilityAssignmentsCodec: typeof availabilityAssignmentsCodec;
11272
11501
  declare const index$e_codecBandersnatchKey: typeof codecBandersnatchKey;
11273
11502
  declare const index$e_codecPerCore: typeof codecPerCore;
11274
11503
  declare const index$e_codecServiceId: typeof codecServiceId;
11275
11504
  declare const index$e_codecVarGas: typeof codecVarGas;
11276
11505
  declare const index$e_codecVarU16: typeof codecVarU16;
11277
- declare const index$e_codecWithHash: typeof codecWithHash;
11506
+ declare const index$e_codecWithVersion: typeof codecWithVersion;
11278
11507
  declare const index$e_hashComparator: typeof hashComparator;
11279
11508
  declare const index$e_ignoreValueWithDefault: typeof ignoreValueWithDefault;
11509
+ declare const index$e_recentlyAccumulatedCodec: typeof recentlyAccumulatedCodec;
11280
11510
  declare const index$e_serviceDataCodec: typeof serviceDataCodec;
11281
11511
  declare const index$e_serviceEntriesCodec: typeof serviceEntriesCodec;
11282
11512
  declare const index$e_sortedSetCodec: typeof sortedSetCodec;
11283
11513
  declare const index$e_tryAsLookupHistorySlots: typeof tryAsLookupHistorySlots;
11284
11514
  declare const index$e_tryAsPerCore: typeof tryAsPerCore;
11515
+ declare const index$e_validatorsDataCodec: typeof validatorsDataCodec;
11285
11516
  declare const index$e_workReportsSortedSetCodec: typeof workReportsSortedSetCodec;
11286
11517
  declare const index$e_zeroSizeHint: typeof zeroSizeHint;
11287
11518
  declare namespace index$e {
11288
- export { index$e_AccumulationOutput as AccumulationOutput, index$e_AutoAccumulate as AutoAccumulate, index$e_AvailabilityAssignment as AvailabilityAssignment, index$e_BASE_SERVICE_BALANCE as BASE_SERVICE_BALANCE, index$e_BlockState as BlockState, index$e_CoreStatistics as CoreStatistics, index$e_DisputesRecords as DisputesRecords, index$e_ELECTIVE_BYTE_BALANCE as ELECTIVE_BYTE_BALANCE, index$e_ELECTIVE_ITEM_BALANCE as ELECTIVE_ITEM_BALANCE, index$e_InMemoryService as InMemoryService, index$e_InMemoryState as InMemoryState, index$e_LookupHistoryItem as LookupHistoryItem, index$e_MAX_LOOKUP_HISTORY_SLOTS as MAX_LOOKUP_HISTORY_SLOTS, index$e_PreimageItem as PreimageItem, index$e_PrivilegedServices as PrivilegedServices, index$e_RecentBlocks as RecentBlocks, index$e_RecentBlocksHistory as RecentBlocksHistory, index$e_SafroleData as SafroleData, index$e_SafroleSealingKeysData as SafroleSealingKeysData, index$e_SafroleSealingKeysKind as SafroleSealingKeysKind, index$e_ServiceAccountInfo as ServiceAccountInfo, index$e_ServiceStatistics as ServiceStatistics, index$e_StatisticsData as StatisticsData, index$e_StorageItem as StorageItem, index$e_UpdateError as UpdateError, index$e_UpdatePreimage as UpdatePreimage, index$e_UpdatePreimageKind as UpdatePreimageKind, index$e_UpdateService as UpdateService, index$e_UpdateServiceKind as UpdateServiceKind, index$e_UpdateStorage as UpdateStorage, index$e_UpdateStorageKind as UpdateStorageKind, index$e_ValidatorData as ValidatorData, index$e_ValidatorStatistics as ValidatorStatistics, index$e_accumulationOutputComparator as accumulationOutputComparator, index$e_codecBandersnatchKey as codecBandersnatchKey, index$e_codecPerCore as codecPerCore, index$e_codecServiceId as codecServiceId, index$e_codecVarGas as codecVarGas, index$e_codecVarU16 as codecVarU16, index$e_codecWithHash as codecWithHash, index$e_hashComparator as hashComparator, index$e_ignoreValueWithDefault as ignoreValueWithDefault, index$e_serviceDataCodec as serviceDataCodec, index$e_serviceEntriesCodec as serviceEntriesCodec, index$e_sortedSetCodec as sortedSetCodec, index$e_tryAsLookupHistorySlots as tryAsLookupHistorySlots, index$e_tryAsPerCore as tryAsPerCore, index$e_workReportsSortedSetCodec as workReportsSortedSetCodec, index$e_zeroSizeHint as zeroSizeHint };
11289
- export type { index$e_BlocksState as BlocksState, index$e_ENTROPY_ENTRIES as ENTROPY_ENTRIES, index$e_EnumerableState as EnumerableState, index$e_FieldNames as FieldNames, index$e_InMemoryStateFields as InMemoryStateFields, index$e_LookupHistorySlots as LookupHistorySlots, index$e_MAX_RECENT_HISTORY as MAX_RECENT_HISTORY, index$e_PerCore as PerCore, index$e_SafroleSealingKeys as SafroleSealingKeys, index$e_Service as Service, index$e_ServiceData as ServiceData, index$e_ServiceEntries as ServiceEntries, index$e_ServicesUpdate as ServicesUpdate, index$e_State as State, index$e_StorageKey as StorageKey, index$e_VALIDATOR_META_BYTES as VALIDATOR_META_BYTES };
11519
+ export { index$e_AccumulationOutput as AccumulationOutput, index$e_AutoAccumulate as AutoAccumulate, index$e_AvailabilityAssignment as AvailabilityAssignment, index$e_BASE_SERVICE_BALANCE as BASE_SERVICE_BALANCE, index$e_BlockState as BlockState, index$e_CoreStatistics as CoreStatistics, index$e_DisputesRecords as DisputesRecords, index$e_ELECTIVE_BYTE_BALANCE as ELECTIVE_BYTE_BALANCE, index$e_ELECTIVE_ITEM_BALANCE as ELECTIVE_ITEM_BALANCE, index$e_InMemoryService as InMemoryService, index$e_InMemoryState as InMemoryState, index$e_LookupHistoryItem as LookupHistoryItem, index$e_MAX_LOOKUP_HISTORY_SLOTS as MAX_LOOKUP_HISTORY_SLOTS, index$e_NotYetAccumulatedReport as NotYetAccumulatedReport, index$e_PreimageItem as PreimageItem, index$e_PrivilegedServices as PrivilegedServices, index$e_RecentBlocks as RecentBlocks, index$e_SafroleData as SafroleData, index$e_SafroleSealingKeysData as SafroleSealingKeysData, index$e_SafroleSealingKeysKind as SafroleSealingKeysKind, index$e_ServiceAccountInfo as ServiceAccountInfo, index$e_ServiceStatistics as ServiceStatistics, index$e_StatisticsData as StatisticsData, index$e_StorageItem as StorageItem, index$e_UpdateError as UpdateError, index$e_UpdatePreimage as UpdatePreimage, index$e_UpdatePreimageKind as UpdatePreimageKind, index$e_UpdateService as UpdateService, index$e_UpdateServiceKind as UpdateServiceKind, index$e_UpdateStorage as UpdateStorage, index$e_UpdateStorageKind as UpdateStorageKind, index$e_ValidatorData as ValidatorData, index$e_ValidatorStatistics as ValidatorStatistics, index$e_accumulationOutputComparator as accumulationOutputComparator, index$e_accumulationQueueCodec as accumulationQueueCodec, index$e_authPoolsCodec as authPoolsCodec, index$e_authQueuesCodec as authQueuesCodec, index$e_availabilityAssignmentsCodec as availabilityAssignmentsCodec, index$e_codecBandersnatchKey as codecBandersnatchKey, index$e_codecPerCore as codecPerCore, index$e_codecServiceId as codecServiceId, index$e_codecVarGas as codecVarGas, index$e_codecVarU16 as codecVarU16, index$e_codecWithVersion as codecWithVersion, index$e_hashComparator as hashComparator, index$e_ignoreValueWithDefault as ignoreValueWithDefault, index$e_recentlyAccumulatedCodec as recentlyAccumulatedCodec, index$e_serviceDataCodec as serviceDataCodec, index$e_serviceEntriesCodec as serviceEntriesCodec, index$e_sortedSetCodec as sortedSetCodec, index$e_tryAsLookupHistorySlots as tryAsLookupHistorySlots, index$e_tryAsPerCore as tryAsPerCore, index$e_validatorsDataCodec as validatorsDataCodec, index$e_workReportsSortedSetCodec as workReportsSortedSetCodec, index$e_zeroSizeHint as zeroSizeHint };
11520
+ export type { index$e_AUTHORIZATION_QUEUE_SIZE as AUTHORIZATION_QUEUE_SIZE, index$e_AccumulationQueue as AccumulationQueue, index$e_AccumulationQueueView as AccumulationQueueView, index$e_AuthorizationPool as AuthorizationPool, index$e_AuthorizationQueue as AuthorizationQueue, index$e_AvailabilityAssignmentsView as AvailabilityAssignmentsView, index$e_BlocksState as BlocksState, index$e_ENTROPY_ENTRIES as ENTROPY_ENTRIES, index$e_EnumerableState as EnumerableState, index$e_FieldNames as FieldNames, index$e_InMemoryStateFields as InMemoryStateFields, index$e_LookupHistorySlots as LookupHistorySlots, index$e_MAX_AUTH_POOL_SIZE as MAX_AUTH_POOL_SIZE, index$e_MAX_RECENT_HISTORY as MAX_RECENT_HISTORY, index$e_PerCore as PerCore, index$e_RecentBlocksView as RecentBlocksView, index$e_RecentlyAccumulated as RecentlyAccumulated, index$e_RecentlyAccumulatedView as RecentlyAccumulatedView, index$e_SafroleDataView as SafroleDataView, index$e_SafroleSealingKeys as SafroleSealingKeys, index$e_Service as Service, index$e_ServiceAccountInfoView as ServiceAccountInfoView, index$e_ServiceData as ServiceData, index$e_ServiceEntries as ServiceEntries, index$e_ServicesUpdate as ServicesUpdate, index$e_State as State, index$e_StateView as StateView, index$e_StatisticsDataView as StatisticsDataView, index$e_StorageKey as StorageKey, index$e_VALIDATOR_META_BYTES as VALIDATOR_META_BYTES, index$e_ValidatorDataView as ValidatorDataView, index$e_WithStateView as WithStateView };
11290
11521
  }
11291
11522
 
11292
11523
  type StateKey = Opaque<OpaqueHash, "stateKey">;
@@ -11353,7 +11584,7 @@ declare namespace stateKeys {
11353
11584
  }
11354
11585
 
11355
11586
  /** https://graypaper.fluffylabs.dev/#/1c979cb/3bba033bba03?v=0.7.1 */
11356
- export function serviceStorage(serviceId: ServiceId, key: StorageKey): StateKey {
11587
+ export function serviceStorage(blake2b: Blake2b, serviceId: ServiceId, key: StorageKey): StateKey {
11357
11588
  if (Compatibility.isLessThan(GpVersion.V0_6_7)) {
11358
11589
  const out = Bytes.zero(HASH_SIZE);
11359
11590
  out.raw.set(u32AsLeBytes(tryAsU32(2 ** 32 - 1)), 0);
@@ -11361,11 +11592,11 @@ declare namespace stateKeys {
11361
11592
  return legacyServiceNested(serviceId, out);
11362
11593
  }
11363
11594
 
11364
- return serviceNested(serviceId, tryAsU32(2 ** 32 - 1), key);
11595
+ return serviceNested(blake2b, serviceId, tryAsU32(2 ** 32 - 1), key);
11365
11596
  }
11366
11597
 
11367
11598
  /** https://graypaper.fluffylabs.dev/#/1c979cb/3bd7033bd703?v=0.7.1 */
11368
- export function servicePreimage(serviceId: ServiceId, hash: PreimageHash): StateKey {
11599
+ export function servicePreimage(blake2b: Blake2b, serviceId: ServiceId, hash: PreimageHash): StateKey {
11369
11600
  if (Compatibility.isLessThan(GpVersion.V0_6_7)) {
11370
11601
  const out = Bytes.zero(HASH_SIZE);
11371
11602
  out.raw.set(u32AsLeBytes(tryAsU32(2 ** 32 - 2)), 0);
@@ -11373,11 +11604,16 @@ declare namespace stateKeys {
11373
11604
  return legacyServiceNested(serviceId, out);
11374
11605
  }
11375
11606
 
11376
- return serviceNested(serviceId, tryAsU32(2 ** 32 - 2), hash);
11607
+ return serviceNested(blake2b, serviceId, tryAsU32(2 ** 32 - 2), hash);
11377
11608
  }
11378
11609
 
11379
11610
  /** https://graypaper.fluffylabs.dev/#/1c979cb/3b0a043b0a04?v=0.7.1 */
11380
- export function serviceLookupHistory(serviceId: ServiceId, hash: PreimageHash, preimageLength: U32): StateKey {
11611
+ export function serviceLookupHistory(
11612
+ blake2b: Blake2b,
11613
+ serviceId: ServiceId,
11614
+ hash: PreimageHash,
11615
+ preimageLength: U32,
11616
+ ): StateKey {
11381
11617
  if (Compatibility.isLessThan(GpVersion.V0_6_7)) {
11382
11618
  const doubleHash = blake2b.hashBytes(hash);
11383
11619
  const out = Bytes.zero(HASH_SIZE);
@@ -11386,11 +11622,11 @@ declare namespace stateKeys {
11386
11622
  return legacyServiceNested(serviceId, out);
11387
11623
  }
11388
11624
 
11389
- return serviceNested(serviceId, preimageLength, hash);
11625
+ return serviceNested(blake2b, serviceId, preimageLength, hash);
11390
11626
  }
11391
11627
 
11392
11628
  /** https://graypaper.fluffylabs.dev/#/1c979cb/3b88003b8800?v=0.7.1 */
11393
- export function serviceNested(serviceId: ServiceId, numberPrefix: U32, hash: BytesBlob): StateKey {
11629
+ export function serviceNested(blake2b: Blake2b, serviceId: ServiceId, numberPrefix: U32, hash: BytesBlob): StateKey {
11394
11630
  const inputToHash = BytesBlob.blobFromParts(u32AsLeBytes(numberPrefix), hash.raw);
11395
11631
  const newHash = blake2b.hashBytes(inputToHash).raw.subarray(0, 28);
11396
11632
  const key = Bytes.zero(HASH_SIZE);
@@ -11422,33 +11658,25 @@ declare function legacyServiceNested(serviceId: ServiceId, hash: OpaqueHash): St
11422
11658
  return key.asOpaque();
11423
11659
  }
11424
11660
 
11425
- type StateCodec<T> = {
11661
+ type StateCodec<T, V = T> = {
11426
11662
  key: StateKey;
11427
- Codec: Descriptor<T>;
11663
+ Codec: Descriptor<T, V>;
11428
11664
  extract: (s: State) => T;
11429
11665
  };
11430
11666
 
11431
11667
  /** Serialization for particular state entries. */
11432
11668
  declare namespace serialize {
11433
11669
  /** C(1): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b15013b1501?v=0.6.7 */
11434
- export const authPools: StateCodec<State["authPools"]> = {
11670
+ export const authPools: StateCodec<State["authPools"], ReturnType<StateView["authPoolsView"]>> = {
11435
11671
  key: stateKeys.index(StateKeyIdx.Alpha),
11436
- Codec: codecPerCore(
11437
- codecKnownSizeArray(codec.bytes(HASH_SIZE).asOpaque<AuthorizerHash>(), {
11438
- minLength: 0,
11439
- maxLength: MAX_AUTH_POOL_SIZE,
11440
- typicalLength: MAX_AUTH_POOL_SIZE,
11441
- }),
11442
- ),
11672
+ Codec: authPoolsCodec,
11443
11673
  extract: (s) => s.authPools,
11444
11674
  };
11445
11675
 
11446
11676
  /** C(2): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b31013b3101?v=0.6.7 */
11447
- export const authQueues: StateCodec<State["authQueues"]> = {
11677
+ export const authQueues: StateCodec<State["authQueues"], ReturnType<StateView["authQueuesView"]>> = {
11448
11678
  key: stateKeys.index(StateKeyIdx.Phi),
11449
- Codec: codecPerCore(
11450
- codecFixedSizeArray(codec.bytes(HASH_SIZE).asOpaque<AuthorizerHash>(), AUTHORIZATION_QUEUE_SIZE),
11451
- ),
11679
+ Codec: authQueuesCodec,
11452
11680
  extract: (s) => s.authQueues,
11453
11681
  };
11454
11682
 
@@ -11456,14 +11684,14 @@ declare namespace serialize {
11456
11684
  * C(3): Recent blocks with compatibility
11457
11685
  * https://graypaper.fluffylabs.dev/#/7e6ff6a/3b3e013b3e01?v=0.6.7
11458
11686
  */
11459
- export const recentBlocks: StateCodec<State["recentBlocks"]> = {
11687
+ export const recentBlocks: StateCodec<RecentBlocks, RecentBlocksView> = {
11460
11688
  key: stateKeys.index(StateKeyIdx.Beta),
11461
- Codec: RecentBlocksHistory.Codec,
11689
+ Codec: RecentBlocks.Codec,
11462
11690
  extract: (s) => s.recentBlocks,
11463
11691
  };
11464
11692
 
11465
11693
  /** C(4): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b63013b6301?v=0.6.7 */
11466
- export const safrole: StateCodec<SafroleData> = {
11694
+ export const safrole: StateCodec<SafroleData, SafroleDataView> = {
11467
11695
  key: stateKeys.index(StateKeyIdx.Gamma),
11468
11696
  Codec: SafroleData.Codec,
11469
11697
  extract: (s) =>
@@ -11476,7 +11704,7 @@ declare namespace serialize {
11476
11704
  };
11477
11705
 
11478
11706
  /** C(5): https://graypaper.fluffylabs.dev/#/7e6ff6a/3bba013bba01?v=0.6.7 */
11479
- export const disputesRecords: StateCodec<State["disputesRecords"]> = {
11707
+ export const disputesRecords: StateCodec<DisputesRecords> = {
11480
11708
  key: stateKeys.index(StateKeyIdx.Psi),
11481
11709
  Codec: DisputesRecords.Codec,
11482
11710
  extract: (s) => s.disputesRecords,
@@ -11490,30 +11718,42 @@ declare namespace serialize {
11490
11718
  };
11491
11719
 
11492
11720
  /** C(7): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b00023b0002?v=0.6.7 */
11493
- export const designatedValidators: StateCodec<State["designatedValidatorData"]> = {
11721
+ export const designatedValidators: StateCodec<
11722
+ State["designatedValidatorData"],
11723
+ ReturnType<StateView["designatedValidatorDataView"]>
11724
+ > = {
11494
11725
  key: stateKeys.index(StateKeyIdx.Iota),
11495
- Codec: codecPerValidator(ValidatorData.Codec),
11726
+ Codec: validatorsDataCodec,
11496
11727
  extract: (s) => s.designatedValidatorData,
11497
11728
  };
11498
11729
 
11499
11730
  /** C(8): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b0d023b0d02?v=0.6.7 */
11500
- export const currentValidators: StateCodec<State["currentValidatorData"]> = {
11731
+ export const currentValidators: StateCodec<
11732
+ State["currentValidatorData"],
11733
+ ReturnType<StateView["currentValidatorDataView"]>
11734
+ > = {
11501
11735
  key: stateKeys.index(StateKeyIdx.Kappa),
11502
- Codec: codecPerValidator(ValidatorData.Codec),
11736
+ Codec: validatorsDataCodec,
11503
11737
  extract: (s) => s.currentValidatorData,
11504
11738
  };
11505
11739
 
11506
11740
  /** C(9): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b1a023b1a02?v=0.6.7 */
11507
- export const previousValidators: StateCodec<State["previousValidatorData"]> = {
11741
+ export const previousValidators: StateCodec<
11742
+ State["previousValidatorData"],
11743
+ ReturnType<StateView["previousValidatorDataView"]>
11744
+ > = {
11508
11745
  key: stateKeys.index(StateKeyIdx.Lambda),
11509
- Codec: codecPerValidator(ValidatorData.Codec),
11746
+ Codec: validatorsDataCodec,
11510
11747
  extract: (s) => s.previousValidatorData,
11511
11748
  };
11512
11749
 
11513
11750
  /** C(10): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b27023b2702?v=0.6.7 */
11514
- export const availabilityAssignment: StateCodec<State["availabilityAssignment"]> = {
11751
+ export const availabilityAssignment: StateCodec<
11752
+ State["availabilityAssignment"],
11753
+ ReturnType<StateView["availabilityAssignmentView"]>
11754
+ > = {
11515
11755
  key: stateKeys.index(StateKeyIdx.Rho),
11516
- Codec: codecPerCore(codec.optional(AvailabilityAssignment.Codec)),
11756
+ Codec: availabilityAssignmentsCodec,
11517
11757
  extract: (s) => s.availabilityAssignment,
11518
11758
  };
11519
11759
 
@@ -11532,28 +11772,29 @@ declare namespace serialize {
11532
11772
  };
11533
11773
 
11534
11774
  /** C(13): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b5e023b5e02?v=0.6.7 */
11535
- export const statistics: StateCodec<State["statistics"]> = {
11775
+ export const statistics: StateCodec<StatisticsData, StatisticsDataView> = {
11536
11776
  key: stateKeys.index(StateKeyIdx.Pi),
11537
11777
  Codec: StatisticsData.Codec,
11538
11778
  extract: (s) => s.statistics,
11539
11779
  };
11540
11780
 
11541
11781
  /** C(14): https://graypaper.fluffylabs.dev/#/1c979cb/3bf0023bf002?v=0.7.1 */
11542
- export const accumulationQueue: StateCodec<State["accumulationQueue"]> = {
11782
+ export const accumulationQueue: StateCodec<
11783
+ State["accumulationQueue"],
11784
+ ReturnType<StateView["accumulationQueueView"]>
11785
+ > = {
11543
11786
  key: stateKeys.index(StateKeyIdx.Omega),
11544
- Codec: codecPerEpochBlock(readonlyArray(codec.sequenceVarLen(NotYetAccumulatedReport.Codec))),
11787
+ Codec: accumulationQueueCodec,
11545
11788
  extract: (s) => s.accumulationQueue,
11546
11789
  };
11547
11790
 
11548
11791
  /** C(15): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b96023b9602?v=0.6.7 */
11549
- export const recentlyAccumulated: StateCodec<State["recentlyAccumulated"]> = {
11792
+ export const recentlyAccumulated: StateCodec<
11793
+ State["recentlyAccumulated"],
11794
+ ReturnType<StateView["recentlyAccumulatedView"]>
11795
+ > = {
11550
11796
  key: stateKeys.index(StateKeyIdx.Xi),
11551
- Codec: codecPerEpochBlock(
11552
- codec.sequenceVarLen(codec.bytes(HASH_SIZE).asOpaque<WorkPackageHash>()).convert(
11553
- (x) => Array.from(x),
11554
- (x) => HashSet.from(x),
11555
- ),
11556
- ),
11797
+ Codec: recentlyAccumulatedCodec,
11557
11798
  extract: (s) => s.recentlyAccumulated,
11558
11799
  };
11559
11800
 
@@ -11570,24 +11811,26 @@ declare namespace serialize {
11570
11811
  /** C(255, s): https://graypaper.fluffylabs.dev/#/85129da/383103383103?v=0.6.3 */
11571
11812
  export const serviceData = (serviceId: ServiceId) => ({
11572
11813
  key: stateKeys.serviceInfo(serviceId),
11573
- Codec: ServiceAccountInfo.Codec,
11814
+ Codec: Compatibility.isGreaterOrEqual(GpVersion.V0_7_1)
11815
+ ? codecWithVersion(ServiceAccountInfo.Codec)
11816
+ : ServiceAccountInfo.Codec,
11574
11817
  });
11575
11818
 
11576
11819
  /** https://graypaper.fluffylabs.dev/#/85129da/384803384803?v=0.6.3 */
11577
- export const serviceStorage = (serviceId: ServiceId, key: StorageKey) => ({
11578
- key: stateKeys.serviceStorage(serviceId, key),
11820
+ export const serviceStorage = (blake2b: Blake2b, serviceId: ServiceId, key: StorageKey) => ({
11821
+ key: stateKeys.serviceStorage(blake2b, serviceId, key),
11579
11822
  Codec: dumpCodec,
11580
11823
  });
11581
11824
 
11582
11825
  /** https://graypaper.fluffylabs.dev/#/85129da/385b03385b03?v=0.6.3 */
11583
- export const servicePreimages = (serviceId: ServiceId, hash: PreimageHash) => ({
11584
- key: stateKeys.servicePreimage(serviceId, hash),
11826
+ export const servicePreimages = (blake2b: Blake2b, serviceId: ServiceId, hash: PreimageHash) => ({
11827
+ key: stateKeys.servicePreimage(blake2b, serviceId, hash),
11585
11828
  Codec: dumpCodec,
11586
11829
  });
11587
11830
 
11588
11831
  /** https://graypaper.fluffylabs.dev/#/85129da/387603387603?v=0.6.3 */
11589
- export const serviceLookupHistory = (serviceId: ServiceId, hash: PreimageHash, len: U32) => ({
11590
- key: stateKeys.serviceLookupHistory(serviceId, hash, len),
11832
+ export const serviceLookupHistory = (blake2b: Blake2b, serviceId: ServiceId, hash: PreimageHash, len: U32) => ({
11833
+ key: stateKeys.serviceLookupHistory(blake2b, serviceId, hash, len),
11591
11834
  Codec: readonlyArray(codec.sequenceVarLen(codec.u32)),
11592
11835
  });
11593
11836
  }
@@ -11607,6 +11850,107 @@ declare const dumpCodec = Descriptor.new<BytesBlob>(
11607
11850
  (s) => s.bytes(s.decoder.source.length - s.decoder.bytesRead()),
11608
11851
  );
11609
11852
 
11853
+ /**
11854
+ * Abstraction over some backend containing serialized state entries.
11855
+ *
11856
+ * This may or may not be backed by some on-disk database or can be just stored in memory.
11857
+ */
11858
+ interface SerializedStateBackend {
11859
+ /** Retrieve given state key. */
11860
+ get(key: StateKey): BytesBlob | null;
11861
+ }
11862
+
11863
+ declare class SerializedStateView<T extends SerializedStateBackend> implements StateView {
11864
+ constructor(
11865
+ private readonly spec: ChainSpec,
11866
+ public backend: T,
11867
+ /** Best-effort list of recently active services. */
11868
+ private readonly recentlyUsedServices: ServiceId[],
11869
+ private readonly viewCache: HashDictionary<StateKey, unknown>,
11870
+ ) {}
11871
+
11872
+ private retrieveView<A, B>({ key, Codec }: KeyAndCodecWithView<A, B>, description: string): B {
11873
+ const cached = this.viewCache.get(key);
11874
+ if (cached !== undefined) {
11875
+ return cached as B;
11876
+ }
11877
+ const bytes = this.backend.get(key);
11878
+ if (bytes === null) {
11879
+ throw new Error(`Required state entry for ${description} is missing!. Accessing view of key: ${key}`);
11880
+ }
11881
+ // NOTE [ToDr] we are not using `Decoder.decodeObject` here because
11882
+ // it needs to get to the end of the data (skip), yet that's expensive.
11883
+ // we assume that the state data is correct and coherent anyway, so
11884
+ // for performance reasons we simply create the view here.
11885
+ const d = Decoder.fromBytesBlob(bytes);
11886
+ d.attachContext(this.spec);
11887
+ const view = Codec.View.decode(d);
11888
+ this.viewCache.set(key, view);
11889
+ return view;
11890
+ }
11891
+
11892
+ availabilityAssignmentView(): AvailabilityAssignmentsView {
11893
+ return this.retrieveView(serialize.availabilityAssignment, "availabilityAssignmentView");
11894
+ }
11895
+
11896
+ designatedValidatorDataView(): SequenceView<ValidatorData, ValidatorDataView> {
11897
+ return this.retrieveView(serialize.designatedValidators, "designatedValidatorsView");
11898
+ }
11899
+
11900
+ currentValidatorDataView(): SequenceView<ValidatorData, ValidatorDataView> {
11901
+ return this.retrieveView(serialize.currentValidators, "currentValidatorsView");
11902
+ }
11903
+
11904
+ previousValidatorDataView(): SequenceView<ValidatorData, ValidatorDataView> {
11905
+ return this.retrieveView(serialize.previousValidators, "previousValidatorsView");
11906
+ }
11907
+
11908
+ authPoolsView(): SequenceView<AuthorizationPool, SequenceView<AuthorizerHash>> {
11909
+ return this.retrieveView(serialize.authPools, "authPoolsView");
11910
+ }
11911
+
11912
+ authQueuesView(): SequenceView<AuthorizationQueue, SequenceView<AuthorizerHash>> {
11913
+ return this.retrieveView(serialize.authQueues, "authQueuesView");
11914
+ }
11915
+
11916
+ recentBlocksView(): RecentBlocksView {
11917
+ return this.retrieveView(serialize.recentBlocks, "recentBlocksView");
11918
+ }
11919
+
11920
+ statisticsView(): StatisticsDataView {
11921
+ return this.retrieveView(serialize.statistics, "statisticsView");
11922
+ }
11923
+
11924
+ accumulationQueueView(): AccumulationQueueView {
11925
+ return this.retrieveView(serialize.accumulationQueue, "accumulationQueueView");
11926
+ }
11927
+
11928
+ recentlyAccumulatedView(): RecentlyAccumulatedView {
11929
+ return this.retrieveView(serialize.recentlyAccumulated, "recentlyAccumulatedView");
11930
+ }
11931
+
11932
+ safroleDataView(): SafroleDataView {
11933
+ return this.retrieveView(serialize.safrole, "safroleDataView");
11934
+ }
11935
+
11936
+ getServiceInfoView(id: ServiceId): ServiceAccountInfoView | null {
11937
+ const serviceData = serialize.serviceData(id);
11938
+ const bytes = this.backend.get(serviceData.key);
11939
+ if (bytes === null) {
11940
+ return null;
11941
+ }
11942
+ if (!this.recentlyUsedServices.includes(id)) {
11943
+ this.recentlyUsedServices.push(id);
11944
+ }
11945
+ return Decoder.decodeObject(serviceData.Codec.View, bytes, this.spec);
11946
+ }
11947
+ }
11948
+
11949
+ type KeyAndCodecWithView<T, V> = {
11950
+ key: StateKey;
11951
+ Codec: CodecWithView<T, V>;
11952
+ };
11953
+
11610
11954
  /** What should be done with that key? */
11611
11955
  declare enum StateEntryUpdateAction {
11612
11956
  /** Insert an entry. */
@@ -11622,6 +11966,7 @@ declare const EMPTY_BLOB = BytesBlob.empty();
11622
11966
  /** Serialize given state update into a series of key-value pairs. */
11623
11967
  declare function* serializeStateUpdate(
11624
11968
  spec: ChainSpec,
11969
+ blake2b: Blake2b,
11625
11970
  update: Partial<State & ServicesUpdate>,
11626
11971
  ): Generator<StateEntryUpdate> {
11627
11972
  // first let's serialize all of the simple entries (if present!)
@@ -11630,9 +11975,9 @@ declare function* serializeStateUpdate(
11630
11975
  const encode = <T>(codec: Encode<T>, val: T) => Encoder.encodeObject(codec, val, spec);
11631
11976
 
11632
11977
  // then let's proceed with service updates
11633
- yield* serializeServiceUpdates(update.servicesUpdates, encode);
11634
- yield* serializePreimages(update.preimages, encode);
11635
- yield* serializeStorage(update.storage);
11978
+ yield* serializeServiceUpdates(update.servicesUpdates, encode, blake2b);
11979
+ yield* serializePreimages(update.preimages, encode, blake2b);
11980
+ yield* serializeStorage(update.storage, blake2b);
11636
11981
  yield* serializeRemovedServices(update.servicesRemoved);
11637
11982
  }
11638
11983
 
@@ -11644,18 +11989,18 @@ declare function* serializeRemovedServices(servicesRemoved: ServiceId[] | undefi
11644
11989
  }
11645
11990
  }
11646
11991
 
11647
- declare function* serializeStorage(storage: UpdateStorage[] | undefined): Generator<StateEntryUpdate> {
11992
+ declare function* serializeStorage(storage: UpdateStorage[] | undefined, blake2b: Blake2b): Generator<StateEntryUpdate> {
11648
11993
  for (const { action, serviceId } of storage ?? []) {
11649
11994
  switch (action.kind) {
11650
11995
  case UpdateStorageKind.Set: {
11651
11996
  const key = action.storage.key;
11652
- const codec = serialize.serviceStorage(serviceId, key);
11997
+ const codec = serialize.serviceStorage(blake2b, serviceId, key);
11653
11998
  yield [StateEntryUpdateAction.Insert, codec.key, action.storage.value];
11654
11999
  break;
11655
12000
  }
11656
12001
  case UpdateStorageKind.Remove: {
11657
12002
  const key = action.key;
11658
- const codec = serialize.serviceStorage(serviceId, key);
12003
+ const codec = serialize.serviceStorage(blake2b, serviceId, key);
11659
12004
  yield [StateEntryUpdateAction.Remove, codec.key, EMPTY_BLOB];
11660
12005
  break;
11661
12006
  }
@@ -11665,16 +12010,20 @@ declare function* serializeStorage(storage: UpdateStorage[] | undefined): Genera
11665
12010
  }
11666
12011
  }
11667
12012
 
11668
- declare function* serializePreimages(preimages: UpdatePreimage[] | undefined, encode: EncodeFun): Generator<StateEntryUpdate> {
12013
+ declare function* serializePreimages(
12014
+ preimages: UpdatePreimage[] | undefined,
12015
+ encode: EncodeFun,
12016
+ blake2b: Blake2b,
12017
+ ): Generator<StateEntryUpdate> {
11669
12018
  for (const { action, serviceId } of preimages ?? []) {
11670
12019
  switch (action.kind) {
11671
12020
  case UpdatePreimageKind.Provide: {
11672
12021
  const { hash, blob } = action.preimage;
11673
- const codec = serialize.servicePreimages(serviceId, hash);
12022
+ const codec = serialize.servicePreimages(blake2b, serviceId, hash);
11674
12023
  yield [StateEntryUpdateAction.Insert, codec.key, blob];
11675
12024
 
11676
12025
  if (action.slot !== null) {
11677
- const codec2 = serialize.serviceLookupHistory(serviceId, hash, tryAsU32(blob.length));
12026
+ const codec2 = serialize.serviceLookupHistory(blake2b, serviceId, hash, tryAsU32(blob.length));
11678
12027
  yield [
11679
12028
  StateEntryUpdateAction.Insert,
11680
12029
  codec2.key,
@@ -11685,16 +12034,16 @@ declare function* serializePreimages(preimages: UpdatePreimage[] | undefined, en
11685
12034
  }
11686
12035
  case UpdatePreimageKind.UpdateOrAdd: {
11687
12036
  const { hash, length, slots } = action.item;
11688
- const codec = serialize.serviceLookupHistory(serviceId, hash, length);
12037
+ const codec = serialize.serviceLookupHistory(blake2b, serviceId, hash, length);
11689
12038
  yield [StateEntryUpdateAction.Insert, codec.key, encode(codec.Codec, slots)];
11690
12039
  break;
11691
12040
  }
11692
12041
  case UpdatePreimageKind.Remove: {
11693
12042
  const { hash, length } = action;
11694
- const codec = serialize.servicePreimages(serviceId, hash);
12043
+ const codec = serialize.servicePreimages(blake2b, serviceId, hash);
11695
12044
  yield [StateEntryUpdateAction.Remove, codec.key, EMPTY_BLOB];
11696
12045
 
11697
- const codec2 = serialize.serviceLookupHistory(serviceId, hash, length);
12046
+ const codec2 = serialize.serviceLookupHistory(blake2b, serviceId, hash, length);
11698
12047
  yield [StateEntryUpdateAction.Remove, codec2.key, EMPTY_BLOB];
11699
12048
  break;
11700
12049
  }
@@ -11706,6 +12055,7 @@ declare function* serializePreimages(preimages: UpdatePreimage[] | undefined, en
11706
12055
  declare function* serializeServiceUpdates(
11707
12056
  servicesUpdates: UpdateService[] | undefined,
11708
12057
  encode: EncodeFun,
12058
+ blake2b: Blake2b,
11709
12059
  ): Generator<StateEntryUpdate> {
11710
12060
  for (const { action, serviceId } of servicesUpdates ?? []) {
11711
12061
  // new service being created or updated
@@ -11715,7 +12065,7 @@ declare function* serializeServiceUpdates(
11715
12065
  // additional lookup history update
11716
12066
  if (action.kind === UpdateServiceKind.Create && action.lookupHistory !== null) {
11717
12067
  const { lookupHistory } = action;
11718
- const codec2 = serialize.serviceLookupHistory(serviceId, lookupHistory.hash, lookupHistory.length);
12068
+ const codec2 = serialize.serviceLookupHistory(blake2b, serviceId, lookupHistory.hash, lookupHistory.length);
11719
12069
  yield [StateEntryUpdateAction.Insert, codec2.key, encode(codec2.Codec, lookupHistory.slots)];
11720
12070
  }
11721
12071
  }
@@ -11849,8 +12199,8 @@ declare class StateEntries {
11849
12199
  );
11850
12200
 
11851
12201
  /** Turn in-memory state into it's serialized form. */
11852
- static serializeInMemory(spec: ChainSpec, state: InMemoryState) {
11853
- return new StateEntries(convertInMemoryStateToDictionary(spec, state));
12202
+ static serializeInMemory(spec: ChainSpec, blake2b: Blake2b, state: InMemoryState) {
12203
+ return new StateEntries(convertInMemoryStateToDictionary(spec, blake2b, state));
11854
12204
  }
11855
12205
 
11856
12206
  /**
@@ -11905,7 +12255,8 @@ declare class StateEntries {
11905
12255
  }
11906
12256
 
11907
12257
  /** https://graypaper.fluffylabs.dev/#/68eaa1f/391600391600?v=0.6.4 */
11908
- getRootHash(): StateRootHash {
12258
+ getRootHash(blake2b: Blake2b): StateRootHash {
12259
+ const blake2bTrieHasher = getBlake2bTrieHasher(blake2b);
11909
12260
  const leaves: SortedSet<LeafNode> = SortedSet.fromArray(leafComparator);
11910
12261
  for (const [key, value] of this) {
11911
12262
  leaves.insert(InMemoryTrie.constructLeaf(blake2bTrieHasher, key.asOpaque(), value));
@@ -11918,6 +12269,7 @@ declare class StateEntries {
11918
12269
  /** https://graypaper.fluffylabs.dev/#/68eaa1f/38a50038a500?v=0.6.4 */
11919
12270
  declare function convertInMemoryStateToDictionary(
11920
12271
  spec: ChainSpec,
12272
+ blake2b: Blake2b,
11921
12273
  state: InMemoryState,
11922
12274
  ): TruncatedHashDictionary<StateKey, BytesBlob> {
11923
12275
  const serialized = TruncatedHashDictionary.fromEntries<StateKey, BytesBlob>([]);
@@ -11950,20 +12302,25 @@ declare function convertInMemoryStateToDictionary(
11950
12302
 
11951
12303
  // preimages
11952
12304
  for (const preimage of service.data.preimages.values()) {
11953
- const { key, Codec } = serialize.servicePreimages(serviceId, preimage.hash);
12305
+ const { key, Codec } = serialize.servicePreimages(blake2b, serviceId, preimage.hash);
11954
12306
  serialized.set(key, Encoder.encodeObject(Codec, preimage.blob));
11955
12307
  }
11956
12308
 
11957
12309
  // storage
11958
12310
  for (const storage of service.data.storage.values()) {
11959
- const { key, Codec } = serialize.serviceStorage(serviceId, storage.key);
12311
+ const { key, Codec } = serialize.serviceStorage(blake2b, serviceId, storage.key);
11960
12312
  serialized.set(key, Encoder.encodeObject(Codec, storage.value));
11961
12313
  }
11962
12314
 
11963
12315
  // lookup history
11964
12316
  for (const lookupHistoryList of service.data.lookupHistory.values()) {
11965
12317
  for (const lookupHistory of lookupHistoryList) {
11966
- const { key, Codec } = serialize.serviceLookupHistory(serviceId, lookupHistory.hash, lookupHistory.length);
12318
+ const { key, Codec } = serialize.serviceLookupHistory(
12319
+ blake2b,
12320
+ serviceId,
12321
+ lookupHistory.hash,
12322
+ lookupHistory.length,
12323
+ );
11967
12324
  serialized.set(key, Encoder.encodeObject(Codec, lookupHistory.slots.slice()));
11968
12325
  }
11969
12326
  }
@@ -11972,16 +12329,6 @@ declare function convertInMemoryStateToDictionary(
11972
12329
  return serialized;
11973
12330
  }
11974
12331
 
11975
- /**
11976
- * Abstraction over some backend containing serialized state entries.
11977
- *
11978
- * This may or may not be backed by some on-disk database or can be just stored in memory.
11979
- */
11980
- interface SerializedStateBackend {
11981
- /** Retrieve given state key. */
11982
- get(key: StateKey): BytesBlob | null;
11983
- }
11984
-
11985
12332
  /**
11986
12333
  * State object which reads it's entries from some backend.
11987
12334
  *
@@ -11991,27 +12338,32 @@ interface SerializedStateBackend {
11991
12338
  * in the backend layer, so it MAY fail during runtime.
11992
12339
  */
11993
12340
  declare class SerializedState<T extends SerializedStateBackend = SerializedStateBackend>
11994
- implements State, EnumerableState
12341
+ implements State, WithStateView, EnumerableState
11995
12342
  {
11996
12343
  /** Create a state-like object from collection of serialized entries. */
11997
- static fromStateEntries(spec: ChainSpec, state: StateEntries, recentServices: ServiceId[] = []) {
11998
- return new SerializedState(spec, state, recentServices);
12344
+ static fromStateEntries(spec: ChainSpec, blake2b: Blake2b, state: StateEntries, recentServices: ServiceId[] = []) {
12345
+ return new SerializedState(spec, blake2b, state, recentServices);
11999
12346
  }
12000
12347
 
12001
12348
  /** Create a state-like object backed by some DB. */
12002
12349
  static new<T extends SerializedStateBackend>(
12003
12350
  spec: ChainSpec,
12351
+ blake2b: Blake2b,
12004
12352
  db: T,
12005
12353
  recentServices: ServiceId[] = [],
12006
12354
  ): SerializedState<T> {
12007
- return new SerializedState(spec, db, recentServices);
12355
+ return new SerializedState(spec, blake2b, db, recentServices);
12008
12356
  }
12009
12357
 
12358
+ private dataCache: HashDictionary<StateKey, unknown> = HashDictionary.new();
12359
+ private viewCache: HashDictionary<StateKey, unknown> = HashDictionary.new();
12360
+
12010
12361
  private constructor(
12011
12362
  private readonly spec: ChainSpec,
12363
+ private readonly blake2b: Blake2b,
12012
12364
  public backend: T,
12013
12365
  /** Best-effort list of recently active services. */
12014
- private readonly _recentServiceIds: ServiceId[],
12366
+ private readonly recentlyUsedServices: ServiceId[],
12015
12367
  ) {}
12016
12368
 
12017
12369
  /** Comparing the serialized states, just means comparing their backends. */
@@ -12019,14 +12371,21 @@ declare class SerializedState<T extends SerializedStateBackend = SerializedState
12019
12371
  return this.backend;
12020
12372
  }
12021
12373
 
12374
+ /** Return a non-decoding version of the state. */
12375
+ view(): StateView {
12376
+ return new SerializedStateView(this.spec, this.backend, this.recentlyUsedServices, this.viewCache);
12377
+ }
12378
+
12022
12379
  // TODO [ToDr] Temporary method to update the state,
12023
12380
  // without changing references.
12024
12381
  public updateBackend(newBackend: T) {
12025
12382
  this.backend = newBackend;
12383
+ this.dataCache = HashDictionary.new();
12384
+ this.viewCache = HashDictionary.new();
12026
12385
  }
12027
12386
 
12028
12387
  recentServiceIds(): readonly ServiceId[] {
12029
- return this._recentServiceIds;
12388
+ return this.recentlyUsedServices;
12030
12389
  }
12031
12390
 
12032
12391
  getService(id: ServiceId): SerializedService | null {
@@ -12035,27 +12394,33 @@ declare class SerializedState<T extends SerializedStateBackend = SerializedState
12035
12394
  return null;
12036
12395
  }
12037
12396
 
12038
- if (!this._recentServiceIds.includes(id)) {
12039
- this._recentServiceIds.push(id);
12397
+ if (!this.recentlyUsedServices.includes(id)) {
12398
+ this.recentlyUsedServices.push(id);
12040
12399
  }
12041
12400
 
12042
- return new SerializedService(id, serviceData, (key) => this.retrieveOptional(key));
12401
+ return new SerializedService(this.blake2b, id, serviceData, (key) => this.retrieveOptional(key));
12043
12402
  }
12044
12403
 
12045
- private retrieve<T>({ key, Codec }: KeyAndCodec<T>, description: string): T {
12046
- const bytes = this.backend.get(key);
12047
- if (bytes === null) {
12048
- throw new Error(`Required state entry for ${description} is missing!. Accessing key: ${key}`);
12404
+ private retrieve<T>(k: KeyAndCodec<T>, description: string): T {
12405
+ const data = this.retrieveOptional(k);
12406
+ if (data === undefined) {
12407
+ throw new Error(`Required state entry for ${description} is missing!. Accessing key: ${k.key}`);
12049
12408
  }
12050
- return Decoder.decodeObject(Codec, bytes, this.spec);
12409
+ return data;
12051
12410
  }
12052
12411
 
12053
12412
  private retrieveOptional<T>({ key, Codec }: KeyAndCodec<T>): T | undefined {
12413
+ const cached = this.dataCache.get(key);
12414
+ if (cached !== undefined) {
12415
+ return cached as T;
12416
+ }
12054
12417
  const bytes = this.backend.get(key);
12055
12418
  if (bytes === null) {
12056
12419
  return undefined;
12057
12420
  }
12058
- return Decoder.decodeObject(Codec, bytes, this.spec);
12421
+ const data = Decoder.decodeObject(Codec, bytes, this.spec);
12422
+ this.dataCache.set(key, data);
12423
+ return data;
12059
12424
  }
12060
12425
 
12061
12426
  get availabilityAssignment(): State["availabilityAssignment"] {
@@ -12138,6 +12503,7 @@ declare class SerializedState<T extends SerializedStateBackend = SerializedState
12138
12503
  /** Service data representation on a serialized state. */
12139
12504
  declare class SerializedService implements Service {
12140
12505
  constructor(
12506
+ public readonly blake2b: Blake2b,
12141
12507
  /** Service id */
12142
12508
  public readonly serviceId: ServiceId,
12143
12509
  private readonly accountInfo: ServiceAccountInfo,
@@ -12153,14 +12519,14 @@ declare class SerializedService implements Service {
12153
12519
  getStorage(rawKey: StorageKey): BytesBlob | null {
12154
12520
  if (Compatibility.isLessThan(GpVersion.V0_6_7)) {
12155
12521
  const SERVICE_ID_BYTES = 4;
12156
- const serviceIdAndKey = new Uint8Array(SERVICE_ID_BYTES + rawKey.length);
12522
+ const serviceIdAndKey = safeAllocUint8Array(SERVICE_ID_BYTES + rawKey.length);
12157
12523
  serviceIdAndKey.set(u32AsLeBytes(this.serviceId));
12158
12524
  serviceIdAndKey.set(rawKey.raw, SERVICE_ID_BYTES);
12159
- const key: StorageKey = asOpaqueType(BytesBlob.blobFrom(blake2b.hashBytes(serviceIdAndKey).raw));
12160
- return this.retrieveOptional(serialize.serviceStorage(this.serviceId, key)) ?? null;
12525
+ const key: StorageKey = asOpaqueType(BytesBlob.blobFrom(this.blake2b.hashBytes(serviceIdAndKey).raw));
12526
+ return this.retrieveOptional(serialize.serviceStorage(this.blake2b, this.serviceId, key)) ?? null;
12161
12527
  }
12162
12528
 
12163
- return this.retrieveOptional(serialize.serviceStorage(this.serviceId, rawKey)) ?? null;
12529
+ return this.retrieveOptional(serialize.serviceStorage(this.blake2b, this.serviceId, rawKey)) ?? null;
12164
12530
  }
12165
12531
 
12166
12532
  /**
@@ -12170,17 +12536,17 @@ declare class SerializedService implements Service {
12170
12536
  */
12171
12537
  hasPreimage(hash: PreimageHash): boolean {
12172
12538
  // TODO [ToDr] consider optimizing to avoid fetching the whole data.
12173
- return this.retrieveOptional(serialize.servicePreimages(this.serviceId, hash)) !== undefined;
12539
+ return this.retrieveOptional(serialize.servicePreimages(this.blake2b, this.serviceId, hash)) !== undefined;
12174
12540
  }
12175
12541
 
12176
12542
  /** Retrieve preimage from the DB. */
12177
12543
  getPreimage(hash: PreimageHash): BytesBlob | null {
12178
- return this.retrieveOptional(serialize.servicePreimages(this.serviceId, hash)) ?? null;
12544
+ return this.retrieveOptional(serialize.servicePreimages(this.blake2b, this.serviceId, hash)) ?? null;
12179
12545
  }
12180
12546
 
12181
12547
  /** Retrieve preimage lookup history. */
12182
12548
  getLookupHistory(hash: PreimageHash, len: U32): LookupHistorySlots | null {
12183
- const rawSlots = this.retrieveOptional(serialize.serviceLookupHistory(this.serviceId, hash, len));
12549
+ const rawSlots = this.retrieveOptional(serialize.serviceLookupHistory(this.blake2b, this.serviceId, hash, len));
12184
12550
  if (rawSlots === undefined) {
12185
12551
  return null;
12186
12552
  }
@@ -12193,9 +12559,9 @@ type KeyAndCodec<T> = {
12193
12559
  Codec: Decode<T>;
12194
12560
  };
12195
12561
 
12196
- declare function loadState(spec: ChainSpec, entries: Iterable<[StateKey | TruncatedHash, BytesBlob]>) {
12562
+ declare function loadState(spec: ChainSpec, blake2b: Blake2b, entries: Iterable<[StateKey | TruncatedHash, BytesBlob]>) {
12197
12563
  const stateEntries = StateEntries.fromEntriesUnsafe(entries);
12198
- return SerializedState.fromStateEntries(spec, stateEntries);
12564
+ return SerializedState.fromStateEntries(spec, blake2b, stateEntries);
12199
12565
  }
12200
12566
 
12201
12567
  /**
@@ -12227,12 +12593,15 @@ declare function loadState(spec: ChainSpec, entries: Iterable<[StateKey | Trunca
12227
12593
  declare const index$d_EMPTY_BLOB: typeof EMPTY_BLOB;
12228
12594
  type index$d_EncodeFun = EncodeFun;
12229
12595
  type index$d_KeyAndCodec<T> = KeyAndCodec<T>;
12596
+ type index$d_KeyAndCodecWithView<T, V> = KeyAndCodecWithView<T, V>;
12230
12597
  type index$d_SerializedService = SerializedService;
12231
12598
  declare const index$d_SerializedService: typeof SerializedService;
12232
12599
  type index$d_SerializedState<T extends SerializedStateBackend = SerializedStateBackend> = SerializedState<T>;
12233
12600
  declare const index$d_SerializedState: typeof SerializedState;
12234
12601
  type index$d_SerializedStateBackend = SerializedStateBackend;
12235
- type index$d_StateCodec<T> = StateCodec<T>;
12602
+ type index$d_SerializedStateView<T extends SerializedStateBackend> = SerializedStateView<T>;
12603
+ declare const index$d_SerializedStateView: typeof SerializedStateView;
12604
+ type index$d_StateCodec<T, V = T> = StateCodec<T, V>;
12236
12605
  type index$d_StateEntries = StateEntries;
12237
12606
  declare const index$d_StateEntries: typeof StateEntries;
12238
12607
  type index$d_StateEntryUpdate = StateEntryUpdate;
@@ -12260,8 +12629,8 @@ declare const index$d_serializeStorage: typeof serializeStorage;
12260
12629
  declare const index$d_stateEntriesSequenceCodec: typeof stateEntriesSequenceCodec;
12261
12630
  import index$d_stateKeys = stateKeys;
12262
12631
  declare namespace index$d {
12263
- export { index$d_EMPTY_BLOB as EMPTY_BLOB, index$d_SerializedService as SerializedService, index$d_SerializedState as SerializedState, index$d_StateEntries as StateEntries, index$d_StateEntryUpdateAction as StateEntryUpdateAction, index$d_StateKeyIdx as StateKeyIdx, index$d_TYPICAL_STATE_ITEMS as TYPICAL_STATE_ITEMS, index$d_TYPICAL_STATE_ITEM_LEN as TYPICAL_STATE_ITEM_LEN, index$d_U32_BYTES as U32_BYTES, index$d_binaryMerkleization as binaryMerkleization, index$d_convertInMemoryStateToDictionary as convertInMemoryStateToDictionary, index$d_dumpCodec as dumpCodec, index$d_getSafroleData as getSafroleData, index$d_legacyServiceNested as legacyServiceNested, index$d_loadState as loadState, index$d_serialize as serialize, index$d_serializeBasicKeys as serializeBasicKeys, index$d_serializePreimages as serializePreimages, index$d_serializeRemovedServices as serializeRemovedServices, index$d_serializeServiceUpdates as serializeServiceUpdates, index$d_serializeStateUpdate as serializeStateUpdate, index$d_serializeStorage as serializeStorage, index$d_stateEntriesSequenceCodec as stateEntriesSequenceCodec, index$d_stateKeys as stateKeys };
12264
- export type { index$d_EncodeFun as EncodeFun, index$d_KeyAndCodec as KeyAndCodec, index$d_SerializedStateBackend as SerializedStateBackend, index$d_StateCodec as StateCodec, index$d_StateEntryUpdate as StateEntryUpdate, index$d_StateKey as StateKey };
12632
+ export { index$d_EMPTY_BLOB as EMPTY_BLOB, index$d_SerializedService as SerializedService, index$d_SerializedState as SerializedState, index$d_SerializedStateView as SerializedStateView, index$d_StateEntries as StateEntries, index$d_StateEntryUpdateAction as StateEntryUpdateAction, index$d_StateKeyIdx as StateKeyIdx, index$d_TYPICAL_STATE_ITEMS as TYPICAL_STATE_ITEMS, index$d_TYPICAL_STATE_ITEM_LEN as TYPICAL_STATE_ITEM_LEN, index$d_U32_BYTES as U32_BYTES, index$d_binaryMerkleization as binaryMerkleization, index$d_convertInMemoryStateToDictionary as convertInMemoryStateToDictionary, index$d_dumpCodec as dumpCodec, index$d_getSafroleData as getSafroleData, index$d_legacyServiceNested as legacyServiceNested, index$d_loadState as loadState, index$d_serialize as serialize, index$d_serializeBasicKeys as serializeBasicKeys, index$d_serializePreimages as serializePreimages, index$d_serializeRemovedServices as serializeRemovedServices, index$d_serializeServiceUpdates as serializeServiceUpdates, index$d_serializeStateUpdate as serializeStateUpdate, index$d_serializeStorage as serializeStorage, index$d_stateEntriesSequenceCodec as stateEntriesSequenceCodec, index$d_stateKeys as stateKeys };
12633
+ export type { index$d_EncodeFun as EncodeFun, index$d_KeyAndCodec as KeyAndCodec, index$d_KeyAndCodecWithView as KeyAndCodecWithView, index$d_SerializedStateBackend as SerializedStateBackend, index$d_StateCodec as StateCodec, index$d_StateEntryUpdate as StateEntryUpdate, index$d_StateKey as StateKey };
12265
12634
  }
12266
12635
 
12267
12636
  /** Error during `LeafDb` creation. */
@@ -12351,7 +12720,8 @@ declare class LeafDb implements SerializedStateBackend {
12351
12720
  assertNever(val);
12352
12721
  }
12353
12722
 
12354
- getStateRoot(): StateRootHash {
12723
+ getStateRoot(blake2b: Blake2b): StateRootHash {
12724
+ const blake2bTrieHasher = getBlake2bTrieHasher(blake2b);
12355
12725
  return InMemoryTrie.computeStateRoot(blake2bTrieHasher, this.leaves).asOpaque();
12356
12726
  }
12357
12727
 
@@ -12449,12 +12819,13 @@ declare class InMemoryStates implements StatesDb<InMemoryState> {
12449
12819
  }
12450
12820
 
12451
12821
  async getStateRoot(state: InMemoryState): Promise<StateRootHash> {
12452
- return StateEntries.serializeInMemory(this.spec, state).getRootHash();
12822
+ const blake2b = await Blake2b.createHasher();
12823
+ return StateEntries.serializeInMemory(this.spec, blake2b, state).getRootHash(blake2b);
12453
12824
  }
12454
12825
 
12455
12826
  /** Insert a full state into the database. */
12456
12827
  async insertState(headerHash: HeaderHash, state: InMemoryState): Promise<Result$2<OK, StateUpdateError>> {
12457
- const encoded = Encoder.encodeObject(inMemoryStateCodec, state, this.spec);
12828
+ const encoded = Encoder.encodeObject(inMemoryStateCodec(this.spec), state, this.spec);
12458
12829
  this.db.set(headerHash, encoded);
12459
12830
  return Result.ok(OK);
12460
12831
  }
@@ -12465,7 +12836,7 @@ declare class InMemoryStates implements StatesDb<InMemoryState> {
12465
12836
  return null;
12466
12837
  }
12467
12838
 
12468
- return Decoder.decodeObject(inMemoryStateCodec, encodedState, this.spec);
12839
+ return Decoder.decodeObject(inMemoryStateCodec(this.spec), encodedState, this.spec);
12469
12840
  }
12470
12841
  }
12471
12842
 
@@ -12554,7 +12925,7 @@ declare function padAndEncodeData(input: BytesBlob) {
12554
12925
  const paddedLength = Math.ceil(input.length / PIECE_SIZE) * PIECE_SIZE;
12555
12926
  let padded = input;
12556
12927
  if (input.length !== paddedLength) {
12557
- padded = BytesBlob.blobFrom(new Uint8Array(paddedLength));
12928
+ padded = BytesBlob.blobFrom(safeAllocUint8Array(paddedLength));
12558
12929
  padded.raw.set(input.raw, 0);
12559
12930
  }
12560
12931
  return chunkingFunction(padded);
@@ -12610,7 +12981,7 @@ declare function decodeData(input: FixedSizeArray<[number, BytesBlob], N_CHUNKS_
12610
12981
  */
12611
12982
  declare function encodePoints(input: Bytes<PIECE_SIZE>): FixedSizeArray<Bytes<POINT_LENGTH>, N_CHUNKS_TOTAL> {
12612
12983
  const result: Bytes<POINT_LENGTH>[] = [];
12613
- const data = new Uint8Array(POINT_ALIGNMENT * N_CHUNKS_REQUIRED);
12984
+ const data = safeAllocUint8Array(POINT_ALIGNMENT * N_CHUNKS_REQUIRED);
12614
12985
 
12615
12986
  // add original shards to the result
12616
12987
  for (let i = 0; i < N_CHUNKS_REQUIRED; i++) {
@@ -12630,7 +13001,7 @@ declare function encodePoints(input: Bytes<PIECE_SIZE>): FixedSizeArray<Bytes<PO
12630
13001
  for (let i = 0; i < N_CHUNKS_REDUNDANCY; i++) {
12631
13002
  const pointIndex = i * POINT_ALIGNMENT;
12632
13003
 
12633
- const redundancyPoint = new Uint8Array(POINT_LENGTH);
13004
+ const redundancyPoint = safeAllocUint8Array(POINT_LENGTH);
12634
13005
  for (let j = 0; j < POINT_LENGTH; j++) {
12635
13006
  redundancyPoint[j] = encodedData[pointIndex + j * HALF_POINT_SIZE];
12636
13007
  }
@@ -12650,7 +13021,7 @@ declare function decodePiece(
12650
13021
  ): Bytes<PIECE_SIZE> {
12651
13022
  const result = Bytes.zero(PIECE_SIZE);
12652
13023
 
12653
- const data = new Uint8Array(N_CHUNKS_REQUIRED * POINT_ALIGNMENT);
13024
+ const data = safeAllocUint8Array(N_CHUNKS_REQUIRED * POINT_ALIGNMENT);
12654
13025
  const indices = new Uint16Array(input.length);
12655
13026
 
12656
13027
  for (let i = 0; i < N_CHUNKS_REQUIRED; i++) {
@@ -12777,7 +13148,7 @@ declare function lace<N extends number, K extends number>(input: FixedSizeArray<
12777
13148
  return BytesBlob.empty();
12778
13149
  }
12779
13150
  const n = input[0].length;
12780
- const result = BytesBlob.blobFrom(new Uint8Array(k * n));
13151
+ const result = BytesBlob.blobFrom(safeAllocUint8Array(k * n));
12781
13152
  for (let i = 0; i < k; i++) {
12782
13153
  const entry = input[i].raw;
12783
13154
  for (let j = 0; j < n; j++) {
@@ -13620,6 +13991,8 @@ declare enum NewServiceError {
13620
13991
  InsufficientFunds = 0,
13621
13992
  /** Service is not privileged to set gratis storage. */
13622
13993
  UnprivilegedService = 1,
13994
+ /** Registrar attempting to create a service with already existing id. */
13995
+ RegistrarServiceIdAlreadyTaken = 2,
13623
13996
  }
13624
13997
 
13625
13998
  declare enum UpdatePrivilegesError {
@@ -13675,25 +14048,28 @@ interface PartialState {
13675
14048
 
13676
14049
  /**
13677
14050
  * Transfer given `amount` of funds to the `destination`,
13678
- * passing `suppliedGas` to invoke `OnTransfer` entry point
13679
- * and given `memo`.
14051
+ * passing `gas` fee for transfer and given `memo`.
13680
14052
  */
13681
14053
  transfer(
13682
14054
  destination: ServiceId | null,
13683
14055
  amount: U64,
13684
- suppliedGas: ServiceGas,
14056
+ gas: ServiceGas,
13685
14057
  memo: Bytes<TRANSFER_MEMO_BYTES>,
13686
14058
  ): Result$2<OK, TransferError>;
13687
14059
 
13688
14060
  /**
13689
- * Create a new service with given codeHash, length, gas, allowance and gratisStorage.
14061
+ * Create a new service with given codeHash, length, gas, allowance, gratisStorage and wantedServiceId.
13690
14062
  *
13691
- * Returns a newly assigned id of that service.
13692
- * https://graypaper.fluffylabs.dev/#/7e6ff6a/2f4c022f4c02?v=0.6.7
14063
+ * Returns a newly assigned id
14064
+ * or `wantedServiceId` if it's lower than `S`
14065
+ * and parent of that service is `Registrar`.
14066
+ *
14067
+ * https://graypaper.fluffylabs.dev/#/ab2cdbd/2fa9042fc304?v=0.7.2
13693
14068
  *
13694
14069
  * An error can be returned in case the account does not
13695
14070
  * have the required balance
13696
- * or tries to set gratis storage without being privileged.
14071
+ * or tries to set gratis storage without being `Manager`
14072
+ * or `Registrar` tries to set service id thats already taken.
13697
14073
  */
13698
14074
  newService(
13699
14075
  codeHash: CodeHash,
@@ -13701,6 +14077,7 @@ interface PartialState {
13701
14077
  gas: ServiceGas,
13702
14078
  allowance: ServiceGas,
13703
14079
  gratisStorage: U64,
14080
+ wantedServiceId: U64,
13704
14081
  ): Result$2<ServiceId, NewServiceError>;
13705
14082
 
13706
14083
  /** Upgrade code of currently running service. */
@@ -13721,8 +14098,8 @@ interface PartialState {
13721
14098
  /** Update authorization queue for given core and authorize a service for this core. */
13722
14099
  updateAuthorizationQueue(
13723
14100
  coreIndex: CoreIndex,
13724
- authQueue: FixedSizeArray<Blake2bHash, AUTHORIZATION_QUEUE_SIZE>,
13725
- authManager: ServiceId | null,
14101
+ authQueue: AuthorizationQueue,
14102
+ assigner: ServiceId | null,
13726
14103
  ): Result$2<OK, UpdatePrivilegesError>;
13727
14104
 
13728
14105
  /**
@@ -13731,14 +14108,16 @@ interface PartialState {
13731
14108
  * `m`: manager service (can change privileged services)
13732
14109
  * `a`: manages authorization queue
13733
14110
  * `v`: manages validator keys
13734
- * `g`: collection of serviceId -> gas that auto-accumulate every block
14111
+ * `r`: manages create new services in protected id range.
14112
+ * `z`: collection of serviceId -> gas that auto-accumulate every block
13735
14113
  *
13736
14114
  */
13737
14115
  updatePrivilegedServices(
13738
14116
  m: ServiceId | null,
13739
14117
  a: PerCore<ServiceId>,
13740
14118
  v: ServiceId | null,
13741
- g: [ServiceId, ServiceGas][],
14119
+ r: ServiceId | null,
14120
+ z: [ServiceId, ServiceGas][],
13742
14121
  ): Result$2<OK, UpdatePrivilegesError>;
13743
14122
 
13744
14123
  /** Yield accumulation trie result hash. */
@@ -13850,7 +14229,7 @@ declare class Mask {
13850
14229
  }
13851
14230
 
13852
14231
  private buildLookupTableForward(mask: BitVec) {
13853
- const table = new Uint8Array(mask.bitLength);
14232
+ const table = safeAllocUint8Array(mask.bitLength);
13854
14233
  let lastInstructionOffset = 0;
13855
14234
  for (let i = mask.bitLength - 1; i >= 0; i--) {
13856
14235
  if (mask.isSet(i)) {
@@ -13994,7 +14373,7 @@ declare class Registers {
13994
14373
  private asSigned: BigInt64Array;
13995
14374
  private asUnsigned: BigUint64Array;
13996
14375
 
13997
- constructor(private readonly bytes = new Uint8Array(NO_OF_REGISTERS << REGISTER_SIZE_SHIFT)) {
14376
+ constructor(private readonly bytes = safeAllocUint8Array(NO_OF_REGISTERS << REGISTER_SIZE_SHIFT)) {
13998
14377
  check`${bytes.length === NO_OF_REGISTERS << REGISTER_SIZE_SHIFT} Invalid size of registers array.`;
13999
14378
  this.asSigned = new BigInt64Array(bytes.buffer, bytes.byteOffset);
14000
14379
  this.asUnsigned = new BigUint64Array(bytes.buffer, bytes.byteOffset);
@@ -17446,6 +17825,29 @@ declare class Interpreter {
17446
17825
  getMemoryPage(pageNumber: number): null | Uint8Array {
17447
17826
  return this.memory.getPageDump(tryAsPageNumber(pageNumber));
17448
17827
  }
17828
+
17829
+ calculateBlockGasCost(): Map<string, number> {
17830
+ const codeLength = this.code.length;
17831
+ const blocks: Map<string, number> = new Map();
17832
+ let currentBlock = "0";
17833
+ let gasCost = 0;
17834
+ const getNextIstructionIndex = (index: number) => index + 1 + this.mask.getNoOfBytesToNextInstruction(index + 1);
17835
+
17836
+ for (let index = 0; index < codeLength; index = getNextIstructionIndex(index)) {
17837
+ const instruction = this.code[index];
17838
+ if (this.basicBlocks.isBeginningOfBasicBlock(index)) {
17839
+ blocks.set(currentBlock, gasCost);
17840
+ currentBlock = index.toString();
17841
+ gasCost = 0;
17842
+ }
17843
+
17844
+ gasCost += instructionGasMap[instruction];
17845
+ }
17846
+
17847
+ blocks.set(currentBlock, gasCost);
17848
+
17849
+ return blocks;
17850
+ }
17449
17851
  }
17450
17852
 
17451
17853
  type index$8_BigGas = BigGas;
@@ -17656,7 +18058,7 @@ declare class AccumulationStateUpdate {
17656
18058
  /** Services state updates. */
17657
18059
  public readonly services: ServicesUpdate,
17658
18060
  /** Pending transfers. */
17659
- public readonly transfers: PendingTransfer[],
18061
+ public transfers: PendingTransfer[],
17660
18062
  /** Yielded accumulation root. */
17661
18063
  public readonly yieldedRoots: Map<ServiceId, OpaqueHash> = new Map(),
17662
18064
  ) {}
@@ -17707,11 +18109,18 @@ declare class AccumulationStateUpdate {
17707
18109
  if (from.privilegedServices !== null) {
17708
18110
  update.privilegedServices = PrivilegedServices.create({
17709
18111
  ...from.privilegedServices,
17710
- authManager: asKnownSize([...from.privilegedServices.authManager]),
18112
+ assigners: asKnownSize([...from.privilegedServices.assigners]),
17711
18113
  });
17712
18114
  }
17713
18115
  return update;
17714
18116
  }
18117
+
18118
+ /** Retrieve and clear pending transfers. */
18119
+ takeTransfers() {
18120
+ const transfers = this.transfers;
18121
+ this.transfers = [];
18122
+ return transfers;
18123
+ }
17715
18124
  }
17716
18125
 
17717
18126
  type StateSlice = Pick<State, "getService" | "privilegedServices">;
@@ -17978,7 +18387,7 @@ declare const HostCallResult = {
17978
18387
  OOB: tryAsU64(0xffff_ffff_ffff_fffdn), // 2**64 - 3
17979
18388
  /** Index unknown. */
17980
18389
  WHO: tryAsU64(0xffff_ffff_ffff_fffcn), // 2**64 - 4
17981
- /** Storage full. */
18390
+ /** Storage full or resource already allocated. */
17982
18391
  FULL: tryAsU64(0xffff_ffff_ffff_fffbn), // 2**64 - 5
17983
18392
  /** Core index unknown. */
17984
18393
  CORE: tryAsU64(0xffff_ffff_ffff_fffan), // 2**64 - 6
@@ -17986,7 +18395,7 @@ declare const HostCallResult = {
17986
18395
  CASH: tryAsU64(0xffff_ffff_ffff_fff9n), // 2**64 - 7
17987
18396
  /** Gas limit too low. */
17988
18397
  LOW: tryAsU64(0xffff_ffff_ffff_fff8n), // 2**64 - 8
17989
- /** The item is already solicited or cannot be forgotten. */
18398
+ /** The item is already solicited, cannot be forgotten or the operation is invalid due to privilege level. */
17990
18399
  HUH: tryAsU64(0xffff_ffff_ffff_fff7n), // 2**64 - 9
17991
18400
  /** The return value indicating general success. */
17992
18401
  OK: tryAsU64(0n),
@@ -18052,9 +18461,15 @@ type HostCallIndex = Opaque<U32, "HostCallIndex[U32]">;
18052
18461
  /** Attempt to convert a number into `HostCallIndex`. */
18053
18462
  declare const tryAsHostCallIndex = (v: number): HostCallIndex => asOpaqueType(tryAsU32(v));
18054
18463
 
18464
+ /**
18465
+ * Host-call exit reason.
18466
+ *
18467
+ * https://graypaper.fluffylabs.dev/#/ab2cdbd/24a30124a501?v=0.7.2
18468
+ */
18055
18469
  declare enum PvmExecution {
18056
18470
  Halt = 0,
18057
18471
  Panic = 1,
18472
+ OOG = 2, // out-of-gas
18058
18473
  }
18059
18474
 
18060
18475
  /** A utility function to easily trace a bunch of registers. */
@@ -18067,8 +18482,12 @@ interface HostCallHandler {
18067
18482
  /** Index of that host call (i.e. what PVM invokes via `ecalli`) */
18068
18483
  readonly index: HostCallIndex;
18069
18484
 
18070
- /** The gas cost of invocation of that host call. */
18071
- readonly gasCost: SmallGas | ((reg: IHostCallRegisters) => Gas);
18485
+ /**
18486
+ * The gas cost of invocation of that host call.
18487
+ *
18488
+ * NOTE: `((reg: IHostCallRegisters) => Gas)` function is for compatibility reasons: pre GP 0.7.2
18489
+ */
18490
+ readonly basicGasCost: SmallGas | ((reg: IHostCallRegisters) => Gas);
18072
18491
 
18073
18492
  /** Currently executing service id. */
18074
18493
  readonly currentServiceId: U32;
@@ -18211,7 +18630,7 @@ declare class HostCalls {
18211
18630
  const maybeAddress = regs.getLowerU32(7);
18212
18631
  const maybeLength = regs.getLowerU32(8);
18213
18632
 
18214
- const result = new Uint8Array(maybeLength);
18633
+ const result = safeAllocUint8Array(maybeLength);
18215
18634
  const startAddress = tryAsMemoryIndex(maybeAddress);
18216
18635
  const loadResult = memory.loadInto(result, startAddress);
18217
18636
 
@@ -18244,8 +18663,10 @@ declare class HostCalls {
18244
18663
 
18245
18664
  const hostCall = this.hostCalls.get(index);
18246
18665
  const gasBefore = gas.get();
18247
- const gasCost = typeof hostCall.gasCost === "number" ? hostCall.gasCost : hostCall.gasCost(regs);
18248
- const underflow = gas.sub(gasCost);
18666
+ // NOTE: `basicGasCost(regs)` function is for compatibility reasons: pre GP 0.7.2
18667
+ const basicGasCost =
18668
+ typeof hostCall.basicGasCost === "number" ? hostCall.basicGasCost : hostCall.basicGasCost(regs);
18669
+ const underflow = gas.sub(basicGasCost);
18249
18670
 
18250
18671
  const pcLog = `[PC: ${pvmInstance.getPC()}]`;
18251
18672
  if (underflow) {
@@ -18272,6 +18693,11 @@ declare class HostCalls {
18272
18693
  return this.getReturnValue(status, pvmInstance);
18273
18694
  }
18274
18695
 
18696
+ if (result === PvmExecution.OOG) {
18697
+ status = Status.OOG;
18698
+ return this.getReturnValue(status, pvmInstance);
18699
+ }
18700
+
18275
18701
  if (result === undefined) {
18276
18702
  pvmInstance.runProgram();
18277
18703
  status = pvmInstance.getStatus();
@@ -18643,7 +19069,7 @@ declare class DebuggerAdapter {
18643
19069
 
18644
19070
  if (page === null) {
18645
19071
  // page wasn't allocated so we return an empty page
18646
- return new Uint8Array(PAGE_SIZE);
19072
+ return safeAllocUint8Array(PAGE_SIZE);
18647
19073
  }
18648
19074
 
18649
19075
  if (page.length === PAGE_SIZE) {
@@ -18652,7 +19078,7 @@ declare class DebuggerAdapter {
18652
19078
  }
18653
19079
 
18654
19080
  // page was allocated but it is shorter than PAGE_SIZE so we have to extend it
18655
- const fullPage = new Uint8Array(PAGE_SIZE);
19081
+ const fullPage = safeAllocUint8Array(PAGE_SIZE);
18656
19082
  fullPage.set(page);
18657
19083
  return fullPage;
18658
19084
  }
@@ -18845,10 +19271,10 @@ type ENTROPY_BYTES = typeof ENTROPY_BYTES;
18845
19271
  *
18846
19272
  * https://graypaper.fluffylabs.dev/#/579bd12/3b9a013b9a01
18847
19273
  */
18848
- declare function fisherYatesShuffle<T>(arr: T[], entropy: Bytes<ENTROPY_BYTES>): T[] {
19274
+ declare function fisherYatesShuffle<T>(blake2b: Blake2b, arr: T[], entropy: Bytes<ENTROPY_BYTES>): T[] {
18849
19275
  check`${entropy.length === ENTROPY_BYTES} Expected entropy of length ${ENTROPY_BYTES}, got ${entropy.length}`;
18850
19276
  const n = arr.length;
18851
- const randomNumbers = hashToNumberSequence(entropy, arr.length);
19277
+ const randomNumbers = hashToNumberSequence(blake2b, entropy, arr.length);
18852
19278
  const result: T[] = new Array<T>(n);
18853
19279
 
18854
19280
  let itemsLeft = n;
@@ -18874,6 +19300,7 @@ declare namespace index$2 {
18874
19300
  declare class JsonServiceInfo {
18875
19301
  static fromJson = json.object<JsonServiceInfo, ServiceAccountInfo>(
18876
19302
  {
19303
+ ...(Compatibility.isGreaterOrEqual(GpVersion.V0_7_1) ? { version: "number" } : {}),
18877
19304
  code_hash: fromJson.bytes32(),
18878
19305
  balance: json.fromNumber((x) => tryAsU64(x)),
18879
19306
  min_item_gas: json.fromNumber((x) => tryAsServiceGas(x)),
@@ -18912,6 +19339,7 @@ declare class JsonServiceInfo {
18912
19339
  },
18913
19340
  );
18914
19341
 
19342
+ version?: number;
18915
19343
  code_hash!: CodeHash;
18916
19344
  balance!: U64;
18917
19345
  min_item_gas!: ServiceGas;
@@ -18958,6 +19386,19 @@ declare const lookupMetaFromJson = json.object<JsonLookupMeta, LookupHistoryItem
18958
19386
  ({ key, value }) => new LookupHistoryItem(key.hash, key.length, value),
18959
19387
  );
18960
19388
 
19389
+ declare const preimageStatusFromJson = json.object<JsonPreimageStatus, LookupHistoryItem>(
19390
+ {
19391
+ hash: fromJson.bytes32(),
19392
+ status: json.array("number"),
19393
+ },
19394
+ ({ hash, status }) => new LookupHistoryItem(hash, tryAsU32(0), status),
19395
+ );
19396
+
19397
+ type JsonPreimageStatus = {
19398
+ hash: PreimageHash;
19399
+ status: LookupHistorySlots;
19400
+ };
19401
+
18961
19402
  type JsonLookupMeta = {
18962
19403
  key: {
18963
19404
  hash: PreimageHash;
@@ -18970,21 +19411,34 @@ declare class JsonService {
18970
19411
  static fromJson = json.object<JsonService, InMemoryService>(
18971
19412
  {
18972
19413
  id: "number",
18973
- data: {
18974
- service: JsonServiceInfo.fromJson,
18975
- preimages: json.optional(json.array(JsonPreimageItem.fromJson)),
18976
- storage: json.optional(json.array(JsonStorageItem.fromJson)),
18977
- lookup_meta: json.optional(json.array(lookupMetaFromJson)),
18978
- },
19414
+ data: Compatibility.isLessThan(GpVersion.V0_7_1)
19415
+ ? {
19416
+ service: JsonServiceInfo.fromJson,
19417
+ preimages: json.optional(json.array(JsonPreimageItem.fromJson)),
19418
+ storage: json.optional(json.array(JsonStorageItem.fromJson)),
19419
+ lookup_meta: json.optional(json.array(lookupMetaFromJson)),
19420
+ }
19421
+ : {
19422
+ service: JsonServiceInfo.fromJson,
19423
+ storage: json.optional(json.array(JsonStorageItem.fromJson)),
19424
+ preimages_blob: json.optional(json.array(JsonPreimageItem.fromJson)),
19425
+ preimages_status: json.optional(json.array(preimageStatusFromJson)),
19426
+ },
18979
19427
  },
18980
19428
  ({ id, data }) => {
19429
+ const preimages = HashDictionary.fromEntries(
19430
+ (data.preimages ?? data.preimages_blob ?? []).map((x) => [x.hash, x]),
19431
+ );
19432
+
18981
19433
  const lookupHistory = HashDictionary.new<PreimageHash, LookupHistoryItem[]>();
18982
- for (const item of data.lookup_meta ?? []) {
19434
+
19435
+ for (const item of data.lookup_meta ?? data.preimages_status ?? []) {
18983
19436
  const data = lookupHistory.get(item.hash) ?? [];
18984
- data.push(item);
19437
+ const length = tryAsU32(preimages.get(item.hash)?.blob.length ?? item.length);
19438
+ data.push(new LookupHistoryItem(item.hash, length, item.slots));
18985
19439
  lookupHistory.set(item.hash, data);
18986
19440
  }
18987
- const preimages = HashDictionary.fromEntries((data.preimages ?? []).map((x) => [x.hash, x]));
19441
+
18988
19442
  const storage = new Map<string, StorageItem>();
18989
19443
 
18990
19444
  const entries = (data.storage ?? []).map(({ key, value }) => {
@@ -19011,6 +19465,8 @@ declare class JsonService {
19011
19465
  preimages?: JsonPreimageItem[];
19012
19466
  storage?: JsonStorageItem[];
19013
19467
  lookup_meta?: LookupHistoryItem[];
19468
+ preimages_blob?: JsonPreimageItem[];
19469
+ preimages_status?: LookupHistoryItem[];
19014
19470
  };
19015
19471
  }
19016
19472
 
@@ -19020,8 +19476,7 @@ declare const availabilityAssignmentFromJson = json.object<JsonAvailabilityAssig
19020
19476
  timeout: "number",
19021
19477
  },
19022
19478
  ({ report, timeout }) => {
19023
- const workReportHash = blake2b.hashBytes(Encoder.encodeObject(WorkReport.Codec, report)).asOpaque();
19024
- return AvailabilityAssignment.create({ workReport: new WithHash(workReportHash, report), timeout });
19479
+ return AvailabilityAssignment.create({ workReport: report, timeout });
19025
19480
  },
19026
19481
  );
19027
19482
 
@@ -19109,7 +19564,7 @@ type JsonRecentBlockState = {
19109
19564
  reported: WorkPackageInfo[];
19110
19565
  };
19111
19566
 
19112
- declare const recentBlocksHistoryFromJson = json.object<JsonRecentBlocks, RecentBlocksHistory>(
19567
+ declare const recentBlocksHistoryFromJson = json.object<JsonRecentBlocks, RecentBlocks>(
19113
19568
  {
19114
19569
  history: json.array(recentBlockStateFromJson),
19115
19570
  mmr: {
@@ -19117,12 +19572,10 @@ declare const recentBlocksHistoryFromJson = json.object<JsonRecentBlocks, Recent
19117
19572
  },
19118
19573
  },
19119
19574
  ({ history, mmr }) => {
19120
- return RecentBlocksHistory.create(
19121
- RecentBlocks.create({
19122
- blocks: history,
19123
- accumulationLog: mmr,
19124
- }),
19125
- );
19575
+ return RecentBlocks.create({
19576
+ blocks: history,
19577
+ accumulationLog: mmr,
19578
+ });
19126
19579
  },
19127
19580
  );
19128
19581
 
@@ -19242,8 +19695,12 @@ declare class JsonServiceStatistics {
19242
19695
  extrinsic_count: "number",
19243
19696
  accumulate_count: "number",
19244
19697
  accumulate_gas_used: json.fromNumber(tryAsServiceGas),
19245
- on_transfers_count: "number",
19246
- on_transfers_gas_used: json.fromNumber(tryAsServiceGas),
19698
+ ...(Compatibility.isLessThan(GpVersion.V0_7_1)
19699
+ ? {
19700
+ on_transfers_count: "number",
19701
+ on_transfers_gas_used: json.fromNumber(tryAsServiceGas),
19702
+ }
19703
+ : {}),
19247
19704
  },
19248
19705
  ({
19249
19706
  provided_count,
@@ -19270,8 +19727,8 @@ declare class JsonServiceStatistics {
19270
19727
  extrinsicCount: extrinsic_count,
19271
19728
  accumulateCount: accumulate_count,
19272
19729
  accumulateGasUsed: accumulate_gas_used,
19273
- onTransfersCount: on_transfers_count,
19274
- onTransfersGasUsed: on_transfers_gas_used,
19730
+ onTransfersCount: on_transfers_count ?? tryAsU32(0),
19731
+ onTransfersGasUsed: on_transfers_gas_used ?? tryAsServiceGas(0),
19275
19732
  });
19276
19733
  },
19277
19734
  );
@@ -19286,8 +19743,8 @@ declare class JsonServiceStatistics {
19286
19743
  extrinsic_count!: U16;
19287
19744
  accumulate_count!: U32;
19288
19745
  accumulate_gas_used!: ServiceGas;
19289
- on_transfers_count!: U32;
19290
- on_transfers_gas_used!: ServiceGas;
19746
+ on_transfers_count?: U32;
19747
+ on_transfers_gas_used?: ServiceGas;
19291
19748
  }
19292
19749
 
19293
19750
  type ServiceStatisticsEntry = {
@@ -19359,8 +19816,9 @@ type JsonStateDump = {
19359
19816
  tau: State["timeslot"];
19360
19817
  chi: {
19361
19818
  chi_m: PrivilegedServices["manager"];
19362
- chi_a: PrivilegedServices["authManager"];
19363
- chi_v: PrivilegedServices["validatorsManager"];
19819
+ chi_a: PrivilegedServices["assigners"];
19820
+ chi_v: PrivilegedServices["delegator"];
19821
+ chi_r?: PrivilegedServices["registrar"];
19364
19822
  chi_g: PrivilegedServices["autoAccumulateServices"] | null;
19365
19823
  };
19366
19824
  pi: JsonStatisticsData;
@@ -19393,6 +19851,7 @@ declare const fullStateDumpFromJson = (spec: ChainSpec) =>
19393
19851
  chi_m: "number",
19394
19852
  chi_a: json.array("number"),
19395
19853
  chi_v: "number",
19854
+ chi_r: json.optional("number"),
19396
19855
  chi_g: json.nullable(
19397
19856
  json.array({
19398
19857
  service: "number",
@@ -19425,7 +19884,10 @@ declare const fullStateDumpFromJson = (spec: ChainSpec) =>
19425
19884
  theta,
19426
19885
  accounts,
19427
19886
  }): InMemoryState => {
19428
- return InMemoryState.create({
19887
+ if (Compatibility.isGreaterOrEqual(GpVersion.V0_7_1) && chi.chi_r === undefined) {
19888
+ throw new Error("Registrar is required in Privileges GP ^0.7.1");
19889
+ }
19890
+ return InMemoryState.new(spec, {
19429
19891
  authPools: tryAsPerCore(
19430
19892
  alpha.map((perCore) => {
19431
19893
  if (perCore.length > MAX_AUTH_POOL_SIZE) {
@@ -19444,7 +19906,7 @@ declare const fullStateDumpFromJson = (spec: ChainSpec) =>
19444
19906
  }),
19445
19907
  spec,
19446
19908
  ),
19447
- recentBlocks: beta ?? RecentBlocksHistory.empty(),
19909
+ recentBlocks: beta ?? RecentBlocks.empty(),
19448
19910
  nextValidatorData: gamma.gamma_k,
19449
19911
  epochRoot: gamma.gamma_z,
19450
19912
  sealingKeySeries: TicketsOrKeys.toSafroleSealingKeys(gamma.gamma_s, spec),
@@ -19458,8 +19920,9 @@ declare const fullStateDumpFromJson = (spec: ChainSpec) =>
19458
19920
  timeslot: tau,
19459
19921
  privilegedServices: PrivilegedServices.create({
19460
19922
  manager: chi.chi_m,
19461
- authManager: chi.chi_a,
19462
- validatorsManager: chi.chi_v,
19923
+ assigners: chi.chi_a,
19924
+ delegator: chi.chi_v,
19925
+ registrar: chi.chi_r ?? tryAsServiceId(2 ** 32 - 1),
19463
19926
  autoAccumulateServices: chi.chi_g ?? [],
19464
19927
  }),
19465
19928
  statistics: JsonStatisticsData.toStatisticsData(spec, pi),
@@ -19482,6 +19945,7 @@ declare const index$1_JsonDisputesRecords: typeof JsonDisputesRecords;
19482
19945
  type index$1_JsonLookupMeta = JsonLookupMeta;
19483
19946
  type index$1_JsonPreimageItem = JsonPreimageItem;
19484
19947
  declare const index$1_JsonPreimageItem: typeof JsonPreimageItem;
19948
+ type index$1_JsonPreimageStatus = JsonPreimageStatus;
19485
19949
  type index$1_JsonRecentBlockState = JsonRecentBlockState;
19486
19950
  type index$1_JsonRecentBlocks = JsonRecentBlocks;
19487
19951
  type index$1_JsonReportedWorkPackageInfo = JsonReportedWorkPackageInfo;
@@ -19506,6 +19970,7 @@ declare const index$1_disputesRecordsFromJson: typeof disputesRecordsFromJson;
19506
19970
  declare const index$1_fullStateDumpFromJson: typeof fullStateDumpFromJson;
19507
19971
  declare const index$1_lookupMetaFromJson: typeof lookupMetaFromJson;
19508
19972
  declare const index$1_notYetAccumulatedFromJson: typeof notYetAccumulatedFromJson;
19973
+ declare const index$1_preimageStatusFromJson: typeof preimageStatusFromJson;
19509
19974
  declare const index$1_recentBlockStateFromJson: typeof recentBlockStateFromJson;
19510
19975
  declare const index$1_recentBlocksHistoryFromJson: typeof recentBlocksHistoryFromJson;
19511
19976
  declare const index$1_reportedWorkPackageFromJson: typeof reportedWorkPackageFromJson;
@@ -19513,8 +19978,8 @@ declare const index$1_serviceStatisticsEntryFromJson: typeof serviceStatisticsEn
19513
19978
  declare const index$1_ticketFromJson: typeof ticketFromJson;
19514
19979
  declare const index$1_validatorDataFromJson: typeof validatorDataFromJson;
19515
19980
  declare namespace index$1 {
19516
- export { index$1_JsonCoreStatistics as JsonCoreStatistics, index$1_JsonDisputesRecords as JsonDisputesRecords, index$1_JsonPreimageItem as JsonPreimageItem, index$1_JsonService as JsonService, index$1_JsonServiceInfo as JsonServiceInfo, index$1_JsonServiceStatistics as JsonServiceStatistics, index$1_JsonStatisticsData as JsonStatisticsData, index$1_JsonStorageItem as JsonStorageItem, index$1_JsonValidatorStatistics as JsonValidatorStatistics, index$1_TicketsOrKeys as TicketsOrKeys, index$1_availabilityAssignmentFromJson as availabilityAssignmentFromJson, index$1_disputesRecordsFromJson as disputesRecordsFromJson, index$1_fullStateDumpFromJson as fullStateDumpFromJson, index$1_lookupMetaFromJson as lookupMetaFromJson, index$1_notYetAccumulatedFromJson as notYetAccumulatedFromJson, index$1_recentBlockStateFromJson as recentBlockStateFromJson, index$1_recentBlocksHistoryFromJson as recentBlocksHistoryFromJson, index$1_reportedWorkPackageFromJson as reportedWorkPackageFromJson, index$1_serviceStatisticsEntryFromJson as serviceStatisticsEntryFromJson, index$1_ticketFromJson as ticketFromJson, index$1_validatorDataFromJson as validatorDataFromJson };
19517
- export type { index$1_JsonAvailabilityAssignment as JsonAvailabilityAssignment, index$1_JsonLookupMeta as JsonLookupMeta, index$1_JsonRecentBlockState as JsonRecentBlockState, index$1_JsonRecentBlocks as JsonRecentBlocks, index$1_JsonReportedWorkPackageInfo as JsonReportedWorkPackageInfo, index$1_JsonStateDump as JsonStateDump, index$1_ServiceStatisticsEntry as ServiceStatisticsEntry };
19981
+ export { index$1_JsonCoreStatistics as JsonCoreStatistics, index$1_JsonDisputesRecords as JsonDisputesRecords, index$1_JsonPreimageItem as JsonPreimageItem, index$1_JsonService as JsonService, index$1_JsonServiceInfo as JsonServiceInfo, index$1_JsonServiceStatistics as JsonServiceStatistics, index$1_JsonStatisticsData as JsonStatisticsData, index$1_JsonStorageItem as JsonStorageItem, index$1_JsonValidatorStatistics as JsonValidatorStatistics, index$1_TicketsOrKeys as TicketsOrKeys, index$1_availabilityAssignmentFromJson as availabilityAssignmentFromJson, index$1_disputesRecordsFromJson as disputesRecordsFromJson, index$1_fullStateDumpFromJson as fullStateDumpFromJson, index$1_lookupMetaFromJson as lookupMetaFromJson, index$1_notYetAccumulatedFromJson as notYetAccumulatedFromJson, index$1_preimageStatusFromJson as preimageStatusFromJson, index$1_recentBlockStateFromJson as recentBlockStateFromJson, index$1_recentBlocksHistoryFromJson as recentBlocksHistoryFromJson, index$1_reportedWorkPackageFromJson as reportedWorkPackageFromJson, index$1_serviceStatisticsEntryFromJson as serviceStatisticsEntryFromJson, index$1_ticketFromJson as ticketFromJson, index$1_validatorDataFromJson as validatorDataFromJson };
19982
+ export type { index$1_JsonAvailabilityAssignment as JsonAvailabilityAssignment, index$1_JsonLookupMeta as JsonLookupMeta, index$1_JsonPreimageStatus as JsonPreimageStatus, index$1_JsonRecentBlockState as JsonRecentBlockState, index$1_JsonRecentBlocks as JsonRecentBlocks, index$1_JsonReportedWorkPackageInfo as JsonReportedWorkPackageInfo, index$1_JsonStateDump as JsonStateDump, index$1_ServiceStatisticsEntry as ServiceStatisticsEntry };
19518
19983
  }
19519
19984
 
19520
19985
  /** Helper function to create most used hashes in the block */
@@ -19522,7 +19987,7 @@ declare class TransitionHasher implements MmrHasher<KeccakHash> {
19522
19987
  constructor(
19523
19988
  private readonly context: ChainSpec,
19524
19989
  private readonly keccakHasher: KeccakHasher,
19525
- private readonly allocator: HashAllocator,
19990
+ public readonly blake2b: Blake2b,
19526
19991
  ) {}
19527
19992
 
19528
19993
  /** Concatenates two hashes and hash this concatenation */
@@ -19536,7 +20001,7 @@ declare class TransitionHasher implements MmrHasher<KeccakHash> {
19536
20001
 
19537
20002
  /** Creates hash from the block header view */
19538
20003
  header(header: HeaderView): WithHash<HeaderHash, HeaderView> {
19539
- return new WithHash(blake2b.hashBytes(header.encoded(), this.allocator).asOpaque(), header);
20004
+ return new WithHash(this.blake2b.hashBytes(header.encoded()).asOpaque(), header);
19540
20005
  }
19541
20006
 
19542
20007
  /**
@@ -19546,29 +20011,31 @@ declare class TransitionHasher implements MmrHasher<KeccakHash> {
19546
20011
  */
19547
20012
  extrinsic(extrinsicView: ExtrinsicView): WithHashAndBytes<ExtrinsicHash, ExtrinsicView> {
19548
20013
  // https://graypaper.fluffylabs.dev/#/cc517d7/0cfb000cfb00?v=0.6.5
19549
- const guarantees = extrinsicView.guarantees
20014
+ const guaranteesCount = tryAsU32(extrinsicView.guarantees.view().length);
20015
+ const countEncoded = Encoder.encodeObject(codec.varU32, guaranteesCount);
20016
+ const guaranteesBlobs = extrinsicView.guarantees
19550
20017
  .view()
19551
20018
  .map((g) => g.view())
19552
- .map((guarantee) => {
19553
- const reportHash = blake2b.hashBytes(guarantee.report.encoded(), this.allocator).asOpaque<WorkReportHash>();
19554
- return BytesBlob.blobFromParts([
19555
- reportHash.raw,
19556
- guarantee.slot.encoded().raw,
19557
- guarantee.credentials.encoded().raw,
19558
- ]);
19559
- });
19560
-
19561
- const guaranteeBlob = Encoder.encodeObject(codec.sequenceVarLen(dumpCodec), guarantees, this.context);
20019
+ .reduce(
20020
+ (aggregated, guarantee) => {
20021
+ const reportHash = this.blake2b.hashBytes(guarantee.report.encoded()).asOpaque<WorkReportHash>();
20022
+ aggregated.push(reportHash.raw);
20023
+ aggregated.push(guarantee.slot.encoded().raw);
20024
+ aggregated.push(guarantee.credentials.encoded().raw);
20025
+ return aggregated;
20026
+ },
20027
+ [countEncoded.raw],
20028
+ );
19562
20029
 
19563
- const et = blake2b.hashBytes(extrinsicView.tickets.encoded(), this.allocator).asOpaque<ExtrinsicHash>();
19564
- const ep = blake2b.hashBytes(extrinsicView.preimages.encoded(), this.allocator).asOpaque<ExtrinsicHash>();
19565
- const eg = blake2b.hashBytes(guaranteeBlob, this.allocator).asOpaque<ExtrinsicHash>();
19566
- const ea = blake2b.hashBytes(extrinsicView.assurances.encoded(), this.allocator).asOpaque<ExtrinsicHash>();
19567
- const ed = blake2b.hashBytes(extrinsicView.disputes.encoded(), this.allocator).asOpaque<ExtrinsicHash>();
20030
+ const et = this.blake2b.hashBytes(extrinsicView.tickets.encoded()).asOpaque<ExtrinsicHash>();
20031
+ const ep = this.blake2b.hashBytes(extrinsicView.preimages.encoded()).asOpaque<ExtrinsicHash>();
20032
+ const eg = this.blake2b.hashBlobs(guaranteesBlobs).asOpaque<ExtrinsicHash>();
20033
+ const ea = this.blake2b.hashBytes(extrinsicView.assurances.encoded()).asOpaque<ExtrinsicHash>();
20034
+ const ed = this.blake2b.hashBytes(extrinsicView.disputes.encoded()).asOpaque<ExtrinsicHash>();
19568
20035
 
19569
20036
  const encoded = BytesBlob.blobFromParts([et.raw, ep.raw, eg.raw, ea.raw, ed.raw]);
19570
20037
 
19571
- return new WithHashAndBytes(blake2b.hashBytes(encoded, this.allocator).asOpaque(), extrinsicView, encoded);
20038
+ return new WithHashAndBytes(this.blake2b.hashBytes(encoded).asOpaque(), extrinsicView, encoded);
19572
20039
  }
19573
20040
 
19574
20041
  /** Creates hash for given WorkPackage */
@@ -19579,7 +20046,7 @@ declare class TransitionHasher implements MmrHasher<KeccakHash> {
19579
20046
  private encode<T, THash extends OpaqueHash>(codec: Codec<T>, data: T): WithHashAndBytes<THash, T> {
19580
20047
  // TODO [ToDr] Use already allocated encoding destination and hash bytes from some arena.
19581
20048
  const encoded = Encoder.encodeObject(codec, data, this.context);
19582
- return new WithHashAndBytes(blake2b.hashBytes(encoded, this.allocator).asOpaque(), data, encoded);
20049
+ return new WithHashAndBytes(this.blake2b.hashBytes(encoded).asOpaque(), data, encoded);
19583
20050
  }
19584
20051
  }
19585
20052
 
@@ -19600,7 +20067,10 @@ declare enum PreimagesErrorCode {
19600
20067
 
19601
20068
  // TODO [SeKo] consider whether this module is the right place to remove expired preimages
19602
20069
  declare class Preimages {
19603
- constructor(public readonly state: PreimagesState) {}
20070
+ constructor(
20071
+ public readonly state: PreimagesState,
20072
+ public readonly blake2b: Blake2b,
20073
+ ) {}
19604
20074
 
19605
20075
  integrate(input: PreimagesInput): Result$2<PreimagesStateUpdate, PreimagesErrorCode> {
19606
20076
  // make sure lookup extrinsics are sorted and unique
@@ -19629,7 +20099,7 @@ declare class Preimages {
19629
20099
  // select preimages for integration
19630
20100
  for (const preimage of preimages) {
19631
20101
  const { requester, blob } = preimage;
19632
- const hash: PreimageHash = blake2b.hashBytes(blob).asOpaque();
20102
+ const hash: PreimageHash = this.blake2b.hashBytes(blob).asOpaque();
19633
20103
 
19634
20104
  const service = this.state.getService(requester);
19635
20105
  if (service === null) {
@@ -19660,156 +20130,6 @@ declare class Preimages {
19660
20130
  }
19661
20131
  }
19662
20132
 
19663
- declare enum ServiceExecutorError {
19664
- NoLookup = 0,
19665
- NoState = 1,
19666
- NoServiceCode = 2,
19667
- ServiceCodeMismatch = 3,
19668
- }
19669
-
19670
- declare class WorkPackageExecutor {
19671
- constructor(
19672
- private readonly blocks: BlocksDb,
19673
- private readonly state: StatesDb,
19674
- private readonly hasher: TransitionHasher,
19675
- ) {}
19676
-
19677
- // TODO [ToDr] this while thing should be triple-checked with the GP.
19678
- // I'm currently implementing some dirty version for the demo.
19679
- async executeWorkPackage(pack: WorkPackage): Promise<WorkReport> {
19680
- const headerHash = pack.context.lookupAnchor;
19681
- // execute authorisation first or is it already executed and we just need to check it?
19682
- const authExec = this.getServiceExecutor(
19683
- // TODO [ToDr] should this be anchor or lookupAnchor?
19684
- headerHash,
19685
- pack.authCodeHost,
19686
- pack.authCodeHash,
19687
- );
19688
-
19689
- if (authExec.isError) {
19690
- // TODO [ToDr] most likely shouldn't be throw.
19691
- throw new Error(`Could not get authorization executor: ${authExec.error}`);
19692
- }
19693
-
19694
- const pvm = authExec.ok;
19695
- const authGas = tryAsGas(15_000n);
19696
- const result = await pvm.run(pack.parametrization, authGas);
19697
-
19698
- if (!result.isEqualTo(pack.authorization)) {
19699
- throw new Error("Authorization is invalid.");
19700
- }
19701
-
19702
- const results: WorkResult[] = [];
19703
- for (const item of pack.items) {
19704
- const exec = this.getServiceExecutor(headerHash, item.service, item.codeHash);
19705
- if (exec.isError) {
19706
- throw new Error(`Could not get item executor: ${exec.error}`);
19707
- }
19708
- const pvm = exec.ok;
19709
-
19710
- const gasRatio = tryAsServiceGas(3_000n);
19711
- const ret = await pvm.run(item.payload, tryAsGas(item.refineGasLimit)); // or accumulateGasLimit?
19712
- results.push(
19713
- WorkResult.create({
19714
- serviceId: item.service,
19715
- codeHash: item.codeHash,
19716
- payloadHash: blake2b.hashBytes(item.payload),
19717
- gas: gasRatio,
19718
- result: new WorkExecResult(WorkExecResultKind.ok, ret),
19719
- load: WorkRefineLoad.create({
19720
- gasUsed: tryAsServiceGas(5),
19721
- importedSegments: tryAsU32(0),
19722
- exportedSegments: tryAsU32(0),
19723
- extrinsicSize: tryAsU32(0),
19724
- extrinsicCount: tryAsU32(0),
19725
- }),
19726
- }),
19727
- );
19728
- }
19729
-
19730
- const workPackage = this.hasher.workPackage(pack);
19731
- const workPackageSpec = WorkPackageSpec.create({
19732
- hash: workPackage.hash,
19733
- length: tryAsU32(workPackage.encoded.length),
19734
- erasureRoot: Bytes.zero(HASH_SIZE),
19735
- exportsRoot: Bytes.zero(HASH_SIZE).asOpaque(),
19736
- exportsCount: tryAsU16(0),
19737
- });
19738
- const coreIndex = tryAsCoreIndex(0);
19739
- const authorizerHash = Bytes.fill(HASH_SIZE, 5).asOpaque();
19740
-
19741
- const workResults = FixedSizeArray.new(results, tryAsWorkItemsCount(results.length));
19742
-
19743
- return Promise.resolve(
19744
- WorkReport.create({
19745
- workPackageSpec,
19746
- context: pack.context,
19747
- coreIndex,
19748
- authorizerHash,
19749
- authorizationOutput: pack.authorization,
19750
- segmentRootLookup: [],
19751
- results: workResults,
19752
- authorizationGasUsed: tryAsServiceGas(0),
19753
- }),
19754
- );
19755
- }
19756
-
19757
- getServiceExecutor(
19758
- lookupAnchor: HeaderHash,
19759
- serviceId: ServiceId,
19760
- expectedCodeHash: CodeHash,
19761
- ): Result$2<PvmExecutor, ServiceExecutorError> {
19762
- const header = this.blocks.getHeader(lookupAnchor);
19763
- if (header === null) {
19764
- return Result.error(ServiceExecutorError.NoLookup);
19765
- }
19766
-
19767
- const state = this.state.getState(lookupAnchor);
19768
- if (state === null) {
19769
- return Result.error(ServiceExecutorError.NoState);
19770
- }
19771
-
19772
- const service = state.getService(serviceId);
19773
- const serviceCodeHash = service?.getInfo().codeHash ?? null;
19774
- if (serviceCodeHash === null) {
19775
- return Result.error(ServiceExecutorError.NoServiceCode);
19776
- }
19777
-
19778
- if (!serviceCodeHash.isEqualTo(expectedCodeHash)) {
19779
- return Result.error(ServiceExecutorError.ServiceCodeMismatch);
19780
- }
19781
-
19782
- const serviceCode = service?.getPreimage(serviceCodeHash.asOpaque()) ?? null;
19783
- if (serviceCode === null) {
19784
- return Result.error(ServiceExecutorError.NoServiceCode);
19785
- }
19786
-
19787
- return Result.ok(new PvmExecutor(serviceCode));
19788
- }
19789
- }
19790
-
19791
- declare class PvmExecutor {
19792
- private readonly pvm: HostCalls;
19793
- private hostCalls = new HostCallsManager({ missing: new Missing() });
19794
- private pvmInstanceManager = new PvmInstanceManager(4);
19795
-
19796
- constructor(private serviceCode: BytesBlob) {
19797
- this.pvm = new PvmHostCallExtension(this.pvmInstanceManager, this.hostCalls);
19798
- }
19799
-
19800
- async run(args: BytesBlob, gas: Gas): Promise<BytesBlob> {
19801
- const program = Program.fromSpi(this.serviceCode.raw, args.raw, true);
19802
-
19803
- const result = await this.pvm.runProgram(program.code, 5, gas, program.registers, program.memory);
19804
-
19805
- if (result.hasMemorySlice()) {
19806
- return BytesBlob.blobFrom(result.memorySlice);
19807
- }
19808
-
19809
- return BytesBlob.empty();
19810
- }
19811
- }
19812
-
19813
20133
  type index_Preimages = Preimages;
19814
20134
  declare const index_Preimages: typeof Preimages;
19815
20135
  type index_PreimagesErrorCode = PreimagesErrorCode;
@@ -19819,10 +20139,8 @@ type index_PreimagesState = PreimagesState;
19819
20139
  type index_PreimagesStateUpdate = PreimagesStateUpdate;
19820
20140
  type index_TransitionHasher = TransitionHasher;
19821
20141
  declare const index_TransitionHasher: typeof TransitionHasher;
19822
- type index_WorkPackageExecutor = WorkPackageExecutor;
19823
- declare const index_WorkPackageExecutor: typeof WorkPackageExecutor;
19824
20142
  declare namespace index {
19825
- export { index_Preimages as Preimages, index_PreimagesErrorCode as PreimagesErrorCode, index_TransitionHasher as TransitionHasher, index_WorkPackageExecutor as WorkPackageExecutor };
20143
+ export { index_Preimages as Preimages, index_PreimagesErrorCode as PreimagesErrorCode, index_TransitionHasher as TransitionHasher };
19826
20144
  export type { index_PreimagesInput as PreimagesInput, index_PreimagesState as PreimagesState, index_PreimagesStateUpdate as PreimagesStateUpdate };
19827
20145
  }
19828
20146