@typeberry/lib 0.1.3-462ca77 → 0.1.3-47d06ae

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (4) hide show
  1. package/index.cjs +671 -1354
  2. package/index.d.ts +777 -815
  3. package/index.js +670 -1353
  4. package/package.json +1 -1
package/index.d.ts CHANGED
@@ -1,7 +1,7 @@
1
1
  declare enum GpVersion {
2
2
  V0_6_7 = "0.6.7",
3
3
  V0_7_0 = "0.7.0",
4
- V0_7_1 = "0.7.1-preview",
4
+ V0_7_1 = "0.7.1",
5
5
  V0_7_2 = "0.7.2-preview",
6
6
  }
7
7
 
@@ -11,12 +11,12 @@ declare enum TestSuite {
11
11
  }
12
12
 
13
13
  declare const DEFAULT_SUITE = TestSuite.W3F_DAVXY;
14
-
15
- declare const ALL_VERSIONS_IN_ORDER = [GpVersion.V0_6_7, GpVersion.V0_7_0, GpVersion.V0_7_1, GpVersion.V0_7_2];
16
- declare const DEFAULT_VERSION = GpVersion.V0_7_0;
14
+ declare const DEFAULT_VERSION = GpVersion.V0_7_1;
17
15
  declare let CURRENT_VERSION = parseCurrentVersion(env.GP_VERSION) ?? DEFAULT_VERSION;
18
16
  declare let CURRENT_SUITE = parseCurrentSuite(env.TEST_SUITE) ?? DEFAULT_SUITE;
19
17
 
18
+ declare const ALL_VERSIONS_IN_ORDER = [GpVersion.V0_6_7, GpVersion.V0_7_0, GpVersion.V0_7_1, GpVersion.V0_7_2];
19
+
20
20
  declare function parseCurrentVersion(env?: string): GpVersion | undefined {
21
21
  if (env === undefined) {
22
22
  return undefined;
@@ -35,7 +35,9 @@ declare function parseCurrentVersion(env?: string): GpVersion | undefined {
35
35
  }
36
36
 
37
37
  declare function parseCurrentSuite(env?: string): TestSuite | undefined {
38
- if (env === undefined) return undefined;
38
+ if (env === undefined) {
39
+ return undefined;
40
+ }
39
41
  switch (env) {
40
42
  case TestSuite.W3F_DAVXY:
41
43
  case TestSuite.JAMDUNA:
@@ -587,8 +589,12 @@ declare function deepEqual<T>(
587
589
  const aKey = `${a.key}`;
588
590
  const bKey = `${b.key}`;
589
591
 
590
- if (aKey < bKey) return -1;
591
- if (bKey < aKey) return 1;
592
+ if (aKey < bKey) {
593
+ return -1;
594
+ }
595
+ if (bKey < aKey) {
596
+ return 1;
597
+ }
592
598
  return 0;
593
599
  });
594
600
  };
@@ -3476,6 +3482,99 @@ declare namespace index$q {
3476
3482
  export type { index$q_ClassConstructor as ClassConstructor, index$q_Codec as Codec, index$q_CodecRecord as CodecRecord, index$q_Decode as Decode, index$q_DescribedBy as DescribedBy, index$q_DescriptorRecord as DescriptorRecord, index$q_Encode as Encode, index$q_LengthRange as LengthRange, index$q_OptionalRecord as OptionalRecord, Options$1 as Options, index$q_PropertyKeys as PropertyKeys, index$q_SimpleDescriptorRecord as SimpleDescriptorRecord, index$q_SizeHint as SizeHint, index$q_ViewOf as ViewOf };
3477
3483
  }
3478
3484
 
3485
+ /**
3486
+ * A utility class providing a readonly view over a portion of an array without copying it.
3487
+ */
3488
+ declare class ArrayView<T> implements Iterable<T> {
3489
+ private readonly source: T[];
3490
+ public readonly length: number;
3491
+
3492
+ private constructor(
3493
+ source: T[],
3494
+ private readonly start: number,
3495
+ private readonly end: number,
3496
+ ) {
3497
+ this.source = source;
3498
+ this.length = end - start;
3499
+ }
3500
+
3501
+ static from<T>(source: T[], start = 0, end = source.length): ArrayView<T> {
3502
+ check`
3503
+ ${start >= 0 && end <= source.length && start <= end}
3504
+ Invalid start (${start})/end (${end}) for ArrayView
3505
+ `;
3506
+ return new ArrayView(source, start, end);
3507
+ }
3508
+
3509
+ get(i: number): T {
3510
+ check`
3511
+ ${i >= 0 && i < this.length}
3512
+ Index out of bounds: ${i} < ${this.length}
3513
+ `;
3514
+ return this.source[this.start + i];
3515
+ }
3516
+
3517
+ subview(from: number, to: number = this.length): ArrayView<T> {
3518
+ return ArrayView.from(this.source, this.start + from, this.start + to);
3519
+ }
3520
+
3521
+ toArray(): T[] {
3522
+ return this.source.slice(this.start, this.end);
3523
+ }
3524
+
3525
+ *[Symbol.iterator](): Iterator<T> {
3526
+ for (let i = this.start; i < this.end; i++) {
3527
+ yield this.source[i];
3528
+ }
3529
+ }
3530
+ }
3531
+
3532
+ type ITypedArray = Uint8Array | Uint16Array | Uint32Array;
3533
+ type IDataType = string | Buffer | ITypedArray;
3534
+
3535
+ type IHasher = {
3536
+ /**
3537
+ * Initializes hash state to default value
3538
+ */
3539
+ init: () => IHasher;
3540
+ /**
3541
+ * Updates the hash content with the given data
3542
+ */
3543
+ update: (data: IDataType) => IHasher;
3544
+ /**
3545
+ * Calculates the hash of all of the data passed to be hashed with hash.update().
3546
+ * Defaults to hexadecimal string
3547
+ * @param outputType If outputType is "binary", it returns Uint8Array. Otherwise it
3548
+ * returns hexadecimal string
3549
+ */
3550
+ digest: {
3551
+ (outputType: "binary"): Uint8Array;
3552
+ (outputType?: "hex"): string;
3553
+ };
3554
+ /**
3555
+ * Save the current internal state of the hasher for later resumption with load().
3556
+ * Cannot be called before .init() or after .digest()
3557
+ *
3558
+ * Note that this state can include arbitrary information about the value being hashed (e.g.
3559
+ * could include N plaintext bytes from the value), so needs to be treated as being as
3560
+ * sensitive as the input value itself.
3561
+ */
3562
+ save: () => Uint8Array;
3563
+ /**
3564
+ * Resume a state that was created by save(). If this state was not created by a
3565
+ * compatible build of hash-wasm, an exception will be thrown.
3566
+ */
3567
+ load: (state: Uint8Array) => IHasher;
3568
+ /**
3569
+ * Block size in bytes
3570
+ */
3571
+ blockSize: number;
3572
+ /**
3573
+ * Digest size in bytes
3574
+ */
3575
+ digestSize: number;
3576
+ };
3577
+
3479
3578
  /**
3480
3579
  * Size of the output of the hash functions.
3481
3580
  *
@@ -3531,144 +3630,46 @@ declare class WithHashAndBytes<THash extends OpaqueHash, TData> extends WithHash
3531
3630
  }
3532
3631
  }
3533
3632
 
3534
- /** Allocator interface - returns an empty bytes vector that can be filled with the hash. */
3535
- interface HashAllocator {
3536
- /** Return a new hash destination. */
3537
- emptyHash(): OpaqueHash;
3538
- }
3539
-
3540
- /** The simplest allocator returning just a fresh copy of bytes each time. */
3541
- declare class SimpleAllocator implements HashAllocator {
3542
- emptyHash(): OpaqueHash {
3543
- return Bytes.zero(HASH_SIZE);
3544
- }
3545
- }
3546
-
3547
- /** An allocator that works by allocating larger (continuous) pages of memory. */
3548
- declare class PageAllocator implements HashAllocator {
3549
- private page: Uint8Array = safeAllocUint8Array(0);
3550
- private currentHash = 0;
3633
+ declare const zero$1 = Bytes.zero(HASH_SIZE);
3551
3634
 
3552
- // TODO [ToDr] Benchmark the performance!
3553
- constructor(private readonly hashesPerPage: number) {
3554
- check`${hashesPerPage > 0 && hashesPerPage >>> 0 === hashesPerPage} Expected a non-zero integer.`;
3555
- this.resetPage();
3635
+ declare class Blake2b {
3636
+ static async createHasher() {
3637
+ return new Blake2b(await createBLAKE2b(HASH_SIZE * 8));
3556
3638
  }
3557
3639
 
3558
- private resetPage() {
3559
- const pageSizeBytes = this.hashesPerPage * HASH_SIZE;
3560
- this.currentHash = 0;
3561
- this.page = safeAllocUint8Array(pageSizeBytes);
3562
- }
3563
-
3564
- emptyHash(): OpaqueHash {
3565
- const startIdx = this.currentHash * HASH_SIZE;
3566
- const endIdx = startIdx + HASH_SIZE;
3640
+ private constructor(private readonly hasher: IHasher) {}
3567
3641
 
3568
- this.currentHash += 1;
3569
- if (this.currentHash >= this.hashesPerPage) {
3570
- this.resetPage();
3642
+ /**
3643
+ * Hash given collection of blobs.
3644
+ *
3645
+ * If empty array is given a zero-hash is returned.
3646
+ */
3647
+ hashBlobs<H extends Blake2bHash>(r: (BytesBlob | Uint8Array)[]): H {
3648
+ if (r.length === 0) {
3649
+ return zero.asOpaque();
3571
3650
  }
3572
3651
 
3573
- return Bytes.fromBlob(this.page.subarray(startIdx, endIdx), HASH_SIZE);
3652
+ const hasher = this.hasher.init();
3653
+ for (const v of r) {
3654
+ hasher.update(v instanceof BytesBlob ? v.raw : v);
3655
+ }
3656
+ return Bytes.fromBlob(hasher.digest("binary"), HASH_SIZE).asOpaque();
3574
3657
  }
3575
- }
3576
-
3577
- declare const defaultAllocator = new SimpleAllocator();
3578
3658
 
3579
- /**
3580
- * Hash given collection of blobs.
3581
- *
3582
- * If empty array is given a zero-hash is returned.
3583
- */
3584
- declare function hashBlobs$1<H extends Blake2bHash>(
3585
- r: (BytesBlob | Uint8Array)[],
3586
- allocator: HashAllocator = defaultAllocator,
3587
- ): H {
3588
- const out = allocator.emptyHash();
3589
- if (r.length === 0) {
3590
- return out.asOpaque();
3659
+ /** Hash given blob of bytes. */
3660
+ hashBytes(blob: BytesBlob | Uint8Array): Blake2bHash {
3661
+ const hasher = this.hasher.init();
3662
+ const bytes = blob instanceof BytesBlob ? blob.raw : blob;
3663
+ hasher.update(bytes);
3664
+ return Bytes.fromBlob(hasher.digest("binary"), HASH_SIZE).asOpaque();
3591
3665
  }
3592
3666
 
3593
- const hasher = blake2b(HASH_SIZE);
3594
- for (const v of r) {
3595
- hasher?.update(v instanceof BytesBlob ? v.raw : v);
3667
+ /** Convert given string into bytes and hash it. */
3668
+ hashString(str: string) {
3669
+ return this.hashBytes(BytesBlob.blobFromString(str));
3596
3670
  }
3597
- hasher?.digest(out.raw);
3598
- return out.asOpaque();
3599
- }
3600
-
3601
- /** Hash given blob of bytes. */
3602
- declare function hashBytes(blob: BytesBlob | Uint8Array, allocator: HashAllocator = defaultAllocator): Blake2bHash {
3603
- const hasher = blake2b(HASH_SIZE);
3604
- const bytes = blob instanceof BytesBlob ? blob.raw : blob;
3605
- hasher?.update(bytes);
3606
- const out = allocator.emptyHash();
3607
- hasher?.digest(out.raw);
3608
- return out;
3609
- }
3610
-
3611
- /** Convert given string into bytes and hash it. */
3612
- declare function hashString(str: string, allocator: HashAllocator = defaultAllocator) {
3613
- return hashBytes(BytesBlob.blobFromString(str), allocator);
3614
- }
3615
-
3616
- declare const blake2b_hashBytes: typeof hashBytes;
3617
- declare const blake2b_hashString: typeof hashString;
3618
- declare namespace blake2b {
3619
- export {
3620
- hashBlobs$1 as hashBlobs,
3621
- blake2b_hashBytes as hashBytes,
3622
- blake2b_hashString as hashString,
3623
- };
3624
3671
  }
3625
3672
 
3626
- type ITypedArray = Uint8Array | Uint16Array | Uint32Array;
3627
- type IDataType = string | Buffer | ITypedArray;
3628
-
3629
- type IHasher = {
3630
- /**
3631
- * Initializes hash state to default value
3632
- */
3633
- init: () => IHasher;
3634
- /**
3635
- * Updates the hash content with the given data
3636
- */
3637
- update: (data: IDataType) => IHasher;
3638
- /**
3639
- * Calculates the hash of all of the data passed to be hashed with hash.update().
3640
- * Defaults to hexadecimal string
3641
- * @param outputType If outputType is "binary", it returns Uint8Array. Otherwise it
3642
- * returns hexadecimal string
3643
- */
3644
- digest: {
3645
- (outputType: "binary"): Uint8Array;
3646
- (outputType?: "hex"): string;
3647
- };
3648
- /**
3649
- * Save the current internal state of the hasher for later resumption with load().
3650
- * Cannot be called before .init() or after .digest()
3651
- *
3652
- * Note that this state can include arbitrary information about the value being hashed (e.g.
3653
- * could include N plaintext bytes from the value), so needs to be treated as being as
3654
- * sensitive as the input value itself.
3655
- */
3656
- save: () => Uint8Array;
3657
- /**
3658
- * Resume a state that was created by save(). If this state was not created by a
3659
- * compatible build of hash-wasm, an exception will be thrown.
3660
- */
3661
- load: (state: Uint8Array) => IHasher;
3662
- /**
3663
- * Block size in bytes
3664
- */
3665
- blockSize: number;
3666
- /**
3667
- * Digest size in bytes
3668
- */
3669
- digestSize: number;
3670
- };
3671
-
3672
3673
  declare class KeccakHasher {
3673
3674
  static async create(): Promise<KeccakHasher> {
3674
3675
  return new KeccakHasher(await createKeccak(256));
@@ -3696,15 +3697,15 @@ declare namespace keccak {
3696
3697
  };
3697
3698
  }
3698
3699
 
3700
+ // TODO [ToDr] (#213) this should most likely be moved to a separate
3701
+ // package to avoid pulling in unnecessary deps.
3702
+
3703
+ type index$p_Blake2b = Blake2b;
3704
+ declare const index$p_Blake2b: typeof Blake2b;
3699
3705
  type index$p_Blake2bHash = Blake2bHash;
3700
3706
  type index$p_HASH_SIZE = HASH_SIZE;
3701
- type index$p_HashAllocator = HashAllocator;
3702
3707
  type index$p_KeccakHash = KeccakHash;
3703
3708
  type index$p_OpaqueHash = OpaqueHash;
3704
- type index$p_PageAllocator = PageAllocator;
3705
- declare const index$p_PageAllocator: typeof PageAllocator;
3706
- type index$p_SimpleAllocator = SimpleAllocator;
3707
- declare const index$p_SimpleAllocator: typeof SimpleAllocator;
3708
3709
  type index$p_TRUNCATED_HASH_SIZE = TRUNCATED_HASH_SIZE;
3709
3710
  type index$p_TruncatedHash = TruncatedHash;
3710
3711
  type index$p_WithHash<THash extends OpaqueHash, TData> = WithHash<THash, TData>;
@@ -3712,12 +3713,10 @@ declare const index$p_WithHash: typeof WithHash;
3712
3713
  type index$p_WithHashAndBytes<THash extends OpaqueHash, TData> = WithHashAndBytes<THash, TData>;
3713
3714
  declare const index$p_WithHashAndBytes: typeof WithHashAndBytes;
3714
3715
  declare const index$p_ZERO_HASH: typeof ZERO_HASH;
3715
- declare const index$p_blake2b: typeof blake2b;
3716
- declare const index$p_defaultAllocator: typeof defaultAllocator;
3717
3716
  declare const index$p_keccak: typeof keccak;
3718
3717
  declare namespace index$p {
3719
- export { index$p_PageAllocator as PageAllocator, index$p_SimpleAllocator as SimpleAllocator, index$p_WithHash as WithHash, index$p_WithHashAndBytes as WithHashAndBytes, index$p_ZERO_HASH as ZERO_HASH, index$p_blake2b as blake2b, index$p_defaultAllocator as defaultAllocator, index$p_keccak as keccak };
3720
- export type { index$p_Blake2bHash as Blake2bHash, index$p_HASH_SIZE as HASH_SIZE, index$p_HashAllocator as HashAllocator, index$p_KeccakHash as KeccakHash, index$p_OpaqueHash as OpaqueHash, index$p_TRUNCATED_HASH_SIZE as TRUNCATED_HASH_SIZE, index$p_TruncatedHash as TruncatedHash };
3718
+ export { index$p_Blake2b as Blake2b, index$p_WithHash as WithHash, index$p_WithHashAndBytes as WithHashAndBytes, index$p_ZERO_HASH as ZERO_HASH, index$p_keccak as keccak, zero$1 as zero };
3719
+ export type { index$p_Blake2bHash as Blake2bHash, index$p_HASH_SIZE as HASH_SIZE, index$p_KeccakHash as KeccakHash, index$p_OpaqueHash as OpaqueHash, index$p_TRUNCATED_HASH_SIZE as TRUNCATED_HASH_SIZE, index$p_TruncatedHash as TruncatedHash };
3721
3720
  }
3722
3721
 
3723
3722
  /** Immutable view of the `HashDictionary`. */
@@ -4494,6 +4493,8 @@ declare class TruncatedHashDictionary<T extends OpaqueHash, V> {
4494
4493
  }
4495
4494
  }
4496
4495
 
4496
+ type index$o_ArrayView<T> = ArrayView<T>;
4497
+ declare const index$o_ArrayView: typeof ArrayView;
4497
4498
  type index$o_FixedSizeArray<T, N extends number> = FixedSizeArray<T, N>;
4498
4499
  declare const index$o_FixedSizeArray: typeof FixedSizeArray;
4499
4500
  type index$o_HashDictionary<K extends OpaqueHash, V> = HashDictionary<K, V>;
@@ -4521,7 +4522,7 @@ type index$o_TruncatedHashDictionary<T extends OpaqueHash, V> = TruncatedHashDic
4521
4522
  declare const index$o_TruncatedHashDictionary: typeof TruncatedHashDictionary;
4522
4523
  declare const index$o_asKnownSize: typeof asKnownSize;
4523
4524
  declare namespace index$o {
4524
- export { index$o_FixedSizeArray as FixedSizeArray, index$o_HashDictionary as HashDictionary, index$o_HashSet as HashSet, index$o_MultiMap as MultiMap, index$o_SortedArray as SortedArray, index$o_SortedSet as SortedSet, index$o_TruncatedHashDictionary as TruncatedHashDictionary, index$o_asKnownSize as asKnownSize };
4525
+ export { index$o_ArrayView as ArrayView, index$o_FixedSizeArray as FixedSizeArray, index$o_HashDictionary as HashDictionary, index$o_HashSet as HashSet, index$o_MultiMap as MultiMap, index$o_SortedArray as SortedArray, index$o_SortedSet as SortedSet, index$o_TruncatedHashDictionary as TruncatedHashDictionary, index$o_asKnownSize as asKnownSize };
4525
4526
  export type { index$o_HashWithZeroedBit as HashWithZeroedBit, index$o_ImmutableHashDictionary as ImmutableHashDictionary, index$o_ImmutableHashSet as ImmutableHashSet, index$o_ImmutableSortedArray as ImmutableSortedArray, index$o_ImmutableSortedSet as ImmutableSortedSet, index$o_KeyMapper as KeyMapper, index$o_KeyMappers as KeyMappers, index$o_KnownSize as KnownSize, index$o_KnownSizeArray as KnownSizeArray, index$o_KnownSizeId as KnownSizeId, index$o_NestedMaps as NestedMaps };
4526
4527
  }
4527
4528
 
@@ -4840,22 +4841,16 @@ declare function trivialSeed(s: U32): KeySeed {
4840
4841
  * Derives a Ed25519 secret key from a seed.
4841
4842
  * https://github.com/polkadot-fellows/JIPs/blob/7048f79edf4f4eb8bfe6fb42e6bbf61900f44c65/JIP-5.md#derivation-method
4842
4843
  */
4843
- declare function deriveEd25519SecretKey(
4844
- seed: KeySeed,
4845
- allocator: SimpleAllocator = new SimpleAllocator(),
4846
- ): Ed25519SecretSeed {
4847
- return blake2b.hashBytes(BytesBlob.blobFromParts([ED25519_SECRET_KEY.raw, seed.raw]), allocator).asOpaque();
4844
+ declare function deriveEd25519SecretKey(seed: KeySeed, blake2b: Blake2b): Ed25519SecretSeed {
4845
+ return blake2b.hashBytes(BytesBlob.blobFromParts([ED25519_SECRET_KEY.raw, seed.raw])).asOpaque();
4848
4846
  }
4849
4847
 
4850
4848
  /**
4851
4849
  * Derives a Bandersnatch secret key from a seed.
4852
4850
  * https://github.com/polkadot-fellows/JIPs/blob/7048f79edf4f4eb8bfe6fb42e6bbf61900f44c65/JIP-5.md#derivation-method
4853
4851
  */
4854
- declare function deriveBandersnatchSecretKey(
4855
- seed: KeySeed,
4856
- allocator: SimpleAllocator = new SimpleAllocator(),
4857
- ): BandersnatchSecretSeed {
4858
- return blake2b.hashBytes(BytesBlob.blobFromParts([BANDERSNATCH_SECRET_KEY.raw, seed.raw]), allocator).asOpaque();
4852
+ declare function deriveBandersnatchSecretKey(seed: KeySeed, blake2b: Blake2b): BandersnatchSecretSeed {
4853
+ return blake2b.hashBytes(BytesBlob.blobFromParts([BANDERSNATCH_SECRET_KEY.raw, seed.raw])).asOpaque();
4859
4854
  }
4860
4855
 
4861
4856
  /**
@@ -9126,21 +9121,6 @@ declare function accumulationOutputComparator(a: AccumulationOutput, b: Accumula
9126
9121
  return Ordering.Equal;
9127
9122
  }
9128
9123
 
9129
- declare const codecWithHash = <T, V, H extends OpaqueHash>(val: Descriptor<T, V>): Descriptor<WithHash<H, T>, V> =>
9130
- Descriptor.withView(
9131
- val.name,
9132
- val.sizeHint,
9133
- (e, elem) => val.encode(e, elem.data),
9134
- (d): WithHash<H, T> => {
9135
- const decoder2 = d.clone();
9136
- const encoded = val.skipEncoded(decoder2);
9137
- const hash = blake2b.hashBytes(encoded);
9138
- return new WithHash(hash.asOpaque(), val.decode(d));
9139
- },
9140
- val.skip,
9141
- val.View,
9142
- );
9143
-
9144
9124
  /**
9145
9125
  * Assignment of particular work report to a core.
9146
9126
  *
@@ -9151,7 +9131,7 @@ declare const codecWithHash = <T, V, H extends OpaqueHash>(val: Descriptor<T, V>
9151
9131
  */
9152
9132
  declare class AvailabilityAssignment extends WithDebug {
9153
9133
  static Codec = codec.Class(AvailabilityAssignment, {
9154
- workReport: codecWithHash(WorkReport.Codec),
9134
+ workReport: WorkReport.Codec,
9155
9135
  timeout: codec.u32.asOpaque<TimeSlot>(),
9156
9136
  });
9157
9137
 
@@ -9161,7 +9141,7 @@ declare class AvailabilityAssignment extends WithDebug {
9161
9141
 
9162
9142
  private constructor(
9163
9143
  /** Work report assigned to a core. */
9164
- public readonly workReport: WithHash<WorkReportHash, WorkReport>,
9144
+ public readonly workReport: WorkReport,
9165
9145
  /** Time slot at which the report becomes obsolete. */
9166
9146
  public readonly timeout: TimeSlot,
9167
9147
  ) {
@@ -9265,8 +9245,6 @@ declare function hashComparator<V extends OpaqueHash>(a: V, b: V) {
9265
9245
  return a.compare(b);
9266
9246
  }
9267
9247
 
9268
- // TODO [ToDr] Not sure where these should live yet :(
9269
-
9270
9248
  /**
9271
9249
  * `J`: The maximum sum of dependency items in a work-report.
9272
9250
  *
@@ -9320,87 +9298,305 @@ declare class NotYetAccumulatedReport extends WithDebug {
9320
9298
  }
9321
9299
  }
9322
9300
 
9323
- /** Dictionary entry of services that auto-accumulate every block. */
9324
- declare class AutoAccumulate {
9325
- static Codec = codec.Class(AutoAccumulate, {
9326
- service: codec.u32.asOpaque<ServiceId>(),
9327
- gasLimit: codec.u64.asOpaque<ServiceGas>(),
9328
- });
9329
-
9330
- static create({ service, gasLimit }: CodecRecord<AutoAccumulate>) {
9331
- return new AutoAccumulate(service, gasLimit);
9332
- }
9333
-
9334
- private constructor(
9335
- /** Service id that auto-accumulates. */
9336
- readonly service: ServiceId,
9337
- /** Gas limit for auto-accumulation. */
9338
- readonly gasLimit: ServiceGas,
9339
- ) {}
9340
- }
9341
-
9342
9301
  /**
9343
- * https://graypaper.fluffylabs.dev/#/7e6ff6a/11da0111da01?v=0.6.7
9302
+ * `B_S`: The basic minimum balance which all services require.
9303
+ *
9304
+ * https://graypaper.fluffylabs.dev/#/7e6ff6a/445800445800?v=0.6.7
9344
9305
  */
9345
- declare class PrivilegedServices {
9346
- static Codec = codec.Class(PrivilegedServices, {
9347
- manager: codec.u32.asOpaque<ServiceId>(),
9348
- authManager: codecPerCore(codec.u32.asOpaque<ServiceId>()),
9349
- validatorsManager: codec.u32.asOpaque<ServiceId>(),
9350
- autoAccumulateServices: readonlyArray(codec.sequenceVarLen(AutoAccumulate.Codec)),
9351
- });
9352
-
9353
- static create({ manager, authManager, validatorsManager, autoAccumulateServices }: CodecRecord<PrivilegedServices>) {
9354
- return new PrivilegedServices(manager, authManager, validatorsManager, autoAccumulateServices);
9355
- }
9356
-
9357
- private constructor(
9358
- /**
9359
- * `chi_m`: The first, χm, is the index of the manager service which is
9360
- * the service able to effect an alteration of χ from block to block,
9361
- * as well as bestow services with storage deposit credits.
9362
- * https://graypaper.fluffylabs.dev/#/7e6ff6a/11a40111a801?v=0.6.7
9363
- */
9364
- readonly manager: ServiceId,
9365
- /** `chi_a`: Manages authorization queue one for each core. */
9366
- readonly authManager: PerCore<ServiceId>,
9367
- /** `chi_v`: Managers validator keys. */
9368
- readonly validatorsManager: ServiceId,
9369
- /** `chi_g`: Dictionary of services that auto-accumulate every block with their gas limit. */
9370
- readonly autoAccumulateServices: readonly AutoAccumulate[],
9371
- ) {}
9372
- }
9306
+ declare const BASE_SERVICE_BALANCE = 100n;
9307
+ /**
9308
+ * `B_I`: The additional minimum balance required per item of elective service state.
9309
+ *
9310
+ * https://graypaper.fluffylabs.dev/#/7e6ff6a/445000445000?v=0.6.7
9311
+ */
9312
+ declare const ELECTIVE_ITEM_BALANCE = 10n;
9313
+ /**
9314
+ * `B_L`: The additional minimum balance required per octet of elective service state.
9315
+ *
9316
+ * https://graypaper.fluffylabs.dev/#/7e6ff6a/445400445400?v=0.6.7
9317
+ */
9318
+ declare const ELECTIVE_BYTE_BALANCE = 1n;
9373
9319
 
9374
- declare const SUPER_PEAK_STRING = BytesBlob.blobFromString("peak");
9320
+ declare const zeroSizeHint: SizeHint = {
9321
+ bytes: 0,
9322
+ isExact: true,
9323
+ };
9375
9324
 
9376
- /** Merkle Mountain Range peaks. */
9377
- interface MmrPeaks<H extends OpaqueHash> {
9378
- /**
9379
- * Peaks at particular positions.
9380
- *
9381
- * In case there is no merkle trie at given index, `null` is placed.
9382
- */
9383
- peaks: readonly (H | null)[];
9384
- }
9325
+ /** 0-byte read, return given default value */
9326
+ declare const ignoreValueWithDefault = <T>(defaultValue: T) =>
9327
+ Descriptor.new<T>(
9328
+ "ignoreValue",
9329
+ zeroSizeHint,
9330
+ (_e, _v) => {},
9331
+ (_d) => defaultValue,
9332
+ (_s) => {},
9333
+ );
9385
9334
 
9386
- /** Hasher interface for MMR. */
9387
- interface MmrHasher<H extends OpaqueHash> {
9388
- /** Hash two items together. */
9389
- hashConcat(a: H, b: H): H;
9390
- /** Hash two items together with extra bytes blob prepended. */
9391
- hashConcatPrepend(id: BytesBlob, a: H, b: H): H;
9392
- }
9335
+ /** Encode and decode object with leading version number. */
9336
+ declare const codecWithVersion = <T>(val: Descriptor<T>): Descriptor<T> =>
9337
+ Descriptor.new<T>(
9338
+ "withVersion",
9339
+ {
9340
+ bytes: val.sizeHint.bytes + 8,
9341
+ isExact: false,
9342
+ },
9343
+ (e, v) => {
9344
+ e.varU64(0n);
9345
+ val.encode(e, v);
9346
+ },
9347
+ (d) => {
9348
+ const version = d.varU64();
9349
+ if (version !== 0n) {
9350
+ throw new Error("Non-zero version is not supported!");
9351
+ }
9352
+ return val.decode(d);
9353
+ },
9354
+ (s) => {
9355
+ s.varU64();
9356
+ val.skip(s);
9357
+ },
9358
+ );
9393
9359
 
9394
9360
  /**
9395
- * Merkle Mountain Range.
9361
+ * Service account details.
9396
9362
  *
9397
- * https://graypaper.fluffylabs.dev/#/5f542d7/3aa0023aa002?v=0.6.2
9363
+ * https://graypaper.fluffylabs.dev/#/7e6ff6a/108301108301?v=0.6.7
9398
9364
  */
9399
- declare class MerkleMountainRange<H extends OpaqueHash> {
9400
- /** Construct an empty MMR. */
9401
- static empty<H extends OpaqueHash>(hasher: MmrHasher<H>) {
9402
- return new MerkleMountainRange(hasher);
9403
- }
9365
+ declare class ServiceAccountInfo extends WithDebug {
9366
+ static Codec = codec.Class(ServiceAccountInfo, {
9367
+ codeHash: codec.bytes(HASH_SIZE).asOpaque<CodeHash>(),
9368
+ balance: codec.u64,
9369
+ accumulateMinGas: codec.u64.convert((x) => x, tryAsServiceGas),
9370
+ onTransferMinGas: codec.u64.convert((x) => x, tryAsServiceGas),
9371
+ storageUtilisationBytes: codec.u64,
9372
+ gratisStorage: codec.u64,
9373
+ storageUtilisationCount: codec.u32,
9374
+ created: codec.u32.convert((x) => x, tryAsTimeSlot),
9375
+ lastAccumulation: codec.u32.convert((x) => x, tryAsTimeSlot),
9376
+ parentService: codec.u32.convert((x) => x, tryAsServiceId),
9377
+ });
9378
+
9379
+ static create(a: CodecRecord<ServiceAccountInfo>) {
9380
+ return new ServiceAccountInfo(
9381
+ a.codeHash,
9382
+ a.balance,
9383
+ a.accumulateMinGas,
9384
+ a.onTransferMinGas,
9385
+ a.storageUtilisationBytes,
9386
+ a.gratisStorage,
9387
+ a.storageUtilisationCount,
9388
+ a.created,
9389
+ a.lastAccumulation,
9390
+ a.parentService,
9391
+ );
9392
+ }
9393
+
9394
+ /**
9395
+ * `a_t = max(0, BS + BI * a_i + BL * a_o - a_f)`
9396
+ * https://graypaper.fluffylabs.dev/#/7e6ff6a/119e01119e01?v=0.6.7
9397
+ */
9398
+ static calculateThresholdBalance(items: U32, bytes: U64, gratisStorage: U64): U64 {
9399
+ const storageCost =
9400
+ BASE_SERVICE_BALANCE + ELECTIVE_ITEM_BALANCE * BigInt(items) + ELECTIVE_BYTE_BALANCE * bytes - gratisStorage;
9401
+
9402
+ if (storageCost < 0n) {
9403
+ return tryAsU64(0);
9404
+ }
9405
+
9406
+ if (storageCost >= 2n ** 64n) {
9407
+ return tryAsU64(2n ** 64n - 1n);
9408
+ }
9409
+
9410
+ return tryAsU64(storageCost);
9411
+ }
9412
+
9413
+ private constructor(
9414
+ /** `a_c`: Hash of the service code. */
9415
+ public readonly codeHash: CodeHash,
9416
+ /** `a_b`: Current account balance. */
9417
+ public readonly balance: U64,
9418
+ /** `a_g`: Minimal gas required to execute Accumulate entrypoint. */
9419
+ public readonly accumulateMinGas: ServiceGas,
9420
+ /** `a_m`: Minimal gas required to execute On Transfer entrypoint. */
9421
+ public readonly onTransferMinGas: ServiceGas,
9422
+ /** `a_o`: Total number of octets in storage. */
9423
+ public readonly storageUtilisationBytes: U64,
9424
+ /** `a_f`: Cost-free storage. Decreases both storage item count and total byte size. */
9425
+ public readonly gratisStorage: U64,
9426
+ /** `a_i`: Number of items in storage. */
9427
+ public readonly storageUtilisationCount: U32,
9428
+ /** `a_r`: Creation account time slot. */
9429
+ public readonly created: TimeSlot,
9430
+ /** `a_a`: Most recent accumulation time slot. */
9431
+ public readonly lastAccumulation: TimeSlot,
9432
+ /** `a_p`: Parent service ID. */
9433
+ public readonly parentService: ServiceId,
9434
+ ) {
9435
+ super();
9436
+ }
9437
+ }
9438
+
9439
+ declare class PreimageItem extends WithDebug {
9440
+ static Codec = codec.Class(PreimageItem, {
9441
+ hash: codec.bytes(HASH_SIZE).asOpaque<PreimageHash>(),
9442
+ blob: codec.blob,
9443
+ });
9444
+
9445
+ static create({ hash, blob }: CodecRecord<PreimageItem>) {
9446
+ return new PreimageItem(hash, blob);
9447
+ }
9448
+
9449
+ private constructor(
9450
+ readonly hash: PreimageHash,
9451
+ readonly blob: BytesBlob,
9452
+ ) {
9453
+ super();
9454
+ }
9455
+ }
9456
+
9457
+ type StorageKey = Opaque<BytesBlob, "storage key">;
9458
+
9459
+ declare class StorageItem extends WithDebug {
9460
+ static Codec = codec.Class(StorageItem, {
9461
+ key: codec.blob.convert(
9462
+ (i) => i,
9463
+ (o) => asOpaqueType(o),
9464
+ ),
9465
+ value: codec.blob,
9466
+ });
9467
+
9468
+ static create({ key, value }: CodecRecord<StorageItem>) {
9469
+ return new StorageItem(key, value);
9470
+ }
9471
+
9472
+ private constructor(
9473
+ readonly key: StorageKey,
9474
+ readonly value: BytesBlob,
9475
+ ) {
9476
+ super();
9477
+ }
9478
+ }
9479
+
9480
+ declare const MAX_LOOKUP_HISTORY_SLOTS = 3;
9481
+ type LookupHistorySlots = KnownSizeArray<TimeSlot, `0-${typeof MAX_LOOKUP_HISTORY_SLOTS} timeslots`>;
9482
+ declare function tryAsLookupHistorySlots(items: readonly TimeSlot[]): LookupHistorySlots {
9483
+ const knownSize = asKnownSize(items) as LookupHistorySlots;
9484
+ if (knownSize.length > MAX_LOOKUP_HISTORY_SLOTS) {
9485
+ throw new Error(`Lookup history items must contain 0-${MAX_LOOKUP_HISTORY_SLOTS} timeslots.`);
9486
+ }
9487
+ return knownSize;
9488
+ }
9489
+
9490
+ /** https://graypaper.fluffylabs.dev/#/5f542d7/115400115800 */
9491
+ declare class LookupHistoryItem {
9492
+ constructor(
9493
+ public readonly hash: PreimageHash,
9494
+ public readonly length: U32,
9495
+ /**
9496
+ * Preimage availability history as a sequence of time slots.
9497
+ * See PreimageStatus and the following GP fragment for more details.
9498
+ * https://graypaper.fluffylabs.dev/#/5f542d7/11780011a500 */
9499
+ public readonly slots: LookupHistorySlots,
9500
+ ) {}
9501
+
9502
+ static isRequested(item: LookupHistoryItem | LookupHistorySlots): boolean {
9503
+ if ("slots" in item) {
9504
+ return item.slots.length === 0;
9505
+ }
9506
+ return item.length === 0;
9507
+ }
9508
+ }
9509
+
9510
+ /** Dictionary entry of services that auto-accumulate every block. */
9511
+ declare class AutoAccumulate {
9512
+ static Codec = codec.Class(AutoAccumulate, {
9513
+ service: codec.u32.asOpaque<ServiceId>(),
9514
+ gasLimit: codec.u64.asOpaque<ServiceGas>(),
9515
+ });
9516
+
9517
+ static create({ service, gasLimit }: CodecRecord<AutoAccumulate>) {
9518
+ return new AutoAccumulate(service, gasLimit);
9519
+ }
9520
+
9521
+ private constructor(
9522
+ /** Service id that auto-accumulates. */
9523
+ readonly service: ServiceId,
9524
+ /** Gas limit for auto-accumulation. */
9525
+ readonly gasLimit: ServiceGas,
9526
+ ) {}
9527
+ }
9528
+
9529
+ /**
9530
+ * https://graypaper.fluffylabs.dev/#/ab2cdbd/114402114402?v=0.7.2
9531
+ */
9532
+ declare class PrivilegedServices {
9533
+ /** https://graypaper.fluffylabs.dev/#/ab2cdbd/3bbd023bcb02?v=0.7.2 */
9534
+ static Codec = codec.Class(PrivilegedServices, {
9535
+ manager: codec.u32.asOpaque<ServiceId>(),
9536
+ assigners: codecPerCore(codec.u32.asOpaque<ServiceId>()),
9537
+ delegator: codec.u32.asOpaque<ServiceId>(),
9538
+ registrar: Compatibility.isGreaterOrEqual(GpVersion.V0_7_1)
9539
+ ? codec.u32.asOpaque<ServiceId>()
9540
+ : ignoreValueWithDefault(tryAsServiceId(2 ** 32 - 1)),
9541
+ autoAccumulateServices: readonlyArray(codec.sequenceVarLen(AutoAccumulate.Codec)),
9542
+ });
9543
+
9544
+ static create(a: CodecRecord<PrivilegedServices>) {
9545
+ return new PrivilegedServices(a.manager, a.delegator, a.registrar, a.assigners, a.autoAccumulateServices);
9546
+ }
9547
+
9548
+ private constructor(
9549
+ /**
9550
+ * `χ_M`: Manages alteration of χ from block to block,
9551
+ * as well as bestow services with storage deposit credits.
9552
+ * https://graypaper.fluffylabs.dev/#/ab2cdbd/111502111902?v=0.7.2
9553
+ */
9554
+ readonly manager: ServiceId,
9555
+ /** `χ_V`: Managers validator keys. */
9556
+ readonly delegator: ServiceId,
9557
+ /**
9558
+ * `χ_R`: Manages the creation of services in protected range.
9559
+ *
9560
+ * https://graypaper.fluffylabs.dev/#/ab2cdbd/111b02111d02?v=0.7.2
9561
+ */
9562
+ readonly registrar: ServiceId,
9563
+ /** `χ_A`: Manages authorization queue one for each core. */
9564
+ readonly assigners: PerCore<ServiceId>,
9565
+ /** `χ_Z`: Dictionary of services that auto-accumulate every block with their gas limit. */
9566
+ readonly autoAccumulateServices: readonly AutoAccumulate[],
9567
+ ) {}
9568
+ }
9569
+
9570
+ declare const SUPER_PEAK_STRING = BytesBlob.blobFromString("peak");
9571
+
9572
+ /** Merkle Mountain Range peaks. */
9573
+ interface MmrPeaks<H extends OpaqueHash> {
9574
+ /**
9575
+ * Peaks at particular positions.
9576
+ *
9577
+ * In case there is no merkle trie at given index, `null` is placed.
9578
+ */
9579
+ peaks: readonly (H | null)[];
9580
+ }
9581
+
9582
+ /** Hasher interface for MMR. */
9583
+ interface MmrHasher<H extends OpaqueHash> {
9584
+ /** Hash two items together. */
9585
+ hashConcat(a: H, b: H): H;
9586
+ /** Hash two items together with extra bytes blob prepended. */
9587
+ hashConcatPrepend(id: BytesBlob, a: H, b: H): H;
9588
+ }
9589
+
9590
+ /**
9591
+ * Merkle Mountain Range.
9592
+ *
9593
+ * https://graypaper.fluffylabs.dev/#/5f542d7/3aa0023aa002?v=0.6.2
9594
+ */
9595
+ declare class MerkleMountainRange<H extends OpaqueHash> {
9596
+ /** Construct an empty MMR. */
9597
+ static empty<H extends OpaqueHash>(hasher: MmrHasher<H>) {
9598
+ return new MerkleMountainRange(hasher);
9599
+ }
9404
9600
 
9405
9601
  /** Construct a new MMR from existing peaks. */
9406
9602
  static fromPeaks<H extends OpaqueHash>(hasher: MmrHasher<H>, mmr: MmrPeaks<H>) {
@@ -9663,340 +9859,156 @@ declare class RecentBlocksHistory extends WithDebug {
9663
9859
  return RecentBlocksHistory.create(
9664
9860
  RecentBlocks.create({
9665
9861
  ...this.current,
9666
- blocks: asOpaqueType(blocks as BlockState[]),
9667
- }),
9668
- );
9669
- }
9670
-
9671
- throw new Error("RecentBlocksHistory is in invalid state. Cannot be updated!");
9672
- }
9673
- }
9674
-
9675
- /**
9676
- * Fixed size of validator metadata.
9677
- *
9678
- * https://graypaper.fluffylabs.dev/#/5f542d7/0d55010d5501
9679
- */
9680
- declare const VALIDATOR_META_BYTES = 128;
9681
- type VALIDATOR_META_BYTES = typeof VALIDATOR_META_BYTES;
9682
-
9683
- /**
9684
- * Details about validators' identity.
9685
- *
9686
- * https://graypaper.fluffylabs.dev/#/5f542d7/0d4b010d4c01
9687
- */
9688
- declare class ValidatorData extends WithDebug {
9689
- static Codec = codec.Class(ValidatorData, {
9690
- bandersnatch: codec.bytes(BANDERSNATCH_KEY_BYTES).asOpaque<BandersnatchKey>(),
9691
- ed25519: codec.bytes(ED25519_KEY_BYTES).asOpaque<Ed25519Key>(),
9692
- bls: codec.bytes(BLS_KEY_BYTES).asOpaque<BlsKey>(),
9693
- metadata: codec.bytes(VALIDATOR_META_BYTES),
9694
- });
9695
-
9696
- static create({ ed25519, bandersnatch, bls, metadata }: CodecRecord<ValidatorData>) {
9697
- return new ValidatorData(bandersnatch, ed25519, bls, metadata);
9698
- }
9699
-
9700
- private constructor(
9701
- /** Bandersnatch public key. */
9702
- public readonly bandersnatch: BandersnatchKey,
9703
- /** ED25519 key data. */
9704
- public readonly ed25519: Ed25519Key,
9705
- /** BLS public key. */
9706
- public readonly bls: BlsKey,
9707
- /** Validator-defined additional metdata. */
9708
- public readonly metadata: Bytes<VALIDATOR_META_BYTES>,
9709
- ) {
9710
- super();
9711
- }
9712
- }
9713
-
9714
- declare enum SafroleSealingKeysKind {
9715
- Tickets = 0,
9716
- Keys = 1,
9717
- }
9718
-
9719
- type SafroleSealingKeys =
9720
- | {
9721
- kind: SafroleSealingKeysKind.Keys;
9722
- keys: PerEpochBlock<BandersnatchKey>;
9723
- }
9724
- | {
9725
- kind: SafroleSealingKeysKind.Tickets;
9726
- tickets: PerEpochBlock<Ticket>;
9727
- };
9728
-
9729
- declare const codecBandersnatchKey = codec.bytes(BANDERSNATCH_KEY_BYTES).asOpaque<BandersnatchKey>();
9730
-
9731
- declare class SafroleSealingKeysData extends WithDebug {
9732
- static Codec = codecWithContext((context) => {
9733
- return codec.custom<SafroleSealingKeys>(
9734
- {
9735
- name: "SafroleSealingKeys",
9736
- sizeHint: { bytes: 1 + HASH_SIZE * context.epochLength, isExact: false },
9737
- },
9738
- (e, x) => {
9739
- e.varU32(tryAsU32(x.kind));
9740
- if (x.kind === SafroleSealingKeysKind.Keys) {
9741
- e.sequenceFixLen(codecBandersnatchKey, x.keys);
9742
- } else {
9743
- e.sequenceFixLen(Ticket.Codec, x.tickets);
9744
- }
9745
- },
9746
- (d) => {
9747
- const epochLength = context.epochLength;
9748
- const kind = d.varU32();
9749
- if (kind === SafroleSealingKeysKind.Keys) {
9750
- const keys = d.sequenceFixLen<BandersnatchKey>(codecBandersnatchKey, epochLength);
9751
- return SafroleSealingKeysData.keys(tryAsPerEpochBlock(keys, context));
9752
- }
9753
-
9754
- if (kind === SafroleSealingKeysKind.Tickets) {
9755
- const tickets = d.sequenceFixLen(Ticket.Codec, epochLength);
9756
- return SafroleSealingKeysData.tickets(tryAsPerEpochBlock(tickets, context));
9757
- }
9758
-
9759
- throw new Error(`Unexpected safrole sealing keys kind: ${kind}`);
9760
- },
9761
- (s) => {
9762
- const kind = s.decoder.varU32();
9763
- if (kind === SafroleSealingKeysKind.Keys) {
9764
- s.sequenceFixLen(codecBandersnatchKey, context.epochLength);
9765
- return;
9766
- }
9767
- if (kind === SafroleSealingKeysKind.Tickets) {
9768
- s.sequenceFixLen(Ticket.Codec, context.epochLength);
9769
- return;
9770
- }
9771
-
9772
- throw new Error(`Unexpected safrole sealing keys kind: ${kind}`);
9773
- },
9774
- );
9775
- });
9776
-
9777
- static keys(keys: PerEpochBlock<BandersnatchKey>): SafroleSealingKeys {
9778
- return new SafroleSealingKeysData(SafroleSealingKeysKind.Keys, keys, undefined) as SafroleSealingKeys;
9779
- }
9780
-
9781
- static tickets(tickets: PerEpochBlock<Ticket>): SafroleSealingKeys {
9782
- return new SafroleSealingKeysData(SafroleSealingKeysKind.Tickets, undefined, tickets) as SafroleSealingKeys;
9783
- }
9784
-
9785
- private constructor(
9786
- readonly kind: SafroleSealingKeysKind,
9787
- readonly keys?: PerEpochBlock<BandersnatchKey>,
9788
- readonly tickets?: PerEpochBlock<Ticket>,
9789
- ) {
9790
- super();
9791
- }
9792
- }
9793
-
9794
- declare class SafroleData {
9795
- static Codec = codec.Class(SafroleData, {
9796
- nextValidatorData: codecPerValidator(ValidatorData.Codec),
9797
- epochRoot: codec.bytes(BANDERSNATCH_RING_ROOT_BYTES).asOpaque<BandersnatchRingRoot>(),
9798
- sealingKeySeries: SafroleSealingKeysData.Codec,
9799
- ticketsAccumulator: readonlyArray(codec.sequenceVarLen(Ticket.Codec)).convert(seeThrough, asKnownSize),
9800
- });
9801
-
9802
- static create({ nextValidatorData, epochRoot, sealingKeySeries, ticketsAccumulator }: CodecRecord<SafroleData>) {
9803
- return new SafroleData(nextValidatorData, epochRoot, sealingKeySeries, ticketsAccumulator);
9804
- }
9805
-
9806
- private constructor(
9807
- /** gamma_k */
9808
- public readonly nextValidatorData: PerValidator<ValidatorData>,
9809
- /** gamma_z */
9810
- public readonly epochRoot: BandersnatchRingRoot,
9811
- /** gamma_s */
9812
- public readonly sealingKeySeries: SafroleSealingKeys,
9813
- /** gamma_a */
9814
- public readonly ticketsAccumulator: KnownSizeArray<Ticket, "0...EpochLength">,
9815
- ) {}
9816
- }
9817
-
9818
- /**
9819
- * `B_S`: The basic minimum balance which all services require.
9820
- *
9821
- * https://graypaper.fluffylabs.dev/#/7e6ff6a/445800445800?v=0.6.7
9822
- */
9823
- declare const BASE_SERVICE_BALANCE = 100n;
9824
- /**
9825
- * `B_I`: The additional minimum balance required per item of elective service state.
9826
- *
9827
- * https://graypaper.fluffylabs.dev/#/7e6ff6a/445000445000?v=0.6.7
9828
- */
9829
- declare const ELECTIVE_ITEM_BALANCE = 10n;
9830
- /**
9831
- * `B_L`: The additional minimum balance required per octet of elective service state.
9832
- *
9833
- * https://graypaper.fluffylabs.dev/#/7e6ff6a/445400445400?v=0.6.7
9834
- */
9835
- declare const ELECTIVE_BYTE_BALANCE = 1n;
9836
-
9837
- declare const zeroSizeHint: SizeHint = {
9838
- bytes: 0,
9839
- isExact: true,
9840
- };
9841
-
9842
- /** 0-byte read, return given default value */
9843
- declare const ignoreValueWithDefault = <T>(defaultValue: T) =>
9844
- Descriptor.new<T>(
9845
- "ignoreValue",
9846
- zeroSizeHint,
9847
- (_e, _v) => {},
9848
- (_d) => defaultValue,
9849
- (_s) => {},
9850
- );
9851
-
9852
- /**
9853
- * Service account details.
9854
- *
9855
- * https://graypaper.fluffylabs.dev/#/7e6ff6a/108301108301?v=0.6.7
9856
- */
9857
- declare class ServiceAccountInfo extends WithDebug {
9858
- static Codec = codec.Class(ServiceAccountInfo, {
9859
- codeHash: codec.bytes(HASH_SIZE).asOpaque<CodeHash>(),
9860
- balance: codec.u64,
9861
- accumulateMinGas: codec.u64.convert((x) => x, tryAsServiceGas),
9862
- onTransferMinGas: codec.u64.convert((x) => x, tryAsServiceGas),
9863
- storageUtilisationBytes: codec.u64,
9864
- gratisStorage: codec.u64,
9865
- storageUtilisationCount: codec.u32,
9866
- created: codec.u32.convert((x) => x, tryAsTimeSlot),
9867
- lastAccumulation: codec.u32.convert((x) => x, tryAsTimeSlot),
9868
- parentService: codec.u32.convert((x) => x, tryAsServiceId),
9869
- });
9870
-
9871
- static create(a: CodecRecord<ServiceAccountInfo>) {
9872
- return new ServiceAccountInfo(
9873
- a.codeHash,
9874
- a.balance,
9875
- a.accumulateMinGas,
9876
- a.onTransferMinGas,
9877
- a.storageUtilisationBytes,
9878
- a.gratisStorage,
9879
- a.storageUtilisationCount,
9880
- a.created,
9881
- a.lastAccumulation,
9882
- a.parentService,
9883
- );
9884
- }
9885
-
9886
- /**
9887
- * `a_t = max(0, BS + BI * a_i + BL * a_o - a_f)`
9888
- * https://graypaper.fluffylabs.dev/#/7e6ff6a/119e01119e01?v=0.6.7
9889
- */
9890
- static calculateThresholdBalance(items: U32, bytes: U64, gratisStorage: U64): U64 {
9891
- const storageCost =
9892
- BASE_SERVICE_BALANCE + ELECTIVE_ITEM_BALANCE * BigInt(items) + ELECTIVE_BYTE_BALANCE * bytes - gratisStorage;
9893
-
9894
- if (storageCost < 0n) {
9895
- return tryAsU64(0);
9896
- }
9897
-
9898
- if (storageCost >= 2n ** 64n) {
9899
- return tryAsU64(2n ** 64n - 1n);
9900
- }
9901
-
9902
- return tryAsU64(storageCost);
9903
- }
9904
-
9905
- private constructor(
9906
- /** `a_c`: Hash of the service code. */
9907
- public readonly codeHash: CodeHash,
9908
- /** `a_b`: Current account balance. */
9909
- public readonly balance: U64,
9910
- /** `a_g`: Minimal gas required to execute Accumulate entrypoint. */
9911
- public readonly accumulateMinGas: ServiceGas,
9912
- /** `a_m`: Minimal gas required to execute On Transfer entrypoint. */
9913
- public readonly onTransferMinGas: ServiceGas,
9914
- /** `a_o`: Total number of octets in storage. */
9915
- public readonly storageUtilisationBytes: U64,
9916
- /** `a_f`: Cost-free storage. Decreases both storage item count and total byte size. */
9917
- public readonly gratisStorage: U64,
9918
- /** `a_i`: Number of items in storage. */
9919
- public readonly storageUtilisationCount: U32,
9920
- /** `a_r`: Creation account time slot. */
9921
- public readonly created: TimeSlot,
9922
- /** `a_a`: Most recent accumulation time slot. */
9923
- public readonly lastAccumulation: TimeSlot,
9924
- /** `a_p`: Parent service ID. */
9925
- public readonly parentService: ServiceId,
9926
- ) {
9927
- super();
9862
+ blocks: asOpaqueType(blocks as BlockState[]),
9863
+ }),
9864
+ );
9865
+ }
9866
+
9867
+ throw new Error("RecentBlocksHistory is in invalid state. Cannot be updated!");
9928
9868
  }
9929
9869
  }
9930
9870
 
9931
- declare class PreimageItem extends WithDebug {
9932
- static Codec = codec.Class(PreimageItem, {
9933
- hash: codec.bytes(HASH_SIZE).asOpaque<PreimageHash>(),
9934
- blob: codec.blob,
9871
+ /**
9872
+ * Fixed size of validator metadata.
9873
+ *
9874
+ * https://graypaper.fluffylabs.dev/#/5f542d7/0d55010d5501
9875
+ */
9876
+ declare const VALIDATOR_META_BYTES = 128;
9877
+ type VALIDATOR_META_BYTES = typeof VALIDATOR_META_BYTES;
9878
+
9879
+ /**
9880
+ * Details about validators' identity.
9881
+ *
9882
+ * https://graypaper.fluffylabs.dev/#/5f542d7/0d4b010d4c01
9883
+ */
9884
+ declare class ValidatorData extends WithDebug {
9885
+ static Codec = codec.Class(ValidatorData, {
9886
+ bandersnatch: codec.bytes(BANDERSNATCH_KEY_BYTES).asOpaque<BandersnatchKey>(),
9887
+ ed25519: codec.bytes(ED25519_KEY_BYTES).asOpaque<Ed25519Key>(),
9888
+ bls: codec.bytes(BLS_KEY_BYTES).asOpaque<BlsKey>(),
9889
+ metadata: codec.bytes(VALIDATOR_META_BYTES),
9935
9890
  });
9936
9891
 
9937
- static create({ hash, blob }: CodecRecord<PreimageItem>) {
9938
- return new PreimageItem(hash, blob);
9892
+ static create({ ed25519, bandersnatch, bls, metadata }: CodecRecord<ValidatorData>) {
9893
+ return new ValidatorData(bandersnatch, ed25519, bls, metadata);
9939
9894
  }
9940
9895
 
9941
9896
  private constructor(
9942
- readonly hash: PreimageHash,
9943
- readonly blob: BytesBlob,
9897
+ /** Bandersnatch public key. */
9898
+ public readonly bandersnatch: BandersnatchKey,
9899
+ /** ED25519 key data. */
9900
+ public readonly ed25519: Ed25519Key,
9901
+ /** BLS public key. */
9902
+ public readonly bls: BlsKey,
9903
+ /** Validator-defined additional metdata. */
9904
+ public readonly metadata: Bytes<VALIDATOR_META_BYTES>,
9944
9905
  ) {
9945
9906
  super();
9946
9907
  }
9947
9908
  }
9948
9909
 
9949
- type StorageKey = Opaque<BytesBlob, "storage key">;
9910
+ declare enum SafroleSealingKeysKind {
9911
+ Tickets = 0,
9912
+ Keys = 1,
9913
+ }
9950
9914
 
9951
- declare class StorageItem extends WithDebug {
9952
- static Codec = codec.Class(StorageItem, {
9953
- key: codec.blob.convert(
9954
- (i) => i,
9955
- (o) => asOpaqueType(o),
9956
- ),
9957
- value: codec.blob,
9915
+ type SafroleSealingKeys =
9916
+ | {
9917
+ kind: SafroleSealingKeysKind.Keys;
9918
+ keys: PerEpochBlock<BandersnatchKey>;
9919
+ }
9920
+ | {
9921
+ kind: SafroleSealingKeysKind.Tickets;
9922
+ tickets: PerEpochBlock<Ticket>;
9923
+ };
9924
+
9925
+ declare const codecBandersnatchKey = codec.bytes(BANDERSNATCH_KEY_BYTES).asOpaque<BandersnatchKey>();
9926
+
9927
+ declare class SafroleSealingKeysData extends WithDebug {
9928
+ static Codec = codecWithContext((context) => {
9929
+ return codec.custom<SafroleSealingKeys>(
9930
+ {
9931
+ name: "SafroleSealingKeys",
9932
+ sizeHint: { bytes: 1 + HASH_SIZE * context.epochLength, isExact: false },
9933
+ },
9934
+ (e, x) => {
9935
+ e.varU32(tryAsU32(x.kind));
9936
+ if (x.kind === SafroleSealingKeysKind.Keys) {
9937
+ e.sequenceFixLen(codecBandersnatchKey, x.keys);
9938
+ } else {
9939
+ e.sequenceFixLen(Ticket.Codec, x.tickets);
9940
+ }
9941
+ },
9942
+ (d) => {
9943
+ const epochLength = context.epochLength;
9944
+ const kind = d.varU32();
9945
+ if (kind === SafroleSealingKeysKind.Keys) {
9946
+ const keys = d.sequenceFixLen<BandersnatchKey>(codecBandersnatchKey, epochLength);
9947
+ return SafroleSealingKeysData.keys(tryAsPerEpochBlock(keys, context));
9948
+ }
9949
+
9950
+ if (kind === SafroleSealingKeysKind.Tickets) {
9951
+ const tickets = d.sequenceFixLen(Ticket.Codec, epochLength);
9952
+ return SafroleSealingKeysData.tickets(tryAsPerEpochBlock(tickets, context));
9953
+ }
9954
+
9955
+ throw new Error(`Unexpected safrole sealing keys kind: ${kind}`);
9956
+ },
9957
+ (s) => {
9958
+ const kind = s.decoder.varU32();
9959
+ if (kind === SafroleSealingKeysKind.Keys) {
9960
+ s.sequenceFixLen(codecBandersnatchKey, context.epochLength);
9961
+ return;
9962
+ }
9963
+ if (kind === SafroleSealingKeysKind.Tickets) {
9964
+ s.sequenceFixLen(Ticket.Codec, context.epochLength);
9965
+ return;
9966
+ }
9967
+
9968
+ throw new Error(`Unexpected safrole sealing keys kind: ${kind}`);
9969
+ },
9970
+ );
9958
9971
  });
9959
9972
 
9960
- static create({ key, value }: CodecRecord<StorageItem>) {
9961
- return new StorageItem(key, value);
9973
+ static keys(keys: PerEpochBlock<BandersnatchKey>): SafroleSealingKeys {
9974
+ return new SafroleSealingKeysData(SafroleSealingKeysKind.Keys, keys, undefined) as SafroleSealingKeys;
9975
+ }
9976
+
9977
+ static tickets(tickets: PerEpochBlock<Ticket>): SafroleSealingKeys {
9978
+ return new SafroleSealingKeysData(SafroleSealingKeysKind.Tickets, undefined, tickets) as SafroleSealingKeys;
9962
9979
  }
9963
9980
 
9964
9981
  private constructor(
9965
- readonly key: StorageKey,
9966
- readonly value: BytesBlob,
9982
+ readonly kind: SafroleSealingKeysKind,
9983
+ readonly keys?: PerEpochBlock<BandersnatchKey>,
9984
+ readonly tickets?: PerEpochBlock<Ticket>,
9967
9985
  ) {
9968
9986
  super();
9969
9987
  }
9970
9988
  }
9971
9989
 
9972
- declare const MAX_LOOKUP_HISTORY_SLOTS = 3;
9973
- type LookupHistorySlots = KnownSizeArray<TimeSlot, `0-${typeof MAX_LOOKUP_HISTORY_SLOTS} timeslots`>;
9974
- declare function tryAsLookupHistorySlots(items: readonly TimeSlot[]): LookupHistorySlots {
9975
- const knownSize = asKnownSize(items) as LookupHistorySlots;
9976
- if (knownSize.length > MAX_LOOKUP_HISTORY_SLOTS) {
9977
- throw new Error(`Lookup history items must contain 0-${MAX_LOOKUP_HISTORY_SLOTS} timeslots.`);
9990
+ declare class SafroleData {
9991
+ static Codec = codec.Class(SafroleData, {
9992
+ nextValidatorData: codecPerValidator(ValidatorData.Codec),
9993
+ epochRoot: codec.bytes(BANDERSNATCH_RING_ROOT_BYTES).asOpaque<BandersnatchRingRoot>(),
9994
+ sealingKeySeries: SafroleSealingKeysData.Codec,
9995
+ ticketsAccumulator: readonlyArray(codec.sequenceVarLen(Ticket.Codec)).convert(seeThrough, asKnownSize),
9996
+ });
9997
+
9998
+ static create({ nextValidatorData, epochRoot, sealingKeySeries, ticketsAccumulator }: CodecRecord<SafroleData>) {
9999
+ return new SafroleData(nextValidatorData, epochRoot, sealingKeySeries, ticketsAccumulator);
9978
10000
  }
9979
- return knownSize;
9980
- }
9981
10001
 
9982
- /** https://graypaper.fluffylabs.dev/#/5f542d7/115400115800 */
9983
- declare class LookupHistoryItem {
9984
- constructor(
9985
- public readonly hash: PreimageHash,
9986
- public readonly length: U32,
9987
- /**
9988
- * Preimage availability history as a sequence of time slots.
9989
- * See PreimageStatus and the following GP fragment for more details.
9990
- * https://graypaper.fluffylabs.dev/#/5f542d7/11780011a500 */
9991
- public readonly slots: LookupHistorySlots,
10002
+ private constructor(
10003
+ /** gamma_k */
10004
+ public readonly nextValidatorData: PerValidator<ValidatorData>,
10005
+ /** gamma_z */
10006
+ public readonly epochRoot: BandersnatchRingRoot,
10007
+ /** gamma_s */
10008
+ public readonly sealingKeySeries: SafroleSealingKeys,
10009
+ /** gamma_a */
10010
+ public readonly ticketsAccumulator: KnownSizeArray<Ticket, "0...EpochLength">,
9992
10011
  ) {}
9993
-
9994
- static isRequested(item: LookupHistoryItem | LookupHistorySlots): boolean {
9995
- if ("slots" in item) {
9996
- return item.slots.length === 0;
9997
- }
9998
- return item.length === 0;
9999
- }
10000
10012
  }
10001
10013
 
10002
10014
  declare const codecServiceId: Descriptor<ServiceId> =
@@ -10139,12 +10151,26 @@ declare class CoreStatistics {
10139
10151
  * Service statistics.
10140
10152
  * Updated per block, based on available work reports (`W`).
10141
10153
  *
10142
- * https://graypaper.fluffylabs.dev/#/68eaa1f/185104185104?v=0.6.4
10143
- * https://github.com/gavofyork/graypaper/blob/9bffb08f3ea7b67832019176754df4fb36b9557d/text/statistics.tex#L77
10154
+ * https://graypaper.fluffylabs.dev/#/1c979cb/199802199802?v=0.7.1
10144
10155
  */
10145
10156
  declare class ServiceStatistics {
10146
- static Codec = Compatibility.isGreaterOrEqual(GpVersion.V0_7_0)
10147
- ? codec.Class(ServiceStatistics, {
10157
+ static Codec = Compatibility.selectIfGreaterOrEqual({
10158
+ fallback: codec.Class(ServiceStatistics, {
10159
+ providedCount: codecVarU16,
10160
+ providedSize: codec.varU32,
10161
+ refinementCount: codec.varU32,
10162
+ refinementGasUsed: codecVarGas,
10163
+ imports: codecVarU16,
10164
+ exports: codecVarU16,
10165
+ extrinsicSize: codec.varU32,
10166
+ extrinsicCount: codecVarU16,
10167
+ accumulateCount: codec.varU32,
10168
+ accumulateGasUsed: codecVarGas,
10169
+ onTransfersCount: codec.varU32,
10170
+ onTransfersGasUsed: codecVarGas,
10171
+ }),
10172
+ versions: {
10173
+ [GpVersion.V0_7_0]: codec.Class(ServiceStatistics, {
10148
10174
  providedCount: codecVarU16,
10149
10175
  providedSize: codec.varU32,
10150
10176
  refinementCount: codec.varU32,
@@ -10157,21 +10183,23 @@ declare class ServiceStatistics {
10157
10183
  accumulateGasUsed: codecVarGas,
10158
10184
  onTransfersCount: codec.varU32,
10159
10185
  onTransfersGasUsed: codecVarGas,
10160
- })
10161
- : codec.Class(ServiceStatistics, {
10186
+ }),
10187
+ [GpVersion.V0_7_1]: codec.Class(ServiceStatistics, {
10162
10188
  providedCount: codecVarU16,
10163
10189
  providedSize: codec.varU32,
10164
10190
  refinementCount: codec.varU32,
10165
10191
  refinementGasUsed: codecVarGas,
10166
10192
  imports: codecVarU16,
10167
- exports: codecVarU16,
10168
- extrinsicSize: codec.varU32,
10169
10193
  extrinsicCount: codecVarU16,
10194
+ extrinsicSize: codec.varU32,
10195
+ exports: codecVarU16,
10170
10196
  accumulateCount: codec.varU32,
10171
10197
  accumulateGasUsed: codecVarGas,
10172
- onTransfersCount: codec.varU32,
10173
- onTransfersGasUsed: codecVarGas,
10174
- });
10198
+ onTransfersCount: ignoreValueWithDefault(tryAsU32(0)),
10199
+ onTransfersGasUsed: ignoreValueWithDefault(tryAsServiceGas(0)),
10200
+ }),
10201
+ },
10202
+ });
10175
10203
 
10176
10204
  static create(v: CodecRecord<ServiceStatistics>) {
10177
10205
  return new ServiceStatistics(
@@ -10211,9 +10239,9 @@ declare class ServiceStatistics {
10211
10239
  public accumulateCount: U32,
10212
10240
  /** `a.1` */
10213
10241
  public accumulateGasUsed: ServiceGas,
10214
- /** `t.0` */
10242
+ /** `t.0` @deprecated since 0.7.1 */
10215
10243
  public onTransfersCount: U32,
10216
- /** `t.1` */
10244
+ /** `t.1` @deprecated since 0.7.1 */
10217
10245
  public onTransfersGasUsed: ServiceGas,
10218
10246
  ) {}
10219
10247
 
@@ -11173,8 +11201,9 @@ declare class InMemoryState extends WithDebug implements State, EnumerableState
11173
11201
  epochRoot: Bytes.zero(BANDERSNATCH_RING_ROOT_BYTES).asOpaque(),
11174
11202
  privilegedServices: PrivilegedServices.create({
11175
11203
  manager: tryAsServiceId(0),
11176
- authManager: tryAsPerCore(new Array(spec.coresCount).fill(tryAsServiceId(0)), spec),
11177
- validatorsManager: tryAsServiceId(0),
11204
+ assigners: tryAsPerCore(new Array(spec.coresCount).fill(tryAsServiceId(0)), spec),
11205
+ delegator: tryAsServiceId(0),
11206
+ registrar: tryAsServiceId(MAX_VALUE),
11178
11207
  autoAccumulateServices: [],
11179
11208
  }),
11180
11209
  accumulationOutputLog: SortedArray.fromArray(accumulationOutputComparator, []),
@@ -11308,7 +11337,7 @@ declare const index$e_codecPerCore: typeof codecPerCore;
11308
11337
  declare const index$e_codecServiceId: typeof codecServiceId;
11309
11338
  declare const index$e_codecVarGas: typeof codecVarGas;
11310
11339
  declare const index$e_codecVarU16: typeof codecVarU16;
11311
- declare const index$e_codecWithHash: typeof codecWithHash;
11340
+ declare const index$e_codecWithVersion: typeof codecWithVersion;
11312
11341
  declare const index$e_hashComparator: typeof hashComparator;
11313
11342
  declare const index$e_ignoreValueWithDefault: typeof ignoreValueWithDefault;
11314
11343
  declare const index$e_serviceDataCodec: typeof serviceDataCodec;
@@ -11319,7 +11348,7 @@ declare const index$e_tryAsPerCore: typeof tryAsPerCore;
11319
11348
  declare const index$e_workReportsSortedSetCodec: typeof workReportsSortedSetCodec;
11320
11349
  declare const index$e_zeroSizeHint: typeof zeroSizeHint;
11321
11350
  declare namespace index$e {
11322
- export { index$e_AccumulationOutput as AccumulationOutput, index$e_AutoAccumulate as AutoAccumulate, index$e_AvailabilityAssignment as AvailabilityAssignment, index$e_BASE_SERVICE_BALANCE as BASE_SERVICE_BALANCE, index$e_BlockState as BlockState, index$e_CoreStatistics as CoreStatistics, index$e_DisputesRecords as DisputesRecords, index$e_ELECTIVE_BYTE_BALANCE as ELECTIVE_BYTE_BALANCE, index$e_ELECTIVE_ITEM_BALANCE as ELECTIVE_ITEM_BALANCE, index$e_InMemoryService as InMemoryService, index$e_InMemoryState as InMemoryState, index$e_LookupHistoryItem as LookupHistoryItem, index$e_MAX_LOOKUP_HISTORY_SLOTS as MAX_LOOKUP_HISTORY_SLOTS, index$e_PreimageItem as PreimageItem, index$e_PrivilegedServices as PrivilegedServices, index$e_RecentBlocks as RecentBlocks, index$e_RecentBlocksHistory as RecentBlocksHistory, index$e_SafroleData as SafroleData, index$e_SafroleSealingKeysData as SafroleSealingKeysData, index$e_SafroleSealingKeysKind as SafroleSealingKeysKind, index$e_ServiceAccountInfo as ServiceAccountInfo, index$e_ServiceStatistics as ServiceStatistics, index$e_StatisticsData as StatisticsData, index$e_StorageItem as StorageItem, index$e_UpdateError as UpdateError, index$e_UpdatePreimage as UpdatePreimage, index$e_UpdatePreimageKind as UpdatePreimageKind, index$e_UpdateService as UpdateService, index$e_UpdateServiceKind as UpdateServiceKind, index$e_UpdateStorage as UpdateStorage, index$e_UpdateStorageKind as UpdateStorageKind, index$e_ValidatorData as ValidatorData, index$e_ValidatorStatistics as ValidatorStatistics, index$e_accumulationOutputComparator as accumulationOutputComparator, index$e_codecBandersnatchKey as codecBandersnatchKey, index$e_codecPerCore as codecPerCore, index$e_codecServiceId as codecServiceId, index$e_codecVarGas as codecVarGas, index$e_codecVarU16 as codecVarU16, index$e_codecWithHash as codecWithHash, index$e_hashComparator as hashComparator, index$e_ignoreValueWithDefault as ignoreValueWithDefault, index$e_serviceDataCodec as serviceDataCodec, index$e_serviceEntriesCodec as serviceEntriesCodec, index$e_sortedSetCodec as sortedSetCodec, index$e_tryAsLookupHistorySlots as tryAsLookupHistorySlots, index$e_tryAsPerCore as tryAsPerCore, index$e_workReportsSortedSetCodec as workReportsSortedSetCodec, index$e_zeroSizeHint as zeroSizeHint };
11351
+ export { index$e_AccumulationOutput as AccumulationOutput, index$e_AutoAccumulate as AutoAccumulate, index$e_AvailabilityAssignment as AvailabilityAssignment, index$e_BASE_SERVICE_BALANCE as BASE_SERVICE_BALANCE, index$e_BlockState as BlockState, index$e_CoreStatistics as CoreStatistics, index$e_DisputesRecords as DisputesRecords, index$e_ELECTIVE_BYTE_BALANCE as ELECTIVE_BYTE_BALANCE, index$e_ELECTIVE_ITEM_BALANCE as ELECTIVE_ITEM_BALANCE, index$e_InMemoryService as InMemoryService, index$e_InMemoryState as InMemoryState, index$e_LookupHistoryItem as LookupHistoryItem, index$e_MAX_LOOKUP_HISTORY_SLOTS as MAX_LOOKUP_HISTORY_SLOTS, index$e_PreimageItem as PreimageItem, index$e_PrivilegedServices as PrivilegedServices, index$e_RecentBlocks as RecentBlocks, index$e_RecentBlocksHistory as RecentBlocksHistory, index$e_SafroleData as SafroleData, index$e_SafroleSealingKeysData as SafroleSealingKeysData, index$e_SafroleSealingKeysKind as SafroleSealingKeysKind, index$e_ServiceAccountInfo as ServiceAccountInfo, index$e_ServiceStatistics as ServiceStatistics, index$e_StatisticsData as StatisticsData, index$e_StorageItem as StorageItem, index$e_UpdateError as UpdateError, index$e_UpdatePreimage as UpdatePreimage, index$e_UpdatePreimageKind as UpdatePreimageKind, index$e_UpdateService as UpdateService, index$e_UpdateServiceKind as UpdateServiceKind, index$e_UpdateStorage as UpdateStorage, index$e_UpdateStorageKind as UpdateStorageKind, index$e_ValidatorData as ValidatorData, index$e_ValidatorStatistics as ValidatorStatistics, index$e_accumulationOutputComparator as accumulationOutputComparator, index$e_codecBandersnatchKey as codecBandersnatchKey, index$e_codecPerCore as codecPerCore, index$e_codecServiceId as codecServiceId, index$e_codecVarGas as codecVarGas, index$e_codecVarU16 as codecVarU16, index$e_codecWithVersion as codecWithVersion, index$e_hashComparator as hashComparator, index$e_ignoreValueWithDefault as ignoreValueWithDefault, index$e_serviceDataCodec as serviceDataCodec, index$e_serviceEntriesCodec as serviceEntriesCodec, index$e_sortedSetCodec as sortedSetCodec, index$e_tryAsLookupHistorySlots as tryAsLookupHistorySlots, index$e_tryAsPerCore as tryAsPerCore, index$e_workReportsSortedSetCodec as workReportsSortedSetCodec, index$e_zeroSizeHint as zeroSizeHint };
11323
11352
  export type { index$e_BlocksState as BlocksState, index$e_ENTROPY_ENTRIES as ENTROPY_ENTRIES, index$e_EnumerableState as EnumerableState, index$e_FieldNames as FieldNames, index$e_InMemoryStateFields as InMemoryStateFields, index$e_LookupHistorySlots as LookupHistorySlots, index$e_MAX_RECENT_HISTORY as MAX_RECENT_HISTORY, index$e_PerCore as PerCore, index$e_SafroleSealingKeys as SafroleSealingKeys, index$e_Service as Service, index$e_ServiceData as ServiceData, index$e_ServiceEntries as ServiceEntries, index$e_ServicesUpdate as ServicesUpdate, index$e_State as State, index$e_StorageKey as StorageKey, index$e_VALIDATOR_META_BYTES as VALIDATOR_META_BYTES };
11324
11353
  }
11325
11354
 
@@ -11387,7 +11416,7 @@ declare namespace stateKeys {
11387
11416
  }
11388
11417
 
11389
11418
  /** https://graypaper.fluffylabs.dev/#/1c979cb/3bba033bba03?v=0.7.1 */
11390
- export function serviceStorage(serviceId: ServiceId, key: StorageKey): StateKey {
11419
+ export function serviceStorage(blake2b: Blake2b, serviceId: ServiceId, key: StorageKey): StateKey {
11391
11420
  if (Compatibility.isLessThan(GpVersion.V0_6_7)) {
11392
11421
  const out = Bytes.zero(HASH_SIZE);
11393
11422
  out.raw.set(u32AsLeBytes(tryAsU32(2 ** 32 - 1)), 0);
@@ -11395,11 +11424,11 @@ declare namespace stateKeys {
11395
11424
  return legacyServiceNested(serviceId, out);
11396
11425
  }
11397
11426
 
11398
- return serviceNested(serviceId, tryAsU32(2 ** 32 - 1), key);
11427
+ return serviceNested(blake2b, serviceId, tryAsU32(2 ** 32 - 1), key);
11399
11428
  }
11400
11429
 
11401
11430
  /** https://graypaper.fluffylabs.dev/#/1c979cb/3bd7033bd703?v=0.7.1 */
11402
- export function servicePreimage(serviceId: ServiceId, hash: PreimageHash): StateKey {
11431
+ export function servicePreimage(blake2b: Blake2b, serviceId: ServiceId, hash: PreimageHash): StateKey {
11403
11432
  if (Compatibility.isLessThan(GpVersion.V0_6_7)) {
11404
11433
  const out = Bytes.zero(HASH_SIZE);
11405
11434
  out.raw.set(u32AsLeBytes(tryAsU32(2 ** 32 - 2)), 0);
@@ -11407,11 +11436,16 @@ declare namespace stateKeys {
11407
11436
  return legacyServiceNested(serviceId, out);
11408
11437
  }
11409
11438
 
11410
- return serviceNested(serviceId, tryAsU32(2 ** 32 - 2), hash);
11439
+ return serviceNested(blake2b, serviceId, tryAsU32(2 ** 32 - 2), hash);
11411
11440
  }
11412
11441
 
11413
11442
  /** https://graypaper.fluffylabs.dev/#/1c979cb/3b0a043b0a04?v=0.7.1 */
11414
- export function serviceLookupHistory(serviceId: ServiceId, hash: PreimageHash, preimageLength: U32): StateKey {
11443
+ export function serviceLookupHistory(
11444
+ blake2b: Blake2b,
11445
+ serviceId: ServiceId,
11446
+ hash: PreimageHash,
11447
+ preimageLength: U32,
11448
+ ): StateKey {
11415
11449
  if (Compatibility.isLessThan(GpVersion.V0_6_7)) {
11416
11450
  const doubleHash = blake2b.hashBytes(hash);
11417
11451
  const out = Bytes.zero(HASH_SIZE);
@@ -11420,11 +11454,11 @@ declare namespace stateKeys {
11420
11454
  return legacyServiceNested(serviceId, out);
11421
11455
  }
11422
11456
 
11423
- return serviceNested(serviceId, preimageLength, hash);
11457
+ return serviceNested(blake2b, serviceId, preimageLength, hash);
11424
11458
  }
11425
11459
 
11426
11460
  /** https://graypaper.fluffylabs.dev/#/1c979cb/3b88003b8800?v=0.7.1 */
11427
- export function serviceNested(serviceId: ServiceId, numberPrefix: U32, hash: BytesBlob): StateKey {
11461
+ export function serviceNested(blake2b: Blake2b, serviceId: ServiceId, numberPrefix: U32, hash: BytesBlob): StateKey {
11428
11462
  const inputToHash = BytesBlob.blobFromParts(u32AsLeBytes(numberPrefix), hash.raw);
11429
11463
  const newHash = blake2b.hashBytes(inputToHash).raw.subarray(0, 28);
11430
11464
  const key = Bytes.zero(HASH_SIZE);
@@ -11604,24 +11638,26 @@ declare namespace serialize {
11604
11638
  /** C(255, s): https://graypaper.fluffylabs.dev/#/85129da/383103383103?v=0.6.3 */
11605
11639
  export const serviceData = (serviceId: ServiceId) => ({
11606
11640
  key: stateKeys.serviceInfo(serviceId),
11607
- Codec: ServiceAccountInfo.Codec,
11641
+ Codec: Compatibility.isGreaterOrEqual(GpVersion.V0_7_1)
11642
+ ? codecWithVersion(ServiceAccountInfo.Codec)
11643
+ : ServiceAccountInfo.Codec,
11608
11644
  });
11609
11645
 
11610
11646
  /** https://graypaper.fluffylabs.dev/#/85129da/384803384803?v=0.6.3 */
11611
- export const serviceStorage = (serviceId: ServiceId, key: StorageKey) => ({
11612
- key: stateKeys.serviceStorage(serviceId, key),
11647
+ export const serviceStorage = (blake2b: Blake2b, serviceId: ServiceId, key: StorageKey) => ({
11648
+ key: stateKeys.serviceStorage(blake2b, serviceId, key),
11613
11649
  Codec: dumpCodec,
11614
11650
  });
11615
11651
 
11616
11652
  /** https://graypaper.fluffylabs.dev/#/85129da/385b03385b03?v=0.6.3 */
11617
- export const servicePreimages = (serviceId: ServiceId, hash: PreimageHash) => ({
11618
- key: stateKeys.servicePreimage(serviceId, hash),
11653
+ export const servicePreimages = (blake2b: Blake2b, serviceId: ServiceId, hash: PreimageHash) => ({
11654
+ key: stateKeys.servicePreimage(blake2b, serviceId, hash),
11619
11655
  Codec: dumpCodec,
11620
11656
  });
11621
11657
 
11622
11658
  /** https://graypaper.fluffylabs.dev/#/85129da/387603387603?v=0.6.3 */
11623
- export const serviceLookupHistory = (serviceId: ServiceId, hash: PreimageHash, len: U32) => ({
11624
- key: stateKeys.serviceLookupHistory(serviceId, hash, len),
11659
+ export const serviceLookupHistory = (blake2b: Blake2b, serviceId: ServiceId, hash: PreimageHash, len: U32) => ({
11660
+ key: stateKeys.serviceLookupHistory(blake2b, serviceId, hash, len),
11625
11661
  Codec: readonlyArray(codec.sequenceVarLen(codec.u32)),
11626
11662
  });
11627
11663
  }
@@ -11656,6 +11692,7 @@ declare const EMPTY_BLOB = BytesBlob.empty();
11656
11692
  /** Serialize given state update into a series of key-value pairs. */
11657
11693
  declare function* serializeStateUpdate(
11658
11694
  spec: ChainSpec,
11695
+ blake2b: Blake2b,
11659
11696
  update: Partial<State & ServicesUpdate>,
11660
11697
  ): Generator<StateEntryUpdate> {
11661
11698
  // first let's serialize all of the simple entries (if present!)
@@ -11664,9 +11701,9 @@ declare function* serializeStateUpdate(
11664
11701
  const encode = <T>(codec: Encode<T>, val: T) => Encoder.encodeObject(codec, val, spec);
11665
11702
 
11666
11703
  // then let's proceed with service updates
11667
- yield* serializeServiceUpdates(update.servicesUpdates, encode);
11668
- yield* serializePreimages(update.preimages, encode);
11669
- yield* serializeStorage(update.storage);
11704
+ yield* serializeServiceUpdates(update.servicesUpdates, encode, blake2b);
11705
+ yield* serializePreimages(update.preimages, encode, blake2b);
11706
+ yield* serializeStorage(update.storage, blake2b);
11670
11707
  yield* serializeRemovedServices(update.servicesRemoved);
11671
11708
  }
11672
11709
 
@@ -11678,18 +11715,18 @@ declare function* serializeRemovedServices(servicesRemoved: ServiceId[] | undefi
11678
11715
  }
11679
11716
  }
11680
11717
 
11681
- declare function* serializeStorage(storage: UpdateStorage[] | undefined): Generator<StateEntryUpdate> {
11718
+ declare function* serializeStorage(storage: UpdateStorage[] | undefined, blake2b: Blake2b): Generator<StateEntryUpdate> {
11682
11719
  for (const { action, serviceId } of storage ?? []) {
11683
11720
  switch (action.kind) {
11684
11721
  case UpdateStorageKind.Set: {
11685
11722
  const key = action.storage.key;
11686
- const codec = serialize.serviceStorage(serviceId, key);
11723
+ const codec = serialize.serviceStorage(blake2b, serviceId, key);
11687
11724
  yield [StateEntryUpdateAction.Insert, codec.key, action.storage.value];
11688
11725
  break;
11689
11726
  }
11690
11727
  case UpdateStorageKind.Remove: {
11691
11728
  const key = action.key;
11692
- const codec = serialize.serviceStorage(serviceId, key);
11729
+ const codec = serialize.serviceStorage(blake2b, serviceId, key);
11693
11730
  yield [StateEntryUpdateAction.Remove, codec.key, EMPTY_BLOB];
11694
11731
  break;
11695
11732
  }
@@ -11699,16 +11736,20 @@ declare function* serializeStorage(storage: UpdateStorage[] | undefined): Genera
11699
11736
  }
11700
11737
  }
11701
11738
 
11702
- declare function* serializePreimages(preimages: UpdatePreimage[] | undefined, encode: EncodeFun): Generator<StateEntryUpdate> {
11739
+ declare function* serializePreimages(
11740
+ preimages: UpdatePreimage[] | undefined,
11741
+ encode: EncodeFun,
11742
+ blake2b: Blake2b,
11743
+ ): Generator<StateEntryUpdate> {
11703
11744
  for (const { action, serviceId } of preimages ?? []) {
11704
11745
  switch (action.kind) {
11705
11746
  case UpdatePreimageKind.Provide: {
11706
11747
  const { hash, blob } = action.preimage;
11707
- const codec = serialize.servicePreimages(serviceId, hash);
11748
+ const codec = serialize.servicePreimages(blake2b, serviceId, hash);
11708
11749
  yield [StateEntryUpdateAction.Insert, codec.key, blob];
11709
11750
 
11710
11751
  if (action.slot !== null) {
11711
- const codec2 = serialize.serviceLookupHistory(serviceId, hash, tryAsU32(blob.length));
11752
+ const codec2 = serialize.serviceLookupHistory(blake2b, serviceId, hash, tryAsU32(blob.length));
11712
11753
  yield [
11713
11754
  StateEntryUpdateAction.Insert,
11714
11755
  codec2.key,
@@ -11719,16 +11760,16 @@ declare function* serializePreimages(preimages: UpdatePreimage[] | undefined, en
11719
11760
  }
11720
11761
  case UpdatePreimageKind.UpdateOrAdd: {
11721
11762
  const { hash, length, slots } = action.item;
11722
- const codec = serialize.serviceLookupHistory(serviceId, hash, length);
11763
+ const codec = serialize.serviceLookupHistory(blake2b, serviceId, hash, length);
11723
11764
  yield [StateEntryUpdateAction.Insert, codec.key, encode(codec.Codec, slots)];
11724
11765
  break;
11725
11766
  }
11726
11767
  case UpdatePreimageKind.Remove: {
11727
11768
  const { hash, length } = action;
11728
- const codec = serialize.servicePreimages(serviceId, hash);
11769
+ const codec = serialize.servicePreimages(blake2b, serviceId, hash);
11729
11770
  yield [StateEntryUpdateAction.Remove, codec.key, EMPTY_BLOB];
11730
11771
 
11731
- const codec2 = serialize.serviceLookupHistory(serviceId, hash, length);
11772
+ const codec2 = serialize.serviceLookupHistory(blake2b, serviceId, hash, length);
11732
11773
  yield [StateEntryUpdateAction.Remove, codec2.key, EMPTY_BLOB];
11733
11774
  break;
11734
11775
  }
@@ -11740,6 +11781,7 @@ declare function* serializePreimages(preimages: UpdatePreimage[] | undefined, en
11740
11781
  declare function* serializeServiceUpdates(
11741
11782
  servicesUpdates: UpdateService[] | undefined,
11742
11783
  encode: EncodeFun,
11784
+ blake2b: Blake2b,
11743
11785
  ): Generator<StateEntryUpdate> {
11744
11786
  for (const { action, serviceId } of servicesUpdates ?? []) {
11745
11787
  // new service being created or updated
@@ -11749,7 +11791,7 @@ declare function* serializeServiceUpdates(
11749
11791
  // additional lookup history update
11750
11792
  if (action.kind === UpdateServiceKind.Create && action.lookupHistory !== null) {
11751
11793
  const { lookupHistory } = action;
11752
- const codec2 = serialize.serviceLookupHistory(serviceId, lookupHistory.hash, lookupHistory.length);
11794
+ const codec2 = serialize.serviceLookupHistory(blake2b, serviceId, lookupHistory.hash, lookupHistory.length);
11753
11795
  yield [StateEntryUpdateAction.Insert, codec2.key, encode(codec2.Codec, lookupHistory.slots)];
11754
11796
  }
11755
11797
  }
@@ -11883,8 +11925,8 @@ declare class StateEntries {
11883
11925
  );
11884
11926
 
11885
11927
  /** Turn in-memory state into it's serialized form. */
11886
- static serializeInMemory(spec: ChainSpec, state: InMemoryState) {
11887
- return new StateEntries(convertInMemoryStateToDictionary(spec, state));
11928
+ static serializeInMemory(spec: ChainSpec, blake2b: Blake2b, state: InMemoryState) {
11929
+ return new StateEntries(convertInMemoryStateToDictionary(spec, blake2b, state));
11888
11930
  }
11889
11931
 
11890
11932
  /**
@@ -11939,7 +11981,8 @@ declare class StateEntries {
11939
11981
  }
11940
11982
 
11941
11983
  /** https://graypaper.fluffylabs.dev/#/68eaa1f/391600391600?v=0.6.4 */
11942
- getRootHash(): StateRootHash {
11984
+ getRootHash(blake2b: Blake2b): StateRootHash {
11985
+ const blake2bTrieHasher = getBlake2bTrieHasher(blake2b);
11943
11986
  const leaves: SortedSet<LeafNode> = SortedSet.fromArray(leafComparator);
11944
11987
  for (const [key, value] of this) {
11945
11988
  leaves.insert(InMemoryTrie.constructLeaf(blake2bTrieHasher, key.asOpaque(), value));
@@ -11952,6 +11995,7 @@ declare class StateEntries {
11952
11995
  /** https://graypaper.fluffylabs.dev/#/68eaa1f/38a50038a500?v=0.6.4 */
11953
11996
  declare function convertInMemoryStateToDictionary(
11954
11997
  spec: ChainSpec,
11998
+ blake2b: Blake2b,
11955
11999
  state: InMemoryState,
11956
12000
  ): TruncatedHashDictionary<StateKey, BytesBlob> {
11957
12001
  const serialized = TruncatedHashDictionary.fromEntries<StateKey, BytesBlob>([]);
@@ -11984,20 +12028,25 @@ declare function convertInMemoryStateToDictionary(
11984
12028
 
11985
12029
  // preimages
11986
12030
  for (const preimage of service.data.preimages.values()) {
11987
- const { key, Codec } = serialize.servicePreimages(serviceId, preimage.hash);
12031
+ const { key, Codec } = serialize.servicePreimages(blake2b, serviceId, preimage.hash);
11988
12032
  serialized.set(key, Encoder.encodeObject(Codec, preimage.blob));
11989
12033
  }
11990
12034
 
11991
12035
  // storage
11992
12036
  for (const storage of service.data.storage.values()) {
11993
- const { key, Codec } = serialize.serviceStorage(serviceId, storage.key);
12037
+ const { key, Codec } = serialize.serviceStorage(blake2b, serviceId, storage.key);
11994
12038
  serialized.set(key, Encoder.encodeObject(Codec, storage.value));
11995
12039
  }
11996
12040
 
11997
12041
  // lookup history
11998
12042
  for (const lookupHistoryList of service.data.lookupHistory.values()) {
11999
12043
  for (const lookupHistory of lookupHistoryList) {
12000
- const { key, Codec } = serialize.serviceLookupHistory(serviceId, lookupHistory.hash, lookupHistory.length);
12044
+ const { key, Codec } = serialize.serviceLookupHistory(
12045
+ blake2b,
12046
+ serviceId,
12047
+ lookupHistory.hash,
12048
+ lookupHistory.length,
12049
+ );
12001
12050
  serialized.set(key, Encoder.encodeObject(Codec, lookupHistory.slots.slice()));
12002
12051
  }
12003
12052
  }
@@ -12028,21 +12077,23 @@ declare class SerializedState<T extends SerializedStateBackend = SerializedState
12028
12077
  implements State, EnumerableState
12029
12078
  {
12030
12079
  /** Create a state-like object from collection of serialized entries. */
12031
- static fromStateEntries(spec: ChainSpec, state: StateEntries, recentServices: ServiceId[] = []) {
12032
- return new SerializedState(spec, state, recentServices);
12080
+ static fromStateEntries(spec: ChainSpec, blake2b: Blake2b, state: StateEntries, recentServices: ServiceId[] = []) {
12081
+ return new SerializedState(spec, blake2b, state, recentServices);
12033
12082
  }
12034
12083
 
12035
12084
  /** Create a state-like object backed by some DB. */
12036
12085
  static new<T extends SerializedStateBackend>(
12037
12086
  spec: ChainSpec,
12087
+ blake2b: Blake2b,
12038
12088
  db: T,
12039
12089
  recentServices: ServiceId[] = [],
12040
12090
  ): SerializedState<T> {
12041
- return new SerializedState(spec, db, recentServices);
12091
+ return new SerializedState(spec, blake2b, db, recentServices);
12042
12092
  }
12043
12093
 
12044
12094
  private constructor(
12045
12095
  private readonly spec: ChainSpec,
12096
+ private readonly blake2b: Blake2b,
12046
12097
  public backend: T,
12047
12098
  /** Best-effort list of recently active services. */
12048
12099
  private readonly _recentServiceIds: ServiceId[],
@@ -12073,7 +12124,7 @@ declare class SerializedState<T extends SerializedStateBackend = SerializedState
12073
12124
  this._recentServiceIds.push(id);
12074
12125
  }
12075
12126
 
12076
- return new SerializedService(id, serviceData, (key) => this.retrieveOptional(key));
12127
+ return new SerializedService(this.blake2b, id, serviceData, (key) => this.retrieveOptional(key));
12077
12128
  }
12078
12129
 
12079
12130
  private retrieve<T>({ key, Codec }: KeyAndCodec<T>, description: string): T {
@@ -12172,6 +12223,7 @@ declare class SerializedState<T extends SerializedStateBackend = SerializedState
12172
12223
  /** Service data representation on a serialized state. */
12173
12224
  declare class SerializedService implements Service {
12174
12225
  constructor(
12226
+ public readonly blake2b: Blake2b,
12175
12227
  /** Service id */
12176
12228
  public readonly serviceId: ServiceId,
12177
12229
  private readonly accountInfo: ServiceAccountInfo,
@@ -12190,11 +12242,11 @@ declare class SerializedService implements Service {
12190
12242
  const serviceIdAndKey = safeAllocUint8Array(SERVICE_ID_BYTES + rawKey.length);
12191
12243
  serviceIdAndKey.set(u32AsLeBytes(this.serviceId));
12192
12244
  serviceIdAndKey.set(rawKey.raw, SERVICE_ID_BYTES);
12193
- const key: StorageKey = asOpaqueType(BytesBlob.blobFrom(blake2b.hashBytes(serviceIdAndKey).raw));
12194
- return this.retrieveOptional(serialize.serviceStorage(this.serviceId, key)) ?? null;
12245
+ const key: StorageKey = asOpaqueType(BytesBlob.blobFrom(this.blake2b.hashBytes(serviceIdAndKey).raw));
12246
+ return this.retrieveOptional(serialize.serviceStorage(this.blake2b, this.serviceId, key)) ?? null;
12195
12247
  }
12196
12248
 
12197
- return this.retrieveOptional(serialize.serviceStorage(this.serviceId, rawKey)) ?? null;
12249
+ return this.retrieveOptional(serialize.serviceStorage(this.blake2b, this.serviceId, rawKey)) ?? null;
12198
12250
  }
12199
12251
 
12200
12252
  /**
@@ -12204,17 +12256,17 @@ declare class SerializedService implements Service {
12204
12256
  */
12205
12257
  hasPreimage(hash: PreimageHash): boolean {
12206
12258
  // TODO [ToDr] consider optimizing to avoid fetching the whole data.
12207
- return this.retrieveOptional(serialize.servicePreimages(this.serviceId, hash)) !== undefined;
12259
+ return this.retrieveOptional(serialize.servicePreimages(this.blake2b, this.serviceId, hash)) !== undefined;
12208
12260
  }
12209
12261
 
12210
12262
  /** Retrieve preimage from the DB. */
12211
12263
  getPreimage(hash: PreimageHash): BytesBlob | null {
12212
- return this.retrieveOptional(serialize.servicePreimages(this.serviceId, hash)) ?? null;
12264
+ return this.retrieveOptional(serialize.servicePreimages(this.blake2b, this.serviceId, hash)) ?? null;
12213
12265
  }
12214
12266
 
12215
12267
  /** Retrieve preimage lookup history. */
12216
12268
  getLookupHistory(hash: PreimageHash, len: U32): LookupHistorySlots | null {
12217
- const rawSlots = this.retrieveOptional(serialize.serviceLookupHistory(this.serviceId, hash, len));
12269
+ const rawSlots = this.retrieveOptional(serialize.serviceLookupHistory(this.blake2b, this.serviceId, hash, len));
12218
12270
  if (rawSlots === undefined) {
12219
12271
  return null;
12220
12272
  }
@@ -12227,9 +12279,9 @@ type KeyAndCodec<T> = {
12227
12279
  Codec: Decode<T>;
12228
12280
  };
12229
12281
 
12230
- declare function loadState(spec: ChainSpec, entries: Iterable<[StateKey | TruncatedHash, BytesBlob]>) {
12282
+ declare function loadState(spec: ChainSpec, blake2b: Blake2b, entries: Iterable<[StateKey | TruncatedHash, BytesBlob]>) {
12231
12283
  const stateEntries = StateEntries.fromEntriesUnsafe(entries);
12232
- return SerializedState.fromStateEntries(spec, stateEntries);
12284
+ return SerializedState.fromStateEntries(spec, blake2b, stateEntries);
12233
12285
  }
12234
12286
 
12235
12287
  /**
@@ -12385,7 +12437,8 @@ declare class LeafDb implements SerializedStateBackend {
12385
12437
  assertNever(val);
12386
12438
  }
12387
12439
 
12388
- getStateRoot(): StateRootHash {
12440
+ getStateRoot(blake2b: Blake2b): StateRootHash {
12441
+ const blake2bTrieHasher = getBlake2bTrieHasher(blake2b);
12389
12442
  return InMemoryTrie.computeStateRoot(blake2bTrieHasher, this.leaves).asOpaque();
12390
12443
  }
12391
12444
 
@@ -12483,7 +12536,8 @@ declare class InMemoryStates implements StatesDb<InMemoryState> {
12483
12536
  }
12484
12537
 
12485
12538
  async getStateRoot(state: InMemoryState): Promise<StateRootHash> {
12486
- return StateEntries.serializeInMemory(this.spec, state).getRootHash();
12539
+ const blake2b = await Blake2b.createHasher();
12540
+ return StateEntries.serializeInMemory(this.spec, blake2b, state).getRootHash(blake2b);
12487
12541
  }
12488
12542
 
12489
12543
  /** Insert a full state into the database. */
@@ -13654,6 +13708,8 @@ declare enum NewServiceError {
13654
13708
  InsufficientFunds = 0,
13655
13709
  /** Service is not privileged to set gratis storage. */
13656
13710
  UnprivilegedService = 1,
13711
+ /** Registrar attempting to create a service with already existing id. */
13712
+ RegistrarServiceIdAlreadyTaken = 2,
13657
13713
  }
13658
13714
 
13659
13715
  declare enum UpdatePrivilegesError {
@@ -13719,14 +13775,18 @@ interface PartialState {
13719
13775
  ): Result$2<OK, TransferError>;
13720
13776
 
13721
13777
  /**
13722
- * Create a new service with given codeHash, length, gas, allowance and gratisStorage.
13778
+ * Create a new service with given codeHash, length, gas, allowance, gratisStorage and wantedServiceId.
13723
13779
  *
13724
- * Returns a newly assigned id of that service.
13725
- * https://graypaper.fluffylabs.dev/#/7e6ff6a/2f4c022f4c02?v=0.6.7
13780
+ * Returns a newly assigned id
13781
+ * or `wantedServiceId` if it's lower than `S`
13782
+ * and parent of that service is `Registrar`.
13783
+ *
13784
+ * https://graypaper.fluffylabs.dev/#/ab2cdbd/2fa9042fc304?v=0.7.2
13726
13785
  *
13727
13786
  * An error can be returned in case the account does not
13728
13787
  * have the required balance
13729
- * or tries to set gratis storage without being privileged.
13788
+ * or tries to set gratis storage without being `Manager`
13789
+ * or `Registrar` tries to set service id thats already taken.
13730
13790
  */
13731
13791
  newService(
13732
13792
  codeHash: CodeHash,
@@ -13734,6 +13794,7 @@ interface PartialState {
13734
13794
  gas: ServiceGas,
13735
13795
  allowance: ServiceGas,
13736
13796
  gratisStorage: U64,
13797
+ wantedServiceId: U64,
13737
13798
  ): Result$2<ServiceId, NewServiceError>;
13738
13799
 
13739
13800
  /** Upgrade code of currently running service. */
@@ -13755,7 +13816,7 @@ interface PartialState {
13755
13816
  updateAuthorizationQueue(
13756
13817
  coreIndex: CoreIndex,
13757
13818
  authQueue: FixedSizeArray<Blake2bHash, AUTHORIZATION_QUEUE_SIZE>,
13758
- authManager: ServiceId | null,
13819
+ assigners: ServiceId | null,
13759
13820
  ): Result$2<OK, UpdatePrivilegesError>;
13760
13821
 
13761
13822
  /**
@@ -13764,14 +13825,16 @@ interface PartialState {
13764
13825
  * `m`: manager service (can change privileged services)
13765
13826
  * `a`: manages authorization queue
13766
13827
  * `v`: manages validator keys
13767
- * `g`: collection of serviceId -> gas that auto-accumulate every block
13828
+ * `r`: manages create new services in protected id range.
13829
+ * `z`: collection of serviceId -> gas that auto-accumulate every block
13768
13830
  *
13769
13831
  */
13770
13832
  updatePrivilegedServices(
13771
13833
  m: ServiceId | null,
13772
13834
  a: PerCore<ServiceId>,
13773
13835
  v: ServiceId | null,
13774
- g: [ServiceId, ServiceGas][],
13836
+ r: ServiceId | null,
13837
+ z: [ServiceId, ServiceGas][],
13775
13838
  ): Result$2<OK, UpdatePrivilegesError>;
13776
13839
 
13777
13840
  /** Yield accumulation trie result hash. */
@@ -17689,7 +17752,7 @@ declare class AccumulationStateUpdate {
17689
17752
  /** Services state updates. */
17690
17753
  public readonly services: ServicesUpdate,
17691
17754
  /** Pending transfers. */
17692
- public readonly transfers: PendingTransfer[],
17755
+ public transfers: PendingTransfer[],
17693
17756
  /** Yielded accumulation root. */
17694
17757
  public readonly yieldedRoots: Map<ServiceId, OpaqueHash> = new Map(),
17695
17758
  ) {}
@@ -17740,11 +17803,18 @@ declare class AccumulationStateUpdate {
17740
17803
  if (from.privilegedServices !== null) {
17741
17804
  update.privilegedServices = PrivilegedServices.create({
17742
17805
  ...from.privilegedServices,
17743
- authManager: asKnownSize([...from.privilegedServices.authManager]),
17806
+ assigners: asKnownSize([...from.privilegedServices.assigners]),
17744
17807
  });
17745
17808
  }
17746
17809
  return update;
17747
17810
  }
17811
+
17812
+ /** Retrieve and clear pending transfers. */
17813
+ takeTransfers() {
17814
+ const transfers = this.transfers;
17815
+ this.transfers = [];
17816
+ return transfers;
17817
+ }
17748
17818
  }
17749
17819
 
17750
17820
  type StateSlice = Pick<State, "getService" | "privilegedServices">;
@@ -18011,7 +18081,7 @@ declare const HostCallResult = {
18011
18081
  OOB: tryAsU64(0xffff_ffff_ffff_fffdn), // 2**64 - 3
18012
18082
  /** Index unknown. */
18013
18083
  WHO: tryAsU64(0xffff_ffff_ffff_fffcn), // 2**64 - 4
18014
- /** Storage full. */
18084
+ /** Storage full or resource already allocated. */
18015
18085
  FULL: tryAsU64(0xffff_ffff_ffff_fffbn), // 2**64 - 5
18016
18086
  /** Core index unknown. */
18017
18087
  CORE: tryAsU64(0xffff_ffff_ffff_fffan), // 2**64 - 6
@@ -18019,7 +18089,7 @@ declare const HostCallResult = {
18019
18089
  CASH: tryAsU64(0xffff_ffff_ffff_fff9n), // 2**64 - 7
18020
18090
  /** Gas limit too low. */
18021
18091
  LOW: tryAsU64(0xffff_ffff_ffff_fff8n), // 2**64 - 8
18022
- /** The item is already solicited or cannot be forgotten. */
18092
+ /** The item is already solicited, cannot be forgotten or the operation is invalid due to privilege level. */
18023
18093
  HUH: tryAsU64(0xffff_ffff_ffff_fff7n), // 2**64 - 9
18024
18094
  /** The return value indicating general success. */
18025
18095
  OK: tryAsU64(0n),
@@ -18895,10 +18965,10 @@ type ENTROPY_BYTES = typeof ENTROPY_BYTES;
18895
18965
  *
18896
18966
  * https://graypaper.fluffylabs.dev/#/579bd12/3b9a013b9a01
18897
18967
  */
18898
- declare function fisherYatesShuffle<T>(arr: T[], entropy: Bytes<ENTROPY_BYTES>): T[] {
18968
+ declare function fisherYatesShuffle<T>(blake2b: Blake2b, arr: T[], entropy: Bytes<ENTROPY_BYTES>): T[] {
18899
18969
  check`${entropy.length === ENTROPY_BYTES} Expected entropy of length ${ENTROPY_BYTES}, got ${entropy.length}`;
18900
18970
  const n = arr.length;
18901
- const randomNumbers = hashToNumberSequence(entropy, arr.length);
18971
+ const randomNumbers = hashToNumberSequence(blake2b, entropy, arr.length);
18902
18972
  const result: T[] = new Array<T>(n);
18903
18973
 
18904
18974
  let itemsLeft = n;
@@ -18924,6 +18994,7 @@ declare namespace index$2 {
18924
18994
  declare class JsonServiceInfo {
18925
18995
  static fromJson = json.object<JsonServiceInfo, ServiceAccountInfo>(
18926
18996
  {
18997
+ ...(Compatibility.isGreaterOrEqual(GpVersion.V0_7_1) ? { version: "number" } : {}),
18927
18998
  code_hash: fromJson.bytes32(),
18928
18999
  balance: json.fromNumber((x) => tryAsU64(x)),
18929
19000
  min_item_gas: json.fromNumber((x) => tryAsServiceGas(x)),
@@ -18962,6 +19033,7 @@ declare class JsonServiceInfo {
18962
19033
  },
18963
19034
  );
18964
19035
 
19036
+ version?: number;
18965
19037
  code_hash!: CodeHash;
18966
19038
  balance!: U64;
18967
19039
  min_item_gas!: ServiceGas;
@@ -19008,6 +19080,19 @@ declare const lookupMetaFromJson = json.object<JsonLookupMeta, LookupHistoryItem
19008
19080
  ({ key, value }) => new LookupHistoryItem(key.hash, key.length, value),
19009
19081
  );
19010
19082
 
19083
+ declare const preimageStatusFromJson = json.object<JsonPreimageStatus, LookupHistoryItem>(
19084
+ {
19085
+ hash: fromJson.bytes32(),
19086
+ status: json.array("number"),
19087
+ },
19088
+ ({ hash, status }) => new LookupHistoryItem(hash, tryAsU32(0), status),
19089
+ );
19090
+
19091
+ type JsonPreimageStatus = {
19092
+ hash: PreimageHash;
19093
+ status: LookupHistorySlots;
19094
+ };
19095
+
19011
19096
  type JsonLookupMeta = {
19012
19097
  key: {
19013
19098
  hash: PreimageHash;
@@ -19020,21 +19105,34 @@ declare class JsonService {
19020
19105
  static fromJson = json.object<JsonService, InMemoryService>(
19021
19106
  {
19022
19107
  id: "number",
19023
- data: {
19024
- service: JsonServiceInfo.fromJson,
19025
- preimages: json.optional(json.array(JsonPreimageItem.fromJson)),
19026
- storage: json.optional(json.array(JsonStorageItem.fromJson)),
19027
- lookup_meta: json.optional(json.array(lookupMetaFromJson)),
19028
- },
19108
+ data: Compatibility.isLessThan(GpVersion.V0_7_1)
19109
+ ? {
19110
+ service: JsonServiceInfo.fromJson,
19111
+ preimages: json.optional(json.array(JsonPreimageItem.fromJson)),
19112
+ storage: json.optional(json.array(JsonStorageItem.fromJson)),
19113
+ lookup_meta: json.optional(json.array(lookupMetaFromJson)),
19114
+ }
19115
+ : {
19116
+ service: JsonServiceInfo.fromJson,
19117
+ storage: json.optional(json.array(JsonStorageItem.fromJson)),
19118
+ preimages_blob: json.optional(json.array(JsonPreimageItem.fromJson)),
19119
+ preimages_status: json.optional(json.array(preimageStatusFromJson)),
19120
+ },
19029
19121
  },
19030
19122
  ({ id, data }) => {
19123
+ const preimages = HashDictionary.fromEntries(
19124
+ (data.preimages ?? data.preimages_blob ?? []).map((x) => [x.hash, x]),
19125
+ );
19126
+
19031
19127
  const lookupHistory = HashDictionary.new<PreimageHash, LookupHistoryItem[]>();
19032
- for (const item of data.lookup_meta ?? []) {
19128
+
19129
+ for (const item of data.lookup_meta ?? data.preimages_status ?? []) {
19033
19130
  const data = lookupHistory.get(item.hash) ?? [];
19034
- data.push(item);
19131
+ const length = tryAsU32(preimages.get(item.hash)?.blob.length ?? item.length);
19132
+ data.push(new LookupHistoryItem(item.hash, length, item.slots));
19035
19133
  lookupHistory.set(item.hash, data);
19036
19134
  }
19037
- const preimages = HashDictionary.fromEntries((data.preimages ?? []).map((x) => [x.hash, x]));
19135
+
19038
19136
  const storage = new Map<string, StorageItem>();
19039
19137
 
19040
19138
  const entries = (data.storage ?? []).map(({ key, value }) => {
@@ -19061,6 +19159,8 @@ declare class JsonService {
19061
19159
  preimages?: JsonPreimageItem[];
19062
19160
  storage?: JsonStorageItem[];
19063
19161
  lookup_meta?: LookupHistoryItem[];
19162
+ preimages_blob?: JsonPreimageItem[];
19163
+ preimages_status?: LookupHistoryItem[];
19064
19164
  };
19065
19165
  }
19066
19166
 
@@ -19070,8 +19170,7 @@ declare const availabilityAssignmentFromJson = json.object<JsonAvailabilityAssig
19070
19170
  timeout: "number",
19071
19171
  },
19072
19172
  ({ report, timeout }) => {
19073
- const workReportHash = blake2b.hashBytes(Encoder.encodeObject(WorkReport.Codec, report)).asOpaque();
19074
- return AvailabilityAssignment.create({ workReport: new WithHash(workReportHash, report), timeout });
19173
+ return AvailabilityAssignment.create({ workReport: report, timeout });
19075
19174
  },
19076
19175
  );
19077
19176
 
@@ -19292,8 +19391,12 @@ declare class JsonServiceStatistics {
19292
19391
  extrinsic_count: "number",
19293
19392
  accumulate_count: "number",
19294
19393
  accumulate_gas_used: json.fromNumber(tryAsServiceGas),
19295
- on_transfers_count: "number",
19296
- on_transfers_gas_used: json.fromNumber(tryAsServiceGas),
19394
+ ...(Compatibility.isLessThan(GpVersion.V0_7_1)
19395
+ ? {
19396
+ on_transfers_count: "number",
19397
+ on_transfers_gas_used: json.fromNumber(tryAsServiceGas),
19398
+ }
19399
+ : {}),
19297
19400
  },
19298
19401
  ({
19299
19402
  provided_count,
@@ -19320,8 +19423,8 @@ declare class JsonServiceStatistics {
19320
19423
  extrinsicCount: extrinsic_count,
19321
19424
  accumulateCount: accumulate_count,
19322
19425
  accumulateGasUsed: accumulate_gas_used,
19323
- onTransfersCount: on_transfers_count,
19324
- onTransfersGasUsed: on_transfers_gas_used,
19426
+ onTransfersCount: on_transfers_count ?? tryAsU32(0),
19427
+ onTransfersGasUsed: on_transfers_gas_used ?? tryAsServiceGas(0),
19325
19428
  });
19326
19429
  },
19327
19430
  );
@@ -19336,8 +19439,8 @@ declare class JsonServiceStatistics {
19336
19439
  extrinsic_count!: U16;
19337
19440
  accumulate_count!: U32;
19338
19441
  accumulate_gas_used!: ServiceGas;
19339
- on_transfers_count!: U32;
19340
- on_transfers_gas_used!: ServiceGas;
19442
+ on_transfers_count?: U32;
19443
+ on_transfers_gas_used?: ServiceGas;
19341
19444
  }
19342
19445
 
19343
19446
  type ServiceStatisticsEntry = {
@@ -19409,8 +19512,9 @@ type JsonStateDump = {
19409
19512
  tau: State["timeslot"];
19410
19513
  chi: {
19411
19514
  chi_m: PrivilegedServices["manager"];
19412
- chi_a: PrivilegedServices["authManager"];
19413
- chi_v: PrivilegedServices["validatorsManager"];
19515
+ chi_a: PrivilegedServices["assigners"];
19516
+ chi_v: PrivilegedServices["delegator"];
19517
+ chi_r?: PrivilegedServices["registrar"];
19414
19518
  chi_g: PrivilegedServices["autoAccumulateServices"] | null;
19415
19519
  };
19416
19520
  pi: JsonStatisticsData;
@@ -19443,6 +19547,7 @@ declare const fullStateDumpFromJson = (spec: ChainSpec) =>
19443
19547
  chi_m: "number",
19444
19548
  chi_a: json.array("number"),
19445
19549
  chi_v: "number",
19550
+ chi_r: json.optional("number"),
19446
19551
  chi_g: json.nullable(
19447
19552
  json.array({
19448
19553
  service: "number",
@@ -19475,6 +19580,9 @@ declare const fullStateDumpFromJson = (spec: ChainSpec) =>
19475
19580
  theta,
19476
19581
  accounts,
19477
19582
  }): InMemoryState => {
19583
+ if (Compatibility.isGreaterOrEqual(GpVersion.V0_7_1) && chi.chi_r === undefined) {
19584
+ throw new Error("Registrar is required in Privileges GP ^0.7.1");
19585
+ }
19478
19586
  return InMemoryState.create({
19479
19587
  authPools: tryAsPerCore(
19480
19588
  alpha.map((perCore) => {
@@ -19508,8 +19616,9 @@ declare const fullStateDumpFromJson = (spec: ChainSpec) =>
19508
19616
  timeslot: tau,
19509
19617
  privilegedServices: PrivilegedServices.create({
19510
19618
  manager: chi.chi_m,
19511
- authManager: chi.chi_a,
19512
- validatorsManager: chi.chi_v,
19619
+ assigners: chi.chi_a,
19620
+ delegator: chi.chi_v,
19621
+ registrar: chi.chi_r ?? tryAsServiceId(2 ** 32 - 1),
19513
19622
  autoAccumulateServices: chi.chi_g ?? [],
19514
19623
  }),
19515
19624
  statistics: JsonStatisticsData.toStatisticsData(spec, pi),
@@ -19532,6 +19641,7 @@ declare const index$1_JsonDisputesRecords: typeof JsonDisputesRecords;
19532
19641
  type index$1_JsonLookupMeta = JsonLookupMeta;
19533
19642
  type index$1_JsonPreimageItem = JsonPreimageItem;
19534
19643
  declare const index$1_JsonPreimageItem: typeof JsonPreimageItem;
19644
+ type index$1_JsonPreimageStatus = JsonPreimageStatus;
19535
19645
  type index$1_JsonRecentBlockState = JsonRecentBlockState;
19536
19646
  type index$1_JsonRecentBlocks = JsonRecentBlocks;
19537
19647
  type index$1_JsonReportedWorkPackageInfo = JsonReportedWorkPackageInfo;
@@ -19556,6 +19666,7 @@ declare const index$1_disputesRecordsFromJson: typeof disputesRecordsFromJson;
19556
19666
  declare const index$1_fullStateDumpFromJson: typeof fullStateDumpFromJson;
19557
19667
  declare const index$1_lookupMetaFromJson: typeof lookupMetaFromJson;
19558
19668
  declare const index$1_notYetAccumulatedFromJson: typeof notYetAccumulatedFromJson;
19669
+ declare const index$1_preimageStatusFromJson: typeof preimageStatusFromJson;
19559
19670
  declare const index$1_recentBlockStateFromJson: typeof recentBlockStateFromJson;
19560
19671
  declare const index$1_recentBlocksHistoryFromJson: typeof recentBlocksHistoryFromJson;
19561
19672
  declare const index$1_reportedWorkPackageFromJson: typeof reportedWorkPackageFromJson;
@@ -19563,8 +19674,8 @@ declare const index$1_serviceStatisticsEntryFromJson: typeof serviceStatisticsEn
19563
19674
  declare const index$1_ticketFromJson: typeof ticketFromJson;
19564
19675
  declare const index$1_validatorDataFromJson: typeof validatorDataFromJson;
19565
19676
  declare namespace index$1 {
19566
- export { index$1_JsonCoreStatistics as JsonCoreStatistics, index$1_JsonDisputesRecords as JsonDisputesRecords, index$1_JsonPreimageItem as JsonPreimageItem, index$1_JsonService as JsonService, index$1_JsonServiceInfo as JsonServiceInfo, index$1_JsonServiceStatistics as JsonServiceStatistics, index$1_JsonStatisticsData as JsonStatisticsData, index$1_JsonStorageItem as JsonStorageItem, index$1_JsonValidatorStatistics as JsonValidatorStatistics, index$1_TicketsOrKeys as TicketsOrKeys, index$1_availabilityAssignmentFromJson as availabilityAssignmentFromJson, index$1_disputesRecordsFromJson as disputesRecordsFromJson, index$1_fullStateDumpFromJson as fullStateDumpFromJson, index$1_lookupMetaFromJson as lookupMetaFromJson, index$1_notYetAccumulatedFromJson as notYetAccumulatedFromJson, index$1_recentBlockStateFromJson as recentBlockStateFromJson, index$1_recentBlocksHistoryFromJson as recentBlocksHistoryFromJson, index$1_reportedWorkPackageFromJson as reportedWorkPackageFromJson, index$1_serviceStatisticsEntryFromJson as serviceStatisticsEntryFromJson, index$1_ticketFromJson as ticketFromJson, index$1_validatorDataFromJson as validatorDataFromJson };
19567
- export type { index$1_JsonAvailabilityAssignment as JsonAvailabilityAssignment, index$1_JsonLookupMeta as JsonLookupMeta, index$1_JsonRecentBlockState as JsonRecentBlockState, index$1_JsonRecentBlocks as JsonRecentBlocks, index$1_JsonReportedWorkPackageInfo as JsonReportedWorkPackageInfo, index$1_JsonStateDump as JsonStateDump, index$1_ServiceStatisticsEntry as ServiceStatisticsEntry };
19677
+ export { index$1_JsonCoreStatistics as JsonCoreStatistics, index$1_JsonDisputesRecords as JsonDisputesRecords, index$1_JsonPreimageItem as JsonPreimageItem, index$1_JsonService as JsonService, index$1_JsonServiceInfo as JsonServiceInfo, index$1_JsonServiceStatistics as JsonServiceStatistics, index$1_JsonStatisticsData as JsonStatisticsData, index$1_JsonStorageItem as JsonStorageItem, index$1_JsonValidatorStatistics as JsonValidatorStatistics, index$1_TicketsOrKeys as TicketsOrKeys, index$1_availabilityAssignmentFromJson as availabilityAssignmentFromJson, index$1_disputesRecordsFromJson as disputesRecordsFromJson, index$1_fullStateDumpFromJson as fullStateDumpFromJson, index$1_lookupMetaFromJson as lookupMetaFromJson, index$1_notYetAccumulatedFromJson as notYetAccumulatedFromJson, index$1_preimageStatusFromJson as preimageStatusFromJson, index$1_recentBlockStateFromJson as recentBlockStateFromJson, index$1_recentBlocksHistoryFromJson as recentBlocksHistoryFromJson, index$1_reportedWorkPackageFromJson as reportedWorkPackageFromJson, index$1_serviceStatisticsEntryFromJson as serviceStatisticsEntryFromJson, index$1_ticketFromJson as ticketFromJson, index$1_validatorDataFromJson as validatorDataFromJson };
19678
+ export type { index$1_JsonAvailabilityAssignment as JsonAvailabilityAssignment, index$1_JsonLookupMeta as JsonLookupMeta, index$1_JsonPreimageStatus as JsonPreimageStatus, index$1_JsonRecentBlockState as JsonRecentBlockState, index$1_JsonRecentBlocks as JsonRecentBlocks, index$1_JsonReportedWorkPackageInfo as JsonReportedWorkPackageInfo, index$1_JsonStateDump as JsonStateDump, index$1_ServiceStatisticsEntry as ServiceStatisticsEntry };
19568
19679
  }
19569
19680
 
19570
19681
  /** Helper function to create most used hashes in the block */
@@ -19572,7 +19683,7 @@ declare class TransitionHasher implements MmrHasher<KeccakHash> {
19572
19683
  constructor(
19573
19684
  private readonly context: ChainSpec,
19574
19685
  private readonly keccakHasher: KeccakHasher,
19575
- private readonly allocator: HashAllocator,
19686
+ public readonly blake2b: Blake2b,
19576
19687
  ) {}
19577
19688
 
19578
19689
  /** Concatenates two hashes and hash this concatenation */
@@ -19586,7 +19697,7 @@ declare class TransitionHasher implements MmrHasher<KeccakHash> {
19586
19697
 
19587
19698
  /** Creates hash from the block header view */
19588
19699
  header(header: HeaderView): WithHash<HeaderHash, HeaderView> {
19589
- return new WithHash(blake2b.hashBytes(header.encoded(), this.allocator).asOpaque(), header);
19700
+ return new WithHash(this.blake2b.hashBytes(header.encoded()).asOpaque(), header);
19590
19701
  }
19591
19702
 
19592
19703
  /**
@@ -19600,7 +19711,7 @@ declare class TransitionHasher implements MmrHasher<KeccakHash> {
19600
19711
  .view()
19601
19712
  .map((g) => g.view())
19602
19713
  .map((guarantee) => {
19603
- const reportHash = blake2b.hashBytes(guarantee.report.encoded(), this.allocator).asOpaque<WorkReportHash>();
19714
+ const reportHash = this.blake2b.hashBytes(guarantee.report.encoded()).asOpaque<WorkReportHash>();
19604
19715
  return BytesBlob.blobFromParts([
19605
19716
  reportHash.raw,
19606
19717
  guarantee.slot.encoded().raw,
@@ -19610,15 +19721,15 @@ declare class TransitionHasher implements MmrHasher<KeccakHash> {
19610
19721
 
19611
19722
  const guaranteeBlob = Encoder.encodeObject(codec.sequenceVarLen(dumpCodec), guarantees, this.context);
19612
19723
 
19613
- const et = blake2b.hashBytes(extrinsicView.tickets.encoded(), this.allocator).asOpaque<ExtrinsicHash>();
19614
- const ep = blake2b.hashBytes(extrinsicView.preimages.encoded(), this.allocator).asOpaque<ExtrinsicHash>();
19615
- const eg = blake2b.hashBytes(guaranteeBlob, this.allocator).asOpaque<ExtrinsicHash>();
19616
- const ea = blake2b.hashBytes(extrinsicView.assurances.encoded(), this.allocator).asOpaque<ExtrinsicHash>();
19617
- const ed = blake2b.hashBytes(extrinsicView.disputes.encoded(), this.allocator).asOpaque<ExtrinsicHash>();
19724
+ const et = this.blake2b.hashBytes(extrinsicView.tickets.encoded()).asOpaque<ExtrinsicHash>();
19725
+ const ep = this.blake2b.hashBytes(extrinsicView.preimages.encoded()).asOpaque<ExtrinsicHash>();
19726
+ const eg = this.blake2b.hashBytes(guaranteeBlob).asOpaque<ExtrinsicHash>();
19727
+ const ea = this.blake2b.hashBytes(extrinsicView.assurances.encoded()).asOpaque<ExtrinsicHash>();
19728
+ const ed = this.blake2b.hashBytes(extrinsicView.disputes.encoded()).asOpaque<ExtrinsicHash>();
19618
19729
 
19619
19730
  const encoded = BytesBlob.blobFromParts([et.raw, ep.raw, eg.raw, ea.raw, ed.raw]);
19620
19731
 
19621
- return new WithHashAndBytes(blake2b.hashBytes(encoded, this.allocator).asOpaque(), extrinsicView, encoded);
19732
+ return new WithHashAndBytes(this.blake2b.hashBytes(encoded).asOpaque(), extrinsicView, encoded);
19622
19733
  }
19623
19734
 
19624
19735
  /** Creates hash for given WorkPackage */
@@ -19629,7 +19740,7 @@ declare class TransitionHasher implements MmrHasher<KeccakHash> {
19629
19740
  private encode<T, THash extends OpaqueHash>(codec: Codec<T>, data: T): WithHashAndBytes<THash, T> {
19630
19741
  // TODO [ToDr] Use already allocated encoding destination and hash bytes from some arena.
19631
19742
  const encoded = Encoder.encodeObject(codec, data, this.context);
19632
- return new WithHashAndBytes(blake2b.hashBytes(encoded, this.allocator).asOpaque(), data, encoded);
19743
+ return new WithHashAndBytes(this.blake2b.hashBytes(encoded).asOpaque(), data, encoded);
19633
19744
  }
19634
19745
  }
19635
19746
 
@@ -19650,7 +19761,10 @@ declare enum PreimagesErrorCode {
19650
19761
 
19651
19762
  // TODO [SeKo] consider whether this module is the right place to remove expired preimages
19652
19763
  declare class Preimages {
19653
- constructor(public readonly state: PreimagesState) {}
19764
+ constructor(
19765
+ public readonly state: PreimagesState,
19766
+ public readonly blake2b: Blake2b,
19767
+ ) {}
19654
19768
 
19655
19769
  integrate(input: PreimagesInput): Result$2<PreimagesStateUpdate, PreimagesErrorCode> {
19656
19770
  // make sure lookup extrinsics are sorted and unique
@@ -19679,7 +19793,7 @@ declare class Preimages {
19679
19793
  // select preimages for integration
19680
19794
  for (const preimage of preimages) {
19681
19795
  const { requester, blob } = preimage;
19682
- const hash: PreimageHash = blake2b.hashBytes(blob).asOpaque();
19796
+ const hash: PreimageHash = this.blake2b.hashBytes(blob).asOpaque();
19683
19797
 
19684
19798
  const service = this.state.getService(requester);
19685
19799
  if (service === null) {
@@ -19710,156 +19824,6 @@ declare class Preimages {
19710
19824
  }
19711
19825
  }
19712
19826
 
19713
- declare enum ServiceExecutorError {
19714
- NoLookup = 0,
19715
- NoState = 1,
19716
- NoServiceCode = 2,
19717
- ServiceCodeMismatch = 3,
19718
- }
19719
-
19720
- declare class WorkPackageExecutor {
19721
- constructor(
19722
- private readonly blocks: BlocksDb,
19723
- private readonly state: StatesDb,
19724
- private readonly hasher: TransitionHasher,
19725
- ) {}
19726
-
19727
- // TODO [ToDr] this while thing should be triple-checked with the GP.
19728
- // I'm currently implementing some dirty version for the demo.
19729
- async executeWorkPackage(pack: WorkPackage): Promise<WorkReport> {
19730
- const headerHash = pack.context.lookupAnchor;
19731
- // execute authorisation first or is it already executed and we just need to check it?
19732
- const authExec = this.getServiceExecutor(
19733
- // TODO [ToDr] should this be anchor or lookupAnchor?
19734
- headerHash,
19735
- pack.authCodeHost,
19736
- pack.authCodeHash,
19737
- );
19738
-
19739
- if (authExec.isError) {
19740
- // TODO [ToDr] most likely shouldn't be throw.
19741
- throw new Error(`Could not get authorization executor: ${authExec.error}`);
19742
- }
19743
-
19744
- const pvm = authExec.ok;
19745
- const authGas = tryAsGas(15_000n);
19746
- const result = await pvm.run(pack.parametrization, authGas);
19747
-
19748
- if (!result.isEqualTo(pack.authorization)) {
19749
- throw new Error("Authorization is invalid.");
19750
- }
19751
-
19752
- const results: WorkResult[] = [];
19753
- for (const item of pack.items) {
19754
- const exec = this.getServiceExecutor(headerHash, item.service, item.codeHash);
19755
- if (exec.isError) {
19756
- throw new Error(`Could not get item executor: ${exec.error}`);
19757
- }
19758
- const pvm = exec.ok;
19759
-
19760
- const gasRatio = tryAsServiceGas(3_000n);
19761
- const ret = await pvm.run(item.payload, tryAsGas(item.refineGasLimit)); // or accumulateGasLimit?
19762
- results.push(
19763
- WorkResult.create({
19764
- serviceId: item.service,
19765
- codeHash: item.codeHash,
19766
- payloadHash: blake2b.hashBytes(item.payload),
19767
- gas: gasRatio,
19768
- result: new WorkExecResult(WorkExecResultKind.ok, ret),
19769
- load: WorkRefineLoad.create({
19770
- gasUsed: tryAsServiceGas(5),
19771
- importedSegments: tryAsU32(0),
19772
- exportedSegments: tryAsU32(0),
19773
- extrinsicSize: tryAsU32(0),
19774
- extrinsicCount: tryAsU32(0),
19775
- }),
19776
- }),
19777
- );
19778
- }
19779
-
19780
- const workPackage = this.hasher.workPackage(pack);
19781
- const workPackageSpec = WorkPackageSpec.create({
19782
- hash: workPackage.hash,
19783
- length: tryAsU32(workPackage.encoded.length),
19784
- erasureRoot: Bytes.zero(HASH_SIZE),
19785
- exportsRoot: Bytes.zero(HASH_SIZE).asOpaque(),
19786
- exportsCount: tryAsU16(0),
19787
- });
19788
- const coreIndex = tryAsCoreIndex(0);
19789
- const authorizerHash = Bytes.fill(HASH_SIZE, 5).asOpaque();
19790
-
19791
- const workResults = FixedSizeArray.new(results, tryAsWorkItemsCount(results.length));
19792
-
19793
- return Promise.resolve(
19794
- WorkReport.create({
19795
- workPackageSpec,
19796
- context: pack.context,
19797
- coreIndex,
19798
- authorizerHash,
19799
- authorizationOutput: pack.authorization,
19800
- segmentRootLookup: [],
19801
- results: workResults,
19802
- authorizationGasUsed: tryAsServiceGas(0),
19803
- }),
19804
- );
19805
- }
19806
-
19807
- getServiceExecutor(
19808
- lookupAnchor: HeaderHash,
19809
- serviceId: ServiceId,
19810
- expectedCodeHash: CodeHash,
19811
- ): Result$2<PvmExecutor, ServiceExecutorError> {
19812
- const header = this.blocks.getHeader(lookupAnchor);
19813
- if (header === null) {
19814
- return Result.error(ServiceExecutorError.NoLookup);
19815
- }
19816
-
19817
- const state = this.state.getState(lookupAnchor);
19818
- if (state === null) {
19819
- return Result.error(ServiceExecutorError.NoState);
19820
- }
19821
-
19822
- const service = state.getService(serviceId);
19823
- const serviceCodeHash = service?.getInfo().codeHash ?? null;
19824
- if (serviceCodeHash === null) {
19825
- return Result.error(ServiceExecutorError.NoServiceCode);
19826
- }
19827
-
19828
- if (!serviceCodeHash.isEqualTo(expectedCodeHash)) {
19829
- return Result.error(ServiceExecutorError.ServiceCodeMismatch);
19830
- }
19831
-
19832
- const serviceCode = service?.getPreimage(serviceCodeHash.asOpaque()) ?? null;
19833
- if (serviceCode === null) {
19834
- return Result.error(ServiceExecutorError.NoServiceCode);
19835
- }
19836
-
19837
- return Result.ok(new PvmExecutor(serviceCode));
19838
- }
19839
- }
19840
-
19841
- declare class PvmExecutor {
19842
- private readonly pvm: HostCalls;
19843
- private hostCalls = new HostCallsManager({ missing: new Missing() });
19844
- private pvmInstanceManager = new PvmInstanceManager(4);
19845
-
19846
- constructor(private serviceCode: BytesBlob) {
19847
- this.pvm = new PvmHostCallExtension(this.pvmInstanceManager, this.hostCalls);
19848
- }
19849
-
19850
- async run(args: BytesBlob, gas: Gas): Promise<BytesBlob> {
19851
- const program = Program.fromSpi(this.serviceCode.raw, args.raw, true);
19852
-
19853
- const result = await this.pvm.runProgram(program.code, 5, gas, program.registers, program.memory);
19854
-
19855
- if (result.hasMemorySlice()) {
19856
- return BytesBlob.blobFrom(result.memorySlice);
19857
- }
19858
-
19859
- return BytesBlob.empty();
19860
- }
19861
- }
19862
-
19863
19827
  type index_Preimages = Preimages;
19864
19828
  declare const index_Preimages: typeof Preimages;
19865
19829
  type index_PreimagesErrorCode = PreimagesErrorCode;
@@ -19869,10 +19833,8 @@ type index_PreimagesState = PreimagesState;
19869
19833
  type index_PreimagesStateUpdate = PreimagesStateUpdate;
19870
19834
  type index_TransitionHasher = TransitionHasher;
19871
19835
  declare const index_TransitionHasher: typeof TransitionHasher;
19872
- type index_WorkPackageExecutor = WorkPackageExecutor;
19873
- declare const index_WorkPackageExecutor: typeof WorkPackageExecutor;
19874
19836
  declare namespace index {
19875
- export { index_Preimages as Preimages, index_PreimagesErrorCode as PreimagesErrorCode, index_TransitionHasher as TransitionHasher, index_WorkPackageExecutor as WorkPackageExecutor };
19837
+ export { index_Preimages as Preimages, index_PreimagesErrorCode as PreimagesErrorCode, index_TransitionHasher as TransitionHasher };
19876
19838
  export type { index_PreimagesInput as PreimagesInput, index_PreimagesState as PreimagesState, index_PreimagesStateUpdate as PreimagesStateUpdate };
19877
19839
  }
19878
19840