@typeberry/lib 0.1.3-135961b → 0.1.3-2fdafd6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (4) hide show
  1. package/index.cjs +682 -1366
  2. package/index.d.ts +790 -829
  3. package/index.js +681 -1365
  4. package/package.json +1 -1
package/index.d.ts CHANGED
@@ -35,7 +35,9 @@ declare function parseCurrentVersion(env?: string): GpVersion | undefined {
35
35
  }
36
36
 
37
37
  declare function parseCurrentSuite(env?: string): TestSuite | undefined {
38
- if (env === undefined) return undefined;
38
+ if (env === undefined) {
39
+ return undefined;
40
+ }
39
41
  switch (env) {
40
42
  case TestSuite.W3F_DAVXY:
41
43
  case TestSuite.JAMDUNA:
@@ -420,6 +422,20 @@ declare const Result$2 = {
420
422
  },
421
423
  };
422
424
 
425
+ // about 2GB, the maximum ArrayBuffer length on Chrome confirmed by several sources:
426
+ // - https://issues.chromium.org/issues/40055619
427
+ // - https://stackoverflow.com/a/72124984
428
+ // - https://onnxruntime.ai/docs/tutorials/web/large-models.html#maximum-size-of-arraybuffer
429
+ declare const MAX_LENGTH$1 = 2145386496;
430
+
431
+ declare function safeAllocUint8Array(length: number) {
432
+ if (length > MAX_LENGTH) {
433
+ // biome-ignore lint/suspicious/noConsole: can't have a dependency on logger here
434
+ console.warn(`Trying to allocate ${length} bytes, which is greater than the maximum of ${MAX_LENGTH}.`);
435
+ }
436
+ return new Uint8Array(Math.min(MAX_LENGTH, length));
437
+ }
438
+
423
439
  /**
424
440
  * Utilities for tests.
425
441
  */
@@ -573,8 +589,12 @@ declare function deepEqual<T>(
573
589
  const aKey = `${a.key}`;
574
590
  const bKey = `${b.key}`;
575
591
 
576
- if (aKey < bKey) return -1;
577
- if (bKey < aKey) return 1;
592
+ if (aKey < bKey) {
593
+ return -1;
594
+ }
595
+ if (bKey < aKey) {
596
+ return 1;
597
+ }
578
598
  return 0;
579
599
  });
580
600
  };
@@ -755,11 +775,12 @@ declare const index$u_oomWarningPrinted: typeof oomWarningPrinted;
755
775
  declare const index$u_parseCurrentSuite: typeof parseCurrentSuite;
756
776
  declare const index$u_parseCurrentVersion: typeof parseCurrentVersion;
757
777
  declare const index$u_resultToString: typeof resultToString;
778
+ declare const index$u_safeAllocUint8Array: typeof safeAllocUint8Array;
758
779
  declare const index$u_seeThrough: typeof seeThrough;
759
780
  declare const index$u_trimStack: typeof trimStack;
760
781
  declare const index$u_workspacePathFix: typeof workspacePathFix;
761
782
  declare namespace index$u {
762
- export { index$u_ALL_VERSIONS_IN_ORDER as ALL_VERSIONS_IN_ORDER, index$u_CURRENT_SUITE as CURRENT_SUITE, index$u_CURRENT_VERSION as CURRENT_VERSION, index$u_Compatibility as Compatibility, index$u_DEFAULT_SUITE as DEFAULT_SUITE, index$u_DEFAULT_VERSION as DEFAULT_VERSION, index$u_ErrorsCollector as ErrorsCollector, index$u_GpVersion as GpVersion, Result$2 as Result, index$u_RichTaggedError as RichTaggedError, index$u_TEST_COMPARE_USING as TEST_COMPARE_USING, index$u_TestSuite as TestSuite, index$u_WithDebug as WithDebug, index$u___OPAQUE_TYPE__ as __OPAQUE_TYPE__, index$u_asOpaqueType as asOpaqueType, index$u_assertEmpty as assertEmpty, index$u_assertNever as assertNever, index$u_callCompareFunction as callCompareFunction, index$u_check as check, index$u_deepEqual as deepEqual, index$u_getAllKeysSorted as getAllKeysSorted, index$u_inspect as inspect, index$u_isBrowser as isBrowser, index$u_isResult as isResult, index$u_isTaggedError as isTaggedError, index$u_maybeTaggedErrorToString as maybeTaggedErrorToString, index$u_measure as measure, index$u_oomWarningPrinted as oomWarningPrinted, index$u_parseCurrentSuite as parseCurrentSuite, index$u_parseCurrentVersion as parseCurrentVersion, index$u_resultToString as resultToString, index$u_seeThrough as seeThrough, index$u_trimStack as trimStack, index$u_workspacePathFix as workspacePathFix };
783
+ export { index$u_ALL_VERSIONS_IN_ORDER as ALL_VERSIONS_IN_ORDER, index$u_CURRENT_SUITE as CURRENT_SUITE, index$u_CURRENT_VERSION as CURRENT_VERSION, index$u_Compatibility as Compatibility, index$u_DEFAULT_SUITE as DEFAULT_SUITE, index$u_DEFAULT_VERSION as DEFAULT_VERSION, index$u_ErrorsCollector as ErrorsCollector, index$u_GpVersion as GpVersion, MAX_LENGTH$1 as MAX_LENGTH, Result$2 as Result, index$u_RichTaggedError as RichTaggedError, index$u_TEST_COMPARE_USING as TEST_COMPARE_USING, index$u_TestSuite as TestSuite, index$u_WithDebug as WithDebug, index$u___OPAQUE_TYPE__ as __OPAQUE_TYPE__, index$u_asOpaqueType as asOpaqueType, index$u_assertEmpty as assertEmpty, index$u_assertNever as assertNever, index$u_callCompareFunction as callCompareFunction, index$u_check as check, index$u_deepEqual as deepEqual, index$u_getAllKeysSorted as getAllKeysSorted, index$u_inspect as inspect, index$u_isBrowser as isBrowser, index$u_isResult as isResult, index$u_isTaggedError as isTaggedError, index$u_maybeTaggedErrorToString as maybeTaggedErrorToString, index$u_measure as measure, index$u_oomWarningPrinted as oomWarningPrinted, index$u_parseCurrentSuite as parseCurrentSuite, index$u_parseCurrentVersion as parseCurrentVersion, index$u_resultToString as resultToString, index$u_safeAllocUint8Array as safeAllocUint8Array, index$u_seeThrough as seeThrough, index$u_trimStack as trimStack, index$u_workspacePathFix as workspacePathFix };
763
784
  export type { index$u_DeepEqualOptions as DeepEqualOptions, index$u_EnumMapping as EnumMapping, index$u_ErrorResult as ErrorResult, index$u_OK as OK, index$u_OkResult as OkResult, index$u_Opaque as Opaque, index$u_StringLiteral as StringLiteral, index$u_TaggedError as TaggedError, index$u_TokenOf as TokenOf, index$u_Uninstantiable as Uninstantiable, index$u_WithOpaque as WithOpaque };
764
785
  }
765
786
 
@@ -929,7 +950,7 @@ declare class BytesBlob {
929
950
  static blobFromParts(v: Uint8Array | Uint8Array[], ...rest: Uint8Array[]) {
930
951
  const vArr = v instanceof Uint8Array ? [v] : v;
931
952
  const totalLength = vArr.reduce((a, v) => a + v.length, 0) + rest.reduce((a, v) => a + v.length, 0);
932
- const buffer = new Uint8Array(totalLength);
953
+ const buffer = safeAllocUint8Array(totalLength);
933
954
  let offset = 0;
934
955
  for (const r of vArr) {
935
956
  buffer.set(r, offset);
@@ -1012,7 +1033,7 @@ declare class Bytes<T extends number> extends BytesBlob {
1012
1033
 
1013
1034
  /** Create an empty [`Bytes<X>`] of given length. */
1014
1035
  static zero<X extends number>(len: X): Bytes<X> {
1015
- return new Bytes(new Uint8Array(len), len);
1036
+ return new Bytes(safeAllocUint8Array(len), len);
1016
1037
  }
1017
1038
 
1018
1039
  // TODO [ToDr] `fill` should have the argments swapped to align with the rest.
@@ -1133,7 +1154,7 @@ declare class BitVec {
1133
1154
  * Create new [`BitVec`] with all values set to `false`.
1134
1155
  */
1135
1156
  static empty(bitLength: number) {
1136
- const data = new Uint8Array(Math.ceil(bitLength / 8));
1157
+ const data = safeAllocUint8Array(Math.ceil(bitLength / 8));
1137
1158
  return new BitVec(data, bitLength);
1138
1159
  }
1139
1160
 
@@ -3461,6 +3482,99 @@ declare namespace index$q {
3461
3482
  export type { index$q_ClassConstructor as ClassConstructor, index$q_Codec as Codec, index$q_CodecRecord as CodecRecord, index$q_Decode as Decode, index$q_DescribedBy as DescribedBy, index$q_DescriptorRecord as DescriptorRecord, index$q_Encode as Encode, index$q_LengthRange as LengthRange, index$q_OptionalRecord as OptionalRecord, Options$1 as Options, index$q_PropertyKeys as PropertyKeys, index$q_SimpleDescriptorRecord as SimpleDescriptorRecord, index$q_SizeHint as SizeHint, index$q_ViewOf as ViewOf };
3462
3483
  }
3463
3484
 
3485
+ /**
3486
+ * A utility class providing a readonly view over a portion of an array without copying it.
3487
+ */
3488
+ declare class ArrayView<T> implements Iterable<T> {
3489
+ private readonly source: T[];
3490
+ public readonly length: number;
3491
+
3492
+ private constructor(
3493
+ source: T[],
3494
+ private readonly start: number,
3495
+ private readonly end: number,
3496
+ ) {
3497
+ this.source = source;
3498
+ this.length = end - start;
3499
+ }
3500
+
3501
+ static from<T>(source: T[], start = 0, end = source.length): ArrayView<T> {
3502
+ check`
3503
+ ${start >= 0 && end <= source.length && start <= end}
3504
+ Invalid start (${start})/end (${end}) for ArrayView
3505
+ `;
3506
+ return new ArrayView(source, start, end);
3507
+ }
3508
+
3509
+ get(i: number): T {
3510
+ check`
3511
+ ${i >= 0 && i < this.length}
3512
+ Index out of bounds: ${i} < ${this.length}
3513
+ `;
3514
+ return this.source[this.start + i];
3515
+ }
3516
+
3517
+ subview(from: number, to: number = this.length): ArrayView<T> {
3518
+ return ArrayView.from(this.source, this.start + from, this.start + to);
3519
+ }
3520
+
3521
+ toArray(): T[] {
3522
+ return this.source.slice(this.start, this.end);
3523
+ }
3524
+
3525
+ *[Symbol.iterator](): Iterator<T> {
3526
+ for (let i = this.start; i < this.end; i++) {
3527
+ yield this.source[i];
3528
+ }
3529
+ }
3530
+ }
3531
+
3532
+ type ITypedArray = Uint8Array | Uint16Array | Uint32Array;
3533
+ type IDataType = string | Buffer | ITypedArray;
3534
+
3535
+ type IHasher = {
3536
+ /**
3537
+ * Initializes hash state to default value
3538
+ */
3539
+ init: () => IHasher;
3540
+ /**
3541
+ * Updates the hash content with the given data
3542
+ */
3543
+ update: (data: IDataType) => IHasher;
3544
+ /**
3545
+ * Calculates the hash of all of the data passed to be hashed with hash.update().
3546
+ * Defaults to hexadecimal string
3547
+ * @param outputType If outputType is "binary", it returns Uint8Array. Otherwise it
3548
+ * returns hexadecimal string
3549
+ */
3550
+ digest: {
3551
+ (outputType: "binary"): Uint8Array;
3552
+ (outputType?: "hex"): string;
3553
+ };
3554
+ /**
3555
+ * Save the current internal state of the hasher for later resumption with load().
3556
+ * Cannot be called before .init() or after .digest()
3557
+ *
3558
+ * Note that this state can include arbitrary information about the value being hashed (e.g.
3559
+ * could include N plaintext bytes from the value), so needs to be treated as being as
3560
+ * sensitive as the input value itself.
3561
+ */
3562
+ save: () => Uint8Array;
3563
+ /**
3564
+ * Resume a state that was created by save(). If this state was not created by a
3565
+ * compatible build of hash-wasm, an exception will be thrown.
3566
+ */
3567
+ load: (state: Uint8Array) => IHasher;
3568
+ /**
3569
+ * Block size in bytes
3570
+ */
3571
+ blockSize: number;
3572
+ /**
3573
+ * Digest size in bytes
3574
+ */
3575
+ digestSize: number;
3576
+ };
3577
+
3464
3578
  /**
3465
3579
  * Size of the output of the hash functions.
3466
3580
  *
@@ -3516,144 +3630,46 @@ declare class WithHashAndBytes<THash extends OpaqueHash, TData> extends WithHash
3516
3630
  }
3517
3631
  }
3518
3632
 
3519
- /** Allocator interface - returns an empty bytes vector that can be filled with the hash. */
3520
- interface HashAllocator {
3521
- /** Return a new hash destination. */
3522
- emptyHash(): OpaqueHash;
3523
- }
3524
-
3525
- /** The simplest allocator returning just a fresh copy of bytes each time. */
3526
- declare class SimpleAllocator implements HashAllocator {
3527
- emptyHash(): OpaqueHash {
3528
- return Bytes.zero(HASH_SIZE);
3529
- }
3530
- }
3531
-
3532
- /** An allocator that works by allocating larger (continuous) pages of memory. */
3533
- declare class PageAllocator implements HashAllocator {
3534
- private page: Uint8Array = new Uint8Array(0);
3535
- private currentHash = 0;
3633
+ declare const zero$1 = Bytes.zero(HASH_SIZE);
3536
3634
 
3537
- // TODO [ToDr] Benchmark the performance!
3538
- constructor(private readonly hashesPerPage: number) {
3539
- check`${hashesPerPage > 0 && hashesPerPage >>> 0 === hashesPerPage} Expected a non-zero integer.`;
3540
- this.resetPage();
3635
+ declare class Blake2b {
3636
+ static async createHasher() {
3637
+ return new Blake2b(await createBLAKE2b(HASH_SIZE * 8));
3541
3638
  }
3542
3639
 
3543
- private resetPage() {
3544
- const pageSizeBytes = this.hashesPerPage * HASH_SIZE;
3545
- this.currentHash = 0;
3546
- this.page = new Uint8Array(pageSizeBytes);
3547
- }
3548
-
3549
- emptyHash(): OpaqueHash {
3550
- const startIdx = this.currentHash * HASH_SIZE;
3551
- const endIdx = startIdx + HASH_SIZE;
3640
+ private constructor(private readonly hasher: IHasher) {}
3552
3641
 
3553
- this.currentHash += 1;
3554
- if (this.currentHash >= this.hashesPerPage) {
3555
- this.resetPage();
3642
+ /**
3643
+ * Hash given collection of blobs.
3644
+ *
3645
+ * If empty array is given a zero-hash is returned.
3646
+ */
3647
+ hashBlobs<H extends Blake2bHash>(r: (BytesBlob | Uint8Array)[]): H {
3648
+ if (r.length === 0) {
3649
+ return zero.asOpaque();
3556
3650
  }
3557
3651
 
3558
- return Bytes.fromBlob(this.page.subarray(startIdx, endIdx), HASH_SIZE);
3652
+ const hasher = this.hasher.init();
3653
+ for (const v of r) {
3654
+ hasher.update(v instanceof BytesBlob ? v.raw : v);
3655
+ }
3656
+ return Bytes.fromBlob(hasher.digest("binary"), HASH_SIZE).asOpaque();
3559
3657
  }
3560
- }
3561
-
3562
- declare const defaultAllocator = new SimpleAllocator();
3563
3658
 
3564
- /**
3565
- * Hash given collection of blobs.
3566
- *
3567
- * If empty array is given a zero-hash is returned.
3568
- */
3569
- declare function hashBlobs$1<H extends Blake2bHash>(
3570
- r: (BytesBlob | Uint8Array)[],
3571
- allocator: HashAllocator = defaultAllocator,
3572
- ): H {
3573
- const out = allocator.emptyHash();
3574
- if (r.length === 0) {
3575
- return out.asOpaque();
3659
+ /** Hash given blob of bytes. */
3660
+ hashBytes(blob: BytesBlob | Uint8Array): Blake2bHash {
3661
+ const hasher = this.hasher.init();
3662
+ const bytes = blob instanceof BytesBlob ? blob.raw : blob;
3663
+ hasher.update(bytes);
3664
+ return Bytes.fromBlob(hasher.digest("binary"), HASH_SIZE).asOpaque();
3576
3665
  }
3577
3666
 
3578
- const hasher = blake2b(HASH_SIZE);
3579
- for (const v of r) {
3580
- hasher?.update(v instanceof BytesBlob ? v.raw : v);
3667
+ /** Convert given string into bytes and hash it. */
3668
+ hashString(str: string) {
3669
+ return this.hashBytes(BytesBlob.blobFromString(str));
3581
3670
  }
3582
- hasher?.digest(out.raw);
3583
- return out.asOpaque();
3584
- }
3585
-
3586
- /** Hash given blob of bytes. */
3587
- declare function hashBytes(blob: BytesBlob | Uint8Array, allocator: HashAllocator = defaultAllocator): Blake2bHash {
3588
- const hasher = blake2b(HASH_SIZE);
3589
- const bytes = blob instanceof BytesBlob ? blob.raw : blob;
3590
- hasher?.update(bytes);
3591
- const out = allocator.emptyHash();
3592
- hasher?.digest(out.raw);
3593
- return out;
3594
- }
3595
-
3596
- /** Convert given string into bytes and hash it. */
3597
- declare function hashString(str: string, allocator: HashAllocator = defaultAllocator) {
3598
- return hashBytes(BytesBlob.blobFromString(str), allocator);
3599
- }
3600
-
3601
- declare const blake2b_hashBytes: typeof hashBytes;
3602
- declare const blake2b_hashString: typeof hashString;
3603
- declare namespace blake2b {
3604
- export {
3605
- hashBlobs$1 as hashBlobs,
3606
- blake2b_hashBytes as hashBytes,
3607
- blake2b_hashString as hashString,
3608
- };
3609
3671
  }
3610
3672
 
3611
- type ITypedArray = Uint8Array | Uint16Array | Uint32Array;
3612
- type IDataType = string | Buffer | ITypedArray;
3613
-
3614
- type IHasher = {
3615
- /**
3616
- * Initializes hash state to default value
3617
- */
3618
- init: () => IHasher;
3619
- /**
3620
- * Updates the hash content with the given data
3621
- */
3622
- update: (data: IDataType) => IHasher;
3623
- /**
3624
- * Calculates the hash of all of the data passed to be hashed with hash.update().
3625
- * Defaults to hexadecimal string
3626
- * @param outputType If outputType is "binary", it returns Uint8Array. Otherwise it
3627
- * returns hexadecimal string
3628
- */
3629
- digest: {
3630
- (outputType: "binary"): Uint8Array;
3631
- (outputType?: "hex"): string;
3632
- };
3633
- /**
3634
- * Save the current internal state of the hasher for later resumption with load().
3635
- * Cannot be called before .init() or after .digest()
3636
- *
3637
- * Note that this state can include arbitrary information about the value being hashed (e.g.
3638
- * could include N plaintext bytes from the value), so needs to be treated as being as
3639
- * sensitive as the input value itself.
3640
- */
3641
- save: () => Uint8Array;
3642
- /**
3643
- * Resume a state that was created by save(). If this state was not created by a
3644
- * compatible build of hash-wasm, an exception will be thrown.
3645
- */
3646
- load: (state: Uint8Array) => IHasher;
3647
- /**
3648
- * Block size in bytes
3649
- */
3650
- blockSize: number;
3651
- /**
3652
- * Digest size in bytes
3653
- */
3654
- digestSize: number;
3655
- };
3656
-
3657
3673
  declare class KeccakHasher {
3658
3674
  static async create(): Promise<KeccakHasher> {
3659
3675
  return new KeccakHasher(await createKeccak(256));
@@ -3681,15 +3697,15 @@ declare namespace keccak {
3681
3697
  };
3682
3698
  }
3683
3699
 
3700
+ // TODO [ToDr] (#213) this should most likely be moved to a separate
3701
+ // package to avoid pulling in unnecessary deps.
3702
+
3703
+ type index$p_Blake2b = Blake2b;
3704
+ declare const index$p_Blake2b: typeof Blake2b;
3684
3705
  type index$p_Blake2bHash = Blake2bHash;
3685
3706
  type index$p_HASH_SIZE = HASH_SIZE;
3686
- type index$p_HashAllocator = HashAllocator;
3687
3707
  type index$p_KeccakHash = KeccakHash;
3688
3708
  type index$p_OpaqueHash = OpaqueHash;
3689
- type index$p_PageAllocator = PageAllocator;
3690
- declare const index$p_PageAllocator: typeof PageAllocator;
3691
- type index$p_SimpleAllocator = SimpleAllocator;
3692
- declare const index$p_SimpleAllocator: typeof SimpleAllocator;
3693
3709
  type index$p_TRUNCATED_HASH_SIZE = TRUNCATED_HASH_SIZE;
3694
3710
  type index$p_TruncatedHash = TruncatedHash;
3695
3711
  type index$p_WithHash<THash extends OpaqueHash, TData> = WithHash<THash, TData>;
@@ -3697,12 +3713,10 @@ declare const index$p_WithHash: typeof WithHash;
3697
3713
  type index$p_WithHashAndBytes<THash extends OpaqueHash, TData> = WithHashAndBytes<THash, TData>;
3698
3714
  declare const index$p_WithHashAndBytes: typeof WithHashAndBytes;
3699
3715
  declare const index$p_ZERO_HASH: typeof ZERO_HASH;
3700
- declare const index$p_blake2b: typeof blake2b;
3701
- declare const index$p_defaultAllocator: typeof defaultAllocator;
3702
3716
  declare const index$p_keccak: typeof keccak;
3703
3717
  declare namespace index$p {
3704
- export { index$p_PageAllocator as PageAllocator, index$p_SimpleAllocator as SimpleAllocator, index$p_WithHash as WithHash, index$p_WithHashAndBytes as WithHashAndBytes, index$p_ZERO_HASH as ZERO_HASH, index$p_blake2b as blake2b, index$p_defaultAllocator as defaultAllocator, index$p_keccak as keccak };
3705
- export type { index$p_Blake2bHash as Blake2bHash, index$p_HASH_SIZE as HASH_SIZE, index$p_HashAllocator as HashAllocator, index$p_KeccakHash as KeccakHash, index$p_OpaqueHash as OpaqueHash, index$p_TRUNCATED_HASH_SIZE as TRUNCATED_HASH_SIZE, index$p_TruncatedHash as TruncatedHash };
3718
+ export { index$p_Blake2b as Blake2b, index$p_WithHash as WithHash, index$p_WithHashAndBytes as WithHashAndBytes, index$p_ZERO_HASH as ZERO_HASH, index$p_keccak as keccak, zero$1 as zero };
3719
+ export type { index$p_Blake2bHash as Blake2bHash, index$p_HASH_SIZE as HASH_SIZE, index$p_KeccakHash as KeccakHash, index$p_OpaqueHash as OpaqueHash, index$p_TRUNCATED_HASH_SIZE as TRUNCATED_HASH_SIZE, index$p_TruncatedHash as TruncatedHash };
3706
3720
  }
3707
3721
 
3708
3722
  /** Immutable view of the `HashDictionary`. */
@@ -4479,6 +4493,8 @@ declare class TruncatedHashDictionary<T extends OpaqueHash, V> {
4479
4493
  }
4480
4494
  }
4481
4495
 
4496
+ type index$o_ArrayView<T> = ArrayView<T>;
4497
+ declare const index$o_ArrayView: typeof ArrayView;
4482
4498
  type index$o_FixedSizeArray<T, N extends number> = FixedSizeArray<T, N>;
4483
4499
  declare const index$o_FixedSizeArray: typeof FixedSizeArray;
4484
4500
  type index$o_HashDictionary<K extends OpaqueHash, V> = HashDictionary<K, V>;
@@ -4506,7 +4522,7 @@ type index$o_TruncatedHashDictionary<T extends OpaqueHash, V> = TruncatedHashDic
4506
4522
  declare const index$o_TruncatedHashDictionary: typeof TruncatedHashDictionary;
4507
4523
  declare const index$o_asKnownSize: typeof asKnownSize;
4508
4524
  declare namespace index$o {
4509
- export { index$o_FixedSizeArray as FixedSizeArray, index$o_HashDictionary as HashDictionary, index$o_HashSet as HashSet, index$o_MultiMap as MultiMap, index$o_SortedArray as SortedArray, index$o_SortedSet as SortedSet, index$o_TruncatedHashDictionary as TruncatedHashDictionary, index$o_asKnownSize as asKnownSize };
4525
+ export { index$o_ArrayView as ArrayView, index$o_FixedSizeArray as FixedSizeArray, index$o_HashDictionary as HashDictionary, index$o_HashSet as HashSet, index$o_MultiMap as MultiMap, index$o_SortedArray as SortedArray, index$o_SortedSet as SortedSet, index$o_TruncatedHashDictionary as TruncatedHashDictionary, index$o_asKnownSize as asKnownSize };
4510
4526
  export type { index$o_HashWithZeroedBit as HashWithZeroedBit, index$o_ImmutableHashDictionary as ImmutableHashDictionary, index$o_ImmutableHashSet as ImmutableHashSet, index$o_ImmutableSortedArray as ImmutableSortedArray, index$o_ImmutableSortedSet as ImmutableSortedSet, index$o_KeyMapper as KeyMapper, index$o_KeyMappers as KeyMappers, index$o_KnownSize as KnownSize, index$o_KnownSizeArray as KnownSizeArray, index$o_KnownSizeId as KnownSizeId, index$o_NestedMaps as NestedMaps };
4511
4527
  }
4512
4528
 
@@ -4735,7 +4751,7 @@ declare async function verify<T extends BytesBlob>(input: Input<T>[]): Promise<b
4735
4751
  (acc, { message, key, signature }) => acc + key.length + signature.length + message.length + 1,
4736
4752
  0,
4737
4753
  );
4738
- const data = new Uint8Array(dataLength);
4754
+ const data = safeAllocUint8Array(dataLength);
4739
4755
 
4740
4756
  let offset = 0;
4741
4757
 
@@ -4825,22 +4841,16 @@ declare function trivialSeed(s: U32): KeySeed {
4825
4841
  * Derives a Ed25519 secret key from a seed.
4826
4842
  * https://github.com/polkadot-fellows/JIPs/blob/7048f79edf4f4eb8bfe6fb42e6bbf61900f44c65/JIP-5.md#derivation-method
4827
4843
  */
4828
- declare function deriveEd25519SecretKey(
4829
- seed: KeySeed,
4830
- allocator: SimpleAllocator = new SimpleAllocator(),
4831
- ): Ed25519SecretSeed {
4832
- return blake2b.hashBytes(BytesBlob.blobFromParts([ED25519_SECRET_KEY.raw, seed.raw]), allocator).asOpaque();
4844
+ declare function deriveEd25519SecretKey(seed: KeySeed, blake2b: Blake2b): Ed25519SecretSeed {
4845
+ return blake2b.hashBytes(BytesBlob.blobFromParts([ED25519_SECRET_KEY.raw, seed.raw])).asOpaque();
4833
4846
  }
4834
4847
 
4835
4848
  /**
4836
4849
  * Derives a Bandersnatch secret key from a seed.
4837
4850
  * https://github.com/polkadot-fellows/JIPs/blob/7048f79edf4f4eb8bfe6fb42e6bbf61900f44c65/JIP-5.md#derivation-method
4838
4851
  */
4839
- declare function deriveBandersnatchSecretKey(
4840
- seed: KeySeed,
4841
- allocator: SimpleAllocator = new SimpleAllocator(),
4842
- ): BandersnatchSecretSeed {
4843
- return blake2b.hashBytes(BytesBlob.blobFromParts([BANDERSNATCH_SECRET_KEY.raw, seed.raw]), allocator).asOpaque();
4852
+ declare function deriveBandersnatchSecretKey(seed: KeySeed, blake2b: Blake2b): BandersnatchSecretSeed {
4853
+ return blake2b.hashBytes(BytesBlob.blobFromParts([BANDERSNATCH_SECRET_KEY.raw, seed.raw])).asOpaque();
4844
4854
  }
4845
4855
 
4846
4856
  /**
@@ -8373,7 +8383,7 @@ declare enum NodeType {
8373
8383
  declare class TrieNode {
8374
8384
  constructor(
8375
8385
  /** Exactly 512 bits / 64 bytes */
8376
- public readonly raw: Uint8Array = new Uint8Array(TRIE_NODE_BYTES),
8386
+ public readonly raw: Uint8Array = safeAllocUint8Array(TRIE_NODE_BYTES),
8377
8387
  ) {}
8378
8388
 
8379
8389
  /** Returns the type of the node */
@@ -9111,21 +9121,6 @@ declare function accumulationOutputComparator(a: AccumulationOutput, b: Accumula
9111
9121
  return Ordering.Equal;
9112
9122
  }
9113
9123
 
9114
- declare const codecWithHash = <T, V, H extends OpaqueHash>(val: Descriptor<T, V>): Descriptor<WithHash<H, T>, V> =>
9115
- Descriptor.withView(
9116
- val.name,
9117
- val.sizeHint,
9118
- (e, elem) => val.encode(e, elem.data),
9119
- (d): WithHash<H, T> => {
9120
- const decoder2 = d.clone();
9121
- const encoded = val.skipEncoded(decoder2);
9122
- const hash = blake2b.hashBytes(encoded);
9123
- return new WithHash(hash.asOpaque(), val.decode(d));
9124
- },
9125
- val.skip,
9126
- val.View,
9127
- );
9128
-
9129
9124
  /**
9130
9125
  * Assignment of particular work report to a core.
9131
9126
  *
@@ -9136,7 +9131,7 @@ declare const codecWithHash = <T, V, H extends OpaqueHash>(val: Descriptor<T, V>
9136
9131
  */
9137
9132
  declare class AvailabilityAssignment extends WithDebug {
9138
9133
  static Codec = codec.Class(AvailabilityAssignment, {
9139
- workReport: codecWithHash(WorkReport.Codec),
9134
+ workReport: WorkReport.Codec,
9140
9135
  timeout: codec.u32.asOpaque<TimeSlot>(),
9141
9136
  });
9142
9137
 
@@ -9146,7 +9141,7 @@ declare class AvailabilityAssignment extends WithDebug {
9146
9141
 
9147
9142
  private constructor(
9148
9143
  /** Work report assigned to a core. */
9149
- public readonly workReport: WithHash<WorkReportHash, WorkReport>,
9144
+ public readonly workReport: WorkReport,
9150
9145
  /** Time slot at which the report becomes obsolete. */
9151
9146
  public readonly timeout: TimeSlot,
9152
9147
  ) {
@@ -9250,8 +9245,6 @@ declare function hashComparator<V extends OpaqueHash>(a: V, b: V) {
9250
9245
  return a.compare(b);
9251
9246
  }
9252
9247
 
9253
- // TODO [ToDr] Not sure where these should live yet :(
9254
-
9255
9248
  /**
9256
9249
  * `J`: The maximum sum of dependency items in a work-report.
9257
9250
  *
@@ -9305,87 +9298,305 @@ declare class NotYetAccumulatedReport extends WithDebug {
9305
9298
  }
9306
9299
  }
9307
9300
 
9308
- /** Dictionary entry of services that auto-accumulate every block. */
9309
- declare class AutoAccumulate {
9310
- static Codec = codec.Class(AutoAccumulate, {
9311
- service: codec.u32.asOpaque<ServiceId>(),
9312
- gasLimit: codec.u64.asOpaque<ServiceGas>(),
9313
- });
9314
-
9315
- static create({ service, gasLimit }: CodecRecord<AutoAccumulate>) {
9316
- return new AutoAccumulate(service, gasLimit);
9317
- }
9318
-
9319
- private constructor(
9320
- /** Service id that auto-accumulates. */
9321
- readonly service: ServiceId,
9322
- /** Gas limit for auto-accumulation. */
9323
- readonly gasLimit: ServiceGas,
9324
- ) {}
9325
- }
9326
-
9327
9301
  /**
9328
- * https://graypaper.fluffylabs.dev/#/7e6ff6a/11da0111da01?v=0.6.7
9302
+ * `B_S`: The basic minimum balance which all services require.
9303
+ *
9304
+ * https://graypaper.fluffylabs.dev/#/7e6ff6a/445800445800?v=0.6.7
9329
9305
  */
9330
- declare class PrivilegedServices {
9331
- static Codec = codec.Class(PrivilegedServices, {
9332
- manager: codec.u32.asOpaque<ServiceId>(),
9333
- authManager: codecPerCore(codec.u32.asOpaque<ServiceId>()),
9334
- validatorsManager: codec.u32.asOpaque<ServiceId>(),
9335
- autoAccumulateServices: readonlyArray(codec.sequenceVarLen(AutoAccumulate.Codec)),
9336
- });
9337
-
9338
- static create({ manager, authManager, validatorsManager, autoAccumulateServices }: CodecRecord<PrivilegedServices>) {
9339
- return new PrivilegedServices(manager, authManager, validatorsManager, autoAccumulateServices);
9340
- }
9341
-
9342
- private constructor(
9343
- /**
9344
- * `chi_m`: The first, χm, is the index of the manager service which is
9345
- * the service able to effect an alteration of χ from block to block,
9346
- * as well as bestow services with storage deposit credits.
9347
- * https://graypaper.fluffylabs.dev/#/7e6ff6a/11a40111a801?v=0.6.7
9348
- */
9349
- readonly manager: ServiceId,
9350
- /** `chi_a`: Manages authorization queue one for each core. */
9351
- readonly authManager: PerCore<ServiceId>,
9352
- /** `chi_v`: Managers validator keys. */
9353
- readonly validatorsManager: ServiceId,
9354
- /** `chi_g`: Dictionary of services that auto-accumulate every block with their gas limit. */
9355
- readonly autoAccumulateServices: readonly AutoAccumulate[],
9356
- ) {}
9357
- }
9306
+ declare const BASE_SERVICE_BALANCE = 100n;
9307
+ /**
9308
+ * `B_I`: The additional minimum balance required per item of elective service state.
9309
+ *
9310
+ * https://graypaper.fluffylabs.dev/#/7e6ff6a/445000445000?v=0.6.7
9311
+ */
9312
+ declare const ELECTIVE_ITEM_BALANCE = 10n;
9313
+ /**
9314
+ * `B_L`: The additional minimum balance required per octet of elective service state.
9315
+ *
9316
+ * https://graypaper.fluffylabs.dev/#/7e6ff6a/445400445400?v=0.6.7
9317
+ */
9318
+ declare const ELECTIVE_BYTE_BALANCE = 1n;
9358
9319
 
9359
- declare const SUPER_PEAK_STRING = BytesBlob.blobFromString("peak");
9320
+ declare const zeroSizeHint: SizeHint = {
9321
+ bytes: 0,
9322
+ isExact: true,
9323
+ };
9360
9324
 
9361
- /** Merkle Mountain Range peaks. */
9362
- interface MmrPeaks<H extends OpaqueHash> {
9363
- /**
9364
- * Peaks at particular positions.
9365
- *
9366
- * In case there is no merkle trie at given index, `null` is placed.
9367
- */
9368
- peaks: readonly (H | null)[];
9369
- }
9325
+ /** 0-byte read, return given default value */
9326
+ declare const ignoreValueWithDefault = <T>(defaultValue: T) =>
9327
+ Descriptor.new<T>(
9328
+ "ignoreValue",
9329
+ zeroSizeHint,
9330
+ (_e, _v) => {},
9331
+ (_d) => defaultValue,
9332
+ (_s) => {},
9333
+ );
9370
9334
 
9371
- /** Hasher interface for MMR. */
9372
- interface MmrHasher<H extends OpaqueHash> {
9373
- /** Hash two items together. */
9374
- hashConcat(a: H, b: H): H;
9375
- /** Hash two items together with extra bytes blob prepended. */
9376
- hashConcatPrepend(id: BytesBlob, a: H, b: H): H;
9377
- }
9335
+ /** Encode and decode object with leading version number. */
9336
+ declare const codecWithVersion = <T>(val: Descriptor<T>): Descriptor<T> =>
9337
+ Descriptor.new<T>(
9338
+ "withVersion",
9339
+ {
9340
+ bytes: val.sizeHint.bytes + 8,
9341
+ isExact: false,
9342
+ },
9343
+ (e, v) => {
9344
+ e.varU64(0n);
9345
+ val.encode(e, v);
9346
+ },
9347
+ (d) => {
9348
+ const version = d.varU64();
9349
+ if (version !== 0n) {
9350
+ throw new Error("Non-zero version is not supported!");
9351
+ }
9352
+ return val.decode(d);
9353
+ },
9354
+ (s) => {
9355
+ s.varU64();
9356
+ val.skip(s);
9357
+ },
9358
+ );
9378
9359
 
9379
9360
  /**
9380
- * Merkle Mountain Range.
9361
+ * Service account details.
9381
9362
  *
9382
- * https://graypaper.fluffylabs.dev/#/5f542d7/3aa0023aa002?v=0.6.2
9363
+ * https://graypaper.fluffylabs.dev/#/7e6ff6a/108301108301?v=0.6.7
9383
9364
  */
9384
- declare class MerkleMountainRange<H extends OpaqueHash> {
9385
- /** Construct an empty MMR. */
9386
- static empty<H extends OpaqueHash>(hasher: MmrHasher<H>) {
9387
- return new MerkleMountainRange(hasher);
9388
- }
9365
+ declare class ServiceAccountInfo extends WithDebug {
9366
+ static Codec = codec.Class(ServiceAccountInfo, {
9367
+ codeHash: codec.bytes(HASH_SIZE).asOpaque<CodeHash>(),
9368
+ balance: codec.u64,
9369
+ accumulateMinGas: codec.u64.convert((x) => x, tryAsServiceGas),
9370
+ onTransferMinGas: codec.u64.convert((x) => x, tryAsServiceGas),
9371
+ storageUtilisationBytes: codec.u64,
9372
+ gratisStorage: codec.u64,
9373
+ storageUtilisationCount: codec.u32,
9374
+ created: codec.u32.convert((x) => x, tryAsTimeSlot),
9375
+ lastAccumulation: codec.u32.convert((x) => x, tryAsTimeSlot),
9376
+ parentService: codec.u32.convert((x) => x, tryAsServiceId),
9377
+ });
9378
+
9379
+ static create(a: CodecRecord<ServiceAccountInfo>) {
9380
+ return new ServiceAccountInfo(
9381
+ a.codeHash,
9382
+ a.balance,
9383
+ a.accumulateMinGas,
9384
+ a.onTransferMinGas,
9385
+ a.storageUtilisationBytes,
9386
+ a.gratisStorage,
9387
+ a.storageUtilisationCount,
9388
+ a.created,
9389
+ a.lastAccumulation,
9390
+ a.parentService,
9391
+ );
9392
+ }
9393
+
9394
+ /**
9395
+ * `a_t = max(0, BS + BI * a_i + BL * a_o - a_f)`
9396
+ * https://graypaper.fluffylabs.dev/#/7e6ff6a/119e01119e01?v=0.6.7
9397
+ */
9398
+ static calculateThresholdBalance(items: U32, bytes: U64, gratisStorage: U64): U64 {
9399
+ const storageCost =
9400
+ BASE_SERVICE_BALANCE + ELECTIVE_ITEM_BALANCE * BigInt(items) + ELECTIVE_BYTE_BALANCE * bytes - gratisStorage;
9401
+
9402
+ if (storageCost < 0n) {
9403
+ return tryAsU64(0);
9404
+ }
9405
+
9406
+ if (storageCost >= 2n ** 64n) {
9407
+ return tryAsU64(2n ** 64n - 1n);
9408
+ }
9409
+
9410
+ return tryAsU64(storageCost);
9411
+ }
9412
+
9413
+ private constructor(
9414
+ /** `a_c`: Hash of the service code. */
9415
+ public readonly codeHash: CodeHash,
9416
+ /** `a_b`: Current account balance. */
9417
+ public readonly balance: U64,
9418
+ /** `a_g`: Minimal gas required to execute Accumulate entrypoint. */
9419
+ public readonly accumulateMinGas: ServiceGas,
9420
+ /** `a_m`: Minimal gas required to execute On Transfer entrypoint. */
9421
+ public readonly onTransferMinGas: ServiceGas,
9422
+ /** `a_o`: Total number of octets in storage. */
9423
+ public readonly storageUtilisationBytes: U64,
9424
+ /** `a_f`: Cost-free storage. Decreases both storage item count and total byte size. */
9425
+ public readonly gratisStorage: U64,
9426
+ /** `a_i`: Number of items in storage. */
9427
+ public readonly storageUtilisationCount: U32,
9428
+ /** `a_r`: Creation account time slot. */
9429
+ public readonly created: TimeSlot,
9430
+ /** `a_a`: Most recent accumulation time slot. */
9431
+ public readonly lastAccumulation: TimeSlot,
9432
+ /** `a_p`: Parent service ID. */
9433
+ public readonly parentService: ServiceId,
9434
+ ) {
9435
+ super();
9436
+ }
9437
+ }
9438
+
9439
+ declare class PreimageItem extends WithDebug {
9440
+ static Codec = codec.Class(PreimageItem, {
9441
+ hash: codec.bytes(HASH_SIZE).asOpaque<PreimageHash>(),
9442
+ blob: codec.blob,
9443
+ });
9444
+
9445
+ static create({ hash, blob }: CodecRecord<PreimageItem>) {
9446
+ return new PreimageItem(hash, blob);
9447
+ }
9448
+
9449
+ private constructor(
9450
+ readonly hash: PreimageHash,
9451
+ readonly blob: BytesBlob,
9452
+ ) {
9453
+ super();
9454
+ }
9455
+ }
9456
+
9457
+ type StorageKey = Opaque<BytesBlob, "storage key">;
9458
+
9459
+ declare class StorageItem extends WithDebug {
9460
+ static Codec = codec.Class(StorageItem, {
9461
+ key: codec.blob.convert(
9462
+ (i) => i,
9463
+ (o) => asOpaqueType(o),
9464
+ ),
9465
+ value: codec.blob,
9466
+ });
9467
+
9468
+ static create({ key, value }: CodecRecord<StorageItem>) {
9469
+ return new StorageItem(key, value);
9470
+ }
9471
+
9472
+ private constructor(
9473
+ readonly key: StorageKey,
9474
+ readonly value: BytesBlob,
9475
+ ) {
9476
+ super();
9477
+ }
9478
+ }
9479
+
9480
+ declare const MAX_LOOKUP_HISTORY_SLOTS = 3;
9481
+ type LookupHistorySlots = KnownSizeArray<TimeSlot, `0-${typeof MAX_LOOKUP_HISTORY_SLOTS} timeslots`>;
9482
+ declare function tryAsLookupHistorySlots(items: readonly TimeSlot[]): LookupHistorySlots {
9483
+ const knownSize = asKnownSize(items) as LookupHistorySlots;
9484
+ if (knownSize.length > MAX_LOOKUP_HISTORY_SLOTS) {
9485
+ throw new Error(`Lookup history items must contain 0-${MAX_LOOKUP_HISTORY_SLOTS} timeslots.`);
9486
+ }
9487
+ return knownSize;
9488
+ }
9489
+
9490
+ /** https://graypaper.fluffylabs.dev/#/5f542d7/115400115800 */
9491
+ declare class LookupHistoryItem {
9492
+ constructor(
9493
+ public readonly hash: PreimageHash,
9494
+ public readonly length: U32,
9495
+ /**
9496
+ * Preimage availability history as a sequence of time slots.
9497
+ * See PreimageStatus and the following GP fragment for more details.
9498
+ * https://graypaper.fluffylabs.dev/#/5f542d7/11780011a500 */
9499
+ public readonly slots: LookupHistorySlots,
9500
+ ) {}
9501
+
9502
+ static isRequested(item: LookupHistoryItem | LookupHistorySlots): boolean {
9503
+ if ("slots" in item) {
9504
+ return item.slots.length === 0;
9505
+ }
9506
+ return item.length === 0;
9507
+ }
9508
+ }
9509
+
9510
+ /** Dictionary entry of services that auto-accumulate every block. */
9511
+ declare class AutoAccumulate {
9512
+ static Codec = codec.Class(AutoAccumulate, {
9513
+ service: codec.u32.asOpaque<ServiceId>(),
9514
+ gasLimit: codec.u64.asOpaque<ServiceGas>(),
9515
+ });
9516
+
9517
+ static create({ service, gasLimit }: CodecRecord<AutoAccumulate>) {
9518
+ return new AutoAccumulate(service, gasLimit);
9519
+ }
9520
+
9521
+ private constructor(
9522
+ /** Service id that auto-accumulates. */
9523
+ readonly service: ServiceId,
9524
+ /** Gas limit for auto-accumulation. */
9525
+ readonly gasLimit: ServiceGas,
9526
+ ) {}
9527
+ }
9528
+
9529
+ /**
9530
+ * https://graypaper.fluffylabs.dev/#/ab2cdbd/114402114402?v=0.7.2
9531
+ */
9532
+ declare class PrivilegedServices {
9533
+ /** https://graypaper.fluffylabs.dev/#/ab2cdbd/3bbd023bcb02?v=0.7.2 */
9534
+ static Codec = codec.Class(PrivilegedServices, {
9535
+ manager: codec.u32.asOpaque<ServiceId>(),
9536
+ assigners: codecPerCore(codec.u32.asOpaque<ServiceId>()),
9537
+ delegator: codec.u32.asOpaque<ServiceId>(),
9538
+ registrar: Compatibility.isGreaterOrEqual(GpVersion.V0_7_1)
9539
+ ? codec.u32.asOpaque<ServiceId>()
9540
+ : ignoreValueWithDefault(tryAsServiceId(2 ** 32 - 1)),
9541
+ autoAccumulateServices: readonlyArray(codec.sequenceVarLen(AutoAccumulate.Codec)),
9542
+ });
9543
+
9544
+ static create(a: CodecRecord<PrivilegedServices>) {
9545
+ return new PrivilegedServices(a.manager, a.delegator, a.registrar, a.assigners, a.autoAccumulateServices);
9546
+ }
9547
+
9548
+ private constructor(
9549
+ /**
9550
+ * `χ_M`: Manages alteration of χ from block to block,
9551
+ * as well as bestow services with storage deposit credits.
9552
+ * https://graypaper.fluffylabs.dev/#/ab2cdbd/111502111902?v=0.7.2
9553
+ */
9554
+ readonly manager: ServiceId,
9555
+ /** `χ_V`: Managers validator keys. */
9556
+ readonly delegator: ServiceId,
9557
+ /**
9558
+ * `χ_R`: Manages the creation of services in protected range.
9559
+ *
9560
+ * https://graypaper.fluffylabs.dev/#/ab2cdbd/111b02111d02?v=0.7.2
9561
+ */
9562
+ readonly registrar: ServiceId,
9563
+ /** `χ_A`: Manages authorization queue one for each core. */
9564
+ readonly assigners: PerCore<ServiceId>,
9565
+ /** `χ_Z`: Dictionary of services that auto-accumulate every block with their gas limit. */
9566
+ readonly autoAccumulateServices: readonly AutoAccumulate[],
9567
+ ) {}
9568
+ }
9569
+
9570
+ declare const SUPER_PEAK_STRING = BytesBlob.blobFromString("peak");
9571
+
9572
+ /** Merkle Mountain Range peaks. */
9573
+ interface MmrPeaks<H extends OpaqueHash> {
9574
+ /**
9575
+ * Peaks at particular positions.
9576
+ *
9577
+ * In case there is no merkle trie at given index, `null` is placed.
9578
+ */
9579
+ peaks: readonly (H | null)[];
9580
+ }
9581
+
9582
+ /** Hasher interface for MMR. */
9583
+ interface MmrHasher<H extends OpaqueHash> {
9584
+ /** Hash two items together. */
9585
+ hashConcat(a: H, b: H): H;
9586
+ /** Hash two items together with extra bytes blob prepended. */
9587
+ hashConcatPrepend(id: BytesBlob, a: H, b: H): H;
9588
+ }
9589
+
9590
+ /**
9591
+ * Merkle Mountain Range.
9592
+ *
9593
+ * https://graypaper.fluffylabs.dev/#/5f542d7/3aa0023aa002?v=0.6.2
9594
+ */
9595
+ declare class MerkleMountainRange<H extends OpaqueHash> {
9596
+ /** Construct an empty MMR. */
9597
+ static empty<H extends OpaqueHash>(hasher: MmrHasher<H>) {
9598
+ return new MerkleMountainRange(hasher);
9599
+ }
9389
9600
 
9390
9601
  /** Construct a new MMR from existing peaks. */
9391
9602
  static fromPeaks<H extends OpaqueHash>(hasher: MmrHasher<H>, mmr: MmrPeaks<H>) {
@@ -9635,353 +9846,169 @@ declare class RecentBlocksHistory extends WithDebug {
9635
9846
 
9636
9847
  throw new Error("RecentBlocksHistory is in invalid state");
9637
9848
  }
9638
-
9639
- asCurrent() {
9640
- if (this.current === null) {
9641
- throw new Error("Cannot access current RecentBlocks format");
9642
- }
9643
- return this.current;
9644
- }
9645
-
9646
- updateBlocks(blocks: BlockState[]): RecentBlocksHistory {
9647
- if (this.current !== null) {
9648
- return RecentBlocksHistory.create(
9649
- RecentBlocks.create({
9650
- ...this.current,
9651
- blocks: asOpaqueType(blocks as BlockState[]),
9652
- }),
9653
- );
9654
- }
9655
-
9656
- throw new Error("RecentBlocksHistory is in invalid state. Cannot be updated!");
9657
- }
9658
- }
9659
-
9660
- /**
9661
- * Fixed size of validator metadata.
9662
- *
9663
- * https://graypaper.fluffylabs.dev/#/5f542d7/0d55010d5501
9664
- */
9665
- declare const VALIDATOR_META_BYTES = 128;
9666
- type VALIDATOR_META_BYTES = typeof VALIDATOR_META_BYTES;
9667
-
9668
- /**
9669
- * Details about validators' identity.
9670
- *
9671
- * https://graypaper.fluffylabs.dev/#/5f542d7/0d4b010d4c01
9672
- */
9673
- declare class ValidatorData extends WithDebug {
9674
- static Codec = codec.Class(ValidatorData, {
9675
- bandersnatch: codec.bytes(BANDERSNATCH_KEY_BYTES).asOpaque<BandersnatchKey>(),
9676
- ed25519: codec.bytes(ED25519_KEY_BYTES).asOpaque<Ed25519Key>(),
9677
- bls: codec.bytes(BLS_KEY_BYTES).asOpaque<BlsKey>(),
9678
- metadata: codec.bytes(VALIDATOR_META_BYTES),
9679
- });
9680
-
9681
- static create({ ed25519, bandersnatch, bls, metadata }: CodecRecord<ValidatorData>) {
9682
- return new ValidatorData(bandersnatch, ed25519, bls, metadata);
9683
- }
9684
-
9685
- private constructor(
9686
- /** Bandersnatch public key. */
9687
- public readonly bandersnatch: BandersnatchKey,
9688
- /** ED25519 key data. */
9689
- public readonly ed25519: Ed25519Key,
9690
- /** BLS public key. */
9691
- public readonly bls: BlsKey,
9692
- /** Validator-defined additional metdata. */
9693
- public readonly metadata: Bytes<VALIDATOR_META_BYTES>,
9694
- ) {
9695
- super();
9696
- }
9697
- }
9698
-
9699
- declare enum SafroleSealingKeysKind {
9700
- Tickets = 0,
9701
- Keys = 1,
9702
- }
9703
-
9704
- type SafroleSealingKeys =
9705
- | {
9706
- kind: SafroleSealingKeysKind.Keys;
9707
- keys: PerEpochBlock<BandersnatchKey>;
9708
- }
9709
- | {
9710
- kind: SafroleSealingKeysKind.Tickets;
9711
- tickets: PerEpochBlock<Ticket>;
9712
- };
9713
-
9714
- declare const codecBandersnatchKey = codec.bytes(BANDERSNATCH_KEY_BYTES).asOpaque<BandersnatchKey>();
9715
-
9716
- declare class SafroleSealingKeysData extends WithDebug {
9717
- static Codec = codecWithContext((context) => {
9718
- return codec.custom<SafroleSealingKeys>(
9719
- {
9720
- name: "SafroleSealingKeys",
9721
- sizeHint: { bytes: 1 + HASH_SIZE * context.epochLength, isExact: false },
9722
- },
9723
- (e, x) => {
9724
- e.varU32(tryAsU32(x.kind));
9725
- if (x.kind === SafroleSealingKeysKind.Keys) {
9726
- e.sequenceFixLen(codecBandersnatchKey, x.keys);
9727
- } else {
9728
- e.sequenceFixLen(Ticket.Codec, x.tickets);
9729
- }
9730
- },
9731
- (d) => {
9732
- const epochLength = context.epochLength;
9733
- const kind = d.varU32();
9734
- if (kind === SafroleSealingKeysKind.Keys) {
9735
- const keys = d.sequenceFixLen<BandersnatchKey>(codecBandersnatchKey, epochLength);
9736
- return SafroleSealingKeysData.keys(tryAsPerEpochBlock(keys, context));
9737
- }
9738
-
9739
- if (kind === SafroleSealingKeysKind.Tickets) {
9740
- const tickets = d.sequenceFixLen(Ticket.Codec, epochLength);
9741
- return SafroleSealingKeysData.tickets(tryAsPerEpochBlock(tickets, context));
9742
- }
9743
-
9744
- throw new Error(`Unexpected safrole sealing keys kind: ${kind}`);
9745
- },
9746
- (s) => {
9747
- const kind = s.decoder.varU32();
9748
- if (kind === SafroleSealingKeysKind.Keys) {
9749
- s.sequenceFixLen(codecBandersnatchKey, context.epochLength);
9750
- return;
9751
- }
9752
- if (kind === SafroleSealingKeysKind.Tickets) {
9753
- s.sequenceFixLen(Ticket.Codec, context.epochLength);
9754
- return;
9755
- }
9756
-
9757
- throw new Error(`Unexpected safrole sealing keys kind: ${kind}`);
9758
- },
9759
- );
9760
- });
9761
-
9762
- static keys(keys: PerEpochBlock<BandersnatchKey>): SafroleSealingKeys {
9763
- return new SafroleSealingKeysData(SafroleSealingKeysKind.Keys, keys, undefined) as SafroleSealingKeys;
9764
- }
9765
-
9766
- static tickets(tickets: PerEpochBlock<Ticket>): SafroleSealingKeys {
9767
- return new SafroleSealingKeysData(SafroleSealingKeysKind.Tickets, undefined, tickets) as SafroleSealingKeys;
9768
- }
9769
-
9770
- private constructor(
9771
- readonly kind: SafroleSealingKeysKind,
9772
- readonly keys?: PerEpochBlock<BandersnatchKey>,
9773
- readonly tickets?: PerEpochBlock<Ticket>,
9774
- ) {
9775
- super();
9776
- }
9777
- }
9778
-
9779
- declare class SafroleData {
9780
- static Codec = codec.Class(SafroleData, {
9781
- nextValidatorData: codecPerValidator(ValidatorData.Codec),
9782
- epochRoot: codec.bytes(BANDERSNATCH_RING_ROOT_BYTES).asOpaque<BandersnatchRingRoot>(),
9783
- sealingKeySeries: SafroleSealingKeysData.Codec,
9784
- ticketsAccumulator: readonlyArray(codec.sequenceVarLen(Ticket.Codec)).convert(seeThrough, asKnownSize),
9785
- });
9786
-
9787
- static create({ nextValidatorData, epochRoot, sealingKeySeries, ticketsAccumulator }: CodecRecord<SafroleData>) {
9788
- return new SafroleData(nextValidatorData, epochRoot, sealingKeySeries, ticketsAccumulator);
9789
- }
9790
-
9791
- private constructor(
9792
- /** gamma_k */
9793
- public readonly nextValidatorData: PerValidator<ValidatorData>,
9794
- /** gamma_z */
9795
- public readonly epochRoot: BandersnatchRingRoot,
9796
- /** gamma_s */
9797
- public readonly sealingKeySeries: SafroleSealingKeys,
9798
- /** gamma_a */
9799
- public readonly ticketsAccumulator: KnownSizeArray<Ticket, "0...EpochLength">,
9800
- ) {}
9801
- }
9802
-
9803
- /**
9804
- * `B_S`: The basic minimum balance which all services require.
9805
- *
9806
- * https://graypaper.fluffylabs.dev/#/7e6ff6a/445800445800?v=0.6.7
9807
- */
9808
- declare const BASE_SERVICE_BALANCE = 100n;
9809
- /**
9810
- * `B_I`: The additional minimum balance required per item of elective service state.
9811
- *
9812
- * https://graypaper.fluffylabs.dev/#/7e6ff6a/445000445000?v=0.6.7
9813
- */
9814
- declare const ELECTIVE_ITEM_BALANCE = 10n;
9815
- /**
9816
- * `B_L`: The additional minimum balance required per octet of elective service state.
9817
- *
9818
- * https://graypaper.fluffylabs.dev/#/7e6ff6a/445400445400?v=0.6.7
9819
- */
9820
- declare const ELECTIVE_BYTE_BALANCE = 1n;
9821
-
9822
- declare const zeroSizeHint: SizeHint = {
9823
- bytes: 0,
9824
- isExact: true,
9825
- };
9826
-
9827
- /** 0-byte read, return given default value */
9828
- declare const ignoreValueWithDefault = <T>(defaultValue: T) =>
9829
- Descriptor.new<T>(
9830
- "ignoreValue",
9831
- zeroSizeHint,
9832
- (_e, _v) => {},
9833
- (_d) => defaultValue,
9834
- (_s) => {},
9835
- );
9836
-
9837
- /**
9838
- * Service account details.
9839
- *
9840
- * https://graypaper.fluffylabs.dev/#/7e6ff6a/108301108301?v=0.6.7
9841
- */
9842
- declare class ServiceAccountInfo extends WithDebug {
9843
- static Codec = codec.Class(ServiceAccountInfo, {
9844
- codeHash: codec.bytes(HASH_SIZE).asOpaque<CodeHash>(),
9845
- balance: codec.u64,
9846
- accumulateMinGas: codec.u64.convert((x) => x, tryAsServiceGas),
9847
- onTransferMinGas: codec.u64.convert((x) => x, tryAsServiceGas),
9848
- storageUtilisationBytes: codec.u64,
9849
- gratisStorage: codec.u64,
9850
- storageUtilisationCount: codec.u32,
9851
- created: codec.u32.convert((x) => x, tryAsTimeSlot),
9852
- lastAccumulation: codec.u32.convert((x) => x, tryAsTimeSlot),
9853
- parentService: codec.u32.convert((x) => x, tryAsServiceId),
9854
- });
9855
-
9856
- static create(a: CodecRecord<ServiceAccountInfo>) {
9857
- return new ServiceAccountInfo(
9858
- a.codeHash,
9859
- a.balance,
9860
- a.accumulateMinGas,
9861
- a.onTransferMinGas,
9862
- a.storageUtilisationBytes,
9863
- a.gratisStorage,
9864
- a.storageUtilisationCount,
9865
- a.created,
9866
- a.lastAccumulation,
9867
- a.parentService,
9868
- );
9869
- }
9870
-
9871
- /**
9872
- * `a_t = max(0, BS + BI * a_i + BL * a_o - a_f)`
9873
- * https://graypaper.fluffylabs.dev/#/7e6ff6a/119e01119e01?v=0.6.7
9874
- */
9875
- static calculateThresholdBalance(items: U32, bytes: U64, gratisStorage: U64): U64 {
9876
- const storageCost =
9877
- BASE_SERVICE_BALANCE + ELECTIVE_ITEM_BALANCE * BigInt(items) + ELECTIVE_BYTE_BALANCE * bytes - gratisStorage;
9878
-
9879
- if (storageCost < 0n) {
9880
- return tryAsU64(0);
9881
- }
9882
-
9883
- if (storageCost >= 2n ** 64n) {
9884
- return tryAsU64(2n ** 64n - 1n);
9885
- }
9886
-
9887
- return tryAsU64(storageCost);
9888
- }
9889
-
9890
- private constructor(
9891
- /** `a_c`: Hash of the service code. */
9892
- public readonly codeHash: CodeHash,
9893
- /** `a_b`: Current account balance. */
9894
- public readonly balance: U64,
9895
- /** `a_g`: Minimal gas required to execute Accumulate entrypoint. */
9896
- public readonly accumulateMinGas: ServiceGas,
9897
- /** `a_m`: Minimal gas required to execute On Transfer entrypoint. */
9898
- public readonly onTransferMinGas: ServiceGas,
9899
- /** `a_o`: Total number of octets in storage. */
9900
- public readonly storageUtilisationBytes: U64,
9901
- /** `a_f`: Cost-free storage. Decreases both storage item count and total byte size. */
9902
- public readonly gratisStorage: U64,
9903
- /** `a_i`: Number of items in storage. */
9904
- public readonly storageUtilisationCount: U32,
9905
- /** `a_r`: Creation account time slot. */
9906
- public readonly created: TimeSlot,
9907
- /** `a_a`: Most recent accumulation time slot. */
9908
- public readonly lastAccumulation: TimeSlot,
9909
- /** `a_p`: Parent service ID. */
9910
- public readonly parentService: ServiceId,
9911
- ) {
9912
- super();
9913
- }
9849
+
9850
+ asCurrent() {
9851
+ if (this.current === null) {
9852
+ throw new Error("Cannot access current RecentBlocks format");
9853
+ }
9854
+ return this.current;
9855
+ }
9856
+
9857
+ updateBlocks(blocks: BlockState[]): RecentBlocksHistory {
9858
+ if (this.current !== null) {
9859
+ return RecentBlocksHistory.create(
9860
+ RecentBlocks.create({
9861
+ ...this.current,
9862
+ blocks: asOpaqueType(blocks as BlockState[]),
9863
+ }),
9864
+ );
9865
+ }
9866
+
9867
+ throw new Error("RecentBlocksHistory is in invalid state. Cannot be updated!");
9868
+ }
9914
9869
  }
9915
9870
 
9916
- declare class PreimageItem extends WithDebug {
9917
- static Codec = codec.Class(PreimageItem, {
9918
- hash: codec.bytes(HASH_SIZE).asOpaque<PreimageHash>(),
9919
- blob: codec.blob,
9871
+ /**
9872
+ * Fixed size of validator metadata.
9873
+ *
9874
+ * https://graypaper.fluffylabs.dev/#/5f542d7/0d55010d5501
9875
+ */
9876
+ declare const VALIDATOR_META_BYTES = 128;
9877
+ type VALIDATOR_META_BYTES = typeof VALIDATOR_META_BYTES;
9878
+
9879
+ /**
9880
+ * Details about validators' identity.
9881
+ *
9882
+ * https://graypaper.fluffylabs.dev/#/5f542d7/0d4b010d4c01
9883
+ */
9884
+ declare class ValidatorData extends WithDebug {
9885
+ static Codec = codec.Class(ValidatorData, {
9886
+ bandersnatch: codec.bytes(BANDERSNATCH_KEY_BYTES).asOpaque<BandersnatchKey>(),
9887
+ ed25519: codec.bytes(ED25519_KEY_BYTES).asOpaque<Ed25519Key>(),
9888
+ bls: codec.bytes(BLS_KEY_BYTES).asOpaque<BlsKey>(),
9889
+ metadata: codec.bytes(VALIDATOR_META_BYTES),
9920
9890
  });
9921
9891
 
9922
- static create({ hash, blob }: CodecRecord<PreimageItem>) {
9923
- return new PreimageItem(hash, blob);
9892
+ static create({ ed25519, bandersnatch, bls, metadata }: CodecRecord<ValidatorData>) {
9893
+ return new ValidatorData(bandersnatch, ed25519, bls, metadata);
9924
9894
  }
9925
9895
 
9926
9896
  private constructor(
9927
- readonly hash: PreimageHash,
9928
- readonly blob: BytesBlob,
9897
+ /** Bandersnatch public key. */
9898
+ public readonly bandersnatch: BandersnatchKey,
9899
+ /** ED25519 key data. */
9900
+ public readonly ed25519: Ed25519Key,
9901
+ /** BLS public key. */
9902
+ public readonly bls: BlsKey,
9903
+ /** Validator-defined additional metdata. */
9904
+ public readonly metadata: Bytes<VALIDATOR_META_BYTES>,
9929
9905
  ) {
9930
9906
  super();
9931
9907
  }
9932
9908
  }
9933
9909
 
9934
- type StorageKey = Opaque<BytesBlob, "storage key">;
9910
+ declare enum SafroleSealingKeysKind {
9911
+ Tickets = 0,
9912
+ Keys = 1,
9913
+ }
9935
9914
 
9936
- declare class StorageItem extends WithDebug {
9937
- static Codec = codec.Class(StorageItem, {
9938
- key: codec.blob.convert(
9939
- (i) => i,
9940
- (o) => asOpaqueType(o),
9941
- ),
9942
- value: codec.blob,
9915
+ type SafroleSealingKeys =
9916
+ | {
9917
+ kind: SafroleSealingKeysKind.Keys;
9918
+ keys: PerEpochBlock<BandersnatchKey>;
9919
+ }
9920
+ | {
9921
+ kind: SafroleSealingKeysKind.Tickets;
9922
+ tickets: PerEpochBlock<Ticket>;
9923
+ };
9924
+
9925
+ declare const codecBandersnatchKey = codec.bytes(BANDERSNATCH_KEY_BYTES).asOpaque<BandersnatchKey>();
9926
+
9927
+ declare class SafroleSealingKeysData extends WithDebug {
9928
+ static Codec = codecWithContext((context) => {
9929
+ return codec.custom<SafroleSealingKeys>(
9930
+ {
9931
+ name: "SafroleSealingKeys",
9932
+ sizeHint: { bytes: 1 + HASH_SIZE * context.epochLength, isExact: false },
9933
+ },
9934
+ (e, x) => {
9935
+ e.varU32(tryAsU32(x.kind));
9936
+ if (x.kind === SafroleSealingKeysKind.Keys) {
9937
+ e.sequenceFixLen(codecBandersnatchKey, x.keys);
9938
+ } else {
9939
+ e.sequenceFixLen(Ticket.Codec, x.tickets);
9940
+ }
9941
+ },
9942
+ (d) => {
9943
+ const epochLength = context.epochLength;
9944
+ const kind = d.varU32();
9945
+ if (kind === SafroleSealingKeysKind.Keys) {
9946
+ const keys = d.sequenceFixLen<BandersnatchKey>(codecBandersnatchKey, epochLength);
9947
+ return SafroleSealingKeysData.keys(tryAsPerEpochBlock(keys, context));
9948
+ }
9949
+
9950
+ if (kind === SafroleSealingKeysKind.Tickets) {
9951
+ const tickets = d.sequenceFixLen(Ticket.Codec, epochLength);
9952
+ return SafroleSealingKeysData.tickets(tryAsPerEpochBlock(tickets, context));
9953
+ }
9954
+
9955
+ throw new Error(`Unexpected safrole sealing keys kind: ${kind}`);
9956
+ },
9957
+ (s) => {
9958
+ const kind = s.decoder.varU32();
9959
+ if (kind === SafroleSealingKeysKind.Keys) {
9960
+ s.sequenceFixLen(codecBandersnatchKey, context.epochLength);
9961
+ return;
9962
+ }
9963
+ if (kind === SafroleSealingKeysKind.Tickets) {
9964
+ s.sequenceFixLen(Ticket.Codec, context.epochLength);
9965
+ return;
9966
+ }
9967
+
9968
+ throw new Error(`Unexpected safrole sealing keys kind: ${kind}`);
9969
+ },
9970
+ );
9943
9971
  });
9944
9972
 
9945
- static create({ key, value }: CodecRecord<StorageItem>) {
9946
- return new StorageItem(key, value);
9973
+ static keys(keys: PerEpochBlock<BandersnatchKey>): SafroleSealingKeys {
9974
+ return new SafroleSealingKeysData(SafroleSealingKeysKind.Keys, keys, undefined) as SafroleSealingKeys;
9975
+ }
9976
+
9977
+ static tickets(tickets: PerEpochBlock<Ticket>): SafroleSealingKeys {
9978
+ return new SafroleSealingKeysData(SafroleSealingKeysKind.Tickets, undefined, tickets) as SafroleSealingKeys;
9947
9979
  }
9948
9980
 
9949
9981
  private constructor(
9950
- readonly key: StorageKey,
9951
- readonly value: BytesBlob,
9982
+ readonly kind: SafroleSealingKeysKind,
9983
+ readonly keys?: PerEpochBlock<BandersnatchKey>,
9984
+ readonly tickets?: PerEpochBlock<Ticket>,
9952
9985
  ) {
9953
9986
  super();
9954
9987
  }
9955
9988
  }
9956
9989
 
9957
- declare const MAX_LOOKUP_HISTORY_SLOTS = 3;
9958
- type LookupHistorySlots = KnownSizeArray<TimeSlot, `0-${typeof MAX_LOOKUP_HISTORY_SLOTS} timeslots`>;
9959
- declare function tryAsLookupHistorySlots(items: readonly TimeSlot[]): LookupHistorySlots {
9960
- const knownSize = asKnownSize(items) as LookupHistorySlots;
9961
- if (knownSize.length > MAX_LOOKUP_HISTORY_SLOTS) {
9962
- throw new Error(`Lookup history items must contain 0-${MAX_LOOKUP_HISTORY_SLOTS} timeslots.`);
9990
+ declare class SafroleData {
9991
+ static Codec = codec.Class(SafroleData, {
9992
+ nextValidatorData: codecPerValidator(ValidatorData.Codec),
9993
+ epochRoot: codec.bytes(BANDERSNATCH_RING_ROOT_BYTES).asOpaque<BandersnatchRingRoot>(),
9994
+ sealingKeySeries: SafroleSealingKeysData.Codec,
9995
+ ticketsAccumulator: readonlyArray(codec.sequenceVarLen(Ticket.Codec)).convert(seeThrough, asKnownSize),
9996
+ });
9997
+
9998
+ static create({ nextValidatorData, epochRoot, sealingKeySeries, ticketsAccumulator }: CodecRecord<SafroleData>) {
9999
+ return new SafroleData(nextValidatorData, epochRoot, sealingKeySeries, ticketsAccumulator);
9963
10000
  }
9964
- return knownSize;
9965
- }
9966
10001
 
9967
- /** https://graypaper.fluffylabs.dev/#/5f542d7/115400115800 */
9968
- declare class LookupHistoryItem {
9969
- constructor(
9970
- public readonly hash: PreimageHash,
9971
- public readonly length: U32,
9972
- /**
9973
- * Preimage availability history as a sequence of time slots.
9974
- * See PreimageStatus and the following GP fragment for more details.
9975
- * https://graypaper.fluffylabs.dev/#/5f542d7/11780011a500 */
9976
- public readonly slots: LookupHistorySlots,
10002
+ private constructor(
10003
+ /** gamma_k */
10004
+ public readonly nextValidatorData: PerValidator<ValidatorData>,
10005
+ /** gamma_z */
10006
+ public readonly epochRoot: BandersnatchRingRoot,
10007
+ /** gamma_s */
10008
+ public readonly sealingKeySeries: SafroleSealingKeys,
10009
+ /** gamma_a */
10010
+ public readonly ticketsAccumulator: KnownSizeArray<Ticket, "0...EpochLength">,
9977
10011
  ) {}
9978
-
9979
- static isRequested(item: LookupHistoryItem | LookupHistorySlots): boolean {
9980
- if ("slots" in item) {
9981
- return item.slots.length === 0;
9982
- }
9983
- return item.length === 0;
9984
- }
9985
10012
  }
9986
10013
 
9987
10014
  declare const codecServiceId: Descriptor<ServiceId> =
@@ -11158,8 +11185,9 @@ declare class InMemoryState extends WithDebug implements State, EnumerableState
11158
11185
  epochRoot: Bytes.zero(BANDERSNATCH_RING_ROOT_BYTES).asOpaque(),
11159
11186
  privilegedServices: PrivilegedServices.create({
11160
11187
  manager: tryAsServiceId(0),
11161
- authManager: tryAsPerCore(new Array(spec.coresCount).fill(tryAsServiceId(0)), spec),
11162
- validatorsManager: tryAsServiceId(0),
11188
+ assigners: tryAsPerCore(new Array(spec.coresCount).fill(tryAsServiceId(0)), spec),
11189
+ delegator: tryAsServiceId(0),
11190
+ registrar: tryAsServiceId(MAX_VALUE),
11163
11191
  autoAccumulateServices: [],
11164
11192
  }),
11165
11193
  accumulationOutputLog: SortedArray.fromArray(accumulationOutputComparator, []),
@@ -11293,7 +11321,7 @@ declare const index$e_codecPerCore: typeof codecPerCore;
11293
11321
  declare const index$e_codecServiceId: typeof codecServiceId;
11294
11322
  declare const index$e_codecVarGas: typeof codecVarGas;
11295
11323
  declare const index$e_codecVarU16: typeof codecVarU16;
11296
- declare const index$e_codecWithHash: typeof codecWithHash;
11324
+ declare const index$e_codecWithVersion: typeof codecWithVersion;
11297
11325
  declare const index$e_hashComparator: typeof hashComparator;
11298
11326
  declare const index$e_ignoreValueWithDefault: typeof ignoreValueWithDefault;
11299
11327
  declare const index$e_serviceDataCodec: typeof serviceDataCodec;
@@ -11304,7 +11332,7 @@ declare const index$e_tryAsPerCore: typeof tryAsPerCore;
11304
11332
  declare const index$e_workReportsSortedSetCodec: typeof workReportsSortedSetCodec;
11305
11333
  declare const index$e_zeroSizeHint: typeof zeroSizeHint;
11306
11334
  declare namespace index$e {
11307
- export { index$e_AccumulationOutput as AccumulationOutput, index$e_AutoAccumulate as AutoAccumulate, index$e_AvailabilityAssignment as AvailabilityAssignment, index$e_BASE_SERVICE_BALANCE as BASE_SERVICE_BALANCE, index$e_BlockState as BlockState, index$e_CoreStatistics as CoreStatistics, index$e_DisputesRecords as DisputesRecords, index$e_ELECTIVE_BYTE_BALANCE as ELECTIVE_BYTE_BALANCE, index$e_ELECTIVE_ITEM_BALANCE as ELECTIVE_ITEM_BALANCE, index$e_InMemoryService as InMemoryService, index$e_InMemoryState as InMemoryState, index$e_LookupHistoryItem as LookupHistoryItem, index$e_MAX_LOOKUP_HISTORY_SLOTS as MAX_LOOKUP_HISTORY_SLOTS, index$e_PreimageItem as PreimageItem, index$e_PrivilegedServices as PrivilegedServices, index$e_RecentBlocks as RecentBlocks, index$e_RecentBlocksHistory as RecentBlocksHistory, index$e_SafroleData as SafroleData, index$e_SafroleSealingKeysData as SafroleSealingKeysData, index$e_SafroleSealingKeysKind as SafroleSealingKeysKind, index$e_ServiceAccountInfo as ServiceAccountInfo, index$e_ServiceStatistics as ServiceStatistics, index$e_StatisticsData as StatisticsData, index$e_StorageItem as StorageItem, index$e_UpdateError as UpdateError, index$e_UpdatePreimage as UpdatePreimage, index$e_UpdatePreimageKind as UpdatePreimageKind, index$e_UpdateService as UpdateService, index$e_UpdateServiceKind as UpdateServiceKind, index$e_UpdateStorage as UpdateStorage, index$e_UpdateStorageKind as UpdateStorageKind, index$e_ValidatorData as ValidatorData, index$e_ValidatorStatistics as ValidatorStatistics, index$e_accumulationOutputComparator as accumulationOutputComparator, index$e_codecBandersnatchKey as codecBandersnatchKey, index$e_codecPerCore as codecPerCore, index$e_codecServiceId as codecServiceId, index$e_codecVarGas as codecVarGas, index$e_codecVarU16 as codecVarU16, index$e_codecWithHash as codecWithHash, index$e_hashComparator as hashComparator, index$e_ignoreValueWithDefault as ignoreValueWithDefault, index$e_serviceDataCodec as serviceDataCodec, index$e_serviceEntriesCodec as serviceEntriesCodec, index$e_sortedSetCodec as sortedSetCodec, index$e_tryAsLookupHistorySlots as tryAsLookupHistorySlots, index$e_tryAsPerCore as tryAsPerCore, index$e_workReportsSortedSetCodec as workReportsSortedSetCodec, index$e_zeroSizeHint as zeroSizeHint };
11335
+ export { index$e_AccumulationOutput as AccumulationOutput, index$e_AutoAccumulate as AutoAccumulate, index$e_AvailabilityAssignment as AvailabilityAssignment, index$e_BASE_SERVICE_BALANCE as BASE_SERVICE_BALANCE, index$e_BlockState as BlockState, index$e_CoreStatistics as CoreStatistics, index$e_DisputesRecords as DisputesRecords, index$e_ELECTIVE_BYTE_BALANCE as ELECTIVE_BYTE_BALANCE, index$e_ELECTIVE_ITEM_BALANCE as ELECTIVE_ITEM_BALANCE, index$e_InMemoryService as InMemoryService, index$e_InMemoryState as InMemoryState, index$e_LookupHistoryItem as LookupHistoryItem, index$e_MAX_LOOKUP_HISTORY_SLOTS as MAX_LOOKUP_HISTORY_SLOTS, index$e_PreimageItem as PreimageItem, index$e_PrivilegedServices as PrivilegedServices, index$e_RecentBlocks as RecentBlocks, index$e_RecentBlocksHistory as RecentBlocksHistory, index$e_SafroleData as SafroleData, index$e_SafroleSealingKeysData as SafroleSealingKeysData, index$e_SafroleSealingKeysKind as SafroleSealingKeysKind, index$e_ServiceAccountInfo as ServiceAccountInfo, index$e_ServiceStatistics as ServiceStatistics, index$e_StatisticsData as StatisticsData, index$e_StorageItem as StorageItem, index$e_UpdateError as UpdateError, index$e_UpdatePreimage as UpdatePreimage, index$e_UpdatePreimageKind as UpdatePreimageKind, index$e_UpdateService as UpdateService, index$e_UpdateServiceKind as UpdateServiceKind, index$e_UpdateStorage as UpdateStorage, index$e_UpdateStorageKind as UpdateStorageKind, index$e_ValidatorData as ValidatorData, index$e_ValidatorStatistics as ValidatorStatistics, index$e_accumulationOutputComparator as accumulationOutputComparator, index$e_codecBandersnatchKey as codecBandersnatchKey, index$e_codecPerCore as codecPerCore, index$e_codecServiceId as codecServiceId, index$e_codecVarGas as codecVarGas, index$e_codecVarU16 as codecVarU16, index$e_codecWithVersion as codecWithVersion, index$e_hashComparator as hashComparator, index$e_ignoreValueWithDefault as ignoreValueWithDefault, index$e_serviceDataCodec as serviceDataCodec, index$e_serviceEntriesCodec as serviceEntriesCodec, index$e_sortedSetCodec as sortedSetCodec, index$e_tryAsLookupHistorySlots as tryAsLookupHistorySlots, index$e_tryAsPerCore as tryAsPerCore, index$e_workReportsSortedSetCodec as workReportsSortedSetCodec, index$e_zeroSizeHint as zeroSizeHint };
11308
11336
  export type { index$e_BlocksState as BlocksState, index$e_ENTROPY_ENTRIES as ENTROPY_ENTRIES, index$e_EnumerableState as EnumerableState, index$e_FieldNames as FieldNames, index$e_InMemoryStateFields as InMemoryStateFields, index$e_LookupHistorySlots as LookupHistorySlots, index$e_MAX_RECENT_HISTORY as MAX_RECENT_HISTORY, index$e_PerCore as PerCore, index$e_SafroleSealingKeys as SafroleSealingKeys, index$e_Service as Service, index$e_ServiceData as ServiceData, index$e_ServiceEntries as ServiceEntries, index$e_ServicesUpdate as ServicesUpdate, index$e_State as State, index$e_StorageKey as StorageKey, index$e_VALIDATOR_META_BYTES as VALIDATOR_META_BYTES };
11309
11337
  }
11310
11338
 
@@ -11372,7 +11400,7 @@ declare namespace stateKeys {
11372
11400
  }
11373
11401
 
11374
11402
  /** https://graypaper.fluffylabs.dev/#/1c979cb/3bba033bba03?v=0.7.1 */
11375
- export function serviceStorage(serviceId: ServiceId, key: StorageKey): StateKey {
11403
+ export function serviceStorage(blake2b: Blake2b, serviceId: ServiceId, key: StorageKey): StateKey {
11376
11404
  if (Compatibility.isLessThan(GpVersion.V0_6_7)) {
11377
11405
  const out = Bytes.zero(HASH_SIZE);
11378
11406
  out.raw.set(u32AsLeBytes(tryAsU32(2 ** 32 - 1)), 0);
@@ -11380,11 +11408,11 @@ declare namespace stateKeys {
11380
11408
  return legacyServiceNested(serviceId, out);
11381
11409
  }
11382
11410
 
11383
- return serviceNested(serviceId, tryAsU32(2 ** 32 - 1), key);
11411
+ return serviceNested(blake2b, serviceId, tryAsU32(2 ** 32 - 1), key);
11384
11412
  }
11385
11413
 
11386
11414
  /** https://graypaper.fluffylabs.dev/#/1c979cb/3bd7033bd703?v=0.7.1 */
11387
- export function servicePreimage(serviceId: ServiceId, hash: PreimageHash): StateKey {
11415
+ export function servicePreimage(blake2b: Blake2b, serviceId: ServiceId, hash: PreimageHash): StateKey {
11388
11416
  if (Compatibility.isLessThan(GpVersion.V0_6_7)) {
11389
11417
  const out = Bytes.zero(HASH_SIZE);
11390
11418
  out.raw.set(u32AsLeBytes(tryAsU32(2 ** 32 - 2)), 0);
@@ -11392,11 +11420,16 @@ declare namespace stateKeys {
11392
11420
  return legacyServiceNested(serviceId, out);
11393
11421
  }
11394
11422
 
11395
- return serviceNested(serviceId, tryAsU32(2 ** 32 - 2), hash);
11423
+ return serviceNested(blake2b, serviceId, tryAsU32(2 ** 32 - 2), hash);
11396
11424
  }
11397
11425
 
11398
11426
  /** https://graypaper.fluffylabs.dev/#/1c979cb/3b0a043b0a04?v=0.7.1 */
11399
- export function serviceLookupHistory(serviceId: ServiceId, hash: PreimageHash, preimageLength: U32): StateKey {
11427
+ export function serviceLookupHistory(
11428
+ blake2b: Blake2b,
11429
+ serviceId: ServiceId,
11430
+ hash: PreimageHash,
11431
+ preimageLength: U32,
11432
+ ): StateKey {
11400
11433
  if (Compatibility.isLessThan(GpVersion.V0_6_7)) {
11401
11434
  const doubleHash = blake2b.hashBytes(hash);
11402
11435
  const out = Bytes.zero(HASH_SIZE);
@@ -11405,11 +11438,11 @@ declare namespace stateKeys {
11405
11438
  return legacyServiceNested(serviceId, out);
11406
11439
  }
11407
11440
 
11408
- return serviceNested(serviceId, preimageLength, hash);
11441
+ return serviceNested(blake2b, serviceId, preimageLength, hash);
11409
11442
  }
11410
11443
 
11411
11444
  /** https://graypaper.fluffylabs.dev/#/1c979cb/3b88003b8800?v=0.7.1 */
11412
- export function serviceNested(serviceId: ServiceId, numberPrefix: U32, hash: BytesBlob): StateKey {
11445
+ export function serviceNested(blake2b: Blake2b, serviceId: ServiceId, numberPrefix: U32, hash: BytesBlob): StateKey {
11413
11446
  const inputToHash = BytesBlob.blobFromParts(u32AsLeBytes(numberPrefix), hash.raw);
11414
11447
  const newHash = blake2b.hashBytes(inputToHash).raw.subarray(0, 28);
11415
11448
  const key = Bytes.zero(HASH_SIZE);
@@ -11589,24 +11622,26 @@ declare namespace serialize {
11589
11622
  /** C(255, s): https://graypaper.fluffylabs.dev/#/85129da/383103383103?v=0.6.3 */
11590
11623
  export const serviceData = (serviceId: ServiceId) => ({
11591
11624
  key: stateKeys.serviceInfo(serviceId),
11592
- Codec: ServiceAccountInfo.Codec,
11625
+ Codec: Compatibility.isGreaterOrEqual(GpVersion.V0_7_1)
11626
+ ? codecWithVersion(ServiceAccountInfo.Codec)
11627
+ : ServiceAccountInfo.Codec,
11593
11628
  });
11594
11629
 
11595
11630
  /** https://graypaper.fluffylabs.dev/#/85129da/384803384803?v=0.6.3 */
11596
- export const serviceStorage = (serviceId: ServiceId, key: StorageKey) => ({
11597
- key: stateKeys.serviceStorage(serviceId, key),
11631
+ export const serviceStorage = (blake2b: Blake2b, serviceId: ServiceId, key: StorageKey) => ({
11632
+ key: stateKeys.serviceStorage(blake2b, serviceId, key),
11598
11633
  Codec: dumpCodec,
11599
11634
  });
11600
11635
 
11601
11636
  /** https://graypaper.fluffylabs.dev/#/85129da/385b03385b03?v=0.6.3 */
11602
- export const servicePreimages = (serviceId: ServiceId, hash: PreimageHash) => ({
11603
- key: stateKeys.servicePreimage(serviceId, hash),
11637
+ export const servicePreimages = (blake2b: Blake2b, serviceId: ServiceId, hash: PreimageHash) => ({
11638
+ key: stateKeys.servicePreimage(blake2b, serviceId, hash),
11604
11639
  Codec: dumpCodec,
11605
11640
  });
11606
11641
 
11607
11642
  /** https://graypaper.fluffylabs.dev/#/85129da/387603387603?v=0.6.3 */
11608
- export const serviceLookupHistory = (serviceId: ServiceId, hash: PreimageHash, len: U32) => ({
11609
- key: stateKeys.serviceLookupHistory(serviceId, hash, len),
11643
+ export const serviceLookupHistory = (blake2b: Blake2b, serviceId: ServiceId, hash: PreimageHash, len: U32) => ({
11644
+ key: stateKeys.serviceLookupHistory(blake2b, serviceId, hash, len),
11610
11645
  Codec: readonlyArray(codec.sequenceVarLen(codec.u32)),
11611
11646
  });
11612
11647
  }
@@ -11641,6 +11676,7 @@ declare const EMPTY_BLOB = BytesBlob.empty();
11641
11676
  /** Serialize given state update into a series of key-value pairs. */
11642
11677
  declare function* serializeStateUpdate(
11643
11678
  spec: ChainSpec,
11679
+ blake2b: Blake2b,
11644
11680
  update: Partial<State & ServicesUpdate>,
11645
11681
  ): Generator<StateEntryUpdate> {
11646
11682
  // first let's serialize all of the simple entries (if present!)
@@ -11649,9 +11685,9 @@ declare function* serializeStateUpdate(
11649
11685
  const encode = <T>(codec: Encode<T>, val: T) => Encoder.encodeObject(codec, val, spec);
11650
11686
 
11651
11687
  // then let's proceed with service updates
11652
- yield* serializeServiceUpdates(update.servicesUpdates, encode);
11653
- yield* serializePreimages(update.preimages, encode);
11654
- yield* serializeStorage(update.storage);
11688
+ yield* serializeServiceUpdates(update.servicesUpdates, encode, blake2b);
11689
+ yield* serializePreimages(update.preimages, encode, blake2b);
11690
+ yield* serializeStorage(update.storage, blake2b);
11655
11691
  yield* serializeRemovedServices(update.servicesRemoved);
11656
11692
  }
11657
11693
 
@@ -11663,18 +11699,18 @@ declare function* serializeRemovedServices(servicesRemoved: ServiceId[] | undefi
11663
11699
  }
11664
11700
  }
11665
11701
 
11666
- declare function* serializeStorage(storage: UpdateStorage[] | undefined): Generator<StateEntryUpdate> {
11702
+ declare function* serializeStorage(storage: UpdateStorage[] | undefined, blake2b: Blake2b): Generator<StateEntryUpdate> {
11667
11703
  for (const { action, serviceId } of storage ?? []) {
11668
11704
  switch (action.kind) {
11669
11705
  case UpdateStorageKind.Set: {
11670
11706
  const key = action.storage.key;
11671
- const codec = serialize.serviceStorage(serviceId, key);
11707
+ const codec = serialize.serviceStorage(blake2b, serviceId, key);
11672
11708
  yield [StateEntryUpdateAction.Insert, codec.key, action.storage.value];
11673
11709
  break;
11674
11710
  }
11675
11711
  case UpdateStorageKind.Remove: {
11676
11712
  const key = action.key;
11677
- const codec = serialize.serviceStorage(serviceId, key);
11713
+ const codec = serialize.serviceStorage(blake2b, serviceId, key);
11678
11714
  yield [StateEntryUpdateAction.Remove, codec.key, EMPTY_BLOB];
11679
11715
  break;
11680
11716
  }
@@ -11684,16 +11720,20 @@ declare function* serializeStorage(storage: UpdateStorage[] | undefined): Genera
11684
11720
  }
11685
11721
  }
11686
11722
 
11687
- declare function* serializePreimages(preimages: UpdatePreimage[] | undefined, encode: EncodeFun): Generator<StateEntryUpdate> {
11723
+ declare function* serializePreimages(
11724
+ preimages: UpdatePreimage[] | undefined,
11725
+ encode: EncodeFun,
11726
+ blake2b: Blake2b,
11727
+ ): Generator<StateEntryUpdate> {
11688
11728
  for (const { action, serviceId } of preimages ?? []) {
11689
11729
  switch (action.kind) {
11690
11730
  case UpdatePreimageKind.Provide: {
11691
11731
  const { hash, blob } = action.preimage;
11692
- const codec = serialize.servicePreimages(serviceId, hash);
11732
+ const codec = serialize.servicePreimages(blake2b, serviceId, hash);
11693
11733
  yield [StateEntryUpdateAction.Insert, codec.key, blob];
11694
11734
 
11695
11735
  if (action.slot !== null) {
11696
- const codec2 = serialize.serviceLookupHistory(serviceId, hash, tryAsU32(blob.length));
11736
+ const codec2 = serialize.serviceLookupHistory(blake2b, serviceId, hash, tryAsU32(blob.length));
11697
11737
  yield [
11698
11738
  StateEntryUpdateAction.Insert,
11699
11739
  codec2.key,
@@ -11704,16 +11744,16 @@ declare function* serializePreimages(preimages: UpdatePreimage[] | undefined, en
11704
11744
  }
11705
11745
  case UpdatePreimageKind.UpdateOrAdd: {
11706
11746
  const { hash, length, slots } = action.item;
11707
- const codec = serialize.serviceLookupHistory(serviceId, hash, length);
11747
+ const codec = serialize.serviceLookupHistory(blake2b, serviceId, hash, length);
11708
11748
  yield [StateEntryUpdateAction.Insert, codec.key, encode(codec.Codec, slots)];
11709
11749
  break;
11710
11750
  }
11711
11751
  case UpdatePreimageKind.Remove: {
11712
11752
  const { hash, length } = action;
11713
- const codec = serialize.servicePreimages(serviceId, hash);
11753
+ const codec = serialize.servicePreimages(blake2b, serviceId, hash);
11714
11754
  yield [StateEntryUpdateAction.Remove, codec.key, EMPTY_BLOB];
11715
11755
 
11716
- const codec2 = serialize.serviceLookupHistory(serviceId, hash, length);
11756
+ const codec2 = serialize.serviceLookupHistory(blake2b, serviceId, hash, length);
11717
11757
  yield [StateEntryUpdateAction.Remove, codec2.key, EMPTY_BLOB];
11718
11758
  break;
11719
11759
  }
@@ -11725,6 +11765,7 @@ declare function* serializePreimages(preimages: UpdatePreimage[] | undefined, en
11725
11765
  declare function* serializeServiceUpdates(
11726
11766
  servicesUpdates: UpdateService[] | undefined,
11727
11767
  encode: EncodeFun,
11768
+ blake2b: Blake2b,
11728
11769
  ): Generator<StateEntryUpdate> {
11729
11770
  for (const { action, serviceId } of servicesUpdates ?? []) {
11730
11771
  // new service being created or updated
@@ -11734,7 +11775,7 @@ declare function* serializeServiceUpdates(
11734
11775
  // additional lookup history update
11735
11776
  if (action.kind === UpdateServiceKind.Create && action.lookupHistory !== null) {
11736
11777
  const { lookupHistory } = action;
11737
- const codec2 = serialize.serviceLookupHistory(serviceId, lookupHistory.hash, lookupHistory.length);
11778
+ const codec2 = serialize.serviceLookupHistory(blake2b, serviceId, lookupHistory.hash, lookupHistory.length);
11738
11779
  yield [StateEntryUpdateAction.Insert, codec2.key, encode(codec2.Codec, lookupHistory.slots)];
11739
11780
  }
11740
11781
  }
@@ -11868,8 +11909,8 @@ declare class StateEntries {
11868
11909
  );
11869
11910
 
11870
11911
  /** Turn in-memory state into it's serialized form. */
11871
- static serializeInMemory(spec: ChainSpec, state: InMemoryState) {
11872
- return new StateEntries(convertInMemoryStateToDictionary(spec, state));
11912
+ static serializeInMemory(spec: ChainSpec, blake2b: Blake2b, state: InMemoryState) {
11913
+ return new StateEntries(convertInMemoryStateToDictionary(spec, blake2b, state));
11873
11914
  }
11874
11915
 
11875
11916
  /**
@@ -11924,7 +11965,8 @@ declare class StateEntries {
11924
11965
  }
11925
11966
 
11926
11967
  /** https://graypaper.fluffylabs.dev/#/68eaa1f/391600391600?v=0.6.4 */
11927
- getRootHash(): StateRootHash {
11968
+ getRootHash(blake2b: Blake2b): StateRootHash {
11969
+ const blake2bTrieHasher = getBlake2bTrieHasher(blake2b);
11928
11970
  const leaves: SortedSet<LeafNode> = SortedSet.fromArray(leafComparator);
11929
11971
  for (const [key, value] of this) {
11930
11972
  leaves.insert(InMemoryTrie.constructLeaf(blake2bTrieHasher, key.asOpaque(), value));
@@ -11937,6 +11979,7 @@ declare class StateEntries {
11937
11979
  /** https://graypaper.fluffylabs.dev/#/68eaa1f/38a50038a500?v=0.6.4 */
11938
11980
  declare function convertInMemoryStateToDictionary(
11939
11981
  spec: ChainSpec,
11982
+ blake2b: Blake2b,
11940
11983
  state: InMemoryState,
11941
11984
  ): TruncatedHashDictionary<StateKey, BytesBlob> {
11942
11985
  const serialized = TruncatedHashDictionary.fromEntries<StateKey, BytesBlob>([]);
@@ -11969,20 +12012,25 @@ declare function convertInMemoryStateToDictionary(
11969
12012
 
11970
12013
  // preimages
11971
12014
  for (const preimage of service.data.preimages.values()) {
11972
- const { key, Codec } = serialize.servicePreimages(serviceId, preimage.hash);
12015
+ const { key, Codec } = serialize.servicePreimages(blake2b, serviceId, preimage.hash);
11973
12016
  serialized.set(key, Encoder.encodeObject(Codec, preimage.blob));
11974
12017
  }
11975
12018
 
11976
12019
  // storage
11977
12020
  for (const storage of service.data.storage.values()) {
11978
- const { key, Codec } = serialize.serviceStorage(serviceId, storage.key);
12021
+ const { key, Codec } = serialize.serviceStorage(blake2b, serviceId, storage.key);
11979
12022
  serialized.set(key, Encoder.encodeObject(Codec, storage.value));
11980
12023
  }
11981
12024
 
11982
12025
  // lookup history
11983
12026
  for (const lookupHistoryList of service.data.lookupHistory.values()) {
11984
12027
  for (const lookupHistory of lookupHistoryList) {
11985
- const { key, Codec } = serialize.serviceLookupHistory(serviceId, lookupHistory.hash, lookupHistory.length);
12028
+ const { key, Codec } = serialize.serviceLookupHistory(
12029
+ blake2b,
12030
+ serviceId,
12031
+ lookupHistory.hash,
12032
+ lookupHistory.length,
12033
+ );
11986
12034
  serialized.set(key, Encoder.encodeObject(Codec, lookupHistory.slots.slice()));
11987
12035
  }
11988
12036
  }
@@ -12013,21 +12061,23 @@ declare class SerializedState<T extends SerializedStateBackend = SerializedState
12013
12061
  implements State, EnumerableState
12014
12062
  {
12015
12063
  /** Create a state-like object from collection of serialized entries. */
12016
- static fromStateEntries(spec: ChainSpec, state: StateEntries, recentServices: ServiceId[] = []) {
12017
- return new SerializedState(spec, state, recentServices);
12064
+ static fromStateEntries(spec: ChainSpec, blake2b: Blake2b, state: StateEntries, recentServices: ServiceId[] = []) {
12065
+ return new SerializedState(spec, blake2b, state, recentServices);
12018
12066
  }
12019
12067
 
12020
12068
  /** Create a state-like object backed by some DB. */
12021
12069
  static new<T extends SerializedStateBackend>(
12022
12070
  spec: ChainSpec,
12071
+ blake2b: Blake2b,
12023
12072
  db: T,
12024
12073
  recentServices: ServiceId[] = [],
12025
12074
  ): SerializedState<T> {
12026
- return new SerializedState(spec, db, recentServices);
12075
+ return new SerializedState(spec, blake2b, db, recentServices);
12027
12076
  }
12028
12077
 
12029
12078
  private constructor(
12030
12079
  private readonly spec: ChainSpec,
12080
+ private readonly blake2b: Blake2b,
12031
12081
  public backend: T,
12032
12082
  /** Best-effort list of recently active services. */
12033
12083
  private readonly _recentServiceIds: ServiceId[],
@@ -12058,7 +12108,7 @@ declare class SerializedState<T extends SerializedStateBackend = SerializedState
12058
12108
  this._recentServiceIds.push(id);
12059
12109
  }
12060
12110
 
12061
- return new SerializedService(id, serviceData, (key) => this.retrieveOptional(key));
12111
+ return new SerializedService(this.blake2b, id, serviceData, (key) => this.retrieveOptional(key));
12062
12112
  }
12063
12113
 
12064
12114
  private retrieve<T>({ key, Codec }: KeyAndCodec<T>, description: string): T {
@@ -12157,6 +12207,7 @@ declare class SerializedState<T extends SerializedStateBackend = SerializedState
12157
12207
  /** Service data representation on a serialized state. */
12158
12208
  declare class SerializedService implements Service {
12159
12209
  constructor(
12210
+ public readonly blake2b: Blake2b,
12160
12211
  /** Service id */
12161
12212
  public readonly serviceId: ServiceId,
12162
12213
  private readonly accountInfo: ServiceAccountInfo,
@@ -12172,14 +12223,14 @@ declare class SerializedService implements Service {
12172
12223
  getStorage(rawKey: StorageKey): BytesBlob | null {
12173
12224
  if (Compatibility.isLessThan(GpVersion.V0_6_7)) {
12174
12225
  const SERVICE_ID_BYTES = 4;
12175
- const serviceIdAndKey = new Uint8Array(SERVICE_ID_BYTES + rawKey.length);
12226
+ const serviceIdAndKey = safeAllocUint8Array(SERVICE_ID_BYTES + rawKey.length);
12176
12227
  serviceIdAndKey.set(u32AsLeBytes(this.serviceId));
12177
12228
  serviceIdAndKey.set(rawKey.raw, SERVICE_ID_BYTES);
12178
- const key: StorageKey = asOpaqueType(BytesBlob.blobFrom(blake2b.hashBytes(serviceIdAndKey).raw));
12179
- return this.retrieveOptional(serialize.serviceStorage(this.serviceId, key)) ?? null;
12229
+ const key: StorageKey = asOpaqueType(BytesBlob.blobFrom(this.blake2b.hashBytes(serviceIdAndKey).raw));
12230
+ return this.retrieveOptional(serialize.serviceStorage(this.blake2b, this.serviceId, key)) ?? null;
12180
12231
  }
12181
12232
 
12182
- return this.retrieveOptional(serialize.serviceStorage(this.serviceId, rawKey)) ?? null;
12233
+ return this.retrieveOptional(serialize.serviceStorage(this.blake2b, this.serviceId, rawKey)) ?? null;
12183
12234
  }
12184
12235
 
12185
12236
  /**
@@ -12189,17 +12240,17 @@ declare class SerializedService implements Service {
12189
12240
  */
12190
12241
  hasPreimage(hash: PreimageHash): boolean {
12191
12242
  // TODO [ToDr] consider optimizing to avoid fetching the whole data.
12192
- return this.retrieveOptional(serialize.servicePreimages(this.serviceId, hash)) !== undefined;
12243
+ return this.retrieveOptional(serialize.servicePreimages(this.blake2b, this.serviceId, hash)) !== undefined;
12193
12244
  }
12194
12245
 
12195
12246
  /** Retrieve preimage from the DB. */
12196
12247
  getPreimage(hash: PreimageHash): BytesBlob | null {
12197
- return this.retrieveOptional(serialize.servicePreimages(this.serviceId, hash)) ?? null;
12248
+ return this.retrieveOptional(serialize.servicePreimages(this.blake2b, this.serviceId, hash)) ?? null;
12198
12249
  }
12199
12250
 
12200
12251
  /** Retrieve preimage lookup history. */
12201
12252
  getLookupHistory(hash: PreimageHash, len: U32): LookupHistorySlots | null {
12202
- const rawSlots = this.retrieveOptional(serialize.serviceLookupHistory(this.serviceId, hash, len));
12253
+ const rawSlots = this.retrieveOptional(serialize.serviceLookupHistory(this.blake2b, this.serviceId, hash, len));
12203
12254
  if (rawSlots === undefined) {
12204
12255
  return null;
12205
12256
  }
@@ -12212,9 +12263,9 @@ type KeyAndCodec<T> = {
12212
12263
  Codec: Decode<T>;
12213
12264
  };
12214
12265
 
12215
- declare function loadState(spec: ChainSpec, entries: Iterable<[StateKey | TruncatedHash, BytesBlob]>) {
12266
+ declare function loadState(spec: ChainSpec, blake2b: Blake2b, entries: Iterable<[StateKey | TruncatedHash, BytesBlob]>) {
12216
12267
  const stateEntries = StateEntries.fromEntriesUnsafe(entries);
12217
- return SerializedState.fromStateEntries(spec, stateEntries);
12268
+ return SerializedState.fromStateEntries(spec, blake2b, stateEntries);
12218
12269
  }
12219
12270
 
12220
12271
  /**
@@ -12370,7 +12421,8 @@ declare class LeafDb implements SerializedStateBackend {
12370
12421
  assertNever(val);
12371
12422
  }
12372
12423
 
12373
- getStateRoot(): StateRootHash {
12424
+ getStateRoot(blake2b: Blake2b): StateRootHash {
12425
+ const blake2bTrieHasher = getBlake2bTrieHasher(blake2b);
12374
12426
  return InMemoryTrie.computeStateRoot(blake2bTrieHasher, this.leaves).asOpaque();
12375
12427
  }
12376
12428
 
@@ -12468,7 +12520,8 @@ declare class InMemoryStates implements StatesDb<InMemoryState> {
12468
12520
  }
12469
12521
 
12470
12522
  async getStateRoot(state: InMemoryState): Promise<StateRootHash> {
12471
- return StateEntries.serializeInMemory(this.spec, state).getRootHash();
12523
+ const blake2b = await Blake2b.createHasher();
12524
+ return StateEntries.serializeInMemory(this.spec, blake2b, state).getRootHash(blake2b);
12472
12525
  }
12473
12526
 
12474
12527
  /** Insert a full state into the database. */
@@ -12573,7 +12626,7 @@ declare function padAndEncodeData(input: BytesBlob) {
12573
12626
  const paddedLength = Math.ceil(input.length / PIECE_SIZE) * PIECE_SIZE;
12574
12627
  let padded = input;
12575
12628
  if (input.length !== paddedLength) {
12576
- padded = BytesBlob.blobFrom(new Uint8Array(paddedLength));
12629
+ padded = BytesBlob.blobFrom(safeAllocUint8Array(paddedLength));
12577
12630
  padded.raw.set(input.raw, 0);
12578
12631
  }
12579
12632
  return chunkingFunction(padded);
@@ -12629,7 +12682,7 @@ declare function decodeData(input: FixedSizeArray<[number, BytesBlob], N_CHUNKS_
12629
12682
  */
12630
12683
  declare function encodePoints(input: Bytes<PIECE_SIZE>): FixedSizeArray<Bytes<POINT_LENGTH>, N_CHUNKS_TOTAL> {
12631
12684
  const result: Bytes<POINT_LENGTH>[] = [];
12632
- const data = new Uint8Array(POINT_ALIGNMENT * N_CHUNKS_REQUIRED);
12685
+ const data = safeAllocUint8Array(POINT_ALIGNMENT * N_CHUNKS_REQUIRED);
12633
12686
 
12634
12687
  // add original shards to the result
12635
12688
  for (let i = 0; i < N_CHUNKS_REQUIRED; i++) {
@@ -12649,7 +12702,7 @@ declare function encodePoints(input: Bytes<PIECE_SIZE>): FixedSizeArray<Bytes<PO
12649
12702
  for (let i = 0; i < N_CHUNKS_REDUNDANCY; i++) {
12650
12703
  const pointIndex = i * POINT_ALIGNMENT;
12651
12704
 
12652
- const redundancyPoint = new Uint8Array(POINT_LENGTH);
12705
+ const redundancyPoint = safeAllocUint8Array(POINT_LENGTH);
12653
12706
  for (let j = 0; j < POINT_LENGTH; j++) {
12654
12707
  redundancyPoint[j] = encodedData[pointIndex + j * HALF_POINT_SIZE];
12655
12708
  }
@@ -12669,7 +12722,7 @@ declare function decodePiece(
12669
12722
  ): Bytes<PIECE_SIZE> {
12670
12723
  const result = Bytes.zero(PIECE_SIZE);
12671
12724
 
12672
- const data = new Uint8Array(N_CHUNKS_REQUIRED * POINT_ALIGNMENT);
12725
+ const data = safeAllocUint8Array(N_CHUNKS_REQUIRED * POINT_ALIGNMENT);
12673
12726
  const indices = new Uint16Array(input.length);
12674
12727
 
12675
12728
  for (let i = 0; i < N_CHUNKS_REQUIRED; i++) {
@@ -12796,7 +12849,7 @@ declare function lace<N extends number, K extends number>(input: FixedSizeArray<
12796
12849
  return BytesBlob.empty();
12797
12850
  }
12798
12851
  const n = input[0].length;
12799
- const result = BytesBlob.blobFrom(new Uint8Array(k * n));
12852
+ const result = BytesBlob.blobFrom(safeAllocUint8Array(k * n));
12800
12853
  for (let i = 0; i < k; i++) {
12801
12854
  const entry = input[i].raw;
12802
12855
  for (let j = 0; j < n; j++) {
@@ -13639,6 +13692,8 @@ declare enum NewServiceError {
13639
13692
  InsufficientFunds = 0,
13640
13693
  /** Service is not privileged to set gratis storage. */
13641
13694
  UnprivilegedService = 1,
13695
+ /** Registrar attempting to create a service with already existing id. */
13696
+ RegistrarServiceIdAlreadyTaken = 2,
13642
13697
  }
13643
13698
 
13644
13699
  declare enum UpdatePrivilegesError {
@@ -13704,14 +13759,18 @@ interface PartialState {
13704
13759
  ): Result$2<OK, TransferError>;
13705
13760
 
13706
13761
  /**
13707
- * Create a new service with given codeHash, length, gas, allowance and gratisStorage.
13762
+ * Create a new service with given codeHash, length, gas, allowance, gratisStorage and wantedServiceId.
13708
13763
  *
13709
- * Returns a newly assigned id of that service.
13710
- * https://graypaper.fluffylabs.dev/#/7e6ff6a/2f4c022f4c02?v=0.6.7
13764
+ * Returns a newly assigned id
13765
+ * or `wantedServiceId` if it's lower than `S`
13766
+ * and parent of that service is `Registrar`.
13767
+ *
13768
+ * https://graypaper.fluffylabs.dev/#/ab2cdbd/2fa9042fc304?v=0.7.2
13711
13769
  *
13712
13770
  * An error can be returned in case the account does not
13713
13771
  * have the required balance
13714
- * or tries to set gratis storage without being privileged.
13772
+ * or tries to set gratis storage without being `Manager`
13773
+ * or `Registrar` tries to set service id thats already taken.
13715
13774
  */
13716
13775
  newService(
13717
13776
  codeHash: CodeHash,
@@ -13719,6 +13778,7 @@ interface PartialState {
13719
13778
  gas: ServiceGas,
13720
13779
  allowance: ServiceGas,
13721
13780
  gratisStorage: U64,
13781
+ wantedServiceId: U64,
13722
13782
  ): Result$2<ServiceId, NewServiceError>;
13723
13783
 
13724
13784
  /** Upgrade code of currently running service. */
@@ -13740,7 +13800,7 @@ interface PartialState {
13740
13800
  updateAuthorizationQueue(
13741
13801
  coreIndex: CoreIndex,
13742
13802
  authQueue: FixedSizeArray<Blake2bHash, AUTHORIZATION_QUEUE_SIZE>,
13743
- authManager: ServiceId | null,
13803
+ assigners: ServiceId | null,
13744
13804
  ): Result$2<OK, UpdatePrivilegesError>;
13745
13805
 
13746
13806
  /**
@@ -13749,14 +13809,16 @@ interface PartialState {
13749
13809
  * `m`: manager service (can change privileged services)
13750
13810
  * `a`: manages authorization queue
13751
13811
  * `v`: manages validator keys
13752
- * `g`: collection of serviceId -> gas that auto-accumulate every block
13812
+ * `r`: manages create new services in protected id range.
13813
+ * `z`: collection of serviceId -> gas that auto-accumulate every block
13753
13814
  *
13754
13815
  */
13755
13816
  updatePrivilegedServices(
13756
13817
  m: ServiceId | null,
13757
13818
  a: PerCore<ServiceId>,
13758
13819
  v: ServiceId | null,
13759
- g: [ServiceId, ServiceGas][],
13820
+ r: ServiceId | null,
13821
+ z: [ServiceId, ServiceGas][],
13760
13822
  ): Result$2<OK, UpdatePrivilegesError>;
13761
13823
 
13762
13824
  /** Yield accumulation trie result hash. */
@@ -13868,7 +13930,7 @@ declare class Mask {
13868
13930
  }
13869
13931
 
13870
13932
  private buildLookupTableForward(mask: BitVec) {
13871
- const table = new Uint8Array(mask.bitLength);
13933
+ const table = safeAllocUint8Array(mask.bitLength);
13872
13934
  let lastInstructionOffset = 0;
13873
13935
  for (let i = mask.bitLength - 1; i >= 0; i--) {
13874
13936
  if (mask.isSet(i)) {
@@ -14012,7 +14074,7 @@ declare class Registers {
14012
14074
  private asSigned: BigInt64Array;
14013
14075
  private asUnsigned: BigUint64Array;
14014
14076
 
14015
- constructor(private readonly bytes = new Uint8Array(NO_OF_REGISTERS << REGISTER_SIZE_SHIFT)) {
14077
+ constructor(private readonly bytes = safeAllocUint8Array(NO_OF_REGISTERS << REGISTER_SIZE_SHIFT)) {
14016
14078
  check`${bytes.length === NO_OF_REGISTERS << REGISTER_SIZE_SHIFT} Invalid size of registers array.`;
14017
14079
  this.asSigned = new BigInt64Array(bytes.buffer, bytes.byteOffset);
14018
14080
  this.asUnsigned = new BigUint64Array(bytes.buffer, bytes.byteOffset);
@@ -17674,7 +17736,7 @@ declare class AccumulationStateUpdate {
17674
17736
  /** Services state updates. */
17675
17737
  public readonly services: ServicesUpdate,
17676
17738
  /** Pending transfers. */
17677
- public readonly transfers: PendingTransfer[],
17739
+ public transfers: PendingTransfer[],
17678
17740
  /** Yielded accumulation root. */
17679
17741
  public readonly yieldedRoots: Map<ServiceId, OpaqueHash> = new Map(),
17680
17742
  ) {}
@@ -17725,11 +17787,18 @@ declare class AccumulationStateUpdate {
17725
17787
  if (from.privilegedServices !== null) {
17726
17788
  update.privilegedServices = PrivilegedServices.create({
17727
17789
  ...from.privilegedServices,
17728
- authManager: asKnownSize([...from.privilegedServices.authManager]),
17790
+ assigners: asKnownSize([...from.privilegedServices.assigners]),
17729
17791
  });
17730
17792
  }
17731
17793
  return update;
17732
17794
  }
17795
+
17796
+ /** Retrieve and clear pending transfers. */
17797
+ takeTransfers() {
17798
+ const transfers = this.transfers;
17799
+ this.transfers = [];
17800
+ return transfers;
17801
+ }
17733
17802
  }
17734
17803
 
17735
17804
  type StateSlice = Pick<State, "getService" | "privilegedServices">;
@@ -17996,7 +18065,7 @@ declare const HostCallResult = {
17996
18065
  OOB: tryAsU64(0xffff_ffff_ffff_fffdn), // 2**64 - 3
17997
18066
  /** Index unknown. */
17998
18067
  WHO: tryAsU64(0xffff_ffff_ffff_fffcn), // 2**64 - 4
17999
- /** Storage full. */
18068
+ /** Storage full or resource already allocated. */
18000
18069
  FULL: tryAsU64(0xffff_ffff_ffff_fffbn), // 2**64 - 5
18001
18070
  /** Core index unknown. */
18002
18071
  CORE: tryAsU64(0xffff_ffff_ffff_fffan), // 2**64 - 6
@@ -18004,7 +18073,7 @@ declare const HostCallResult = {
18004
18073
  CASH: tryAsU64(0xffff_ffff_ffff_fff9n), // 2**64 - 7
18005
18074
  /** Gas limit too low. */
18006
18075
  LOW: tryAsU64(0xffff_ffff_ffff_fff8n), // 2**64 - 8
18007
- /** The item is already solicited or cannot be forgotten. */
18076
+ /** The item is already solicited, cannot be forgotten or the operation is invalid due to privilege level. */
18008
18077
  HUH: tryAsU64(0xffff_ffff_ffff_fff7n), // 2**64 - 9
18009
18078
  /** The return value indicating general success. */
18010
18079
  OK: tryAsU64(0n),
@@ -18239,7 +18308,7 @@ declare class HostCalls {
18239
18308
  const maybeAddress = regs.getLowerU32(7);
18240
18309
  const maybeLength = regs.getLowerU32(8);
18241
18310
 
18242
- const result = new Uint8Array(maybeLength);
18311
+ const result = safeAllocUint8Array(maybeLength);
18243
18312
  const startAddress = tryAsMemoryIndex(maybeAddress);
18244
18313
  const loadResult = memory.loadInto(result, startAddress);
18245
18314
 
@@ -18678,7 +18747,7 @@ declare class DebuggerAdapter {
18678
18747
 
18679
18748
  if (page === null) {
18680
18749
  // page wasn't allocated so we return an empty page
18681
- return new Uint8Array(PAGE_SIZE);
18750
+ return safeAllocUint8Array(PAGE_SIZE);
18682
18751
  }
18683
18752
 
18684
18753
  if (page.length === PAGE_SIZE) {
@@ -18687,7 +18756,7 @@ declare class DebuggerAdapter {
18687
18756
  }
18688
18757
 
18689
18758
  // page was allocated but it is shorter than PAGE_SIZE so we have to extend it
18690
- const fullPage = new Uint8Array(PAGE_SIZE);
18759
+ const fullPage = safeAllocUint8Array(PAGE_SIZE);
18691
18760
  fullPage.set(page);
18692
18761
  return fullPage;
18693
18762
  }
@@ -18880,10 +18949,10 @@ type ENTROPY_BYTES = typeof ENTROPY_BYTES;
18880
18949
  *
18881
18950
  * https://graypaper.fluffylabs.dev/#/579bd12/3b9a013b9a01
18882
18951
  */
18883
- declare function fisherYatesShuffle<T>(arr: T[], entropy: Bytes<ENTROPY_BYTES>): T[] {
18952
+ declare function fisherYatesShuffle<T>(blake2b: Blake2b, arr: T[], entropy: Bytes<ENTROPY_BYTES>): T[] {
18884
18953
  check`${entropy.length === ENTROPY_BYTES} Expected entropy of length ${ENTROPY_BYTES}, got ${entropy.length}`;
18885
18954
  const n = arr.length;
18886
- const randomNumbers = hashToNumberSequence(entropy, arr.length);
18955
+ const randomNumbers = hashToNumberSequence(blake2b, entropy, arr.length);
18887
18956
  const result: T[] = new Array<T>(n);
18888
18957
 
18889
18958
  let itemsLeft = n;
@@ -18909,6 +18978,7 @@ declare namespace index$2 {
18909
18978
  declare class JsonServiceInfo {
18910
18979
  static fromJson = json.object<JsonServiceInfo, ServiceAccountInfo>(
18911
18980
  {
18981
+ ...(Compatibility.isGreaterOrEqual(GpVersion.V0_7_1) ? { version: "number" } : {}),
18912
18982
  code_hash: fromJson.bytes32(),
18913
18983
  balance: json.fromNumber((x) => tryAsU64(x)),
18914
18984
  min_item_gas: json.fromNumber((x) => tryAsServiceGas(x)),
@@ -18947,6 +19017,7 @@ declare class JsonServiceInfo {
18947
19017
  },
18948
19018
  );
18949
19019
 
19020
+ version?: number;
18950
19021
  code_hash!: CodeHash;
18951
19022
  balance!: U64;
18952
19023
  min_item_gas!: ServiceGas;
@@ -18993,6 +19064,19 @@ declare const lookupMetaFromJson = json.object<JsonLookupMeta, LookupHistoryItem
18993
19064
  ({ key, value }) => new LookupHistoryItem(key.hash, key.length, value),
18994
19065
  );
18995
19066
 
19067
+ declare const preimageStatusFromJson = json.object<JsonPreimageStatus, LookupHistoryItem>(
19068
+ {
19069
+ hash: fromJson.bytes32(),
19070
+ status: json.array("number"),
19071
+ },
19072
+ ({ hash, status }) => new LookupHistoryItem(hash, tryAsU32(0), status),
19073
+ );
19074
+
19075
+ type JsonPreimageStatus = {
19076
+ hash: PreimageHash;
19077
+ status: LookupHistorySlots;
19078
+ };
19079
+
18996
19080
  type JsonLookupMeta = {
18997
19081
  key: {
18998
19082
  hash: PreimageHash;
@@ -19005,21 +19089,34 @@ declare class JsonService {
19005
19089
  static fromJson = json.object<JsonService, InMemoryService>(
19006
19090
  {
19007
19091
  id: "number",
19008
- data: {
19009
- service: JsonServiceInfo.fromJson,
19010
- preimages: json.optional(json.array(JsonPreimageItem.fromJson)),
19011
- storage: json.optional(json.array(JsonStorageItem.fromJson)),
19012
- lookup_meta: json.optional(json.array(lookupMetaFromJson)),
19013
- },
19092
+ data: Compatibility.isLessThan(GpVersion.V0_7_1)
19093
+ ? {
19094
+ service: JsonServiceInfo.fromJson,
19095
+ preimages: json.optional(json.array(JsonPreimageItem.fromJson)),
19096
+ storage: json.optional(json.array(JsonStorageItem.fromJson)),
19097
+ lookup_meta: json.optional(json.array(lookupMetaFromJson)),
19098
+ }
19099
+ : {
19100
+ service: JsonServiceInfo.fromJson,
19101
+ storage: json.optional(json.array(JsonStorageItem.fromJson)),
19102
+ preimages_blob: json.optional(json.array(JsonPreimageItem.fromJson)),
19103
+ preimages_status: json.optional(json.array(preimageStatusFromJson)),
19104
+ },
19014
19105
  },
19015
19106
  ({ id, data }) => {
19107
+ const preimages = HashDictionary.fromEntries(
19108
+ (data.preimages ?? data.preimages_blob ?? []).map((x) => [x.hash, x]),
19109
+ );
19110
+
19016
19111
  const lookupHistory = HashDictionary.new<PreimageHash, LookupHistoryItem[]>();
19017
- for (const item of data.lookup_meta ?? []) {
19112
+
19113
+ for (const item of data.lookup_meta ?? data.preimages_status ?? []) {
19018
19114
  const data = lookupHistory.get(item.hash) ?? [];
19019
- data.push(item);
19115
+ const length = tryAsU32(preimages.get(item.hash)?.blob.length ?? item.length);
19116
+ data.push(new LookupHistoryItem(item.hash, length, item.slots));
19020
19117
  lookupHistory.set(item.hash, data);
19021
19118
  }
19022
- const preimages = HashDictionary.fromEntries((data.preimages ?? []).map((x) => [x.hash, x]));
19119
+
19023
19120
  const storage = new Map<string, StorageItem>();
19024
19121
 
19025
19122
  const entries = (data.storage ?? []).map(({ key, value }) => {
@@ -19046,6 +19143,8 @@ declare class JsonService {
19046
19143
  preimages?: JsonPreimageItem[];
19047
19144
  storage?: JsonStorageItem[];
19048
19145
  lookup_meta?: LookupHistoryItem[];
19146
+ preimages_blob?: JsonPreimageItem[];
19147
+ preimages_status?: LookupHistoryItem[];
19049
19148
  };
19050
19149
  }
19051
19150
 
@@ -19055,8 +19154,7 @@ declare const availabilityAssignmentFromJson = json.object<JsonAvailabilityAssig
19055
19154
  timeout: "number",
19056
19155
  },
19057
19156
  ({ report, timeout }) => {
19058
- const workReportHash = blake2b.hashBytes(Encoder.encodeObject(WorkReport.Codec, report)).asOpaque();
19059
- return AvailabilityAssignment.create({ workReport: new WithHash(workReportHash, report), timeout });
19157
+ return AvailabilityAssignment.create({ workReport: report, timeout });
19060
19158
  },
19061
19159
  );
19062
19160
 
@@ -19277,8 +19375,12 @@ declare class JsonServiceStatistics {
19277
19375
  extrinsic_count: "number",
19278
19376
  accumulate_count: "number",
19279
19377
  accumulate_gas_used: json.fromNumber(tryAsServiceGas),
19280
- on_transfers_count: "number",
19281
- on_transfers_gas_used: json.fromNumber(tryAsServiceGas),
19378
+ ...(Compatibility.isLessThan(GpVersion.V0_7_1)
19379
+ ? {
19380
+ on_transfers_count: "number",
19381
+ on_transfers_gas_used: json.fromNumber(tryAsServiceGas),
19382
+ }
19383
+ : {}),
19282
19384
  },
19283
19385
  ({
19284
19386
  provided_count,
@@ -19305,8 +19407,8 @@ declare class JsonServiceStatistics {
19305
19407
  extrinsicCount: extrinsic_count,
19306
19408
  accumulateCount: accumulate_count,
19307
19409
  accumulateGasUsed: accumulate_gas_used,
19308
- onTransfersCount: on_transfers_count,
19309
- onTransfersGasUsed: on_transfers_gas_used,
19410
+ onTransfersCount: on_transfers_count ?? tryAsU32(0),
19411
+ onTransfersGasUsed: on_transfers_gas_used ?? tryAsServiceGas(0),
19310
19412
  });
19311
19413
  },
19312
19414
  );
@@ -19321,8 +19423,8 @@ declare class JsonServiceStatistics {
19321
19423
  extrinsic_count!: U16;
19322
19424
  accumulate_count!: U32;
19323
19425
  accumulate_gas_used!: ServiceGas;
19324
- on_transfers_count!: U32;
19325
- on_transfers_gas_used!: ServiceGas;
19426
+ on_transfers_count?: U32;
19427
+ on_transfers_gas_used?: ServiceGas;
19326
19428
  }
19327
19429
 
19328
19430
  type ServiceStatisticsEntry = {
@@ -19394,8 +19496,9 @@ type JsonStateDump = {
19394
19496
  tau: State["timeslot"];
19395
19497
  chi: {
19396
19498
  chi_m: PrivilegedServices["manager"];
19397
- chi_a: PrivilegedServices["authManager"];
19398
- chi_v: PrivilegedServices["validatorsManager"];
19499
+ chi_a: PrivilegedServices["assigners"];
19500
+ chi_v: PrivilegedServices["delegator"];
19501
+ chi_r?: PrivilegedServices["registrar"];
19399
19502
  chi_g: PrivilegedServices["autoAccumulateServices"] | null;
19400
19503
  };
19401
19504
  pi: JsonStatisticsData;
@@ -19428,6 +19531,7 @@ declare const fullStateDumpFromJson = (spec: ChainSpec) =>
19428
19531
  chi_m: "number",
19429
19532
  chi_a: json.array("number"),
19430
19533
  chi_v: "number",
19534
+ chi_r: json.optional("number"),
19431
19535
  chi_g: json.nullable(
19432
19536
  json.array({
19433
19537
  service: "number",
@@ -19460,6 +19564,9 @@ declare const fullStateDumpFromJson = (spec: ChainSpec) =>
19460
19564
  theta,
19461
19565
  accounts,
19462
19566
  }): InMemoryState => {
19567
+ if (Compatibility.isGreaterOrEqual(GpVersion.V0_7_1) && chi.chi_r === undefined) {
19568
+ throw new Error("Registrar is required in Privileges GP ^0.7.1");
19569
+ }
19463
19570
  return InMemoryState.create({
19464
19571
  authPools: tryAsPerCore(
19465
19572
  alpha.map((perCore) => {
@@ -19493,8 +19600,9 @@ declare const fullStateDumpFromJson = (spec: ChainSpec) =>
19493
19600
  timeslot: tau,
19494
19601
  privilegedServices: PrivilegedServices.create({
19495
19602
  manager: chi.chi_m,
19496
- authManager: chi.chi_a,
19497
- validatorsManager: chi.chi_v,
19603
+ assigners: chi.chi_a,
19604
+ delegator: chi.chi_v,
19605
+ registrar: chi.chi_r ?? tryAsServiceId(2 ** 32 - 1),
19498
19606
  autoAccumulateServices: chi.chi_g ?? [],
19499
19607
  }),
19500
19608
  statistics: JsonStatisticsData.toStatisticsData(spec, pi),
@@ -19517,6 +19625,7 @@ declare const index$1_JsonDisputesRecords: typeof JsonDisputesRecords;
19517
19625
  type index$1_JsonLookupMeta = JsonLookupMeta;
19518
19626
  type index$1_JsonPreimageItem = JsonPreimageItem;
19519
19627
  declare const index$1_JsonPreimageItem: typeof JsonPreimageItem;
19628
+ type index$1_JsonPreimageStatus = JsonPreimageStatus;
19520
19629
  type index$1_JsonRecentBlockState = JsonRecentBlockState;
19521
19630
  type index$1_JsonRecentBlocks = JsonRecentBlocks;
19522
19631
  type index$1_JsonReportedWorkPackageInfo = JsonReportedWorkPackageInfo;
@@ -19541,6 +19650,7 @@ declare const index$1_disputesRecordsFromJson: typeof disputesRecordsFromJson;
19541
19650
  declare const index$1_fullStateDumpFromJson: typeof fullStateDumpFromJson;
19542
19651
  declare const index$1_lookupMetaFromJson: typeof lookupMetaFromJson;
19543
19652
  declare const index$1_notYetAccumulatedFromJson: typeof notYetAccumulatedFromJson;
19653
+ declare const index$1_preimageStatusFromJson: typeof preimageStatusFromJson;
19544
19654
  declare const index$1_recentBlockStateFromJson: typeof recentBlockStateFromJson;
19545
19655
  declare const index$1_recentBlocksHistoryFromJson: typeof recentBlocksHistoryFromJson;
19546
19656
  declare const index$1_reportedWorkPackageFromJson: typeof reportedWorkPackageFromJson;
@@ -19548,8 +19658,8 @@ declare const index$1_serviceStatisticsEntryFromJson: typeof serviceStatisticsEn
19548
19658
  declare const index$1_ticketFromJson: typeof ticketFromJson;
19549
19659
  declare const index$1_validatorDataFromJson: typeof validatorDataFromJson;
19550
19660
  declare namespace index$1 {
19551
- export { index$1_JsonCoreStatistics as JsonCoreStatistics, index$1_JsonDisputesRecords as JsonDisputesRecords, index$1_JsonPreimageItem as JsonPreimageItem, index$1_JsonService as JsonService, index$1_JsonServiceInfo as JsonServiceInfo, index$1_JsonServiceStatistics as JsonServiceStatistics, index$1_JsonStatisticsData as JsonStatisticsData, index$1_JsonStorageItem as JsonStorageItem, index$1_JsonValidatorStatistics as JsonValidatorStatistics, index$1_TicketsOrKeys as TicketsOrKeys, index$1_availabilityAssignmentFromJson as availabilityAssignmentFromJson, index$1_disputesRecordsFromJson as disputesRecordsFromJson, index$1_fullStateDumpFromJson as fullStateDumpFromJson, index$1_lookupMetaFromJson as lookupMetaFromJson, index$1_notYetAccumulatedFromJson as notYetAccumulatedFromJson, index$1_recentBlockStateFromJson as recentBlockStateFromJson, index$1_recentBlocksHistoryFromJson as recentBlocksHistoryFromJson, index$1_reportedWorkPackageFromJson as reportedWorkPackageFromJson, index$1_serviceStatisticsEntryFromJson as serviceStatisticsEntryFromJson, index$1_ticketFromJson as ticketFromJson, index$1_validatorDataFromJson as validatorDataFromJson };
19552
- export type { index$1_JsonAvailabilityAssignment as JsonAvailabilityAssignment, index$1_JsonLookupMeta as JsonLookupMeta, index$1_JsonRecentBlockState as JsonRecentBlockState, index$1_JsonRecentBlocks as JsonRecentBlocks, index$1_JsonReportedWorkPackageInfo as JsonReportedWorkPackageInfo, index$1_JsonStateDump as JsonStateDump, index$1_ServiceStatisticsEntry as ServiceStatisticsEntry };
19661
+ export { index$1_JsonCoreStatistics as JsonCoreStatistics, index$1_JsonDisputesRecords as JsonDisputesRecords, index$1_JsonPreimageItem as JsonPreimageItem, index$1_JsonService as JsonService, index$1_JsonServiceInfo as JsonServiceInfo, index$1_JsonServiceStatistics as JsonServiceStatistics, index$1_JsonStatisticsData as JsonStatisticsData, index$1_JsonStorageItem as JsonStorageItem, index$1_JsonValidatorStatistics as JsonValidatorStatistics, index$1_TicketsOrKeys as TicketsOrKeys, index$1_availabilityAssignmentFromJson as availabilityAssignmentFromJson, index$1_disputesRecordsFromJson as disputesRecordsFromJson, index$1_fullStateDumpFromJson as fullStateDumpFromJson, index$1_lookupMetaFromJson as lookupMetaFromJson, index$1_notYetAccumulatedFromJson as notYetAccumulatedFromJson, index$1_preimageStatusFromJson as preimageStatusFromJson, index$1_recentBlockStateFromJson as recentBlockStateFromJson, index$1_recentBlocksHistoryFromJson as recentBlocksHistoryFromJson, index$1_reportedWorkPackageFromJson as reportedWorkPackageFromJson, index$1_serviceStatisticsEntryFromJson as serviceStatisticsEntryFromJson, index$1_ticketFromJson as ticketFromJson, index$1_validatorDataFromJson as validatorDataFromJson };
19662
+ export type { index$1_JsonAvailabilityAssignment as JsonAvailabilityAssignment, index$1_JsonLookupMeta as JsonLookupMeta, index$1_JsonPreimageStatus as JsonPreimageStatus, index$1_JsonRecentBlockState as JsonRecentBlockState, index$1_JsonRecentBlocks as JsonRecentBlocks, index$1_JsonReportedWorkPackageInfo as JsonReportedWorkPackageInfo, index$1_JsonStateDump as JsonStateDump, index$1_ServiceStatisticsEntry as ServiceStatisticsEntry };
19553
19663
  }
19554
19664
 
19555
19665
  /** Helper function to create most used hashes in the block */
@@ -19557,7 +19667,7 @@ declare class TransitionHasher implements MmrHasher<KeccakHash> {
19557
19667
  constructor(
19558
19668
  private readonly context: ChainSpec,
19559
19669
  private readonly keccakHasher: KeccakHasher,
19560
- private readonly allocator: HashAllocator,
19670
+ public readonly blake2b: Blake2b,
19561
19671
  ) {}
19562
19672
 
19563
19673
  /** Concatenates two hashes and hash this concatenation */
@@ -19571,7 +19681,7 @@ declare class TransitionHasher implements MmrHasher<KeccakHash> {
19571
19681
 
19572
19682
  /** Creates hash from the block header view */
19573
19683
  header(header: HeaderView): WithHash<HeaderHash, HeaderView> {
19574
- return new WithHash(blake2b.hashBytes(header.encoded(), this.allocator).asOpaque(), header);
19684
+ return new WithHash(this.blake2b.hashBytes(header.encoded()).asOpaque(), header);
19575
19685
  }
19576
19686
 
19577
19687
  /**
@@ -19585,7 +19695,7 @@ declare class TransitionHasher implements MmrHasher<KeccakHash> {
19585
19695
  .view()
19586
19696
  .map((g) => g.view())
19587
19697
  .map((guarantee) => {
19588
- const reportHash = blake2b.hashBytes(guarantee.report.encoded(), this.allocator).asOpaque<WorkReportHash>();
19698
+ const reportHash = this.blake2b.hashBytes(guarantee.report.encoded()).asOpaque<WorkReportHash>();
19589
19699
  return BytesBlob.blobFromParts([
19590
19700
  reportHash.raw,
19591
19701
  guarantee.slot.encoded().raw,
@@ -19595,15 +19705,15 @@ declare class TransitionHasher implements MmrHasher<KeccakHash> {
19595
19705
 
19596
19706
  const guaranteeBlob = Encoder.encodeObject(codec.sequenceVarLen(dumpCodec), guarantees, this.context);
19597
19707
 
19598
- const et = blake2b.hashBytes(extrinsicView.tickets.encoded(), this.allocator).asOpaque<ExtrinsicHash>();
19599
- const ep = blake2b.hashBytes(extrinsicView.preimages.encoded(), this.allocator).asOpaque<ExtrinsicHash>();
19600
- const eg = blake2b.hashBytes(guaranteeBlob, this.allocator).asOpaque<ExtrinsicHash>();
19601
- const ea = blake2b.hashBytes(extrinsicView.assurances.encoded(), this.allocator).asOpaque<ExtrinsicHash>();
19602
- const ed = blake2b.hashBytes(extrinsicView.disputes.encoded(), this.allocator).asOpaque<ExtrinsicHash>();
19708
+ const et = this.blake2b.hashBytes(extrinsicView.tickets.encoded()).asOpaque<ExtrinsicHash>();
19709
+ const ep = this.blake2b.hashBytes(extrinsicView.preimages.encoded()).asOpaque<ExtrinsicHash>();
19710
+ const eg = this.blake2b.hashBytes(guaranteeBlob).asOpaque<ExtrinsicHash>();
19711
+ const ea = this.blake2b.hashBytes(extrinsicView.assurances.encoded()).asOpaque<ExtrinsicHash>();
19712
+ const ed = this.blake2b.hashBytes(extrinsicView.disputes.encoded()).asOpaque<ExtrinsicHash>();
19603
19713
 
19604
19714
  const encoded = BytesBlob.blobFromParts([et.raw, ep.raw, eg.raw, ea.raw, ed.raw]);
19605
19715
 
19606
- return new WithHashAndBytes(blake2b.hashBytes(encoded, this.allocator).asOpaque(), extrinsicView, encoded);
19716
+ return new WithHashAndBytes(this.blake2b.hashBytes(encoded).asOpaque(), extrinsicView, encoded);
19607
19717
  }
19608
19718
 
19609
19719
  /** Creates hash for given WorkPackage */
@@ -19614,7 +19724,7 @@ declare class TransitionHasher implements MmrHasher<KeccakHash> {
19614
19724
  private encode<T, THash extends OpaqueHash>(codec: Codec<T>, data: T): WithHashAndBytes<THash, T> {
19615
19725
  // TODO [ToDr] Use already allocated encoding destination and hash bytes from some arena.
19616
19726
  const encoded = Encoder.encodeObject(codec, data, this.context);
19617
- return new WithHashAndBytes(blake2b.hashBytes(encoded, this.allocator).asOpaque(), data, encoded);
19727
+ return new WithHashAndBytes(this.blake2b.hashBytes(encoded).asOpaque(), data, encoded);
19618
19728
  }
19619
19729
  }
19620
19730
 
@@ -19635,7 +19745,10 @@ declare enum PreimagesErrorCode {
19635
19745
 
19636
19746
  // TODO [SeKo] consider whether this module is the right place to remove expired preimages
19637
19747
  declare class Preimages {
19638
- constructor(public readonly state: PreimagesState) {}
19748
+ constructor(
19749
+ public readonly state: PreimagesState,
19750
+ public readonly blake2b: Blake2b,
19751
+ ) {}
19639
19752
 
19640
19753
  integrate(input: PreimagesInput): Result$2<PreimagesStateUpdate, PreimagesErrorCode> {
19641
19754
  // make sure lookup extrinsics are sorted and unique
@@ -19664,7 +19777,7 @@ declare class Preimages {
19664
19777
  // select preimages for integration
19665
19778
  for (const preimage of preimages) {
19666
19779
  const { requester, blob } = preimage;
19667
- const hash: PreimageHash = blake2b.hashBytes(blob).asOpaque();
19780
+ const hash: PreimageHash = this.blake2b.hashBytes(blob).asOpaque();
19668
19781
 
19669
19782
  const service = this.state.getService(requester);
19670
19783
  if (service === null) {
@@ -19695,156 +19808,6 @@ declare class Preimages {
19695
19808
  }
19696
19809
  }
19697
19810
 
19698
- declare enum ServiceExecutorError {
19699
- NoLookup = 0,
19700
- NoState = 1,
19701
- NoServiceCode = 2,
19702
- ServiceCodeMismatch = 3,
19703
- }
19704
-
19705
- declare class WorkPackageExecutor {
19706
- constructor(
19707
- private readonly blocks: BlocksDb,
19708
- private readonly state: StatesDb,
19709
- private readonly hasher: TransitionHasher,
19710
- ) {}
19711
-
19712
- // TODO [ToDr] this while thing should be triple-checked with the GP.
19713
- // I'm currently implementing some dirty version for the demo.
19714
- async executeWorkPackage(pack: WorkPackage): Promise<WorkReport> {
19715
- const headerHash = pack.context.lookupAnchor;
19716
- // execute authorisation first or is it already executed and we just need to check it?
19717
- const authExec = this.getServiceExecutor(
19718
- // TODO [ToDr] should this be anchor or lookupAnchor?
19719
- headerHash,
19720
- pack.authCodeHost,
19721
- pack.authCodeHash,
19722
- );
19723
-
19724
- if (authExec.isError) {
19725
- // TODO [ToDr] most likely shouldn't be throw.
19726
- throw new Error(`Could not get authorization executor: ${authExec.error}`);
19727
- }
19728
-
19729
- const pvm = authExec.ok;
19730
- const authGas = tryAsGas(15_000n);
19731
- const result = await pvm.run(pack.parametrization, authGas);
19732
-
19733
- if (!result.isEqualTo(pack.authorization)) {
19734
- throw new Error("Authorization is invalid.");
19735
- }
19736
-
19737
- const results: WorkResult[] = [];
19738
- for (const item of pack.items) {
19739
- const exec = this.getServiceExecutor(headerHash, item.service, item.codeHash);
19740
- if (exec.isError) {
19741
- throw new Error(`Could not get item executor: ${exec.error}`);
19742
- }
19743
- const pvm = exec.ok;
19744
-
19745
- const gasRatio = tryAsServiceGas(3_000n);
19746
- const ret = await pvm.run(item.payload, tryAsGas(item.refineGasLimit)); // or accumulateGasLimit?
19747
- results.push(
19748
- WorkResult.create({
19749
- serviceId: item.service,
19750
- codeHash: item.codeHash,
19751
- payloadHash: blake2b.hashBytes(item.payload),
19752
- gas: gasRatio,
19753
- result: new WorkExecResult(WorkExecResultKind.ok, ret),
19754
- load: WorkRefineLoad.create({
19755
- gasUsed: tryAsServiceGas(5),
19756
- importedSegments: tryAsU32(0),
19757
- exportedSegments: tryAsU32(0),
19758
- extrinsicSize: tryAsU32(0),
19759
- extrinsicCount: tryAsU32(0),
19760
- }),
19761
- }),
19762
- );
19763
- }
19764
-
19765
- const workPackage = this.hasher.workPackage(pack);
19766
- const workPackageSpec = WorkPackageSpec.create({
19767
- hash: workPackage.hash,
19768
- length: tryAsU32(workPackage.encoded.length),
19769
- erasureRoot: Bytes.zero(HASH_SIZE),
19770
- exportsRoot: Bytes.zero(HASH_SIZE).asOpaque(),
19771
- exportsCount: tryAsU16(0),
19772
- });
19773
- const coreIndex = tryAsCoreIndex(0);
19774
- const authorizerHash = Bytes.fill(HASH_SIZE, 5).asOpaque();
19775
-
19776
- const workResults = FixedSizeArray.new(results, tryAsWorkItemsCount(results.length));
19777
-
19778
- return Promise.resolve(
19779
- WorkReport.create({
19780
- workPackageSpec,
19781
- context: pack.context,
19782
- coreIndex,
19783
- authorizerHash,
19784
- authorizationOutput: pack.authorization,
19785
- segmentRootLookup: [],
19786
- results: workResults,
19787
- authorizationGasUsed: tryAsServiceGas(0),
19788
- }),
19789
- );
19790
- }
19791
-
19792
- getServiceExecutor(
19793
- lookupAnchor: HeaderHash,
19794
- serviceId: ServiceId,
19795
- expectedCodeHash: CodeHash,
19796
- ): Result$2<PvmExecutor, ServiceExecutorError> {
19797
- const header = this.blocks.getHeader(lookupAnchor);
19798
- if (header === null) {
19799
- return Result.error(ServiceExecutorError.NoLookup);
19800
- }
19801
-
19802
- const state = this.state.getState(lookupAnchor);
19803
- if (state === null) {
19804
- return Result.error(ServiceExecutorError.NoState);
19805
- }
19806
-
19807
- const service = state.getService(serviceId);
19808
- const serviceCodeHash = service?.getInfo().codeHash ?? null;
19809
- if (serviceCodeHash === null) {
19810
- return Result.error(ServiceExecutorError.NoServiceCode);
19811
- }
19812
-
19813
- if (!serviceCodeHash.isEqualTo(expectedCodeHash)) {
19814
- return Result.error(ServiceExecutorError.ServiceCodeMismatch);
19815
- }
19816
-
19817
- const serviceCode = service?.getPreimage(serviceCodeHash.asOpaque()) ?? null;
19818
- if (serviceCode === null) {
19819
- return Result.error(ServiceExecutorError.NoServiceCode);
19820
- }
19821
-
19822
- return Result.ok(new PvmExecutor(serviceCode));
19823
- }
19824
- }
19825
-
19826
- declare class PvmExecutor {
19827
- private readonly pvm: HostCalls;
19828
- private hostCalls = new HostCallsManager({ missing: new Missing() });
19829
- private pvmInstanceManager = new PvmInstanceManager(4);
19830
-
19831
- constructor(private serviceCode: BytesBlob) {
19832
- this.pvm = new PvmHostCallExtension(this.pvmInstanceManager, this.hostCalls);
19833
- }
19834
-
19835
- async run(args: BytesBlob, gas: Gas): Promise<BytesBlob> {
19836
- const program = Program.fromSpi(this.serviceCode.raw, args.raw, true);
19837
-
19838
- const result = await this.pvm.runProgram(program.code, 5, gas, program.registers, program.memory);
19839
-
19840
- if (result.hasMemorySlice()) {
19841
- return BytesBlob.blobFrom(result.memorySlice);
19842
- }
19843
-
19844
- return BytesBlob.empty();
19845
- }
19846
- }
19847
-
19848
19811
  type index_Preimages = Preimages;
19849
19812
  declare const index_Preimages: typeof Preimages;
19850
19813
  type index_PreimagesErrorCode = PreimagesErrorCode;
@@ -19854,10 +19817,8 @@ type index_PreimagesState = PreimagesState;
19854
19817
  type index_PreimagesStateUpdate = PreimagesStateUpdate;
19855
19818
  type index_TransitionHasher = TransitionHasher;
19856
19819
  declare const index_TransitionHasher: typeof TransitionHasher;
19857
- type index_WorkPackageExecutor = WorkPackageExecutor;
19858
- declare const index_WorkPackageExecutor: typeof WorkPackageExecutor;
19859
19820
  declare namespace index {
19860
- export { index_Preimages as Preimages, index_PreimagesErrorCode as PreimagesErrorCode, index_TransitionHasher as TransitionHasher, index_WorkPackageExecutor as WorkPackageExecutor };
19821
+ export { index_Preimages as Preimages, index_PreimagesErrorCode as PreimagesErrorCode, index_TransitionHasher as TransitionHasher };
19861
19822
  export type { index_PreimagesInput as PreimagesInput, index_PreimagesState as PreimagesState, index_PreimagesStateUpdate as PreimagesStateUpdate };
19862
19823
  }
19863
19824