@typeberry/lib 0.1.3-3f7b9cf → 0.1.3-6759174

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (4) hide show
  1. package/index.cjs +367 -1116
  2. package/index.d.ts +257 -396
  3. package/index.js +366 -1115
  4. package/package.json +1 -1
package/index.d.ts CHANGED
@@ -420,6 +420,20 @@ declare const Result$2 = {
420
420
  },
421
421
  };
422
422
 
423
+ // about 2GB, the maximum ArrayBuffer length on Chrome confirmed by several sources:
424
+ // - https://issues.chromium.org/issues/40055619
425
+ // - https://stackoverflow.com/a/72124984
426
+ // - https://onnxruntime.ai/docs/tutorials/web/large-models.html#maximum-size-of-arraybuffer
427
+ declare const MAX_LENGTH$1 = 2145386496;
428
+
429
+ declare function safeAllocUint8Array(length: number) {
430
+ if (length > MAX_LENGTH) {
431
+ // biome-ignore lint/suspicious/noConsole: can't have a dependency on logger here
432
+ console.warn(`Trying to allocate ${length} bytes, which is greater than the maximum of ${MAX_LENGTH}.`);
433
+ }
434
+ return new Uint8Array(Math.min(MAX_LENGTH, length));
435
+ }
436
+
423
437
  /**
424
438
  * Utilities for tests.
425
439
  */
@@ -755,11 +769,12 @@ declare const index$u_oomWarningPrinted: typeof oomWarningPrinted;
755
769
  declare const index$u_parseCurrentSuite: typeof parseCurrentSuite;
756
770
  declare const index$u_parseCurrentVersion: typeof parseCurrentVersion;
757
771
  declare const index$u_resultToString: typeof resultToString;
772
+ declare const index$u_safeAllocUint8Array: typeof safeAllocUint8Array;
758
773
  declare const index$u_seeThrough: typeof seeThrough;
759
774
  declare const index$u_trimStack: typeof trimStack;
760
775
  declare const index$u_workspacePathFix: typeof workspacePathFix;
761
776
  declare namespace index$u {
762
- export { index$u_ALL_VERSIONS_IN_ORDER as ALL_VERSIONS_IN_ORDER, index$u_CURRENT_SUITE as CURRENT_SUITE, index$u_CURRENT_VERSION as CURRENT_VERSION, index$u_Compatibility as Compatibility, index$u_DEFAULT_SUITE as DEFAULT_SUITE, index$u_DEFAULT_VERSION as DEFAULT_VERSION, index$u_ErrorsCollector as ErrorsCollector, index$u_GpVersion as GpVersion, Result$2 as Result, index$u_RichTaggedError as RichTaggedError, index$u_TEST_COMPARE_USING as TEST_COMPARE_USING, index$u_TestSuite as TestSuite, index$u_WithDebug as WithDebug, index$u___OPAQUE_TYPE__ as __OPAQUE_TYPE__, index$u_asOpaqueType as asOpaqueType, index$u_assertEmpty as assertEmpty, index$u_assertNever as assertNever, index$u_callCompareFunction as callCompareFunction, index$u_check as check, index$u_deepEqual as deepEqual, index$u_getAllKeysSorted as getAllKeysSorted, index$u_inspect as inspect, index$u_isBrowser as isBrowser, index$u_isResult as isResult, index$u_isTaggedError as isTaggedError, index$u_maybeTaggedErrorToString as maybeTaggedErrorToString, index$u_measure as measure, index$u_oomWarningPrinted as oomWarningPrinted, index$u_parseCurrentSuite as parseCurrentSuite, index$u_parseCurrentVersion as parseCurrentVersion, index$u_resultToString as resultToString, index$u_seeThrough as seeThrough, index$u_trimStack as trimStack, index$u_workspacePathFix as workspacePathFix };
777
+ export { index$u_ALL_VERSIONS_IN_ORDER as ALL_VERSIONS_IN_ORDER, index$u_CURRENT_SUITE as CURRENT_SUITE, index$u_CURRENT_VERSION as CURRENT_VERSION, index$u_Compatibility as Compatibility, index$u_DEFAULT_SUITE as DEFAULT_SUITE, index$u_DEFAULT_VERSION as DEFAULT_VERSION, index$u_ErrorsCollector as ErrorsCollector, index$u_GpVersion as GpVersion, MAX_LENGTH$1 as MAX_LENGTH, Result$2 as Result, index$u_RichTaggedError as RichTaggedError, index$u_TEST_COMPARE_USING as TEST_COMPARE_USING, index$u_TestSuite as TestSuite, index$u_WithDebug as WithDebug, index$u___OPAQUE_TYPE__ as __OPAQUE_TYPE__, index$u_asOpaqueType as asOpaqueType, index$u_assertEmpty as assertEmpty, index$u_assertNever as assertNever, index$u_callCompareFunction as callCompareFunction, index$u_check as check, index$u_deepEqual as deepEqual, index$u_getAllKeysSorted as getAllKeysSorted, index$u_inspect as inspect, index$u_isBrowser as isBrowser, index$u_isResult as isResult, index$u_isTaggedError as isTaggedError, index$u_maybeTaggedErrorToString as maybeTaggedErrorToString, index$u_measure as measure, index$u_oomWarningPrinted as oomWarningPrinted, index$u_parseCurrentSuite as parseCurrentSuite, index$u_parseCurrentVersion as parseCurrentVersion, index$u_resultToString as resultToString, index$u_safeAllocUint8Array as safeAllocUint8Array, index$u_seeThrough as seeThrough, index$u_trimStack as trimStack, index$u_workspacePathFix as workspacePathFix };
763
778
  export type { index$u_DeepEqualOptions as DeepEqualOptions, index$u_EnumMapping as EnumMapping, index$u_ErrorResult as ErrorResult, index$u_OK as OK, index$u_OkResult as OkResult, index$u_Opaque as Opaque, index$u_StringLiteral as StringLiteral, index$u_TaggedError as TaggedError, index$u_TokenOf as TokenOf, index$u_Uninstantiable as Uninstantiable, index$u_WithOpaque as WithOpaque };
764
779
  }
765
780
 
@@ -929,7 +944,7 @@ declare class BytesBlob {
929
944
  static blobFromParts(v: Uint8Array | Uint8Array[], ...rest: Uint8Array[]) {
930
945
  const vArr = v instanceof Uint8Array ? [v] : v;
931
946
  const totalLength = vArr.reduce((a, v) => a + v.length, 0) + rest.reduce((a, v) => a + v.length, 0);
932
- const buffer = new Uint8Array(totalLength);
947
+ const buffer = safeAllocUint8Array(totalLength);
933
948
  let offset = 0;
934
949
  for (const r of vArr) {
935
950
  buffer.set(r, offset);
@@ -1012,7 +1027,7 @@ declare class Bytes<T extends number> extends BytesBlob {
1012
1027
 
1013
1028
  /** Create an empty [`Bytes<X>`] of given length. */
1014
1029
  static zero<X extends number>(len: X): Bytes<X> {
1015
- return new Bytes(new Uint8Array(len), len);
1030
+ return new Bytes(safeAllocUint8Array(len), len);
1016
1031
  }
1017
1032
 
1018
1033
  // TODO [ToDr] `fill` should have the argments swapped to align with the rest.
@@ -1133,7 +1148,7 @@ declare class BitVec {
1133
1148
  * Create new [`BitVec`] with all values set to `false`.
1134
1149
  */
1135
1150
  static empty(bitLength: number) {
1136
- const data = new Uint8Array(Math.ceil(bitLength / 8));
1151
+ const data = safeAllocUint8Array(Math.ceil(bitLength / 8));
1137
1152
  return new BitVec(data, bitLength);
1138
1153
  }
1139
1154
 
@@ -3461,6 +3476,99 @@ declare namespace index$q {
3461
3476
  export type { index$q_ClassConstructor as ClassConstructor, index$q_Codec as Codec, index$q_CodecRecord as CodecRecord, index$q_Decode as Decode, index$q_DescribedBy as DescribedBy, index$q_DescriptorRecord as DescriptorRecord, index$q_Encode as Encode, index$q_LengthRange as LengthRange, index$q_OptionalRecord as OptionalRecord, Options$1 as Options, index$q_PropertyKeys as PropertyKeys, index$q_SimpleDescriptorRecord as SimpleDescriptorRecord, index$q_SizeHint as SizeHint, index$q_ViewOf as ViewOf };
3462
3477
  }
3463
3478
 
3479
+ /**
3480
+ * A utility class providing a readonly view over a portion of an array without copying it.
3481
+ */
3482
+ declare class ArrayView<T> implements Iterable<T> {
3483
+ private readonly source: T[];
3484
+ public readonly length: number;
3485
+
3486
+ private constructor(
3487
+ source: T[],
3488
+ private readonly start: number,
3489
+ private readonly end: number,
3490
+ ) {
3491
+ this.source = source;
3492
+ this.length = end - start;
3493
+ }
3494
+
3495
+ static from<T>(source: T[], start = 0, end = source.length): ArrayView<T> {
3496
+ check`
3497
+ ${start >= 0 && end <= source.length && start <= end}
3498
+ Invalid start (${start})/end (${end}) for ArrayView
3499
+ `;
3500
+ return new ArrayView(source, start, end);
3501
+ }
3502
+
3503
+ get(i: number): T {
3504
+ check`
3505
+ ${i >= 0 && i < this.length}
3506
+ Index out of bounds: ${i} < ${this.length}
3507
+ `;
3508
+ return this.source[this.start + i];
3509
+ }
3510
+
3511
+ subview(from: number, to: number = this.length): ArrayView<T> {
3512
+ return ArrayView.from(this.source, this.start + from, this.start + to);
3513
+ }
3514
+
3515
+ toArray(): T[] {
3516
+ return this.source.slice(this.start, this.end);
3517
+ }
3518
+
3519
+ *[Symbol.iterator](): Iterator<T> {
3520
+ for (let i = this.start; i < this.end; i++) {
3521
+ yield this.source[i];
3522
+ }
3523
+ }
3524
+ }
3525
+
3526
+ type ITypedArray = Uint8Array | Uint16Array | Uint32Array;
3527
+ type IDataType = string | Buffer | ITypedArray;
3528
+
3529
+ type IHasher = {
3530
+ /**
3531
+ * Initializes hash state to default value
3532
+ */
3533
+ init: () => IHasher;
3534
+ /**
3535
+ * Updates the hash content with the given data
3536
+ */
3537
+ update: (data: IDataType) => IHasher;
3538
+ /**
3539
+ * Calculates the hash of all of the data passed to be hashed with hash.update().
3540
+ * Defaults to hexadecimal string
3541
+ * @param outputType If outputType is "binary", it returns Uint8Array. Otherwise it
3542
+ * returns hexadecimal string
3543
+ */
3544
+ digest: {
3545
+ (outputType: "binary"): Uint8Array;
3546
+ (outputType?: "hex"): string;
3547
+ };
3548
+ /**
3549
+ * Save the current internal state of the hasher for later resumption with load().
3550
+ * Cannot be called before .init() or after .digest()
3551
+ *
3552
+ * Note that this state can include arbitrary information about the value being hashed (e.g.
3553
+ * could include N plaintext bytes from the value), so needs to be treated as being as
3554
+ * sensitive as the input value itself.
3555
+ */
3556
+ save: () => Uint8Array;
3557
+ /**
3558
+ * Resume a state that was created by save(). If this state was not created by a
3559
+ * compatible build of hash-wasm, an exception will be thrown.
3560
+ */
3561
+ load: (state: Uint8Array) => IHasher;
3562
+ /**
3563
+ * Block size in bytes
3564
+ */
3565
+ blockSize: number;
3566
+ /**
3567
+ * Digest size in bytes
3568
+ */
3569
+ digestSize: number;
3570
+ };
3571
+
3464
3572
  /**
3465
3573
  * Size of the output of the hash functions.
3466
3574
  *
@@ -3516,144 +3624,46 @@ declare class WithHashAndBytes<THash extends OpaqueHash, TData> extends WithHash
3516
3624
  }
3517
3625
  }
3518
3626
 
3519
- /** Allocator interface - returns an empty bytes vector that can be filled with the hash. */
3520
- interface HashAllocator {
3521
- /** Return a new hash destination. */
3522
- emptyHash(): OpaqueHash;
3523
- }
3627
+ declare const zero$1 = Bytes.zero(HASH_SIZE);
3524
3628
 
3525
- /** The simplest allocator returning just a fresh copy of bytes each time. */
3526
- declare class SimpleAllocator implements HashAllocator {
3527
- emptyHash(): OpaqueHash {
3528
- return Bytes.zero(HASH_SIZE);
3629
+ declare class Blake2b {
3630
+ static async createHasher() {
3631
+ return new Blake2b(await createBLAKE2b(HASH_SIZE * 8));
3529
3632
  }
3530
- }
3531
3633
 
3532
- /** An allocator that works by allocating larger (continuous) pages of memory. */
3533
- declare class PageAllocator implements HashAllocator {
3534
- private page: Uint8Array = new Uint8Array(0);
3535
- private currentHash = 0;
3634
+ private constructor(private readonly hasher: IHasher) {}
3536
3635
 
3537
- // TODO [ToDr] Benchmark the performance!
3538
- constructor(private readonly hashesPerPage: number) {
3539
- check`${hashesPerPage > 0 && hashesPerPage >>> 0 === hashesPerPage} Expected a non-zero integer.`;
3540
- this.resetPage();
3541
- }
3542
-
3543
- private resetPage() {
3544
- const pageSizeBytes = this.hashesPerPage * HASH_SIZE;
3545
- this.currentHash = 0;
3546
- this.page = new Uint8Array(pageSizeBytes);
3547
- }
3548
-
3549
- emptyHash(): OpaqueHash {
3550
- const startIdx = this.currentHash * HASH_SIZE;
3551
- const endIdx = startIdx + HASH_SIZE;
3552
-
3553
- this.currentHash += 1;
3554
- if (this.currentHash >= this.hashesPerPage) {
3555
- this.resetPage();
3636
+ /**
3637
+ * Hash given collection of blobs.
3638
+ *
3639
+ * If empty array is given a zero-hash is returned.
3640
+ */
3641
+ hashBlobs<H extends Blake2bHash>(r: (BytesBlob | Uint8Array)[]): H {
3642
+ if (r.length === 0) {
3643
+ return zero.asOpaque();
3556
3644
  }
3557
3645
 
3558
- return Bytes.fromBlob(this.page.subarray(startIdx, endIdx), HASH_SIZE);
3646
+ const hasher = this.hasher.init();
3647
+ for (const v of r) {
3648
+ hasher.update(v instanceof BytesBlob ? v.raw : v);
3649
+ }
3650
+ return Bytes.fromBlob(hasher.digest("binary"), HASH_SIZE).asOpaque();
3559
3651
  }
3560
- }
3561
-
3562
- declare const defaultAllocator = new SimpleAllocator();
3563
3652
 
3564
- /**
3565
- * Hash given collection of blobs.
3566
- *
3567
- * If empty array is given a zero-hash is returned.
3568
- */
3569
- declare function hashBlobs$1<H extends Blake2bHash>(
3570
- r: (BytesBlob | Uint8Array)[],
3571
- allocator: HashAllocator = defaultAllocator,
3572
- ): H {
3573
- const out = allocator.emptyHash();
3574
- if (r.length === 0) {
3575
- return out.asOpaque();
3653
+ /** Hash given blob of bytes. */
3654
+ hashBytes(blob: BytesBlob | Uint8Array): Blake2bHash {
3655
+ const hasher = this.hasher.init();
3656
+ const bytes = blob instanceof BytesBlob ? blob.raw : blob;
3657
+ hasher.update(bytes);
3658
+ return Bytes.fromBlob(hasher.digest("binary"), HASH_SIZE).asOpaque();
3576
3659
  }
3577
3660
 
3578
- const hasher = blake2b(HASH_SIZE);
3579
- for (const v of r) {
3580
- hasher?.update(v instanceof BytesBlob ? v.raw : v);
3661
+ /** Convert given string into bytes and hash it. */
3662
+ hashString(str: string) {
3663
+ return this.hashBytes(BytesBlob.blobFromString(str));
3581
3664
  }
3582
- hasher?.digest(out.raw);
3583
- return out.asOpaque();
3584
3665
  }
3585
3666
 
3586
- /** Hash given blob of bytes. */
3587
- declare function hashBytes(blob: BytesBlob | Uint8Array, allocator: HashAllocator = defaultAllocator): Blake2bHash {
3588
- const hasher = blake2b(HASH_SIZE);
3589
- const bytes = blob instanceof BytesBlob ? blob.raw : blob;
3590
- hasher?.update(bytes);
3591
- const out = allocator.emptyHash();
3592
- hasher?.digest(out.raw);
3593
- return out;
3594
- }
3595
-
3596
- /** Convert given string into bytes and hash it. */
3597
- declare function hashString(str: string, allocator: HashAllocator = defaultAllocator) {
3598
- return hashBytes(BytesBlob.blobFromString(str), allocator);
3599
- }
3600
-
3601
- declare const blake2b_hashBytes: typeof hashBytes;
3602
- declare const blake2b_hashString: typeof hashString;
3603
- declare namespace blake2b {
3604
- export {
3605
- hashBlobs$1 as hashBlobs,
3606
- blake2b_hashBytes as hashBytes,
3607
- blake2b_hashString as hashString,
3608
- };
3609
- }
3610
-
3611
- type ITypedArray = Uint8Array | Uint16Array | Uint32Array;
3612
- type IDataType = string | Buffer | ITypedArray;
3613
-
3614
- type IHasher = {
3615
- /**
3616
- * Initializes hash state to default value
3617
- */
3618
- init: () => IHasher;
3619
- /**
3620
- * Updates the hash content with the given data
3621
- */
3622
- update: (data: IDataType) => IHasher;
3623
- /**
3624
- * Calculates the hash of all of the data passed to be hashed with hash.update().
3625
- * Defaults to hexadecimal string
3626
- * @param outputType If outputType is "binary", it returns Uint8Array. Otherwise it
3627
- * returns hexadecimal string
3628
- */
3629
- digest: {
3630
- (outputType: "binary"): Uint8Array;
3631
- (outputType?: "hex"): string;
3632
- };
3633
- /**
3634
- * Save the current internal state of the hasher for later resumption with load().
3635
- * Cannot be called before .init() or after .digest()
3636
- *
3637
- * Note that this state can include arbitrary information about the value being hashed (e.g.
3638
- * could include N plaintext bytes from the value), so needs to be treated as being as
3639
- * sensitive as the input value itself.
3640
- */
3641
- save: () => Uint8Array;
3642
- /**
3643
- * Resume a state that was created by save(). If this state was not created by a
3644
- * compatible build of hash-wasm, an exception will be thrown.
3645
- */
3646
- load: (state: Uint8Array) => IHasher;
3647
- /**
3648
- * Block size in bytes
3649
- */
3650
- blockSize: number;
3651
- /**
3652
- * Digest size in bytes
3653
- */
3654
- digestSize: number;
3655
- };
3656
-
3657
3667
  declare class KeccakHasher {
3658
3668
  static async create(): Promise<KeccakHasher> {
3659
3669
  return new KeccakHasher(await createKeccak(256));
@@ -3681,15 +3691,15 @@ declare namespace keccak {
3681
3691
  };
3682
3692
  }
3683
3693
 
3694
+ // TODO [ToDr] (#213) this should most likely be moved to a separate
3695
+ // package to avoid pulling in unnecessary deps.
3696
+
3697
+ type index$p_Blake2b = Blake2b;
3698
+ declare const index$p_Blake2b: typeof Blake2b;
3684
3699
  type index$p_Blake2bHash = Blake2bHash;
3685
3700
  type index$p_HASH_SIZE = HASH_SIZE;
3686
- type index$p_HashAllocator = HashAllocator;
3687
3701
  type index$p_KeccakHash = KeccakHash;
3688
3702
  type index$p_OpaqueHash = OpaqueHash;
3689
- type index$p_PageAllocator = PageAllocator;
3690
- declare const index$p_PageAllocator: typeof PageAllocator;
3691
- type index$p_SimpleAllocator = SimpleAllocator;
3692
- declare const index$p_SimpleAllocator: typeof SimpleAllocator;
3693
3703
  type index$p_TRUNCATED_HASH_SIZE = TRUNCATED_HASH_SIZE;
3694
3704
  type index$p_TruncatedHash = TruncatedHash;
3695
3705
  type index$p_WithHash<THash extends OpaqueHash, TData> = WithHash<THash, TData>;
@@ -3697,12 +3707,10 @@ declare const index$p_WithHash: typeof WithHash;
3697
3707
  type index$p_WithHashAndBytes<THash extends OpaqueHash, TData> = WithHashAndBytes<THash, TData>;
3698
3708
  declare const index$p_WithHashAndBytes: typeof WithHashAndBytes;
3699
3709
  declare const index$p_ZERO_HASH: typeof ZERO_HASH;
3700
- declare const index$p_blake2b: typeof blake2b;
3701
- declare const index$p_defaultAllocator: typeof defaultAllocator;
3702
3710
  declare const index$p_keccak: typeof keccak;
3703
3711
  declare namespace index$p {
3704
- export { index$p_PageAllocator as PageAllocator, index$p_SimpleAllocator as SimpleAllocator, index$p_WithHash as WithHash, index$p_WithHashAndBytes as WithHashAndBytes, index$p_ZERO_HASH as ZERO_HASH, index$p_blake2b as blake2b, index$p_defaultAllocator as defaultAllocator, index$p_keccak as keccak };
3705
- export type { index$p_Blake2bHash as Blake2bHash, index$p_HASH_SIZE as HASH_SIZE, index$p_HashAllocator as HashAllocator, index$p_KeccakHash as KeccakHash, index$p_OpaqueHash as OpaqueHash, index$p_TRUNCATED_HASH_SIZE as TRUNCATED_HASH_SIZE, index$p_TruncatedHash as TruncatedHash };
3712
+ export { index$p_Blake2b as Blake2b, index$p_WithHash as WithHash, index$p_WithHashAndBytes as WithHashAndBytes, index$p_ZERO_HASH as ZERO_HASH, index$p_keccak as keccak, zero$1 as zero };
3713
+ export type { index$p_Blake2bHash as Blake2bHash, index$p_HASH_SIZE as HASH_SIZE, index$p_KeccakHash as KeccakHash, index$p_OpaqueHash as OpaqueHash, index$p_TRUNCATED_HASH_SIZE as TRUNCATED_HASH_SIZE, index$p_TruncatedHash as TruncatedHash };
3706
3714
  }
3707
3715
 
3708
3716
  /** Immutable view of the `HashDictionary`. */
@@ -4479,6 +4487,8 @@ declare class TruncatedHashDictionary<T extends OpaqueHash, V> {
4479
4487
  }
4480
4488
  }
4481
4489
 
4490
+ type index$o_ArrayView<T> = ArrayView<T>;
4491
+ declare const index$o_ArrayView: typeof ArrayView;
4482
4492
  type index$o_FixedSizeArray<T, N extends number> = FixedSizeArray<T, N>;
4483
4493
  declare const index$o_FixedSizeArray: typeof FixedSizeArray;
4484
4494
  type index$o_HashDictionary<K extends OpaqueHash, V> = HashDictionary<K, V>;
@@ -4506,7 +4516,7 @@ type index$o_TruncatedHashDictionary<T extends OpaqueHash, V> = TruncatedHashDic
4506
4516
  declare const index$o_TruncatedHashDictionary: typeof TruncatedHashDictionary;
4507
4517
  declare const index$o_asKnownSize: typeof asKnownSize;
4508
4518
  declare namespace index$o {
4509
- export { index$o_FixedSizeArray as FixedSizeArray, index$o_HashDictionary as HashDictionary, index$o_HashSet as HashSet, index$o_MultiMap as MultiMap, index$o_SortedArray as SortedArray, index$o_SortedSet as SortedSet, index$o_TruncatedHashDictionary as TruncatedHashDictionary, index$o_asKnownSize as asKnownSize };
4519
+ export { index$o_ArrayView as ArrayView, index$o_FixedSizeArray as FixedSizeArray, index$o_HashDictionary as HashDictionary, index$o_HashSet as HashSet, index$o_MultiMap as MultiMap, index$o_SortedArray as SortedArray, index$o_SortedSet as SortedSet, index$o_TruncatedHashDictionary as TruncatedHashDictionary, index$o_asKnownSize as asKnownSize };
4510
4520
  export type { index$o_HashWithZeroedBit as HashWithZeroedBit, index$o_ImmutableHashDictionary as ImmutableHashDictionary, index$o_ImmutableHashSet as ImmutableHashSet, index$o_ImmutableSortedArray as ImmutableSortedArray, index$o_ImmutableSortedSet as ImmutableSortedSet, index$o_KeyMapper as KeyMapper, index$o_KeyMappers as KeyMappers, index$o_KnownSize as KnownSize, index$o_KnownSizeArray as KnownSizeArray, index$o_KnownSizeId as KnownSizeId, index$o_NestedMaps as NestedMaps };
4511
4521
  }
4512
4522
 
@@ -4735,7 +4745,7 @@ declare async function verify<T extends BytesBlob>(input: Input<T>[]): Promise<b
4735
4745
  (acc, { message, key, signature }) => acc + key.length + signature.length + message.length + 1,
4736
4746
  0,
4737
4747
  );
4738
- const data = new Uint8Array(dataLength);
4748
+ const data = safeAllocUint8Array(dataLength);
4739
4749
 
4740
4750
  let offset = 0;
4741
4751
 
@@ -4825,22 +4835,16 @@ declare function trivialSeed(s: U32): KeySeed {
4825
4835
  * Derives a Ed25519 secret key from a seed.
4826
4836
  * https://github.com/polkadot-fellows/JIPs/blob/7048f79edf4f4eb8bfe6fb42e6bbf61900f44c65/JIP-5.md#derivation-method
4827
4837
  */
4828
- declare function deriveEd25519SecretKey(
4829
- seed: KeySeed,
4830
- allocator: SimpleAllocator = new SimpleAllocator(),
4831
- ): Ed25519SecretSeed {
4832
- return blake2b.hashBytes(BytesBlob.blobFromParts([ED25519_SECRET_KEY.raw, seed.raw]), allocator).asOpaque();
4838
+ declare function deriveEd25519SecretKey(seed: KeySeed, blake2b: Blake2b): Ed25519SecretSeed {
4839
+ return blake2b.hashBytes(BytesBlob.blobFromParts([ED25519_SECRET_KEY.raw, seed.raw])).asOpaque();
4833
4840
  }
4834
4841
 
4835
4842
  /**
4836
4843
  * Derives a Bandersnatch secret key from a seed.
4837
4844
  * https://github.com/polkadot-fellows/JIPs/blob/7048f79edf4f4eb8bfe6fb42e6bbf61900f44c65/JIP-5.md#derivation-method
4838
4845
  */
4839
- declare function deriveBandersnatchSecretKey(
4840
- seed: KeySeed,
4841
- allocator: SimpleAllocator = new SimpleAllocator(),
4842
- ): BandersnatchSecretSeed {
4843
- return blake2b.hashBytes(BytesBlob.blobFromParts([BANDERSNATCH_SECRET_KEY.raw, seed.raw]), allocator).asOpaque();
4846
+ declare function deriveBandersnatchSecretKey(seed: KeySeed, blake2b: Blake2b): BandersnatchSecretSeed {
4847
+ return blake2b.hashBytes(BytesBlob.blobFromParts([BANDERSNATCH_SECRET_KEY.raw, seed.raw])).asOpaque();
4844
4848
  }
4845
4849
 
4846
4850
  /**
@@ -8373,7 +8377,7 @@ declare enum NodeType {
8373
8377
  declare class TrieNode {
8374
8378
  constructor(
8375
8379
  /** Exactly 512 bits / 64 bytes */
8376
- public readonly raw: Uint8Array = new Uint8Array(TRIE_NODE_BYTES),
8380
+ public readonly raw: Uint8Array = safeAllocUint8Array(TRIE_NODE_BYTES),
8377
8381
  ) {}
8378
8382
 
8379
8383
  /** Returns the type of the node */
@@ -9111,21 +9115,6 @@ declare function accumulationOutputComparator(a: AccumulationOutput, b: Accumula
9111
9115
  return Ordering.Equal;
9112
9116
  }
9113
9117
 
9114
- declare const codecWithHash = <T, V, H extends OpaqueHash>(val: Descriptor<T, V>): Descriptor<WithHash<H, T>, V> =>
9115
- Descriptor.withView(
9116
- val.name,
9117
- val.sizeHint,
9118
- (e, elem) => val.encode(e, elem.data),
9119
- (d): WithHash<H, T> => {
9120
- const decoder2 = d.clone();
9121
- const encoded = val.skipEncoded(decoder2);
9122
- const hash = blake2b.hashBytes(encoded);
9123
- return new WithHash(hash.asOpaque(), val.decode(d));
9124
- },
9125
- val.skip,
9126
- val.View,
9127
- );
9128
-
9129
9118
  /**
9130
9119
  * Assignment of particular work report to a core.
9131
9120
  *
@@ -9136,7 +9125,7 @@ declare const codecWithHash = <T, V, H extends OpaqueHash>(val: Descriptor<T, V>
9136
9125
  */
9137
9126
  declare class AvailabilityAssignment extends WithDebug {
9138
9127
  static Codec = codec.Class(AvailabilityAssignment, {
9139
- workReport: codecWithHash(WorkReport.Codec),
9128
+ workReport: WorkReport.Codec,
9140
9129
  timeout: codec.u32.asOpaque<TimeSlot>(),
9141
9130
  });
9142
9131
 
@@ -9146,7 +9135,7 @@ declare class AvailabilityAssignment extends WithDebug {
9146
9135
 
9147
9136
  private constructor(
9148
9137
  /** Work report assigned to a core. */
9149
- public readonly workReport: WithHash<WorkReportHash, WorkReport>,
9138
+ public readonly workReport: WorkReport,
9150
9139
  /** Time slot at which the report becomes obsolete. */
9151
9140
  public readonly timeout: TimeSlot,
9152
9141
  ) {
@@ -11293,7 +11282,6 @@ declare const index$e_codecPerCore: typeof codecPerCore;
11293
11282
  declare const index$e_codecServiceId: typeof codecServiceId;
11294
11283
  declare const index$e_codecVarGas: typeof codecVarGas;
11295
11284
  declare const index$e_codecVarU16: typeof codecVarU16;
11296
- declare const index$e_codecWithHash: typeof codecWithHash;
11297
11285
  declare const index$e_hashComparator: typeof hashComparator;
11298
11286
  declare const index$e_ignoreValueWithDefault: typeof ignoreValueWithDefault;
11299
11287
  declare const index$e_serviceDataCodec: typeof serviceDataCodec;
@@ -11304,7 +11292,7 @@ declare const index$e_tryAsPerCore: typeof tryAsPerCore;
11304
11292
  declare const index$e_workReportsSortedSetCodec: typeof workReportsSortedSetCodec;
11305
11293
  declare const index$e_zeroSizeHint: typeof zeroSizeHint;
11306
11294
  declare namespace index$e {
11307
- export { index$e_AccumulationOutput as AccumulationOutput, index$e_AutoAccumulate as AutoAccumulate, index$e_AvailabilityAssignment as AvailabilityAssignment, index$e_BASE_SERVICE_BALANCE as BASE_SERVICE_BALANCE, index$e_BlockState as BlockState, index$e_CoreStatistics as CoreStatistics, index$e_DisputesRecords as DisputesRecords, index$e_ELECTIVE_BYTE_BALANCE as ELECTIVE_BYTE_BALANCE, index$e_ELECTIVE_ITEM_BALANCE as ELECTIVE_ITEM_BALANCE, index$e_InMemoryService as InMemoryService, index$e_InMemoryState as InMemoryState, index$e_LookupHistoryItem as LookupHistoryItem, index$e_MAX_LOOKUP_HISTORY_SLOTS as MAX_LOOKUP_HISTORY_SLOTS, index$e_PreimageItem as PreimageItem, index$e_PrivilegedServices as PrivilegedServices, index$e_RecentBlocks as RecentBlocks, index$e_RecentBlocksHistory as RecentBlocksHistory, index$e_SafroleData as SafroleData, index$e_SafroleSealingKeysData as SafroleSealingKeysData, index$e_SafroleSealingKeysKind as SafroleSealingKeysKind, index$e_ServiceAccountInfo as ServiceAccountInfo, index$e_ServiceStatistics as ServiceStatistics, index$e_StatisticsData as StatisticsData, index$e_StorageItem as StorageItem, index$e_UpdateError as UpdateError, index$e_UpdatePreimage as UpdatePreimage, index$e_UpdatePreimageKind as UpdatePreimageKind, index$e_UpdateService as UpdateService, index$e_UpdateServiceKind as UpdateServiceKind, index$e_UpdateStorage as UpdateStorage, index$e_UpdateStorageKind as UpdateStorageKind, index$e_ValidatorData as ValidatorData, index$e_ValidatorStatistics as ValidatorStatistics, index$e_accumulationOutputComparator as accumulationOutputComparator, index$e_codecBandersnatchKey as codecBandersnatchKey, index$e_codecPerCore as codecPerCore, index$e_codecServiceId as codecServiceId, index$e_codecVarGas as codecVarGas, index$e_codecVarU16 as codecVarU16, index$e_codecWithHash as codecWithHash, index$e_hashComparator as hashComparator, index$e_ignoreValueWithDefault as ignoreValueWithDefault, index$e_serviceDataCodec as serviceDataCodec, index$e_serviceEntriesCodec as serviceEntriesCodec, index$e_sortedSetCodec as sortedSetCodec, index$e_tryAsLookupHistorySlots as tryAsLookupHistorySlots, index$e_tryAsPerCore as tryAsPerCore, index$e_workReportsSortedSetCodec as workReportsSortedSetCodec, index$e_zeroSizeHint as zeroSizeHint };
11295
+ export { index$e_AccumulationOutput as AccumulationOutput, index$e_AutoAccumulate as AutoAccumulate, index$e_AvailabilityAssignment as AvailabilityAssignment, index$e_BASE_SERVICE_BALANCE as BASE_SERVICE_BALANCE, index$e_BlockState as BlockState, index$e_CoreStatistics as CoreStatistics, index$e_DisputesRecords as DisputesRecords, index$e_ELECTIVE_BYTE_BALANCE as ELECTIVE_BYTE_BALANCE, index$e_ELECTIVE_ITEM_BALANCE as ELECTIVE_ITEM_BALANCE, index$e_InMemoryService as InMemoryService, index$e_InMemoryState as InMemoryState, index$e_LookupHistoryItem as LookupHistoryItem, index$e_MAX_LOOKUP_HISTORY_SLOTS as MAX_LOOKUP_HISTORY_SLOTS, index$e_PreimageItem as PreimageItem, index$e_PrivilegedServices as PrivilegedServices, index$e_RecentBlocks as RecentBlocks, index$e_RecentBlocksHistory as RecentBlocksHistory, index$e_SafroleData as SafroleData, index$e_SafroleSealingKeysData as SafroleSealingKeysData, index$e_SafroleSealingKeysKind as SafroleSealingKeysKind, index$e_ServiceAccountInfo as ServiceAccountInfo, index$e_ServiceStatistics as ServiceStatistics, index$e_StatisticsData as StatisticsData, index$e_StorageItem as StorageItem, index$e_UpdateError as UpdateError, index$e_UpdatePreimage as UpdatePreimage, index$e_UpdatePreimageKind as UpdatePreimageKind, index$e_UpdateService as UpdateService, index$e_UpdateServiceKind as UpdateServiceKind, index$e_UpdateStorage as UpdateStorage, index$e_UpdateStorageKind as UpdateStorageKind, index$e_ValidatorData as ValidatorData, index$e_ValidatorStatistics as ValidatorStatistics, index$e_accumulationOutputComparator as accumulationOutputComparator, index$e_codecBandersnatchKey as codecBandersnatchKey, index$e_codecPerCore as codecPerCore, index$e_codecServiceId as codecServiceId, index$e_codecVarGas as codecVarGas, index$e_codecVarU16 as codecVarU16, index$e_hashComparator as hashComparator, index$e_ignoreValueWithDefault as ignoreValueWithDefault, index$e_serviceDataCodec as serviceDataCodec, index$e_serviceEntriesCodec as serviceEntriesCodec, index$e_sortedSetCodec as sortedSetCodec, index$e_tryAsLookupHistorySlots as tryAsLookupHistorySlots, index$e_tryAsPerCore as tryAsPerCore, index$e_workReportsSortedSetCodec as workReportsSortedSetCodec, index$e_zeroSizeHint as zeroSizeHint };
11308
11296
  export type { index$e_BlocksState as BlocksState, index$e_ENTROPY_ENTRIES as ENTROPY_ENTRIES, index$e_EnumerableState as EnumerableState, index$e_FieldNames as FieldNames, index$e_InMemoryStateFields as InMemoryStateFields, index$e_LookupHistorySlots as LookupHistorySlots, index$e_MAX_RECENT_HISTORY as MAX_RECENT_HISTORY, index$e_PerCore as PerCore, index$e_SafroleSealingKeys as SafroleSealingKeys, index$e_Service as Service, index$e_ServiceData as ServiceData, index$e_ServiceEntries as ServiceEntries, index$e_ServicesUpdate as ServicesUpdate, index$e_State as State, index$e_StorageKey as StorageKey, index$e_VALIDATOR_META_BYTES as VALIDATOR_META_BYTES };
11309
11297
  }
11310
11298
 
@@ -11372,7 +11360,7 @@ declare namespace stateKeys {
11372
11360
  }
11373
11361
 
11374
11362
  /** https://graypaper.fluffylabs.dev/#/1c979cb/3bba033bba03?v=0.7.1 */
11375
- export function serviceStorage(serviceId: ServiceId, key: StorageKey): StateKey {
11363
+ export function serviceStorage(blake2b: Blake2b, serviceId: ServiceId, key: StorageKey): StateKey {
11376
11364
  if (Compatibility.isLessThan(GpVersion.V0_6_7)) {
11377
11365
  const out = Bytes.zero(HASH_SIZE);
11378
11366
  out.raw.set(u32AsLeBytes(tryAsU32(2 ** 32 - 1)), 0);
@@ -11380,11 +11368,11 @@ declare namespace stateKeys {
11380
11368
  return legacyServiceNested(serviceId, out);
11381
11369
  }
11382
11370
 
11383
- return serviceNested(serviceId, tryAsU32(2 ** 32 - 1), key);
11371
+ return serviceNested(blake2b, serviceId, tryAsU32(2 ** 32 - 1), key);
11384
11372
  }
11385
11373
 
11386
11374
  /** https://graypaper.fluffylabs.dev/#/1c979cb/3bd7033bd703?v=0.7.1 */
11387
- export function servicePreimage(serviceId: ServiceId, hash: PreimageHash): StateKey {
11375
+ export function servicePreimage(blake2b: Blake2b, serviceId: ServiceId, hash: PreimageHash): StateKey {
11388
11376
  if (Compatibility.isLessThan(GpVersion.V0_6_7)) {
11389
11377
  const out = Bytes.zero(HASH_SIZE);
11390
11378
  out.raw.set(u32AsLeBytes(tryAsU32(2 ** 32 - 2)), 0);
@@ -11392,11 +11380,16 @@ declare namespace stateKeys {
11392
11380
  return legacyServiceNested(serviceId, out);
11393
11381
  }
11394
11382
 
11395
- return serviceNested(serviceId, tryAsU32(2 ** 32 - 2), hash);
11383
+ return serviceNested(blake2b, serviceId, tryAsU32(2 ** 32 - 2), hash);
11396
11384
  }
11397
11385
 
11398
11386
  /** https://graypaper.fluffylabs.dev/#/1c979cb/3b0a043b0a04?v=0.7.1 */
11399
- export function serviceLookupHistory(serviceId: ServiceId, hash: PreimageHash, preimageLength: U32): StateKey {
11387
+ export function serviceLookupHistory(
11388
+ blake2b: Blake2b,
11389
+ serviceId: ServiceId,
11390
+ hash: PreimageHash,
11391
+ preimageLength: U32,
11392
+ ): StateKey {
11400
11393
  if (Compatibility.isLessThan(GpVersion.V0_6_7)) {
11401
11394
  const doubleHash = blake2b.hashBytes(hash);
11402
11395
  const out = Bytes.zero(HASH_SIZE);
@@ -11405,11 +11398,11 @@ declare namespace stateKeys {
11405
11398
  return legacyServiceNested(serviceId, out);
11406
11399
  }
11407
11400
 
11408
- return serviceNested(serviceId, preimageLength, hash);
11401
+ return serviceNested(blake2b, serviceId, preimageLength, hash);
11409
11402
  }
11410
11403
 
11411
11404
  /** https://graypaper.fluffylabs.dev/#/1c979cb/3b88003b8800?v=0.7.1 */
11412
- export function serviceNested(serviceId: ServiceId, numberPrefix: U32, hash: BytesBlob): StateKey {
11405
+ export function serviceNested(blake2b: Blake2b, serviceId: ServiceId, numberPrefix: U32, hash: BytesBlob): StateKey {
11413
11406
  const inputToHash = BytesBlob.blobFromParts(u32AsLeBytes(numberPrefix), hash.raw);
11414
11407
  const newHash = blake2b.hashBytes(inputToHash).raw.subarray(0, 28);
11415
11408
  const key = Bytes.zero(HASH_SIZE);
@@ -11593,20 +11586,20 @@ declare namespace serialize {
11593
11586
  });
11594
11587
 
11595
11588
  /** https://graypaper.fluffylabs.dev/#/85129da/384803384803?v=0.6.3 */
11596
- export const serviceStorage = (serviceId: ServiceId, key: StorageKey) => ({
11597
- key: stateKeys.serviceStorage(serviceId, key),
11589
+ export const serviceStorage = (blake2b: Blake2b, serviceId: ServiceId, key: StorageKey) => ({
11590
+ key: stateKeys.serviceStorage(blake2b, serviceId, key),
11598
11591
  Codec: dumpCodec,
11599
11592
  });
11600
11593
 
11601
11594
  /** https://graypaper.fluffylabs.dev/#/85129da/385b03385b03?v=0.6.3 */
11602
- export const servicePreimages = (serviceId: ServiceId, hash: PreimageHash) => ({
11603
- key: stateKeys.servicePreimage(serviceId, hash),
11595
+ export const servicePreimages = (blake2b: Blake2b, serviceId: ServiceId, hash: PreimageHash) => ({
11596
+ key: stateKeys.servicePreimage(blake2b, serviceId, hash),
11604
11597
  Codec: dumpCodec,
11605
11598
  });
11606
11599
 
11607
11600
  /** https://graypaper.fluffylabs.dev/#/85129da/387603387603?v=0.6.3 */
11608
- export const serviceLookupHistory = (serviceId: ServiceId, hash: PreimageHash, len: U32) => ({
11609
- key: stateKeys.serviceLookupHistory(serviceId, hash, len),
11601
+ export const serviceLookupHistory = (blake2b: Blake2b, serviceId: ServiceId, hash: PreimageHash, len: U32) => ({
11602
+ key: stateKeys.serviceLookupHistory(blake2b, serviceId, hash, len),
11610
11603
  Codec: readonlyArray(codec.sequenceVarLen(codec.u32)),
11611
11604
  });
11612
11605
  }
@@ -11641,6 +11634,7 @@ declare const EMPTY_BLOB = BytesBlob.empty();
11641
11634
  /** Serialize given state update into a series of key-value pairs. */
11642
11635
  declare function* serializeStateUpdate(
11643
11636
  spec: ChainSpec,
11637
+ blake2b: Blake2b,
11644
11638
  update: Partial<State & ServicesUpdate>,
11645
11639
  ): Generator<StateEntryUpdate> {
11646
11640
  // first let's serialize all of the simple entries (if present!)
@@ -11649,9 +11643,9 @@ declare function* serializeStateUpdate(
11649
11643
  const encode = <T>(codec: Encode<T>, val: T) => Encoder.encodeObject(codec, val, spec);
11650
11644
 
11651
11645
  // then let's proceed with service updates
11652
- yield* serializeServiceUpdates(update.servicesUpdates, encode);
11653
- yield* serializePreimages(update.preimages, encode);
11654
- yield* serializeStorage(update.storage);
11646
+ yield* serializeServiceUpdates(update.servicesUpdates, encode, blake2b);
11647
+ yield* serializePreimages(update.preimages, encode, blake2b);
11648
+ yield* serializeStorage(update.storage, blake2b);
11655
11649
  yield* serializeRemovedServices(update.servicesRemoved);
11656
11650
  }
11657
11651
 
@@ -11663,18 +11657,18 @@ declare function* serializeRemovedServices(servicesRemoved: ServiceId[] | undefi
11663
11657
  }
11664
11658
  }
11665
11659
 
11666
- declare function* serializeStorage(storage: UpdateStorage[] | undefined): Generator<StateEntryUpdate> {
11660
+ declare function* serializeStorage(storage: UpdateStorage[] | undefined, blake2b: Blake2b): Generator<StateEntryUpdate> {
11667
11661
  for (const { action, serviceId } of storage ?? []) {
11668
11662
  switch (action.kind) {
11669
11663
  case UpdateStorageKind.Set: {
11670
11664
  const key = action.storage.key;
11671
- const codec = serialize.serviceStorage(serviceId, key);
11665
+ const codec = serialize.serviceStorage(blake2b, serviceId, key);
11672
11666
  yield [StateEntryUpdateAction.Insert, codec.key, action.storage.value];
11673
11667
  break;
11674
11668
  }
11675
11669
  case UpdateStorageKind.Remove: {
11676
11670
  const key = action.key;
11677
- const codec = serialize.serviceStorage(serviceId, key);
11671
+ const codec = serialize.serviceStorage(blake2b, serviceId, key);
11678
11672
  yield [StateEntryUpdateAction.Remove, codec.key, EMPTY_BLOB];
11679
11673
  break;
11680
11674
  }
@@ -11684,16 +11678,20 @@ declare function* serializeStorage(storage: UpdateStorage[] | undefined): Genera
11684
11678
  }
11685
11679
  }
11686
11680
 
11687
- declare function* serializePreimages(preimages: UpdatePreimage[] | undefined, encode: EncodeFun): Generator<StateEntryUpdate> {
11681
+ declare function* serializePreimages(
11682
+ preimages: UpdatePreimage[] | undefined,
11683
+ encode: EncodeFun,
11684
+ blake2b: Blake2b,
11685
+ ): Generator<StateEntryUpdate> {
11688
11686
  for (const { action, serviceId } of preimages ?? []) {
11689
11687
  switch (action.kind) {
11690
11688
  case UpdatePreimageKind.Provide: {
11691
11689
  const { hash, blob } = action.preimage;
11692
- const codec = serialize.servicePreimages(serviceId, hash);
11690
+ const codec = serialize.servicePreimages(blake2b, serviceId, hash);
11693
11691
  yield [StateEntryUpdateAction.Insert, codec.key, blob];
11694
11692
 
11695
11693
  if (action.slot !== null) {
11696
- const codec2 = serialize.serviceLookupHistory(serviceId, hash, tryAsU32(blob.length));
11694
+ const codec2 = serialize.serviceLookupHistory(blake2b, serviceId, hash, tryAsU32(blob.length));
11697
11695
  yield [
11698
11696
  StateEntryUpdateAction.Insert,
11699
11697
  codec2.key,
@@ -11704,16 +11702,16 @@ declare function* serializePreimages(preimages: UpdatePreimage[] | undefined, en
11704
11702
  }
11705
11703
  case UpdatePreimageKind.UpdateOrAdd: {
11706
11704
  const { hash, length, slots } = action.item;
11707
- const codec = serialize.serviceLookupHistory(serviceId, hash, length);
11705
+ const codec = serialize.serviceLookupHistory(blake2b, serviceId, hash, length);
11708
11706
  yield [StateEntryUpdateAction.Insert, codec.key, encode(codec.Codec, slots)];
11709
11707
  break;
11710
11708
  }
11711
11709
  case UpdatePreimageKind.Remove: {
11712
11710
  const { hash, length } = action;
11713
- const codec = serialize.servicePreimages(serviceId, hash);
11711
+ const codec = serialize.servicePreimages(blake2b, serviceId, hash);
11714
11712
  yield [StateEntryUpdateAction.Remove, codec.key, EMPTY_BLOB];
11715
11713
 
11716
- const codec2 = serialize.serviceLookupHistory(serviceId, hash, length);
11714
+ const codec2 = serialize.serviceLookupHistory(blake2b, serviceId, hash, length);
11717
11715
  yield [StateEntryUpdateAction.Remove, codec2.key, EMPTY_BLOB];
11718
11716
  break;
11719
11717
  }
@@ -11725,6 +11723,7 @@ declare function* serializePreimages(preimages: UpdatePreimage[] | undefined, en
11725
11723
  declare function* serializeServiceUpdates(
11726
11724
  servicesUpdates: UpdateService[] | undefined,
11727
11725
  encode: EncodeFun,
11726
+ blake2b: Blake2b,
11728
11727
  ): Generator<StateEntryUpdate> {
11729
11728
  for (const { action, serviceId } of servicesUpdates ?? []) {
11730
11729
  // new service being created or updated
@@ -11734,7 +11733,7 @@ declare function* serializeServiceUpdates(
11734
11733
  // additional lookup history update
11735
11734
  if (action.kind === UpdateServiceKind.Create && action.lookupHistory !== null) {
11736
11735
  const { lookupHistory } = action;
11737
- const codec2 = serialize.serviceLookupHistory(serviceId, lookupHistory.hash, lookupHistory.length);
11736
+ const codec2 = serialize.serviceLookupHistory(blake2b, serviceId, lookupHistory.hash, lookupHistory.length);
11738
11737
  yield [StateEntryUpdateAction.Insert, codec2.key, encode(codec2.Codec, lookupHistory.slots)];
11739
11738
  }
11740
11739
  }
@@ -11868,8 +11867,8 @@ declare class StateEntries {
11868
11867
  );
11869
11868
 
11870
11869
  /** Turn in-memory state into it's serialized form. */
11871
- static serializeInMemory(spec: ChainSpec, state: InMemoryState) {
11872
- return new StateEntries(convertInMemoryStateToDictionary(spec, state));
11870
+ static serializeInMemory(spec: ChainSpec, blake2b: Blake2b, state: InMemoryState) {
11871
+ return new StateEntries(convertInMemoryStateToDictionary(spec, blake2b, state));
11873
11872
  }
11874
11873
 
11875
11874
  /**
@@ -11924,7 +11923,8 @@ declare class StateEntries {
11924
11923
  }
11925
11924
 
11926
11925
  /** https://graypaper.fluffylabs.dev/#/68eaa1f/391600391600?v=0.6.4 */
11927
- getRootHash(): StateRootHash {
11926
+ getRootHash(blake2b: Blake2b): StateRootHash {
11927
+ const blake2bTrieHasher = getBlake2bTrieHasher(blake2b);
11928
11928
  const leaves: SortedSet<LeafNode> = SortedSet.fromArray(leafComparator);
11929
11929
  for (const [key, value] of this) {
11930
11930
  leaves.insert(InMemoryTrie.constructLeaf(blake2bTrieHasher, key.asOpaque(), value));
@@ -11937,6 +11937,7 @@ declare class StateEntries {
11937
11937
  /** https://graypaper.fluffylabs.dev/#/68eaa1f/38a50038a500?v=0.6.4 */
11938
11938
  declare function convertInMemoryStateToDictionary(
11939
11939
  spec: ChainSpec,
11940
+ blake2b: Blake2b,
11940
11941
  state: InMemoryState,
11941
11942
  ): TruncatedHashDictionary<StateKey, BytesBlob> {
11942
11943
  const serialized = TruncatedHashDictionary.fromEntries<StateKey, BytesBlob>([]);
@@ -11969,20 +11970,25 @@ declare function convertInMemoryStateToDictionary(
11969
11970
 
11970
11971
  // preimages
11971
11972
  for (const preimage of service.data.preimages.values()) {
11972
- const { key, Codec } = serialize.servicePreimages(serviceId, preimage.hash);
11973
+ const { key, Codec } = serialize.servicePreimages(blake2b, serviceId, preimage.hash);
11973
11974
  serialized.set(key, Encoder.encodeObject(Codec, preimage.blob));
11974
11975
  }
11975
11976
 
11976
11977
  // storage
11977
11978
  for (const storage of service.data.storage.values()) {
11978
- const { key, Codec } = serialize.serviceStorage(serviceId, storage.key);
11979
+ const { key, Codec } = serialize.serviceStorage(blake2b, serviceId, storage.key);
11979
11980
  serialized.set(key, Encoder.encodeObject(Codec, storage.value));
11980
11981
  }
11981
11982
 
11982
11983
  // lookup history
11983
11984
  for (const lookupHistoryList of service.data.lookupHistory.values()) {
11984
11985
  for (const lookupHistory of lookupHistoryList) {
11985
- const { key, Codec } = serialize.serviceLookupHistory(serviceId, lookupHistory.hash, lookupHistory.length);
11986
+ const { key, Codec } = serialize.serviceLookupHistory(
11987
+ blake2b,
11988
+ serviceId,
11989
+ lookupHistory.hash,
11990
+ lookupHistory.length,
11991
+ );
11986
11992
  serialized.set(key, Encoder.encodeObject(Codec, lookupHistory.slots.slice()));
11987
11993
  }
11988
11994
  }
@@ -12013,21 +12019,23 @@ declare class SerializedState<T extends SerializedStateBackend = SerializedState
12013
12019
  implements State, EnumerableState
12014
12020
  {
12015
12021
  /** Create a state-like object from collection of serialized entries. */
12016
- static fromStateEntries(spec: ChainSpec, state: StateEntries, recentServices: ServiceId[] = []) {
12017
- return new SerializedState(spec, state, recentServices);
12022
+ static fromStateEntries(spec: ChainSpec, blake2b: Blake2b, state: StateEntries, recentServices: ServiceId[] = []) {
12023
+ return new SerializedState(spec, blake2b, state, recentServices);
12018
12024
  }
12019
12025
 
12020
12026
  /** Create a state-like object backed by some DB. */
12021
12027
  static new<T extends SerializedStateBackend>(
12022
12028
  spec: ChainSpec,
12029
+ blake2b: Blake2b,
12023
12030
  db: T,
12024
12031
  recentServices: ServiceId[] = [],
12025
12032
  ): SerializedState<T> {
12026
- return new SerializedState(spec, db, recentServices);
12033
+ return new SerializedState(spec, blake2b, db, recentServices);
12027
12034
  }
12028
12035
 
12029
12036
  private constructor(
12030
12037
  private readonly spec: ChainSpec,
12038
+ private readonly blake2b: Blake2b,
12031
12039
  public backend: T,
12032
12040
  /** Best-effort list of recently active services. */
12033
12041
  private readonly _recentServiceIds: ServiceId[],
@@ -12058,7 +12066,7 @@ declare class SerializedState<T extends SerializedStateBackend = SerializedState
12058
12066
  this._recentServiceIds.push(id);
12059
12067
  }
12060
12068
 
12061
- return new SerializedService(id, serviceData, (key) => this.retrieveOptional(key));
12069
+ return new SerializedService(this.blake2b, id, serviceData, (key) => this.retrieveOptional(key));
12062
12070
  }
12063
12071
 
12064
12072
  private retrieve<T>({ key, Codec }: KeyAndCodec<T>, description: string): T {
@@ -12157,6 +12165,7 @@ declare class SerializedState<T extends SerializedStateBackend = SerializedState
12157
12165
  /** Service data representation on a serialized state. */
12158
12166
  declare class SerializedService implements Service {
12159
12167
  constructor(
12168
+ public readonly blake2b: Blake2b,
12160
12169
  /** Service id */
12161
12170
  public readonly serviceId: ServiceId,
12162
12171
  private readonly accountInfo: ServiceAccountInfo,
@@ -12172,14 +12181,14 @@ declare class SerializedService implements Service {
12172
12181
  getStorage(rawKey: StorageKey): BytesBlob | null {
12173
12182
  if (Compatibility.isLessThan(GpVersion.V0_6_7)) {
12174
12183
  const SERVICE_ID_BYTES = 4;
12175
- const serviceIdAndKey = new Uint8Array(SERVICE_ID_BYTES + rawKey.length);
12184
+ const serviceIdAndKey = safeAllocUint8Array(SERVICE_ID_BYTES + rawKey.length);
12176
12185
  serviceIdAndKey.set(u32AsLeBytes(this.serviceId));
12177
12186
  serviceIdAndKey.set(rawKey.raw, SERVICE_ID_BYTES);
12178
- const key: StorageKey = asOpaqueType(BytesBlob.blobFrom(blake2b.hashBytes(serviceIdAndKey).raw));
12179
- return this.retrieveOptional(serialize.serviceStorage(this.serviceId, key)) ?? null;
12187
+ const key: StorageKey = asOpaqueType(BytesBlob.blobFrom(this.blake2b.hashBytes(serviceIdAndKey).raw));
12188
+ return this.retrieveOptional(serialize.serviceStorage(this.blake2b, this.serviceId, key)) ?? null;
12180
12189
  }
12181
12190
 
12182
- return this.retrieveOptional(serialize.serviceStorage(this.serviceId, rawKey)) ?? null;
12191
+ return this.retrieveOptional(serialize.serviceStorage(this.blake2b, this.serviceId, rawKey)) ?? null;
12183
12192
  }
12184
12193
 
12185
12194
  /**
@@ -12189,17 +12198,17 @@ declare class SerializedService implements Service {
12189
12198
  */
12190
12199
  hasPreimage(hash: PreimageHash): boolean {
12191
12200
  // TODO [ToDr] consider optimizing to avoid fetching the whole data.
12192
- return this.retrieveOptional(serialize.servicePreimages(this.serviceId, hash)) !== undefined;
12201
+ return this.retrieveOptional(serialize.servicePreimages(this.blake2b, this.serviceId, hash)) !== undefined;
12193
12202
  }
12194
12203
 
12195
12204
  /** Retrieve preimage from the DB. */
12196
12205
  getPreimage(hash: PreimageHash): BytesBlob | null {
12197
- return this.retrieveOptional(serialize.servicePreimages(this.serviceId, hash)) ?? null;
12206
+ return this.retrieveOptional(serialize.servicePreimages(this.blake2b, this.serviceId, hash)) ?? null;
12198
12207
  }
12199
12208
 
12200
12209
  /** Retrieve preimage lookup history. */
12201
12210
  getLookupHistory(hash: PreimageHash, len: U32): LookupHistorySlots | null {
12202
- const rawSlots = this.retrieveOptional(serialize.serviceLookupHistory(this.serviceId, hash, len));
12211
+ const rawSlots = this.retrieveOptional(serialize.serviceLookupHistory(this.blake2b, this.serviceId, hash, len));
12203
12212
  if (rawSlots === undefined) {
12204
12213
  return null;
12205
12214
  }
@@ -12212,9 +12221,9 @@ type KeyAndCodec<T> = {
12212
12221
  Codec: Decode<T>;
12213
12222
  };
12214
12223
 
12215
- declare function loadState(spec: ChainSpec, entries: Iterable<[StateKey | TruncatedHash, BytesBlob]>) {
12224
+ declare function loadState(spec: ChainSpec, blake2b: Blake2b, entries: Iterable<[StateKey | TruncatedHash, BytesBlob]>) {
12216
12225
  const stateEntries = StateEntries.fromEntriesUnsafe(entries);
12217
- return SerializedState.fromStateEntries(spec, stateEntries);
12226
+ return SerializedState.fromStateEntries(spec, blake2b, stateEntries);
12218
12227
  }
12219
12228
 
12220
12229
  /**
@@ -12370,7 +12379,8 @@ declare class LeafDb implements SerializedStateBackend {
12370
12379
  assertNever(val);
12371
12380
  }
12372
12381
 
12373
- getStateRoot(): StateRootHash {
12382
+ getStateRoot(blake2b: Blake2b): StateRootHash {
12383
+ const blake2bTrieHasher = getBlake2bTrieHasher(blake2b);
12374
12384
  return InMemoryTrie.computeStateRoot(blake2bTrieHasher, this.leaves).asOpaque();
12375
12385
  }
12376
12386
 
@@ -12468,7 +12478,8 @@ declare class InMemoryStates implements StatesDb<InMemoryState> {
12468
12478
  }
12469
12479
 
12470
12480
  async getStateRoot(state: InMemoryState): Promise<StateRootHash> {
12471
- return StateEntries.serializeInMemory(this.spec, state).getRootHash();
12481
+ const blake2b = await Blake2b.createHasher();
12482
+ return StateEntries.serializeInMemory(this.spec, blake2b, state).getRootHash(blake2b);
12472
12483
  }
12473
12484
 
12474
12485
  /** Insert a full state into the database. */
@@ -12573,7 +12584,7 @@ declare function padAndEncodeData(input: BytesBlob) {
12573
12584
  const paddedLength = Math.ceil(input.length / PIECE_SIZE) * PIECE_SIZE;
12574
12585
  let padded = input;
12575
12586
  if (input.length !== paddedLength) {
12576
- padded = BytesBlob.blobFrom(new Uint8Array(paddedLength));
12587
+ padded = BytesBlob.blobFrom(safeAllocUint8Array(paddedLength));
12577
12588
  padded.raw.set(input.raw, 0);
12578
12589
  }
12579
12590
  return chunkingFunction(padded);
@@ -12629,7 +12640,7 @@ declare function decodeData(input: FixedSizeArray<[number, BytesBlob], N_CHUNKS_
12629
12640
  */
12630
12641
  declare function encodePoints(input: Bytes<PIECE_SIZE>): FixedSizeArray<Bytes<POINT_LENGTH>, N_CHUNKS_TOTAL> {
12631
12642
  const result: Bytes<POINT_LENGTH>[] = [];
12632
- const data = new Uint8Array(POINT_ALIGNMENT * N_CHUNKS_REQUIRED);
12643
+ const data = safeAllocUint8Array(POINT_ALIGNMENT * N_CHUNKS_REQUIRED);
12633
12644
 
12634
12645
  // add original shards to the result
12635
12646
  for (let i = 0; i < N_CHUNKS_REQUIRED; i++) {
@@ -12649,7 +12660,7 @@ declare function encodePoints(input: Bytes<PIECE_SIZE>): FixedSizeArray<Bytes<PO
12649
12660
  for (let i = 0; i < N_CHUNKS_REDUNDANCY; i++) {
12650
12661
  const pointIndex = i * POINT_ALIGNMENT;
12651
12662
 
12652
- const redundancyPoint = new Uint8Array(POINT_LENGTH);
12663
+ const redundancyPoint = safeAllocUint8Array(POINT_LENGTH);
12653
12664
  for (let j = 0; j < POINT_LENGTH; j++) {
12654
12665
  redundancyPoint[j] = encodedData[pointIndex + j * HALF_POINT_SIZE];
12655
12666
  }
@@ -12669,7 +12680,7 @@ declare function decodePiece(
12669
12680
  ): Bytes<PIECE_SIZE> {
12670
12681
  const result = Bytes.zero(PIECE_SIZE);
12671
12682
 
12672
- const data = new Uint8Array(N_CHUNKS_REQUIRED * POINT_ALIGNMENT);
12683
+ const data = safeAllocUint8Array(N_CHUNKS_REQUIRED * POINT_ALIGNMENT);
12673
12684
  const indices = new Uint16Array(input.length);
12674
12685
 
12675
12686
  for (let i = 0; i < N_CHUNKS_REQUIRED; i++) {
@@ -12796,7 +12807,7 @@ declare function lace<N extends number, K extends number>(input: FixedSizeArray<
12796
12807
  return BytesBlob.empty();
12797
12808
  }
12798
12809
  const n = input[0].length;
12799
- const result = BytesBlob.blobFrom(new Uint8Array(k * n));
12810
+ const result = BytesBlob.blobFrom(safeAllocUint8Array(k * n));
12800
12811
  for (let i = 0; i < k; i++) {
12801
12812
  const entry = input[i].raw;
12802
12813
  for (let j = 0; j < n; j++) {
@@ -13868,7 +13879,7 @@ declare class Mask {
13868
13879
  }
13869
13880
 
13870
13881
  private buildLookupTableForward(mask: BitVec) {
13871
- const table = new Uint8Array(mask.bitLength);
13882
+ const table = safeAllocUint8Array(mask.bitLength);
13872
13883
  let lastInstructionOffset = 0;
13873
13884
  for (let i = mask.bitLength - 1; i >= 0; i--) {
13874
13885
  if (mask.isSet(i)) {
@@ -14012,7 +14023,7 @@ declare class Registers {
14012
14023
  private asSigned: BigInt64Array;
14013
14024
  private asUnsigned: BigUint64Array;
14014
14025
 
14015
- constructor(private readonly bytes = new Uint8Array(NO_OF_REGISTERS << REGISTER_SIZE_SHIFT)) {
14026
+ constructor(private readonly bytes = safeAllocUint8Array(NO_OF_REGISTERS << REGISTER_SIZE_SHIFT)) {
14016
14027
  check`${bytes.length === NO_OF_REGISTERS << REGISTER_SIZE_SHIFT} Invalid size of registers array.`;
14017
14028
  this.asSigned = new BigInt64Array(bytes.buffer, bytes.byteOffset);
14018
14029
  this.asUnsigned = new BigUint64Array(bytes.buffer, bytes.byteOffset);
@@ -18239,7 +18250,7 @@ declare class HostCalls {
18239
18250
  const maybeAddress = regs.getLowerU32(7);
18240
18251
  const maybeLength = regs.getLowerU32(8);
18241
18252
 
18242
- const result = new Uint8Array(maybeLength);
18253
+ const result = safeAllocUint8Array(maybeLength);
18243
18254
  const startAddress = tryAsMemoryIndex(maybeAddress);
18244
18255
  const loadResult = memory.loadInto(result, startAddress);
18245
18256
 
@@ -18678,7 +18689,7 @@ declare class DebuggerAdapter {
18678
18689
 
18679
18690
  if (page === null) {
18680
18691
  // page wasn't allocated so we return an empty page
18681
- return new Uint8Array(PAGE_SIZE);
18692
+ return safeAllocUint8Array(PAGE_SIZE);
18682
18693
  }
18683
18694
 
18684
18695
  if (page.length === PAGE_SIZE) {
@@ -18687,7 +18698,7 @@ declare class DebuggerAdapter {
18687
18698
  }
18688
18699
 
18689
18700
  // page was allocated but it is shorter than PAGE_SIZE so we have to extend it
18690
- const fullPage = new Uint8Array(PAGE_SIZE);
18701
+ const fullPage = safeAllocUint8Array(PAGE_SIZE);
18691
18702
  fullPage.set(page);
18692
18703
  return fullPage;
18693
18704
  }
@@ -18880,10 +18891,10 @@ type ENTROPY_BYTES = typeof ENTROPY_BYTES;
18880
18891
  *
18881
18892
  * https://graypaper.fluffylabs.dev/#/579bd12/3b9a013b9a01
18882
18893
  */
18883
- declare function fisherYatesShuffle<T>(arr: T[], entropy: Bytes<ENTROPY_BYTES>): T[] {
18894
+ declare function fisherYatesShuffle<T>(blake2b: Blake2b, arr: T[], entropy: Bytes<ENTROPY_BYTES>): T[] {
18884
18895
  check`${entropy.length === ENTROPY_BYTES} Expected entropy of length ${ENTROPY_BYTES}, got ${entropy.length}`;
18885
18896
  const n = arr.length;
18886
- const randomNumbers = hashToNumberSequence(entropy, arr.length);
18897
+ const randomNumbers = hashToNumberSequence(blake2b, entropy, arr.length);
18887
18898
  const result: T[] = new Array<T>(n);
18888
18899
 
18889
18900
  let itemsLeft = n;
@@ -19055,8 +19066,7 @@ declare const availabilityAssignmentFromJson = json.object<JsonAvailabilityAssig
19055
19066
  timeout: "number",
19056
19067
  },
19057
19068
  ({ report, timeout }) => {
19058
- const workReportHash = blake2b.hashBytes(Encoder.encodeObject(WorkReport.Codec, report)).asOpaque();
19059
- return AvailabilityAssignment.create({ workReport: new WithHash(workReportHash, report), timeout });
19069
+ return AvailabilityAssignment.create({ workReport: report, timeout });
19060
19070
  },
19061
19071
  );
19062
19072
 
@@ -19557,7 +19567,7 @@ declare class TransitionHasher implements MmrHasher<KeccakHash> {
19557
19567
  constructor(
19558
19568
  private readonly context: ChainSpec,
19559
19569
  private readonly keccakHasher: KeccakHasher,
19560
- private readonly allocator: HashAllocator,
19570
+ public readonly blake2b: Blake2b,
19561
19571
  ) {}
19562
19572
 
19563
19573
  /** Concatenates two hashes and hash this concatenation */
@@ -19571,7 +19581,7 @@ declare class TransitionHasher implements MmrHasher<KeccakHash> {
19571
19581
 
19572
19582
  /** Creates hash from the block header view */
19573
19583
  header(header: HeaderView): WithHash<HeaderHash, HeaderView> {
19574
- return new WithHash(blake2b.hashBytes(header.encoded(), this.allocator).asOpaque(), header);
19584
+ return new WithHash(this.blake2b.hashBytes(header.encoded()).asOpaque(), header);
19575
19585
  }
19576
19586
 
19577
19587
  /**
@@ -19585,7 +19595,7 @@ declare class TransitionHasher implements MmrHasher<KeccakHash> {
19585
19595
  .view()
19586
19596
  .map((g) => g.view())
19587
19597
  .map((guarantee) => {
19588
- const reportHash = blake2b.hashBytes(guarantee.report.encoded(), this.allocator).asOpaque<WorkReportHash>();
19598
+ const reportHash = this.blake2b.hashBytes(guarantee.report.encoded()).asOpaque<WorkReportHash>();
19589
19599
  return BytesBlob.blobFromParts([
19590
19600
  reportHash.raw,
19591
19601
  guarantee.slot.encoded().raw,
@@ -19595,15 +19605,15 @@ declare class TransitionHasher implements MmrHasher<KeccakHash> {
19595
19605
 
19596
19606
  const guaranteeBlob = Encoder.encodeObject(codec.sequenceVarLen(dumpCodec), guarantees, this.context);
19597
19607
 
19598
- const et = blake2b.hashBytes(extrinsicView.tickets.encoded(), this.allocator).asOpaque<ExtrinsicHash>();
19599
- const ep = blake2b.hashBytes(extrinsicView.preimages.encoded(), this.allocator).asOpaque<ExtrinsicHash>();
19600
- const eg = blake2b.hashBytes(guaranteeBlob, this.allocator).asOpaque<ExtrinsicHash>();
19601
- const ea = blake2b.hashBytes(extrinsicView.assurances.encoded(), this.allocator).asOpaque<ExtrinsicHash>();
19602
- const ed = blake2b.hashBytes(extrinsicView.disputes.encoded(), this.allocator).asOpaque<ExtrinsicHash>();
19608
+ const et = this.blake2b.hashBytes(extrinsicView.tickets.encoded()).asOpaque<ExtrinsicHash>();
19609
+ const ep = this.blake2b.hashBytes(extrinsicView.preimages.encoded()).asOpaque<ExtrinsicHash>();
19610
+ const eg = this.blake2b.hashBytes(guaranteeBlob).asOpaque<ExtrinsicHash>();
19611
+ const ea = this.blake2b.hashBytes(extrinsicView.assurances.encoded()).asOpaque<ExtrinsicHash>();
19612
+ const ed = this.blake2b.hashBytes(extrinsicView.disputes.encoded()).asOpaque<ExtrinsicHash>();
19603
19613
 
19604
19614
  const encoded = BytesBlob.blobFromParts([et.raw, ep.raw, eg.raw, ea.raw, ed.raw]);
19605
19615
 
19606
- return new WithHashAndBytes(blake2b.hashBytes(encoded, this.allocator).asOpaque(), extrinsicView, encoded);
19616
+ return new WithHashAndBytes(this.blake2b.hashBytes(encoded).asOpaque(), extrinsicView, encoded);
19607
19617
  }
19608
19618
 
19609
19619
  /** Creates hash for given WorkPackage */
@@ -19614,7 +19624,7 @@ declare class TransitionHasher implements MmrHasher<KeccakHash> {
19614
19624
  private encode<T, THash extends OpaqueHash>(codec: Codec<T>, data: T): WithHashAndBytes<THash, T> {
19615
19625
  // TODO [ToDr] Use already allocated encoding destination and hash bytes from some arena.
19616
19626
  const encoded = Encoder.encodeObject(codec, data, this.context);
19617
- return new WithHashAndBytes(blake2b.hashBytes(encoded, this.allocator).asOpaque(), data, encoded);
19627
+ return new WithHashAndBytes(this.blake2b.hashBytes(encoded).asOpaque(), data, encoded);
19618
19628
  }
19619
19629
  }
19620
19630
 
@@ -19635,7 +19645,10 @@ declare enum PreimagesErrorCode {
19635
19645
 
19636
19646
  // TODO [SeKo] consider whether this module is the right place to remove expired preimages
19637
19647
  declare class Preimages {
19638
- constructor(public readonly state: PreimagesState) {}
19648
+ constructor(
19649
+ public readonly state: PreimagesState,
19650
+ public readonly blake2b: Blake2b,
19651
+ ) {}
19639
19652
 
19640
19653
  integrate(input: PreimagesInput): Result$2<PreimagesStateUpdate, PreimagesErrorCode> {
19641
19654
  // make sure lookup extrinsics are sorted and unique
@@ -19664,7 +19677,7 @@ declare class Preimages {
19664
19677
  // select preimages for integration
19665
19678
  for (const preimage of preimages) {
19666
19679
  const { requester, blob } = preimage;
19667
- const hash: PreimageHash = blake2b.hashBytes(blob).asOpaque();
19680
+ const hash: PreimageHash = this.blake2b.hashBytes(blob).asOpaque();
19668
19681
 
19669
19682
  const service = this.state.getService(requester);
19670
19683
  if (service === null) {
@@ -19695,156 +19708,6 @@ declare class Preimages {
19695
19708
  }
19696
19709
  }
19697
19710
 
19698
- declare enum ServiceExecutorError {
19699
- NoLookup = 0,
19700
- NoState = 1,
19701
- NoServiceCode = 2,
19702
- ServiceCodeMismatch = 3,
19703
- }
19704
-
19705
- declare class WorkPackageExecutor {
19706
- constructor(
19707
- private readonly blocks: BlocksDb,
19708
- private readonly state: StatesDb,
19709
- private readonly hasher: TransitionHasher,
19710
- ) {}
19711
-
19712
- // TODO [ToDr] this while thing should be triple-checked with the GP.
19713
- // I'm currently implementing some dirty version for the demo.
19714
- async executeWorkPackage(pack: WorkPackage): Promise<WorkReport> {
19715
- const headerHash = pack.context.lookupAnchor;
19716
- // execute authorisation first or is it already executed and we just need to check it?
19717
- const authExec = this.getServiceExecutor(
19718
- // TODO [ToDr] should this be anchor or lookupAnchor?
19719
- headerHash,
19720
- pack.authCodeHost,
19721
- pack.authCodeHash,
19722
- );
19723
-
19724
- if (authExec.isError) {
19725
- // TODO [ToDr] most likely shouldn't be throw.
19726
- throw new Error(`Could not get authorization executor: ${authExec.error}`);
19727
- }
19728
-
19729
- const pvm = authExec.ok;
19730
- const authGas = tryAsGas(15_000n);
19731
- const result = await pvm.run(pack.parametrization, authGas);
19732
-
19733
- if (!result.isEqualTo(pack.authorization)) {
19734
- throw new Error("Authorization is invalid.");
19735
- }
19736
-
19737
- const results: WorkResult[] = [];
19738
- for (const item of pack.items) {
19739
- const exec = this.getServiceExecutor(headerHash, item.service, item.codeHash);
19740
- if (exec.isError) {
19741
- throw new Error(`Could not get item executor: ${exec.error}`);
19742
- }
19743
- const pvm = exec.ok;
19744
-
19745
- const gasRatio = tryAsServiceGas(3_000n);
19746
- const ret = await pvm.run(item.payload, tryAsGas(item.refineGasLimit)); // or accumulateGasLimit?
19747
- results.push(
19748
- WorkResult.create({
19749
- serviceId: item.service,
19750
- codeHash: item.codeHash,
19751
- payloadHash: blake2b.hashBytes(item.payload),
19752
- gas: gasRatio,
19753
- result: new WorkExecResult(WorkExecResultKind.ok, ret),
19754
- load: WorkRefineLoad.create({
19755
- gasUsed: tryAsServiceGas(5),
19756
- importedSegments: tryAsU32(0),
19757
- exportedSegments: tryAsU32(0),
19758
- extrinsicSize: tryAsU32(0),
19759
- extrinsicCount: tryAsU32(0),
19760
- }),
19761
- }),
19762
- );
19763
- }
19764
-
19765
- const workPackage = this.hasher.workPackage(pack);
19766
- const workPackageSpec = WorkPackageSpec.create({
19767
- hash: workPackage.hash,
19768
- length: tryAsU32(workPackage.encoded.length),
19769
- erasureRoot: Bytes.zero(HASH_SIZE),
19770
- exportsRoot: Bytes.zero(HASH_SIZE).asOpaque(),
19771
- exportsCount: tryAsU16(0),
19772
- });
19773
- const coreIndex = tryAsCoreIndex(0);
19774
- const authorizerHash = Bytes.fill(HASH_SIZE, 5).asOpaque();
19775
-
19776
- const workResults = FixedSizeArray.new(results, tryAsWorkItemsCount(results.length));
19777
-
19778
- return Promise.resolve(
19779
- WorkReport.create({
19780
- workPackageSpec,
19781
- context: pack.context,
19782
- coreIndex,
19783
- authorizerHash,
19784
- authorizationOutput: pack.authorization,
19785
- segmentRootLookup: [],
19786
- results: workResults,
19787
- authorizationGasUsed: tryAsServiceGas(0),
19788
- }),
19789
- );
19790
- }
19791
-
19792
- getServiceExecutor(
19793
- lookupAnchor: HeaderHash,
19794
- serviceId: ServiceId,
19795
- expectedCodeHash: CodeHash,
19796
- ): Result$2<PvmExecutor, ServiceExecutorError> {
19797
- const header = this.blocks.getHeader(lookupAnchor);
19798
- if (header === null) {
19799
- return Result.error(ServiceExecutorError.NoLookup);
19800
- }
19801
-
19802
- const state = this.state.getState(lookupAnchor);
19803
- if (state === null) {
19804
- return Result.error(ServiceExecutorError.NoState);
19805
- }
19806
-
19807
- const service = state.getService(serviceId);
19808
- const serviceCodeHash = service?.getInfo().codeHash ?? null;
19809
- if (serviceCodeHash === null) {
19810
- return Result.error(ServiceExecutorError.NoServiceCode);
19811
- }
19812
-
19813
- if (!serviceCodeHash.isEqualTo(expectedCodeHash)) {
19814
- return Result.error(ServiceExecutorError.ServiceCodeMismatch);
19815
- }
19816
-
19817
- const serviceCode = service?.getPreimage(serviceCodeHash.asOpaque()) ?? null;
19818
- if (serviceCode === null) {
19819
- return Result.error(ServiceExecutorError.NoServiceCode);
19820
- }
19821
-
19822
- return Result.ok(new PvmExecutor(serviceCode));
19823
- }
19824
- }
19825
-
19826
- declare class PvmExecutor {
19827
- private readonly pvm: HostCalls;
19828
- private hostCalls = new HostCallsManager({ missing: new Missing() });
19829
- private pvmInstanceManager = new PvmInstanceManager(4);
19830
-
19831
- constructor(private serviceCode: BytesBlob) {
19832
- this.pvm = new PvmHostCallExtension(this.pvmInstanceManager, this.hostCalls);
19833
- }
19834
-
19835
- async run(args: BytesBlob, gas: Gas): Promise<BytesBlob> {
19836
- const program = Program.fromSpi(this.serviceCode.raw, args.raw, true);
19837
-
19838
- const result = await this.pvm.runProgram(program.code, 5, gas, program.registers, program.memory);
19839
-
19840
- if (result.hasMemorySlice()) {
19841
- return BytesBlob.blobFrom(result.memorySlice);
19842
- }
19843
-
19844
- return BytesBlob.empty();
19845
- }
19846
- }
19847
-
19848
19711
  type index_Preimages = Preimages;
19849
19712
  declare const index_Preimages: typeof Preimages;
19850
19713
  type index_PreimagesErrorCode = PreimagesErrorCode;
@@ -19854,10 +19717,8 @@ type index_PreimagesState = PreimagesState;
19854
19717
  type index_PreimagesStateUpdate = PreimagesStateUpdate;
19855
19718
  type index_TransitionHasher = TransitionHasher;
19856
19719
  declare const index_TransitionHasher: typeof TransitionHasher;
19857
- type index_WorkPackageExecutor = WorkPackageExecutor;
19858
- declare const index_WorkPackageExecutor: typeof WorkPackageExecutor;
19859
19720
  declare namespace index {
19860
- export { index_Preimages as Preimages, index_PreimagesErrorCode as PreimagesErrorCode, index_TransitionHasher as TransitionHasher, index_WorkPackageExecutor as WorkPackageExecutor };
19721
+ export { index_Preimages as Preimages, index_PreimagesErrorCode as PreimagesErrorCode, index_TransitionHasher as TransitionHasher };
19861
19722
  export type { index_PreimagesInput as PreimagesInput, index_PreimagesState as PreimagesState, index_PreimagesStateUpdate as PreimagesStateUpdate };
19862
19723
  }
19863
19724