@typeberry/lib 0.1.3-707962d → 0.1.3-c2321fb

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (4) hide show
  1. package/index.cjs +358 -1122
  2. package/index.d.ts +250 -403
  3. package/index.js +357 -1121
  4. package/package.json +1 -1
package/index.d.ts CHANGED
@@ -420,6 +420,20 @@ declare const Result$2 = {
420
420
  },
421
421
  };
422
422
 
423
+ // about 2GB, the maximum ArrayBuffer length on Chrome confirmed by several sources:
424
+ // - https://issues.chromium.org/issues/40055619
425
+ // - https://stackoverflow.com/a/72124984
426
+ // - https://onnxruntime.ai/docs/tutorials/web/large-models.html#maximum-size-of-arraybuffer
427
+ declare const MAX_LENGTH$1 = 2145386496;
428
+
429
+ declare function safeAllocUint8Array(length: number) {
430
+ if (length > MAX_LENGTH) {
431
+ // biome-ignore lint/suspicious/noConsole: can't have a dependency on logger here
432
+ console.warn(`Trying to allocate ${length} bytes, which is greater than the maximum of ${MAX_LENGTH}.`);
433
+ }
434
+ return new Uint8Array(Math.min(MAX_LENGTH, length));
435
+ }
436
+
423
437
  /**
424
438
  * Utilities for tests.
425
439
  */
@@ -755,11 +769,12 @@ declare const index$u_oomWarningPrinted: typeof oomWarningPrinted;
755
769
  declare const index$u_parseCurrentSuite: typeof parseCurrentSuite;
756
770
  declare const index$u_parseCurrentVersion: typeof parseCurrentVersion;
757
771
  declare const index$u_resultToString: typeof resultToString;
772
+ declare const index$u_safeAllocUint8Array: typeof safeAllocUint8Array;
758
773
  declare const index$u_seeThrough: typeof seeThrough;
759
774
  declare const index$u_trimStack: typeof trimStack;
760
775
  declare const index$u_workspacePathFix: typeof workspacePathFix;
761
776
  declare namespace index$u {
762
- export { index$u_ALL_VERSIONS_IN_ORDER as ALL_VERSIONS_IN_ORDER, index$u_CURRENT_SUITE as CURRENT_SUITE, index$u_CURRENT_VERSION as CURRENT_VERSION, index$u_Compatibility as Compatibility, index$u_DEFAULT_SUITE as DEFAULT_SUITE, index$u_DEFAULT_VERSION as DEFAULT_VERSION, index$u_ErrorsCollector as ErrorsCollector, index$u_GpVersion as GpVersion, Result$2 as Result, index$u_RichTaggedError as RichTaggedError, index$u_TEST_COMPARE_USING as TEST_COMPARE_USING, index$u_TestSuite as TestSuite, index$u_WithDebug as WithDebug, index$u___OPAQUE_TYPE__ as __OPAQUE_TYPE__, index$u_asOpaqueType as asOpaqueType, index$u_assertEmpty as assertEmpty, index$u_assertNever as assertNever, index$u_callCompareFunction as callCompareFunction, index$u_check as check, index$u_deepEqual as deepEqual, index$u_getAllKeysSorted as getAllKeysSorted, index$u_inspect as inspect, index$u_isBrowser as isBrowser, index$u_isResult as isResult, index$u_isTaggedError as isTaggedError, index$u_maybeTaggedErrorToString as maybeTaggedErrorToString, index$u_measure as measure, index$u_oomWarningPrinted as oomWarningPrinted, index$u_parseCurrentSuite as parseCurrentSuite, index$u_parseCurrentVersion as parseCurrentVersion, index$u_resultToString as resultToString, index$u_seeThrough as seeThrough, index$u_trimStack as trimStack, index$u_workspacePathFix as workspacePathFix };
777
+ export { index$u_ALL_VERSIONS_IN_ORDER as ALL_VERSIONS_IN_ORDER, index$u_CURRENT_SUITE as CURRENT_SUITE, index$u_CURRENT_VERSION as CURRENT_VERSION, index$u_Compatibility as Compatibility, index$u_DEFAULT_SUITE as DEFAULT_SUITE, index$u_DEFAULT_VERSION as DEFAULT_VERSION, index$u_ErrorsCollector as ErrorsCollector, index$u_GpVersion as GpVersion, MAX_LENGTH$1 as MAX_LENGTH, Result$2 as Result, index$u_RichTaggedError as RichTaggedError, index$u_TEST_COMPARE_USING as TEST_COMPARE_USING, index$u_TestSuite as TestSuite, index$u_WithDebug as WithDebug, index$u___OPAQUE_TYPE__ as __OPAQUE_TYPE__, index$u_asOpaqueType as asOpaqueType, index$u_assertEmpty as assertEmpty, index$u_assertNever as assertNever, index$u_callCompareFunction as callCompareFunction, index$u_check as check, index$u_deepEqual as deepEqual, index$u_getAllKeysSorted as getAllKeysSorted, index$u_inspect as inspect, index$u_isBrowser as isBrowser, index$u_isResult as isResult, index$u_isTaggedError as isTaggedError, index$u_maybeTaggedErrorToString as maybeTaggedErrorToString, index$u_measure as measure, index$u_oomWarningPrinted as oomWarningPrinted, index$u_parseCurrentSuite as parseCurrentSuite, index$u_parseCurrentVersion as parseCurrentVersion, index$u_resultToString as resultToString, index$u_safeAllocUint8Array as safeAllocUint8Array, index$u_seeThrough as seeThrough, index$u_trimStack as trimStack, index$u_workspacePathFix as workspacePathFix };
763
778
  export type { index$u_DeepEqualOptions as DeepEqualOptions, index$u_EnumMapping as EnumMapping, index$u_ErrorResult as ErrorResult, index$u_OK as OK, index$u_OkResult as OkResult, index$u_Opaque as Opaque, index$u_StringLiteral as StringLiteral, index$u_TaggedError as TaggedError, index$u_TokenOf as TokenOf, index$u_Uninstantiable as Uninstantiable, index$u_WithOpaque as WithOpaque };
764
779
  }
765
780
 
@@ -929,7 +944,7 @@ declare class BytesBlob {
929
944
  static blobFromParts(v: Uint8Array | Uint8Array[], ...rest: Uint8Array[]) {
930
945
  const vArr = v instanceof Uint8Array ? [v] : v;
931
946
  const totalLength = vArr.reduce((a, v) => a + v.length, 0) + rest.reduce((a, v) => a + v.length, 0);
932
- const buffer = new Uint8Array(totalLength);
947
+ const buffer = safeAllocUint8Array(totalLength);
933
948
  let offset = 0;
934
949
  for (const r of vArr) {
935
950
  buffer.set(r, offset);
@@ -1012,7 +1027,7 @@ declare class Bytes<T extends number> extends BytesBlob {
1012
1027
 
1013
1028
  /** Create an empty [`Bytes<X>`] of given length. */
1014
1029
  static zero<X extends number>(len: X): Bytes<X> {
1015
- return new Bytes(new Uint8Array(len), len);
1030
+ return new Bytes(safeAllocUint8Array(len), len);
1016
1031
  }
1017
1032
 
1018
1033
  // TODO [ToDr] `fill` should have the argments swapped to align with the rest.
@@ -1133,7 +1148,7 @@ declare class BitVec {
1133
1148
  * Create new [`BitVec`] with all values set to `false`.
1134
1149
  */
1135
1150
  static empty(bitLength: number) {
1136
- const data = new Uint8Array(Math.ceil(bitLength / 8));
1151
+ const data = safeAllocUint8Array(Math.ceil(bitLength / 8));
1137
1152
  return new BitVec(data, bitLength);
1138
1153
  }
1139
1154
 
@@ -3461,6 +3476,52 @@ declare namespace index$q {
3461
3476
  export type { index$q_ClassConstructor as ClassConstructor, index$q_Codec as Codec, index$q_CodecRecord as CodecRecord, index$q_Decode as Decode, index$q_DescribedBy as DescribedBy, index$q_DescriptorRecord as DescriptorRecord, index$q_Encode as Encode, index$q_LengthRange as LengthRange, index$q_OptionalRecord as OptionalRecord, Options$1 as Options, index$q_PropertyKeys as PropertyKeys, index$q_SimpleDescriptorRecord as SimpleDescriptorRecord, index$q_SizeHint as SizeHint, index$q_ViewOf as ViewOf };
3462
3477
  }
3463
3478
 
3479
+ type ITypedArray = Uint8Array | Uint16Array | Uint32Array;
3480
+ type IDataType = string | Buffer | ITypedArray;
3481
+
3482
+ type IHasher = {
3483
+ /**
3484
+ * Initializes hash state to default value
3485
+ */
3486
+ init: () => IHasher;
3487
+ /**
3488
+ * Updates the hash content with the given data
3489
+ */
3490
+ update: (data: IDataType) => IHasher;
3491
+ /**
3492
+ * Calculates the hash of all of the data passed to be hashed with hash.update().
3493
+ * Defaults to hexadecimal string
3494
+ * @param outputType If outputType is "binary", it returns Uint8Array. Otherwise it
3495
+ * returns hexadecimal string
3496
+ */
3497
+ digest: {
3498
+ (outputType: "binary"): Uint8Array;
3499
+ (outputType?: "hex"): string;
3500
+ };
3501
+ /**
3502
+ * Save the current internal state of the hasher for later resumption with load().
3503
+ * Cannot be called before .init() or after .digest()
3504
+ *
3505
+ * Note that this state can include arbitrary information about the value being hashed (e.g.
3506
+ * could include N plaintext bytes from the value), so needs to be treated as being as
3507
+ * sensitive as the input value itself.
3508
+ */
3509
+ save: () => Uint8Array;
3510
+ /**
3511
+ * Resume a state that was created by save(). If this state was not created by a
3512
+ * compatible build of hash-wasm, an exception will be thrown.
3513
+ */
3514
+ load: (state: Uint8Array) => IHasher;
3515
+ /**
3516
+ * Block size in bytes
3517
+ */
3518
+ blockSize: number;
3519
+ /**
3520
+ * Digest size in bytes
3521
+ */
3522
+ digestSize: number;
3523
+ };
3524
+
3464
3525
  /**
3465
3526
  * Size of the output of the hash functions.
3466
3527
  *
@@ -3516,144 +3577,46 @@ declare class WithHashAndBytes<THash extends OpaqueHash, TData> extends WithHash
3516
3577
  }
3517
3578
  }
3518
3579
 
3519
- /** Allocator interface - returns an empty bytes vector that can be filled with the hash. */
3520
- interface HashAllocator {
3521
- /** Return a new hash destination. */
3522
- emptyHash(): OpaqueHash;
3523
- }
3524
-
3525
- /** The simplest allocator returning just a fresh copy of bytes each time. */
3526
- declare class SimpleAllocator implements HashAllocator {
3527
- emptyHash(): OpaqueHash {
3528
- return Bytes.zero(HASH_SIZE);
3529
- }
3530
- }
3531
-
3532
- /** An allocator that works by allocating larger (continuous) pages of memory. */
3533
- declare class PageAllocator implements HashAllocator {
3534
- private page: Uint8Array = new Uint8Array(0);
3535
- private currentHash = 0;
3536
-
3537
- // TODO [ToDr] Benchmark the performance!
3538
- constructor(private readonly hashesPerPage: number) {
3539
- check`${hashesPerPage > 0 && hashesPerPage >>> 0 === hashesPerPage} Expected a non-zero integer.`;
3540
- this.resetPage();
3541
- }
3580
+ declare const zero$1 = Bytes.zero(HASH_SIZE);
3542
3581
 
3543
- private resetPage() {
3544
- const pageSizeBytes = this.hashesPerPage * HASH_SIZE;
3545
- this.currentHash = 0;
3546
- this.page = new Uint8Array(pageSizeBytes);
3582
+ declare class Blake2b {
3583
+ static async createHasher() {
3584
+ return new Blake2b(await createBLAKE2b(HASH_SIZE * 8));
3547
3585
  }
3548
3586
 
3549
- emptyHash(): OpaqueHash {
3550
- const startIdx = this.currentHash * HASH_SIZE;
3551
- const endIdx = startIdx + HASH_SIZE;
3587
+ private constructor(private readonly hasher: IHasher) {}
3552
3588
 
3553
- this.currentHash += 1;
3554
- if (this.currentHash >= this.hashesPerPage) {
3555
- this.resetPage();
3589
+ /**
3590
+ * Hash given collection of blobs.
3591
+ *
3592
+ * If empty array is given a zero-hash is returned.
3593
+ */
3594
+ hashBlobs<H extends Blake2bHash>(r: (BytesBlob | Uint8Array)[]): H {
3595
+ if (r.length === 0) {
3596
+ return zero.asOpaque();
3556
3597
  }
3557
3598
 
3558
- return Bytes.fromBlob(this.page.subarray(startIdx, endIdx), HASH_SIZE);
3599
+ const hasher = this.hasher.init();
3600
+ for (const v of r) {
3601
+ hasher.update(v instanceof BytesBlob ? v.raw : v);
3602
+ }
3603
+ return Bytes.fromBlob(hasher.digest("binary"), HASH_SIZE).asOpaque();
3559
3604
  }
3560
- }
3561
-
3562
- declare const defaultAllocator = new SimpleAllocator();
3563
3605
 
3564
- /**
3565
- * Hash given collection of blobs.
3566
- *
3567
- * If empty array is given a zero-hash is returned.
3568
- */
3569
- declare function hashBlobs$1<H extends Blake2bHash>(
3570
- r: (BytesBlob | Uint8Array)[],
3571
- allocator: HashAllocator = defaultAllocator,
3572
- ): H {
3573
- const out = allocator.emptyHash();
3574
- if (r.length === 0) {
3575
- return out.asOpaque();
3606
+ /** Hash given blob of bytes. */
3607
+ hashBytes(blob: BytesBlob | Uint8Array): Blake2bHash {
3608
+ const hasher = this.hasher.init();
3609
+ const bytes = blob instanceof BytesBlob ? blob.raw : blob;
3610
+ hasher.update(bytes);
3611
+ return Bytes.fromBlob(hasher.digest("binary"), HASH_SIZE).asOpaque();
3576
3612
  }
3577
3613
 
3578
- const hasher = blake2b(HASH_SIZE);
3579
- for (const v of r) {
3580
- hasher?.update(v instanceof BytesBlob ? v.raw : v);
3614
+ /** Convert given string into bytes and hash it. */
3615
+ hashString(str: string) {
3616
+ return this.hashBytes(BytesBlob.blobFromString(str));
3581
3617
  }
3582
- hasher?.digest(out.raw);
3583
- return out.asOpaque();
3584
- }
3585
-
3586
- /** Hash given blob of bytes. */
3587
- declare function hashBytes(blob: BytesBlob | Uint8Array, allocator: HashAllocator = defaultAllocator): Blake2bHash {
3588
- const hasher = blake2b(HASH_SIZE);
3589
- const bytes = blob instanceof BytesBlob ? blob.raw : blob;
3590
- hasher?.update(bytes);
3591
- const out = allocator.emptyHash();
3592
- hasher?.digest(out.raw);
3593
- return out;
3594
- }
3595
-
3596
- /** Convert given string into bytes and hash it. */
3597
- declare function hashString(str: string, allocator: HashAllocator = defaultAllocator) {
3598
- return hashBytes(BytesBlob.blobFromString(str), allocator);
3599
- }
3600
-
3601
- declare const blake2b_hashBytes: typeof hashBytes;
3602
- declare const blake2b_hashString: typeof hashString;
3603
- declare namespace blake2b {
3604
- export {
3605
- hashBlobs$1 as hashBlobs,
3606
- blake2b_hashBytes as hashBytes,
3607
- blake2b_hashString as hashString,
3608
- };
3609
3618
  }
3610
3619
 
3611
- type ITypedArray = Uint8Array | Uint16Array | Uint32Array;
3612
- type IDataType = string | Buffer | ITypedArray;
3613
-
3614
- type IHasher = {
3615
- /**
3616
- * Initializes hash state to default value
3617
- */
3618
- init: () => IHasher;
3619
- /**
3620
- * Updates the hash content with the given data
3621
- */
3622
- update: (data: IDataType) => IHasher;
3623
- /**
3624
- * Calculates the hash of all of the data passed to be hashed with hash.update().
3625
- * Defaults to hexadecimal string
3626
- * @param outputType If outputType is "binary", it returns Uint8Array. Otherwise it
3627
- * returns hexadecimal string
3628
- */
3629
- digest: {
3630
- (outputType: "binary"): Uint8Array;
3631
- (outputType?: "hex"): string;
3632
- };
3633
- /**
3634
- * Save the current internal state of the hasher for later resumption with load().
3635
- * Cannot be called before .init() or after .digest()
3636
- *
3637
- * Note that this state can include arbitrary information about the value being hashed (e.g.
3638
- * could include N plaintext bytes from the value), so needs to be treated as being as
3639
- * sensitive as the input value itself.
3640
- */
3641
- save: () => Uint8Array;
3642
- /**
3643
- * Resume a state that was created by save(). If this state was not created by a
3644
- * compatible build of hash-wasm, an exception will be thrown.
3645
- */
3646
- load: (state: Uint8Array) => IHasher;
3647
- /**
3648
- * Block size in bytes
3649
- */
3650
- blockSize: number;
3651
- /**
3652
- * Digest size in bytes
3653
- */
3654
- digestSize: number;
3655
- };
3656
-
3657
3620
  declare class KeccakHasher {
3658
3621
  static async create(): Promise<KeccakHasher> {
3659
3622
  return new KeccakHasher(await createKeccak(256));
@@ -3681,15 +3644,15 @@ declare namespace keccak {
3681
3644
  };
3682
3645
  }
3683
3646
 
3647
+ // TODO [ToDr] (#213) this should most likely be moved to a separate
3648
+ // package to avoid pulling in unnecessary deps.
3649
+
3650
+ type index$p_Blake2b = Blake2b;
3651
+ declare const index$p_Blake2b: typeof Blake2b;
3684
3652
  type index$p_Blake2bHash = Blake2bHash;
3685
3653
  type index$p_HASH_SIZE = HASH_SIZE;
3686
- type index$p_HashAllocator = HashAllocator;
3687
3654
  type index$p_KeccakHash = KeccakHash;
3688
3655
  type index$p_OpaqueHash = OpaqueHash;
3689
- type index$p_PageAllocator = PageAllocator;
3690
- declare const index$p_PageAllocator: typeof PageAllocator;
3691
- type index$p_SimpleAllocator = SimpleAllocator;
3692
- declare const index$p_SimpleAllocator: typeof SimpleAllocator;
3693
3656
  type index$p_TRUNCATED_HASH_SIZE = TRUNCATED_HASH_SIZE;
3694
3657
  type index$p_TruncatedHash = TruncatedHash;
3695
3658
  type index$p_WithHash<THash extends OpaqueHash, TData> = WithHash<THash, TData>;
@@ -3697,12 +3660,10 @@ declare const index$p_WithHash: typeof WithHash;
3697
3660
  type index$p_WithHashAndBytes<THash extends OpaqueHash, TData> = WithHashAndBytes<THash, TData>;
3698
3661
  declare const index$p_WithHashAndBytes: typeof WithHashAndBytes;
3699
3662
  declare const index$p_ZERO_HASH: typeof ZERO_HASH;
3700
- declare const index$p_blake2b: typeof blake2b;
3701
- declare const index$p_defaultAllocator: typeof defaultAllocator;
3702
3663
  declare const index$p_keccak: typeof keccak;
3703
3664
  declare namespace index$p {
3704
- export { index$p_PageAllocator as PageAllocator, index$p_SimpleAllocator as SimpleAllocator, index$p_WithHash as WithHash, index$p_WithHashAndBytes as WithHashAndBytes, index$p_ZERO_HASH as ZERO_HASH, index$p_blake2b as blake2b, index$p_defaultAllocator as defaultAllocator, index$p_keccak as keccak };
3705
- export type { index$p_Blake2bHash as Blake2bHash, index$p_HASH_SIZE as HASH_SIZE, index$p_HashAllocator as HashAllocator, index$p_KeccakHash as KeccakHash, index$p_OpaqueHash as OpaqueHash, index$p_TRUNCATED_HASH_SIZE as TRUNCATED_HASH_SIZE, index$p_TruncatedHash as TruncatedHash };
3665
+ export { index$p_Blake2b as Blake2b, index$p_WithHash as WithHash, index$p_WithHashAndBytes as WithHashAndBytes, index$p_ZERO_HASH as ZERO_HASH, index$p_keccak as keccak, zero$1 as zero };
3666
+ export type { index$p_Blake2bHash as Blake2bHash, index$p_HASH_SIZE as HASH_SIZE, index$p_KeccakHash as KeccakHash, index$p_OpaqueHash as OpaqueHash, index$p_TRUNCATED_HASH_SIZE as TRUNCATED_HASH_SIZE, index$p_TruncatedHash as TruncatedHash };
3706
3667
  }
3707
3668
 
3708
3669
  /** Immutable view of the `HashDictionary`. */
@@ -4735,7 +4696,7 @@ declare async function verify<T extends BytesBlob>(input: Input<T>[]): Promise<b
4735
4696
  (acc, { message, key, signature }) => acc + key.length + signature.length + message.length + 1,
4736
4697
  0,
4737
4698
  );
4738
- const data = new Uint8Array(dataLength);
4699
+ const data = safeAllocUint8Array(dataLength);
4739
4700
 
4740
4701
  let offset = 0;
4741
4702
 
@@ -4825,22 +4786,16 @@ declare function trivialSeed(s: U32): KeySeed {
4825
4786
  * Derives a Ed25519 secret key from a seed.
4826
4787
  * https://github.com/polkadot-fellows/JIPs/blob/7048f79edf4f4eb8bfe6fb42e6bbf61900f44c65/JIP-5.md#derivation-method
4827
4788
  */
4828
- declare function deriveEd25519SecretKey(
4829
- seed: KeySeed,
4830
- allocator: SimpleAllocator = new SimpleAllocator(),
4831
- ): Ed25519SecretSeed {
4832
- return blake2b.hashBytes(BytesBlob.blobFromParts([ED25519_SECRET_KEY.raw, seed.raw]), allocator).asOpaque();
4789
+ declare function deriveEd25519SecretKey(seed: KeySeed, blake2b: Blake2b): Ed25519SecretSeed {
4790
+ return blake2b.hashBytes(BytesBlob.blobFromParts([ED25519_SECRET_KEY.raw, seed.raw])).asOpaque();
4833
4791
  }
4834
4792
 
4835
4793
  /**
4836
4794
  * Derives a Bandersnatch secret key from a seed.
4837
4795
  * https://github.com/polkadot-fellows/JIPs/blob/7048f79edf4f4eb8bfe6fb42e6bbf61900f44c65/JIP-5.md#derivation-method
4838
4796
  */
4839
- declare function deriveBandersnatchSecretKey(
4840
- seed: KeySeed,
4841
- allocator: SimpleAllocator = new SimpleAllocator(),
4842
- ): BandersnatchSecretSeed {
4843
- return blake2b.hashBytes(BytesBlob.blobFromParts([BANDERSNATCH_SECRET_KEY.raw, seed.raw]), allocator).asOpaque();
4797
+ declare function deriveBandersnatchSecretKey(seed: KeySeed, blake2b: Blake2b): BandersnatchSecretSeed {
4798
+ return blake2b.hashBytes(BytesBlob.blobFromParts([BANDERSNATCH_SECRET_KEY.raw, seed.raw])).asOpaque();
4844
4799
  }
4845
4800
 
4846
4801
  /**
@@ -8373,7 +8328,7 @@ declare enum NodeType {
8373
8328
  declare class TrieNode {
8374
8329
  constructor(
8375
8330
  /** Exactly 512 bits / 64 bytes */
8376
- public readonly raw: Uint8Array = new Uint8Array(TRIE_NODE_BYTES),
8331
+ public readonly raw: Uint8Array = safeAllocUint8Array(TRIE_NODE_BYTES),
8377
8332
  ) {}
8378
8333
 
8379
8334
  /** Returns the type of the node */
@@ -9111,21 +9066,6 @@ declare function accumulationOutputComparator(a: AccumulationOutput, b: Accumula
9111
9066
  return Ordering.Equal;
9112
9067
  }
9113
9068
 
9114
- declare const codecWithHash = <T, V, H extends OpaqueHash>(val: Descriptor<T, V>): Descriptor<WithHash<H, T>, V> =>
9115
- Descriptor.withView(
9116
- val.name,
9117
- val.sizeHint,
9118
- (e, elem) => val.encode(e, elem.data),
9119
- (d): WithHash<H, T> => {
9120
- const decoder2 = d.clone();
9121
- const encoded = val.skipEncoded(decoder2);
9122
- const hash = blake2b.hashBytes(encoded);
9123
- return new WithHash(hash.asOpaque(), val.decode(d));
9124
- },
9125
- val.skip,
9126
- val.View,
9127
- );
9128
-
9129
9069
  /**
9130
9070
  * Assignment of particular work report to a core.
9131
9071
  *
@@ -9136,7 +9076,7 @@ declare const codecWithHash = <T, V, H extends OpaqueHash>(val: Descriptor<T, V>
9136
9076
  */
9137
9077
  declare class AvailabilityAssignment extends WithDebug {
9138
9078
  static Codec = codec.Class(AvailabilityAssignment, {
9139
- workReport: codecWithHash(WorkReport.Codec),
9079
+ workReport: WorkReport.Codec,
9140
9080
  timeout: codec.u32.asOpaque<TimeSlot>(),
9141
9081
  });
9142
9082
 
@@ -9146,7 +9086,7 @@ declare class AvailabilityAssignment extends WithDebug {
9146
9086
 
9147
9087
  private constructor(
9148
9088
  /** Work report assigned to a core. */
9149
- public readonly workReport: WithHash<WorkReportHash, WorkReport>,
9089
+ public readonly workReport: WorkReport,
9150
9090
  /** Time slot at which the report becomes obsolete. */
9151
9091
  public readonly timeout: TimeSlot,
9152
9092
  ) {
@@ -9196,6 +9136,11 @@ declare class DisputesRecords {
9196
9136
  return new DisputesRecords(goodSet, badSet, wonkySet, punishSet);
9197
9137
  }
9198
9138
 
9139
+ private readonly goodSetDict: ImmutableHashSet<WorkReportHash>;
9140
+ private readonly badSetDict: ImmutableHashSet<WorkReportHash>;
9141
+ private readonly wonkySetDict: ImmutableHashSet<WorkReportHash>;
9142
+ private readonly punishSetDict: ImmutableHashSet<Ed25519Key>;
9143
+
9199
9144
  private constructor(
9200
9145
  /** `goodSet`: all work-reports hashes which were judged to be correct */
9201
9146
  public readonly goodSet: ImmutableSortedSet<WorkReportHash>,
@@ -9205,7 +9150,21 @@ declare class DisputesRecords {
9205
9150
  public readonly wonkySet: ImmutableSortedSet<WorkReportHash>,
9206
9151
  /** `punishSet`: set of Ed25519 keys representing validators which were found to have misjudged a work-report */
9207
9152
  public readonly punishSet: ImmutableSortedSet<Ed25519Key>,
9208
- ) {}
9153
+ ) {
9154
+ this.goodSetDict = HashSet.from(goodSet.array);
9155
+ this.badSetDict = HashSet.from(badSet.array);
9156
+ this.wonkySetDict = HashSet.from(wonkySet.array);
9157
+ this.punishSetDict = HashSet.from(punishSet.array);
9158
+ }
9159
+
9160
+ public asDictionaries() {
9161
+ return {
9162
+ goodSet: this.goodSetDict,
9163
+ badSet: this.badSetDict,
9164
+ wonkySet: this.wonkySetDict,
9165
+ punishSet: this.punishSetDict,
9166
+ };
9167
+ }
9209
9168
 
9210
9169
  static fromSortedArrays({
9211
9170
  goodSet,
@@ -11274,7 +11233,6 @@ declare const index$e_codecPerCore: typeof codecPerCore;
11274
11233
  declare const index$e_codecServiceId: typeof codecServiceId;
11275
11234
  declare const index$e_codecVarGas: typeof codecVarGas;
11276
11235
  declare const index$e_codecVarU16: typeof codecVarU16;
11277
- declare const index$e_codecWithHash: typeof codecWithHash;
11278
11236
  declare const index$e_hashComparator: typeof hashComparator;
11279
11237
  declare const index$e_ignoreValueWithDefault: typeof ignoreValueWithDefault;
11280
11238
  declare const index$e_serviceDataCodec: typeof serviceDataCodec;
@@ -11285,7 +11243,7 @@ declare const index$e_tryAsPerCore: typeof tryAsPerCore;
11285
11243
  declare const index$e_workReportsSortedSetCodec: typeof workReportsSortedSetCodec;
11286
11244
  declare const index$e_zeroSizeHint: typeof zeroSizeHint;
11287
11245
  declare namespace index$e {
11288
- export { index$e_AccumulationOutput as AccumulationOutput, index$e_AutoAccumulate as AutoAccumulate, index$e_AvailabilityAssignment as AvailabilityAssignment, index$e_BASE_SERVICE_BALANCE as BASE_SERVICE_BALANCE, index$e_BlockState as BlockState, index$e_CoreStatistics as CoreStatistics, index$e_DisputesRecords as DisputesRecords, index$e_ELECTIVE_BYTE_BALANCE as ELECTIVE_BYTE_BALANCE, index$e_ELECTIVE_ITEM_BALANCE as ELECTIVE_ITEM_BALANCE, index$e_InMemoryService as InMemoryService, index$e_InMemoryState as InMemoryState, index$e_LookupHistoryItem as LookupHistoryItem, index$e_MAX_LOOKUP_HISTORY_SLOTS as MAX_LOOKUP_HISTORY_SLOTS, index$e_PreimageItem as PreimageItem, index$e_PrivilegedServices as PrivilegedServices, index$e_RecentBlocks as RecentBlocks, index$e_RecentBlocksHistory as RecentBlocksHistory, index$e_SafroleData as SafroleData, index$e_SafroleSealingKeysData as SafroleSealingKeysData, index$e_SafroleSealingKeysKind as SafroleSealingKeysKind, index$e_ServiceAccountInfo as ServiceAccountInfo, index$e_ServiceStatistics as ServiceStatistics, index$e_StatisticsData as StatisticsData, index$e_StorageItem as StorageItem, index$e_UpdateError as UpdateError, index$e_UpdatePreimage as UpdatePreimage, index$e_UpdatePreimageKind as UpdatePreimageKind, index$e_UpdateService as UpdateService, index$e_UpdateServiceKind as UpdateServiceKind, index$e_UpdateStorage as UpdateStorage, index$e_UpdateStorageKind as UpdateStorageKind, index$e_ValidatorData as ValidatorData, index$e_ValidatorStatistics as ValidatorStatistics, index$e_accumulationOutputComparator as accumulationOutputComparator, index$e_codecBandersnatchKey as codecBandersnatchKey, index$e_codecPerCore as codecPerCore, index$e_codecServiceId as codecServiceId, index$e_codecVarGas as codecVarGas, index$e_codecVarU16 as codecVarU16, index$e_codecWithHash as codecWithHash, index$e_hashComparator as hashComparator, index$e_ignoreValueWithDefault as ignoreValueWithDefault, index$e_serviceDataCodec as serviceDataCodec, index$e_serviceEntriesCodec as serviceEntriesCodec, index$e_sortedSetCodec as sortedSetCodec, index$e_tryAsLookupHistorySlots as tryAsLookupHistorySlots, index$e_tryAsPerCore as tryAsPerCore, index$e_workReportsSortedSetCodec as workReportsSortedSetCodec, index$e_zeroSizeHint as zeroSizeHint };
11246
+ export { index$e_AccumulationOutput as AccumulationOutput, index$e_AutoAccumulate as AutoAccumulate, index$e_AvailabilityAssignment as AvailabilityAssignment, index$e_BASE_SERVICE_BALANCE as BASE_SERVICE_BALANCE, index$e_BlockState as BlockState, index$e_CoreStatistics as CoreStatistics, index$e_DisputesRecords as DisputesRecords, index$e_ELECTIVE_BYTE_BALANCE as ELECTIVE_BYTE_BALANCE, index$e_ELECTIVE_ITEM_BALANCE as ELECTIVE_ITEM_BALANCE, index$e_InMemoryService as InMemoryService, index$e_InMemoryState as InMemoryState, index$e_LookupHistoryItem as LookupHistoryItem, index$e_MAX_LOOKUP_HISTORY_SLOTS as MAX_LOOKUP_HISTORY_SLOTS, index$e_PreimageItem as PreimageItem, index$e_PrivilegedServices as PrivilegedServices, index$e_RecentBlocks as RecentBlocks, index$e_RecentBlocksHistory as RecentBlocksHistory, index$e_SafroleData as SafroleData, index$e_SafroleSealingKeysData as SafroleSealingKeysData, index$e_SafroleSealingKeysKind as SafroleSealingKeysKind, index$e_ServiceAccountInfo as ServiceAccountInfo, index$e_ServiceStatistics as ServiceStatistics, index$e_StatisticsData as StatisticsData, index$e_StorageItem as StorageItem, index$e_UpdateError as UpdateError, index$e_UpdatePreimage as UpdatePreimage, index$e_UpdatePreimageKind as UpdatePreimageKind, index$e_UpdateService as UpdateService, index$e_UpdateServiceKind as UpdateServiceKind, index$e_UpdateStorage as UpdateStorage, index$e_UpdateStorageKind as UpdateStorageKind, index$e_ValidatorData as ValidatorData, index$e_ValidatorStatistics as ValidatorStatistics, index$e_accumulationOutputComparator as accumulationOutputComparator, index$e_codecBandersnatchKey as codecBandersnatchKey, index$e_codecPerCore as codecPerCore, index$e_codecServiceId as codecServiceId, index$e_codecVarGas as codecVarGas, index$e_codecVarU16 as codecVarU16, index$e_hashComparator as hashComparator, index$e_ignoreValueWithDefault as ignoreValueWithDefault, index$e_serviceDataCodec as serviceDataCodec, index$e_serviceEntriesCodec as serviceEntriesCodec, index$e_sortedSetCodec as sortedSetCodec, index$e_tryAsLookupHistorySlots as tryAsLookupHistorySlots, index$e_tryAsPerCore as tryAsPerCore, index$e_workReportsSortedSetCodec as workReportsSortedSetCodec, index$e_zeroSizeHint as zeroSizeHint };
11289
11247
  export type { index$e_BlocksState as BlocksState, index$e_ENTROPY_ENTRIES as ENTROPY_ENTRIES, index$e_EnumerableState as EnumerableState, index$e_FieldNames as FieldNames, index$e_InMemoryStateFields as InMemoryStateFields, index$e_LookupHistorySlots as LookupHistorySlots, index$e_MAX_RECENT_HISTORY as MAX_RECENT_HISTORY, index$e_PerCore as PerCore, index$e_SafroleSealingKeys as SafroleSealingKeys, index$e_Service as Service, index$e_ServiceData as ServiceData, index$e_ServiceEntries as ServiceEntries, index$e_ServicesUpdate as ServicesUpdate, index$e_State as State, index$e_StorageKey as StorageKey, index$e_VALIDATOR_META_BYTES as VALIDATOR_META_BYTES };
11290
11248
  }
11291
11249
 
@@ -11353,7 +11311,7 @@ declare namespace stateKeys {
11353
11311
  }
11354
11312
 
11355
11313
  /** https://graypaper.fluffylabs.dev/#/1c979cb/3bba033bba03?v=0.7.1 */
11356
- export function serviceStorage(serviceId: ServiceId, key: StorageKey): StateKey {
11314
+ export function serviceStorage(blake2b: Blake2b, serviceId: ServiceId, key: StorageKey): StateKey {
11357
11315
  if (Compatibility.isLessThan(GpVersion.V0_6_7)) {
11358
11316
  const out = Bytes.zero(HASH_SIZE);
11359
11317
  out.raw.set(u32AsLeBytes(tryAsU32(2 ** 32 - 1)), 0);
@@ -11361,11 +11319,11 @@ declare namespace stateKeys {
11361
11319
  return legacyServiceNested(serviceId, out);
11362
11320
  }
11363
11321
 
11364
- return serviceNested(serviceId, tryAsU32(2 ** 32 - 1), key);
11322
+ return serviceNested(blake2b, serviceId, tryAsU32(2 ** 32 - 1), key);
11365
11323
  }
11366
11324
 
11367
11325
  /** https://graypaper.fluffylabs.dev/#/1c979cb/3bd7033bd703?v=0.7.1 */
11368
- export function servicePreimage(serviceId: ServiceId, hash: PreimageHash): StateKey {
11326
+ export function servicePreimage(blake2b: Blake2b, serviceId: ServiceId, hash: PreimageHash): StateKey {
11369
11327
  if (Compatibility.isLessThan(GpVersion.V0_6_7)) {
11370
11328
  const out = Bytes.zero(HASH_SIZE);
11371
11329
  out.raw.set(u32AsLeBytes(tryAsU32(2 ** 32 - 2)), 0);
@@ -11373,11 +11331,16 @@ declare namespace stateKeys {
11373
11331
  return legacyServiceNested(serviceId, out);
11374
11332
  }
11375
11333
 
11376
- return serviceNested(serviceId, tryAsU32(2 ** 32 - 2), hash);
11334
+ return serviceNested(blake2b, serviceId, tryAsU32(2 ** 32 - 2), hash);
11377
11335
  }
11378
11336
 
11379
11337
  /** https://graypaper.fluffylabs.dev/#/1c979cb/3b0a043b0a04?v=0.7.1 */
11380
- export function serviceLookupHistory(serviceId: ServiceId, hash: PreimageHash, preimageLength: U32): StateKey {
11338
+ export function serviceLookupHistory(
11339
+ blake2b: Blake2b,
11340
+ serviceId: ServiceId,
11341
+ hash: PreimageHash,
11342
+ preimageLength: U32,
11343
+ ): StateKey {
11381
11344
  if (Compatibility.isLessThan(GpVersion.V0_6_7)) {
11382
11345
  const doubleHash = blake2b.hashBytes(hash);
11383
11346
  const out = Bytes.zero(HASH_SIZE);
@@ -11386,11 +11349,11 @@ declare namespace stateKeys {
11386
11349
  return legacyServiceNested(serviceId, out);
11387
11350
  }
11388
11351
 
11389
- return serviceNested(serviceId, preimageLength, hash);
11352
+ return serviceNested(blake2b, serviceId, preimageLength, hash);
11390
11353
  }
11391
11354
 
11392
11355
  /** https://graypaper.fluffylabs.dev/#/1c979cb/3b88003b8800?v=0.7.1 */
11393
- export function serviceNested(serviceId: ServiceId, numberPrefix: U32, hash: BytesBlob): StateKey {
11356
+ export function serviceNested(blake2b: Blake2b, serviceId: ServiceId, numberPrefix: U32, hash: BytesBlob): StateKey {
11394
11357
  const inputToHash = BytesBlob.blobFromParts(u32AsLeBytes(numberPrefix), hash.raw);
11395
11358
  const newHash = blake2b.hashBytes(inputToHash).raw.subarray(0, 28);
11396
11359
  const key = Bytes.zero(HASH_SIZE);
@@ -11574,20 +11537,20 @@ declare namespace serialize {
11574
11537
  });
11575
11538
 
11576
11539
  /** https://graypaper.fluffylabs.dev/#/85129da/384803384803?v=0.6.3 */
11577
- export const serviceStorage = (serviceId: ServiceId, key: StorageKey) => ({
11578
- key: stateKeys.serviceStorage(serviceId, key),
11540
+ export const serviceStorage = (blake2b: Blake2b, serviceId: ServiceId, key: StorageKey) => ({
11541
+ key: stateKeys.serviceStorage(blake2b, serviceId, key),
11579
11542
  Codec: dumpCodec,
11580
11543
  });
11581
11544
 
11582
11545
  /** https://graypaper.fluffylabs.dev/#/85129da/385b03385b03?v=0.6.3 */
11583
- export const servicePreimages = (serviceId: ServiceId, hash: PreimageHash) => ({
11584
- key: stateKeys.servicePreimage(serviceId, hash),
11546
+ export const servicePreimages = (blake2b: Blake2b, serviceId: ServiceId, hash: PreimageHash) => ({
11547
+ key: stateKeys.servicePreimage(blake2b, serviceId, hash),
11585
11548
  Codec: dumpCodec,
11586
11549
  });
11587
11550
 
11588
11551
  /** https://graypaper.fluffylabs.dev/#/85129da/387603387603?v=0.6.3 */
11589
- export const serviceLookupHistory = (serviceId: ServiceId, hash: PreimageHash, len: U32) => ({
11590
- key: stateKeys.serviceLookupHistory(serviceId, hash, len),
11552
+ export const serviceLookupHistory = (blake2b: Blake2b, serviceId: ServiceId, hash: PreimageHash, len: U32) => ({
11553
+ key: stateKeys.serviceLookupHistory(blake2b, serviceId, hash, len),
11591
11554
  Codec: readonlyArray(codec.sequenceVarLen(codec.u32)),
11592
11555
  });
11593
11556
  }
@@ -11622,6 +11585,7 @@ declare const EMPTY_BLOB = BytesBlob.empty();
11622
11585
  /** Serialize given state update into a series of key-value pairs. */
11623
11586
  declare function* serializeStateUpdate(
11624
11587
  spec: ChainSpec,
11588
+ blake2b: Blake2b,
11625
11589
  update: Partial<State & ServicesUpdate>,
11626
11590
  ): Generator<StateEntryUpdate> {
11627
11591
  // first let's serialize all of the simple entries (if present!)
@@ -11630,9 +11594,9 @@ declare function* serializeStateUpdate(
11630
11594
  const encode = <T>(codec: Encode<T>, val: T) => Encoder.encodeObject(codec, val, spec);
11631
11595
 
11632
11596
  // then let's proceed with service updates
11633
- yield* serializeServiceUpdates(update.servicesUpdates, encode);
11634
- yield* serializePreimages(update.preimages, encode);
11635
- yield* serializeStorage(update.storage);
11597
+ yield* serializeServiceUpdates(update.servicesUpdates, encode, blake2b);
11598
+ yield* serializePreimages(update.preimages, encode, blake2b);
11599
+ yield* serializeStorage(update.storage, blake2b);
11636
11600
  yield* serializeRemovedServices(update.servicesRemoved);
11637
11601
  }
11638
11602
 
@@ -11644,18 +11608,18 @@ declare function* serializeRemovedServices(servicesRemoved: ServiceId[] | undefi
11644
11608
  }
11645
11609
  }
11646
11610
 
11647
- declare function* serializeStorage(storage: UpdateStorage[] | undefined): Generator<StateEntryUpdate> {
11611
+ declare function* serializeStorage(storage: UpdateStorage[] | undefined, blake2b: Blake2b): Generator<StateEntryUpdate> {
11648
11612
  for (const { action, serviceId } of storage ?? []) {
11649
11613
  switch (action.kind) {
11650
11614
  case UpdateStorageKind.Set: {
11651
11615
  const key = action.storage.key;
11652
- const codec = serialize.serviceStorage(serviceId, key);
11616
+ const codec = serialize.serviceStorage(blake2b, serviceId, key);
11653
11617
  yield [StateEntryUpdateAction.Insert, codec.key, action.storage.value];
11654
11618
  break;
11655
11619
  }
11656
11620
  case UpdateStorageKind.Remove: {
11657
11621
  const key = action.key;
11658
- const codec = serialize.serviceStorage(serviceId, key);
11622
+ const codec = serialize.serviceStorage(blake2b, serviceId, key);
11659
11623
  yield [StateEntryUpdateAction.Remove, codec.key, EMPTY_BLOB];
11660
11624
  break;
11661
11625
  }
@@ -11665,16 +11629,20 @@ declare function* serializeStorage(storage: UpdateStorage[] | undefined): Genera
11665
11629
  }
11666
11630
  }
11667
11631
 
11668
- declare function* serializePreimages(preimages: UpdatePreimage[] | undefined, encode: EncodeFun): Generator<StateEntryUpdate> {
11632
+ declare function* serializePreimages(
11633
+ preimages: UpdatePreimage[] | undefined,
11634
+ encode: EncodeFun,
11635
+ blake2b: Blake2b,
11636
+ ): Generator<StateEntryUpdate> {
11669
11637
  for (const { action, serviceId } of preimages ?? []) {
11670
11638
  switch (action.kind) {
11671
11639
  case UpdatePreimageKind.Provide: {
11672
11640
  const { hash, blob } = action.preimage;
11673
- const codec = serialize.servicePreimages(serviceId, hash);
11641
+ const codec = serialize.servicePreimages(blake2b, serviceId, hash);
11674
11642
  yield [StateEntryUpdateAction.Insert, codec.key, blob];
11675
11643
 
11676
11644
  if (action.slot !== null) {
11677
- const codec2 = serialize.serviceLookupHistory(serviceId, hash, tryAsU32(blob.length));
11645
+ const codec2 = serialize.serviceLookupHistory(blake2b, serviceId, hash, tryAsU32(blob.length));
11678
11646
  yield [
11679
11647
  StateEntryUpdateAction.Insert,
11680
11648
  codec2.key,
@@ -11685,16 +11653,16 @@ declare function* serializePreimages(preimages: UpdatePreimage[] | undefined, en
11685
11653
  }
11686
11654
  case UpdatePreimageKind.UpdateOrAdd: {
11687
11655
  const { hash, length, slots } = action.item;
11688
- const codec = serialize.serviceLookupHistory(serviceId, hash, length);
11656
+ const codec = serialize.serviceLookupHistory(blake2b, serviceId, hash, length);
11689
11657
  yield [StateEntryUpdateAction.Insert, codec.key, encode(codec.Codec, slots)];
11690
11658
  break;
11691
11659
  }
11692
11660
  case UpdatePreimageKind.Remove: {
11693
11661
  const { hash, length } = action;
11694
- const codec = serialize.servicePreimages(serviceId, hash);
11662
+ const codec = serialize.servicePreimages(blake2b, serviceId, hash);
11695
11663
  yield [StateEntryUpdateAction.Remove, codec.key, EMPTY_BLOB];
11696
11664
 
11697
- const codec2 = serialize.serviceLookupHistory(serviceId, hash, length);
11665
+ const codec2 = serialize.serviceLookupHistory(blake2b, serviceId, hash, length);
11698
11666
  yield [StateEntryUpdateAction.Remove, codec2.key, EMPTY_BLOB];
11699
11667
  break;
11700
11668
  }
@@ -11706,6 +11674,7 @@ declare function* serializePreimages(preimages: UpdatePreimage[] | undefined, en
11706
11674
  declare function* serializeServiceUpdates(
11707
11675
  servicesUpdates: UpdateService[] | undefined,
11708
11676
  encode: EncodeFun,
11677
+ blake2b: Blake2b,
11709
11678
  ): Generator<StateEntryUpdate> {
11710
11679
  for (const { action, serviceId } of servicesUpdates ?? []) {
11711
11680
  // new service being created or updated
@@ -11715,7 +11684,7 @@ declare function* serializeServiceUpdates(
11715
11684
  // additional lookup history update
11716
11685
  if (action.kind === UpdateServiceKind.Create && action.lookupHistory !== null) {
11717
11686
  const { lookupHistory } = action;
11718
- const codec2 = serialize.serviceLookupHistory(serviceId, lookupHistory.hash, lookupHistory.length);
11687
+ const codec2 = serialize.serviceLookupHistory(blake2b, serviceId, lookupHistory.hash, lookupHistory.length);
11719
11688
  yield [StateEntryUpdateAction.Insert, codec2.key, encode(codec2.Codec, lookupHistory.slots)];
11720
11689
  }
11721
11690
  }
@@ -11849,8 +11818,8 @@ declare class StateEntries {
11849
11818
  );
11850
11819
 
11851
11820
  /** Turn in-memory state into it's serialized form. */
11852
- static serializeInMemory(spec: ChainSpec, state: InMemoryState) {
11853
- return new StateEntries(convertInMemoryStateToDictionary(spec, state));
11821
+ static serializeInMemory(spec: ChainSpec, blake2b: Blake2b, state: InMemoryState) {
11822
+ return new StateEntries(convertInMemoryStateToDictionary(spec, blake2b, state));
11854
11823
  }
11855
11824
 
11856
11825
  /**
@@ -11905,7 +11874,8 @@ declare class StateEntries {
11905
11874
  }
11906
11875
 
11907
11876
  /** https://graypaper.fluffylabs.dev/#/68eaa1f/391600391600?v=0.6.4 */
11908
- getRootHash(): StateRootHash {
11877
+ getRootHash(blake2b: Blake2b): StateRootHash {
11878
+ const blake2bTrieHasher = getBlake2bTrieHasher(blake2b);
11909
11879
  const leaves: SortedSet<LeafNode> = SortedSet.fromArray(leafComparator);
11910
11880
  for (const [key, value] of this) {
11911
11881
  leaves.insert(InMemoryTrie.constructLeaf(blake2bTrieHasher, key.asOpaque(), value));
@@ -11918,6 +11888,7 @@ declare class StateEntries {
11918
11888
  /** https://graypaper.fluffylabs.dev/#/68eaa1f/38a50038a500?v=0.6.4 */
11919
11889
  declare function convertInMemoryStateToDictionary(
11920
11890
  spec: ChainSpec,
11891
+ blake2b: Blake2b,
11921
11892
  state: InMemoryState,
11922
11893
  ): TruncatedHashDictionary<StateKey, BytesBlob> {
11923
11894
  const serialized = TruncatedHashDictionary.fromEntries<StateKey, BytesBlob>([]);
@@ -11950,20 +11921,25 @@ declare function convertInMemoryStateToDictionary(
11950
11921
 
11951
11922
  // preimages
11952
11923
  for (const preimage of service.data.preimages.values()) {
11953
- const { key, Codec } = serialize.servicePreimages(serviceId, preimage.hash);
11924
+ const { key, Codec } = serialize.servicePreimages(blake2b, serviceId, preimage.hash);
11954
11925
  serialized.set(key, Encoder.encodeObject(Codec, preimage.blob));
11955
11926
  }
11956
11927
 
11957
11928
  // storage
11958
11929
  for (const storage of service.data.storage.values()) {
11959
- const { key, Codec } = serialize.serviceStorage(serviceId, storage.key);
11930
+ const { key, Codec } = serialize.serviceStorage(blake2b, serviceId, storage.key);
11960
11931
  serialized.set(key, Encoder.encodeObject(Codec, storage.value));
11961
11932
  }
11962
11933
 
11963
11934
  // lookup history
11964
11935
  for (const lookupHistoryList of service.data.lookupHistory.values()) {
11965
11936
  for (const lookupHistory of lookupHistoryList) {
11966
- const { key, Codec } = serialize.serviceLookupHistory(serviceId, lookupHistory.hash, lookupHistory.length);
11937
+ const { key, Codec } = serialize.serviceLookupHistory(
11938
+ blake2b,
11939
+ serviceId,
11940
+ lookupHistory.hash,
11941
+ lookupHistory.length,
11942
+ );
11967
11943
  serialized.set(key, Encoder.encodeObject(Codec, lookupHistory.slots.slice()));
11968
11944
  }
11969
11945
  }
@@ -11994,21 +11970,23 @@ declare class SerializedState<T extends SerializedStateBackend = SerializedState
11994
11970
  implements State, EnumerableState
11995
11971
  {
11996
11972
  /** Create a state-like object from collection of serialized entries. */
11997
- static fromStateEntries(spec: ChainSpec, state: StateEntries, recentServices: ServiceId[] = []) {
11998
- return new SerializedState(spec, state, recentServices);
11973
+ static fromStateEntries(spec: ChainSpec, blake2b: Blake2b, state: StateEntries, recentServices: ServiceId[] = []) {
11974
+ return new SerializedState(spec, blake2b, state, recentServices);
11999
11975
  }
12000
11976
 
12001
11977
  /** Create a state-like object backed by some DB. */
12002
11978
  static new<T extends SerializedStateBackend>(
12003
11979
  spec: ChainSpec,
11980
+ blake2b: Blake2b,
12004
11981
  db: T,
12005
11982
  recentServices: ServiceId[] = [],
12006
11983
  ): SerializedState<T> {
12007
- return new SerializedState(spec, db, recentServices);
11984
+ return new SerializedState(spec, blake2b, db, recentServices);
12008
11985
  }
12009
11986
 
12010
11987
  private constructor(
12011
11988
  private readonly spec: ChainSpec,
11989
+ private readonly blake2b: Blake2b,
12012
11990
  public backend: T,
12013
11991
  /** Best-effort list of recently active services. */
12014
11992
  private readonly _recentServiceIds: ServiceId[],
@@ -12039,7 +12017,7 @@ declare class SerializedState<T extends SerializedStateBackend = SerializedState
12039
12017
  this._recentServiceIds.push(id);
12040
12018
  }
12041
12019
 
12042
- return new SerializedService(id, serviceData, (key) => this.retrieveOptional(key));
12020
+ return new SerializedService(this.blake2b, id, serviceData, (key) => this.retrieveOptional(key));
12043
12021
  }
12044
12022
 
12045
12023
  private retrieve<T>({ key, Codec }: KeyAndCodec<T>, description: string): T {
@@ -12138,6 +12116,7 @@ declare class SerializedState<T extends SerializedStateBackend = SerializedState
12138
12116
  /** Service data representation on a serialized state. */
12139
12117
  declare class SerializedService implements Service {
12140
12118
  constructor(
12119
+ public readonly blake2b: Blake2b,
12141
12120
  /** Service id */
12142
12121
  public readonly serviceId: ServiceId,
12143
12122
  private readonly accountInfo: ServiceAccountInfo,
@@ -12153,14 +12132,14 @@ declare class SerializedService implements Service {
12153
12132
  getStorage(rawKey: StorageKey): BytesBlob | null {
12154
12133
  if (Compatibility.isLessThan(GpVersion.V0_6_7)) {
12155
12134
  const SERVICE_ID_BYTES = 4;
12156
- const serviceIdAndKey = new Uint8Array(SERVICE_ID_BYTES + rawKey.length);
12135
+ const serviceIdAndKey = safeAllocUint8Array(SERVICE_ID_BYTES + rawKey.length);
12157
12136
  serviceIdAndKey.set(u32AsLeBytes(this.serviceId));
12158
12137
  serviceIdAndKey.set(rawKey.raw, SERVICE_ID_BYTES);
12159
- const key: StorageKey = asOpaqueType(BytesBlob.blobFrom(blake2b.hashBytes(serviceIdAndKey).raw));
12160
- return this.retrieveOptional(serialize.serviceStorage(this.serviceId, key)) ?? null;
12138
+ const key: StorageKey = asOpaqueType(BytesBlob.blobFrom(this.blake2b.hashBytes(serviceIdAndKey).raw));
12139
+ return this.retrieveOptional(serialize.serviceStorage(this.blake2b, this.serviceId, key)) ?? null;
12161
12140
  }
12162
12141
 
12163
- return this.retrieveOptional(serialize.serviceStorage(this.serviceId, rawKey)) ?? null;
12142
+ return this.retrieveOptional(serialize.serviceStorage(this.blake2b, this.serviceId, rawKey)) ?? null;
12164
12143
  }
12165
12144
 
12166
12145
  /**
@@ -12170,17 +12149,17 @@ declare class SerializedService implements Service {
12170
12149
  */
12171
12150
  hasPreimage(hash: PreimageHash): boolean {
12172
12151
  // TODO [ToDr] consider optimizing to avoid fetching the whole data.
12173
- return this.retrieveOptional(serialize.servicePreimages(this.serviceId, hash)) !== undefined;
12152
+ return this.retrieveOptional(serialize.servicePreimages(this.blake2b, this.serviceId, hash)) !== undefined;
12174
12153
  }
12175
12154
 
12176
12155
  /** Retrieve preimage from the DB. */
12177
12156
  getPreimage(hash: PreimageHash): BytesBlob | null {
12178
- return this.retrieveOptional(serialize.servicePreimages(this.serviceId, hash)) ?? null;
12157
+ return this.retrieveOptional(serialize.servicePreimages(this.blake2b, this.serviceId, hash)) ?? null;
12179
12158
  }
12180
12159
 
12181
12160
  /** Retrieve preimage lookup history. */
12182
12161
  getLookupHistory(hash: PreimageHash, len: U32): LookupHistorySlots | null {
12183
- const rawSlots = this.retrieveOptional(serialize.serviceLookupHistory(this.serviceId, hash, len));
12162
+ const rawSlots = this.retrieveOptional(serialize.serviceLookupHistory(this.blake2b, this.serviceId, hash, len));
12184
12163
  if (rawSlots === undefined) {
12185
12164
  return null;
12186
12165
  }
@@ -12193,9 +12172,9 @@ type KeyAndCodec<T> = {
12193
12172
  Codec: Decode<T>;
12194
12173
  };
12195
12174
 
12196
- declare function loadState(spec: ChainSpec, entries: Iterable<[StateKey | TruncatedHash, BytesBlob]>) {
12175
+ declare function loadState(spec: ChainSpec, blake2b: Blake2b, entries: Iterable<[StateKey | TruncatedHash, BytesBlob]>) {
12197
12176
  const stateEntries = StateEntries.fromEntriesUnsafe(entries);
12198
- return SerializedState.fromStateEntries(spec, stateEntries);
12177
+ return SerializedState.fromStateEntries(spec, blake2b, stateEntries);
12199
12178
  }
12200
12179
 
12201
12180
  /**
@@ -12351,7 +12330,8 @@ declare class LeafDb implements SerializedStateBackend {
12351
12330
  assertNever(val);
12352
12331
  }
12353
12332
 
12354
- getStateRoot(): StateRootHash {
12333
+ getStateRoot(blake2b: Blake2b): StateRootHash {
12334
+ const blake2bTrieHasher = getBlake2bTrieHasher(blake2b);
12355
12335
  return InMemoryTrie.computeStateRoot(blake2bTrieHasher, this.leaves).asOpaque();
12356
12336
  }
12357
12337
 
@@ -12449,7 +12429,8 @@ declare class InMemoryStates implements StatesDb<InMemoryState> {
12449
12429
  }
12450
12430
 
12451
12431
  async getStateRoot(state: InMemoryState): Promise<StateRootHash> {
12452
- return StateEntries.serializeInMemory(this.spec, state).getRootHash();
12432
+ const blake2b = await Blake2b.createHasher();
12433
+ return StateEntries.serializeInMemory(this.spec, blake2b, state).getRootHash(blake2b);
12453
12434
  }
12454
12435
 
12455
12436
  /** Insert a full state into the database. */
@@ -12554,7 +12535,7 @@ declare function padAndEncodeData(input: BytesBlob) {
12554
12535
  const paddedLength = Math.ceil(input.length / PIECE_SIZE) * PIECE_SIZE;
12555
12536
  let padded = input;
12556
12537
  if (input.length !== paddedLength) {
12557
- padded = BytesBlob.blobFrom(new Uint8Array(paddedLength));
12538
+ padded = BytesBlob.blobFrom(safeAllocUint8Array(paddedLength));
12558
12539
  padded.raw.set(input.raw, 0);
12559
12540
  }
12560
12541
  return chunkingFunction(padded);
@@ -12610,7 +12591,7 @@ declare function decodeData(input: FixedSizeArray<[number, BytesBlob], N_CHUNKS_
12610
12591
  */
12611
12592
  declare function encodePoints(input: Bytes<PIECE_SIZE>): FixedSizeArray<Bytes<POINT_LENGTH>, N_CHUNKS_TOTAL> {
12612
12593
  const result: Bytes<POINT_LENGTH>[] = [];
12613
- const data = new Uint8Array(POINT_ALIGNMENT * N_CHUNKS_REQUIRED);
12594
+ const data = safeAllocUint8Array(POINT_ALIGNMENT * N_CHUNKS_REQUIRED);
12614
12595
 
12615
12596
  // add original shards to the result
12616
12597
  for (let i = 0; i < N_CHUNKS_REQUIRED; i++) {
@@ -12630,7 +12611,7 @@ declare function encodePoints(input: Bytes<PIECE_SIZE>): FixedSizeArray<Bytes<PO
12630
12611
  for (let i = 0; i < N_CHUNKS_REDUNDANCY; i++) {
12631
12612
  const pointIndex = i * POINT_ALIGNMENT;
12632
12613
 
12633
- const redundancyPoint = new Uint8Array(POINT_LENGTH);
12614
+ const redundancyPoint = safeAllocUint8Array(POINT_LENGTH);
12634
12615
  for (let j = 0; j < POINT_LENGTH; j++) {
12635
12616
  redundancyPoint[j] = encodedData[pointIndex + j * HALF_POINT_SIZE];
12636
12617
  }
@@ -12650,7 +12631,7 @@ declare function decodePiece(
12650
12631
  ): Bytes<PIECE_SIZE> {
12651
12632
  const result = Bytes.zero(PIECE_SIZE);
12652
12633
 
12653
- const data = new Uint8Array(N_CHUNKS_REQUIRED * POINT_ALIGNMENT);
12634
+ const data = safeAllocUint8Array(N_CHUNKS_REQUIRED * POINT_ALIGNMENT);
12654
12635
  const indices = new Uint16Array(input.length);
12655
12636
 
12656
12637
  for (let i = 0; i < N_CHUNKS_REQUIRED; i++) {
@@ -12777,7 +12758,7 @@ declare function lace<N extends number, K extends number>(input: FixedSizeArray<
12777
12758
  return BytesBlob.empty();
12778
12759
  }
12779
12760
  const n = input[0].length;
12780
- const result = BytesBlob.blobFrom(new Uint8Array(k * n));
12761
+ const result = BytesBlob.blobFrom(safeAllocUint8Array(k * n));
12781
12762
  for (let i = 0; i < k; i++) {
12782
12763
  const entry = input[i].raw;
12783
12764
  for (let j = 0; j < n; j++) {
@@ -13675,13 +13656,12 @@ interface PartialState {
13675
13656
 
13676
13657
  /**
13677
13658
  * Transfer given `amount` of funds to the `destination`,
13678
- * passing `suppliedGas` to invoke `OnTransfer` entry point
13679
- * and given `memo`.
13659
+ * passing `gas` fee for transfer and given `memo`.
13680
13660
  */
13681
13661
  transfer(
13682
13662
  destination: ServiceId | null,
13683
13663
  amount: U64,
13684
- suppliedGas: ServiceGas,
13664
+ gas: ServiceGas,
13685
13665
  memo: Bytes<TRANSFER_MEMO_BYTES>,
13686
13666
  ): Result$2<OK, TransferError>;
13687
13667
 
@@ -13850,7 +13830,7 @@ declare class Mask {
13850
13830
  }
13851
13831
 
13852
13832
  private buildLookupTableForward(mask: BitVec) {
13853
- const table = new Uint8Array(mask.bitLength);
13833
+ const table = safeAllocUint8Array(mask.bitLength);
13854
13834
  let lastInstructionOffset = 0;
13855
13835
  for (let i = mask.bitLength - 1; i >= 0; i--) {
13856
13836
  if (mask.isSet(i)) {
@@ -13994,7 +13974,7 @@ declare class Registers {
13994
13974
  private asSigned: BigInt64Array;
13995
13975
  private asUnsigned: BigUint64Array;
13996
13976
 
13997
- constructor(private readonly bytes = new Uint8Array(NO_OF_REGISTERS << REGISTER_SIZE_SHIFT)) {
13977
+ constructor(private readonly bytes = safeAllocUint8Array(NO_OF_REGISTERS << REGISTER_SIZE_SHIFT)) {
13998
13978
  check`${bytes.length === NO_OF_REGISTERS << REGISTER_SIZE_SHIFT} Invalid size of registers array.`;
13999
13979
  this.asSigned = new BigInt64Array(bytes.buffer, bytes.byteOffset);
14000
13980
  this.asUnsigned = new BigUint64Array(bytes.buffer, bytes.byteOffset);
@@ -18052,9 +18032,15 @@ type HostCallIndex = Opaque<U32, "HostCallIndex[U32]">;
18052
18032
  /** Attempt to convert a number into `HostCallIndex`. */
18053
18033
  declare const tryAsHostCallIndex = (v: number): HostCallIndex => asOpaqueType(tryAsU32(v));
18054
18034
 
18035
+ /**
18036
+ * Host-call exit reason.
18037
+ *
18038
+ * https://graypaper.fluffylabs.dev/#/ab2cdbd/24a30124a501?v=0.7.2
18039
+ */
18055
18040
  declare enum PvmExecution {
18056
18041
  Halt = 0,
18057
18042
  Panic = 1,
18043
+ OOG = 2, // out-of-gas
18058
18044
  }
18059
18045
 
18060
18046
  /** A utility function to easily trace a bunch of registers. */
@@ -18067,8 +18053,12 @@ interface HostCallHandler {
18067
18053
  /** Index of that host call (i.e. what PVM invokes via `ecalli`) */
18068
18054
  readonly index: HostCallIndex;
18069
18055
 
18070
- /** The gas cost of invocation of that host call. */
18071
- readonly gasCost: SmallGas | ((reg: IHostCallRegisters) => Gas);
18056
+ /**
18057
+ * The gas cost of invocation of that host call.
18058
+ *
18059
+ * NOTE: `((reg: IHostCallRegisters) => Gas)` function is for compatibility reasons: pre GP 0.7.2
18060
+ */
18061
+ readonly basicGasCost: SmallGas | ((reg: IHostCallRegisters) => Gas);
18072
18062
 
18073
18063
  /** Currently executing service id. */
18074
18064
  readonly currentServiceId: U32;
@@ -18211,7 +18201,7 @@ declare class HostCalls {
18211
18201
  const maybeAddress = regs.getLowerU32(7);
18212
18202
  const maybeLength = regs.getLowerU32(8);
18213
18203
 
18214
- const result = new Uint8Array(maybeLength);
18204
+ const result = safeAllocUint8Array(maybeLength);
18215
18205
  const startAddress = tryAsMemoryIndex(maybeAddress);
18216
18206
  const loadResult = memory.loadInto(result, startAddress);
18217
18207
 
@@ -18244,8 +18234,10 @@ declare class HostCalls {
18244
18234
 
18245
18235
  const hostCall = this.hostCalls.get(index);
18246
18236
  const gasBefore = gas.get();
18247
- const gasCost = typeof hostCall.gasCost === "number" ? hostCall.gasCost : hostCall.gasCost(regs);
18248
- const underflow = gas.sub(gasCost);
18237
+ // NOTE: `basicGasCost(regs)` function is for compatibility reasons: pre GP 0.7.2
18238
+ const basicGasCost =
18239
+ typeof hostCall.basicGasCost === "number" ? hostCall.basicGasCost : hostCall.basicGasCost(regs);
18240
+ const underflow = gas.sub(basicGasCost);
18249
18241
 
18250
18242
  const pcLog = `[PC: ${pvmInstance.getPC()}]`;
18251
18243
  if (underflow) {
@@ -18272,6 +18264,11 @@ declare class HostCalls {
18272
18264
  return this.getReturnValue(status, pvmInstance);
18273
18265
  }
18274
18266
 
18267
+ if (result === PvmExecution.OOG) {
18268
+ status = Status.OOG;
18269
+ return this.getReturnValue(status, pvmInstance);
18270
+ }
18271
+
18275
18272
  if (result === undefined) {
18276
18273
  pvmInstance.runProgram();
18277
18274
  status = pvmInstance.getStatus();
@@ -18643,7 +18640,7 @@ declare class DebuggerAdapter {
18643
18640
 
18644
18641
  if (page === null) {
18645
18642
  // page wasn't allocated so we return an empty page
18646
- return new Uint8Array(PAGE_SIZE);
18643
+ return safeAllocUint8Array(PAGE_SIZE);
18647
18644
  }
18648
18645
 
18649
18646
  if (page.length === PAGE_SIZE) {
@@ -18652,7 +18649,7 @@ declare class DebuggerAdapter {
18652
18649
  }
18653
18650
 
18654
18651
  // page was allocated but it is shorter than PAGE_SIZE so we have to extend it
18655
- const fullPage = new Uint8Array(PAGE_SIZE);
18652
+ const fullPage = safeAllocUint8Array(PAGE_SIZE);
18656
18653
  fullPage.set(page);
18657
18654
  return fullPage;
18658
18655
  }
@@ -18845,10 +18842,10 @@ type ENTROPY_BYTES = typeof ENTROPY_BYTES;
18845
18842
  *
18846
18843
  * https://graypaper.fluffylabs.dev/#/579bd12/3b9a013b9a01
18847
18844
  */
18848
- declare function fisherYatesShuffle<T>(arr: T[], entropy: Bytes<ENTROPY_BYTES>): T[] {
18845
+ declare function fisherYatesShuffle<T>(blake2b: Blake2b, arr: T[], entropy: Bytes<ENTROPY_BYTES>): T[] {
18849
18846
  check`${entropy.length === ENTROPY_BYTES} Expected entropy of length ${ENTROPY_BYTES}, got ${entropy.length}`;
18850
18847
  const n = arr.length;
18851
- const randomNumbers = hashToNumberSequence(entropy, arr.length);
18848
+ const randomNumbers = hashToNumberSequence(blake2b, entropy, arr.length);
18852
18849
  const result: T[] = new Array<T>(n);
18853
18850
 
18854
18851
  let itemsLeft = n;
@@ -19020,8 +19017,7 @@ declare const availabilityAssignmentFromJson = json.object<JsonAvailabilityAssig
19020
19017
  timeout: "number",
19021
19018
  },
19022
19019
  ({ report, timeout }) => {
19023
- const workReportHash = blake2b.hashBytes(Encoder.encodeObject(WorkReport.Codec, report)).asOpaque();
19024
- return AvailabilityAssignment.create({ workReport: new WithHash(workReportHash, report), timeout });
19020
+ return AvailabilityAssignment.create({ workReport: report, timeout });
19025
19021
  },
19026
19022
  );
19027
19023
 
@@ -19522,7 +19518,7 @@ declare class TransitionHasher implements MmrHasher<KeccakHash> {
19522
19518
  constructor(
19523
19519
  private readonly context: ChainSpec,
19524
19520
  private readonly keccakHasher: KeccakHasher,
19525
- private readonly allocator: HashAllocator,
19521
+ public readonly blake2b: Blake2b,
19526
19522
  ) {}
19527
19523
 
19528
19524
  /** Concatenates two hashes and hash this concatenation */
@@ -19536,7 +19532,7 @@ declare class TransitionHasher implements MmrHasher<KeccakHash> {
19536
19532
 
19537
19533
  /** Creates hash from the block header view */
19538
19534
  header(header: HeaderView): WithHash<HeaderHash, HeaderView> {
19539
- return new WithHash(blake2b.hashBytes(header.encoded(), this.allocator).asOpaque(), header);
19535
+ return new WithHash(this.blake2b.hashBytes(header.encoded()).asOpaque(), header);
19540
19536
  }
19541
19537
 
19542
19538
  /**
@@ -19550,7 +19546,7 @@ declare class TransitionHasher implements MmrHasher<KeccakHash> {
19550
19546
  .view()
19551
19547
  .map((g) => g.view())
19552
19548
  .map((guarantee) => {
19553
- const reportHash = blake2b.hashBytes(guarantee.report.encoded(), this.allocator).asOpaque<WorkReportHash>();
19549
+ const reportHash = this.blake2b.hashBytes(guarantee.report.encoded()).asOpaque<WorkReportHash>();
19554
19550
  return BytesBlob.blobFromParts([
19555
19551
  reportHash.raw,
19556
19552
  guarantee.slot.encoded().raw,
@@ -19560,15 +19556,15 @@ declare class TransitionHasher implements MmrHasher<KeccakHash> {
19560
19556
 
19561
19557
  const guaranteeBlob = Encoder.encodeObject(codec.sequenceVarLen(dumpCodec), guarantees, this.context);
19562
19558
 
19563
- const et = blake2b.hashBytes(extrinsicView.tickets.encoded(), this.allocator).asOpaque<ExtrinsicHash>();
19564
- const ep = blake2b.hashBytes(extrinsicView.preimages.encoded(), this.allocator).asOpaque<ExtrinsicHash>();
19565
- const eg = blake2b.hashBytes(guaranteeBlob, this.allocator).asOpaque<ExtrinsicHash>();
19566
- const ea = blake2b.hashBytes(extrinsicView.assurances.encoded(), this.allocator).asOpaque<ExtrinsicHash>();
19567
- const ed = blake2b.hashBytes(extrinsicView.disputes.encoded(), this.allocator).asOpaque<ExtrinsicHash>();
19559
+ const et = this.blake2b.hashBytes(extrinsicView.tickets.encoded()).asOpaque<ExtrinsicHash>();
19560
+ const ep = this.blake2b.hashBytes(extrinsicView.preimages.encoded()).asOpaque<ExtrinsicHash>();
19561
+ const eg = this.blake2b.hashBytes(guaranteeBlob).asOpaque<ExtrinsicHash>();
19562
+ const ea = this.blake2b.hashBytes(extrinsicView.assurances.encoded()).asOpaque<ExtrinsicHash>();
19563
+ const ed = this.blake2b.hashBytes(extrinsicView.disputes.encoded()).asOpaque<ExtrinsicHash>();
19568
19564
 
19569
19565
  const encoded = BytesBlob.blobFromParts([et.raw, ep.raw, eg.raw, ea.raw, ed.raw]);
19570
19566
 
19571
- return new WithHashAndBytes(blake2b.hashBytes(encoded, this.allocator).asOpaque(), extrinsicView, encoded);
19567
+ return new WithHashAndBytes(this.blake2b.hashBytes(encoded).asOpaque(), extrinsicView, encoded);
19572
19568
  }
19573
19569
 
19574
19570
  /** Creates hash for given WorkPackage */
@@ -19579,7 +19575,7 @@ declare class TransitionHasher implements MmrHasher<KeccakHash> {
19579
19575
  private encode<T, THash extends OpaqueHash>(codec: Codec<T>, data: T): WithHashAndBytes<THash, T> {
19580
19576
  // TODO [ToDr] Use already allocated encoding destination and hash bytes from some arena.
19581
19577
  const encoded = Encoder.encodeObject(codec, data, this.context);
19582
- return new WithHashAndBytes(blake2b.hashBytes(encoded, this.allocator).asOpaque(), data, encoded);
19578
+ return new WithHashAndBytes(this.blake2b.hashBytes(encoded).asOpaque(), data, encoded);
19583
19579
  }
19584
19580
  }
19585
19581
 
@@ -19600,7 +19596,10 @@ declare enum PreimagesErrorCode {
19600
19596
 
19601
19597
  // TODO [SeKo] consider whether this module is the right place to remove expired preimages
19602
19598
  declare class Preimages {
19603
- constructor(public readonly state: PreimagesState) {}
19599
+ constructor(
19600
+ public readonly state: PreimagesState,
19601
+ public readonly blake2b: Blake2b,
19602
+ ) {}
19604
19603
 
19605
19604
  integrate(input: PreimagesInput): Result$2<PreimagesStateUpdate, PreimagesErrorCode> {
19606
19605
  // make sure lookup extrinsics are sorted and unique
@@ -19629,7 +19628,7 @@ declare class Preimages {
19629
19628
  // select preimages for integration
19630
19629
  for (const preimage of preimages) {
19631
19630
  const { requester, blob } = preimage;
19632
- const hash: PreimageHash = blake2b.hashBytes(blob).asOpaque();
19631
+ const hash: PreimageHash = this.blake2b.hashBytes(blob).asOpaque();
19633
19632
 
19634
19633
  const service = this.state.getService(requester);
19635
19634
  if (service === null) {
@@ -19660,156 +19659,6 @@ declare class Preimages {
19660
19659
  }
19661
19660
  }
19662
19661
 
19663
- declare enum ServiceExecutorError {
19664
- NoLookup = 0,
19665
- NoState = 1,
19666
- NoServiceCode = 2,
19667
- ServiceCodeMismatch = 3,
19668
- }
19669
-
19670
- declare class WorkPackageExecutor {
19671
- constructor(
19672
- private readonly blocks: BlocksDb,
19673
- private readonly state: StatesDb,
19674
- private readonly hasher: TransitionHasher,
19675
- ) {}
19676
-
19677
- // TODO [ToDr] this while thing should be triple-checked with the GP.
19678
- // I'm currently implementing some dirty version for the demo.
19679
- async executeWorkPackage(pack: WorkPackage): Promise<WorkReport> {
19680
- const headerHash = pack.context.lookupAnchor;
19681
- // execute authorisation first or is it already executed and we just need to check it?
19682
- const authExec = this.getServiceExecutor(
19683
- // TODO [ToDr] should this be anchor or lookupAnchor?
19684
- headerHash,
19685
- pack.authCodeHost,
19686
- pack.authCodeHash,
19687
- );
19688
-
19689
- if (authExec.isError) {
19690
- // TODO [ToDr] most likely shouldn't be throw.
19691
- throw new Error(`Could not get authorization executor: ${authExec.error}`);
19692
- }
19693
-
19694
- const pvm = authExec.ok;
19695
- const authGas = tryAsGas(15_000n);
19696
- const result = await pvm.run(pack.parametrization, authGas);
19697
-
19698
- if (!result.isEqualTo(pack.authorization)) {
19699
- throw new Error("Authorization is invalid.");
19700
- }
19701
-
19702
- const results: WorkResult[] = [];
19703
- for (const item of pack.items) {
19704
- const exec = this.getServiceExecutor(headerHash, item.service, item.codeHash);
19705
- if (exec.isError) {
19706
- throw new Error(`Could not get item executor: ${exec.error}`);
19707
- }
19708
- const pvm = exec.ok;
19709
-
19710
- const gasRatio = tryAsServiceGas(3_000n);
19711
- const ret = await pvm.run(item.payload, tryAsGas(item.refineGasLimit)); // or accumulateGasLimit?
19712
- results.push(
19713
- WorkResult.create({
19714
- serviceId: item.service,
19715
- codeHash: item.codeHash,
19716
- payloadHash: blake2b.hashBytes(item.payload),
19717
- gas: gasRatio,
19718
- result: new WorkExecResult(WorkExecResultKind.ok, ret),
19719
- load: WorkRefineLoad.create({
19720
- gasUsed: tryAsServiceGas(5),
19721
- importedSegments: tryAsU32(0),
19722
- exportedSegments: tryAsU32(0),
19723
- extrinsicSize: tryAsU32(0),
19724
- extrinsicCount: tryAsU32(0),
19725
- }),
19726
- }),
19727
- );
19728
- }
19729
-
19730
- const workPackage = this.hasher.workPackage(pack);
19731
- const workPackageSpec = WorkPackageSpec.create({
19732
- hash: workPackage.hash,
19733
- length: tryAsU32(workPackage.encoded.length),
19734
- erasureRoot: Bytes.zero(HASH_SIZE),
19735
- exportsRoot: Bytes.zero(HASH_SIZE).asOpaque(),
19736
- exportsCount: tryAsU16(0),
19737
- });
19738
- const coreIndex = tryAsCoreIndex(0);
19739
- const authorizerHash = Bytes.fill(HASH_SIZE, 5).asOpaque();
19740
-
19741
- const workResults = FixedSizeArray.new(results, tryAsWorkItemsCount(results.length));
19742
-
19743
- return Promise.resolve(
19744
- WorkReport.create({
19745
- workPackageSpec,
19746
- context: pack.context,
19747
- coreIndex,
19748
- authorizerHash,
19749
- authorizationOutput: pack.authorization,
19750
- segmentRootLookup: [],
19751
- results: workResults,
19752
- authorizationGasUsed: tryAsServiceGas(0),
19753
- }),
19754
- );
19755
- }
19756
-
19757
- getServiceExecutor(
19758
- lookupAnchor: HeaderHash,
19759
- serviceId: ServiceId,
19760
- expectedCodeHash: CodeHash,
19761
- ): Result$2<PvmExecutor, ServiceExecutorError> {
19762
- const header = this.blocks.getHeader(lookupAnchor);
19763
- if (header === null) {
19764
- return Result.error(ServiceExecutorError.NoLookup);
19765
- }
19766
-
19767
- const state = this.state.getState(lookupAnchor);
19768
- if (state === null) {
19769
- return Result.error(ServiceExecutorError.NoState);
19770
- }
19771
-
19772
- const service = state.getService(serviceId);
19773
- const serviceCodeHash = service?.getInfo().codeHash ?? null;
19774
- if (serviceCodeHash === null) {
19775
- return Result.error(ServiceExecutorError.NoServiceCode);
19776
- }
19777
-
19778
- if (!serviceCodeHash.isEqualTo(expectedCodeHash)) {
19779
- return Result.error(ServiceExecutorError.ServiceCodeMismatch);
19780
- }
19781
-
19782
- const serviceCode = service?.getPreimage(serviceCodeHash.asOpaque()) ?? null;
19783
- if (serviceCode === null) {
19784
- return Result.error(ServiceExecutorError.NoServiceCode);
19785
- }
19786
-
19787
- return Result.ok(new PvmExecutor(serviceCode));
19788
- }
19789
- }
19790
-
19791
- declare class PvmExecutor {
19792
- private readonly pvm: HostCalls;
19793
- private hostCalls = new HostCallsManager({ missing: new Missing() });
19794
- private pvmInstanceManager = new PvmInstanceManager(4);
19795
-
19796
- constructor(private serviceCode: BytesBlob) {
19797
- this.pvm = new PvmHostCallExtension(this.pvmInstanceManager, this.hostCalls);
19798
- }
19799
-
19800
- async run(args: BytesBlob, gas: Gas): Promise<BytesBlob> {
19801
- const program = Program.fromSpi(this.serviceCode.raw, args.raw, true);
19802
-
19803
- const result = await this.pvm.runProgram(program.code, 5, gas, program.registers, program.memory);
19804
-
19805
- if (result.hasMemorySlice()) {
19806
- return BytesBlob.blobFrom(result.memorySlice);
19807
- }
19808
-
19809
- return BytesBlob.empty();
19810
- }
19811
- }
19812
-
19813
19662
  type index_Preimages = Preimages;
19814
19663
  declare const index_Preimages: typeof Preimages;
19815
19664
  type index_PreimagesErrorCode = PreimagesErrorCode;
@@ -19819,10 +19668,8 @@ type index_PreimagesState = PreimagesState;
19819
19668
  type index_PreimagesStateUpdate = PreimagesStateUpdate;
19820
19669
  type index_TransitionHasher = TransitionHasher;
19821
19670
  declare const index_TransitionHasher: typeof TransitionHasher;
19822
- type index_WorkPackageExecutor = WorkPackageExecutor;
19823
- declare const index_WorkPackageExecutor: typeof WorkPackageExecutor;
19824
19671
  declare namespace index {
19825
- export { index_Preimages as Preimages, index_PreimagesErrorCode as PreimagesErrorCode, index_TransitionHasher as TransitionHasher, index_WorkPackageExecutor as WorkPackageExecutor };
19672
+ export { index_Preimages as Preimages, index_PreimagesErrorCode as PreimagesErrorCode, index_TransitionHasher as TransitionHasher };
19826
19673
  export type { index_PreimagesInput as PreimagesInput, index_PreimagesState as PreimagesState, index_PreimagesStateUpdate as PreimagesStateUpdate };
19827
19674
  }
19828
19675