@typeberry/lib 0.1.3-af70ed0 → 0.1.3-c2321fb

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (4) hide show
  1. package/index.cjs +342 -1122
  2. package/index.d.ts +230 -402
  3. package/index.js +341 -1121
  4. package/package.json +1 -1
package/index.d.ts CHANGED
@@ -420,6 +420,20 @@ declare const Result$2 = {
420
420
  },
421
421
  };
422
422
 
423
+ // about 2GB, the maximum ArrayBuffer length on Chrome confirmed by several sources:
424
+ // - https://issues.chromium.org/issues/40055619
425
+ // - https://stackoverflow.com/a/72124984
426
+ // - https://onnxruntime.ai/docs/tutorials/web/large-models.html#maximum-size-of-arraybuffer
427
+ declare const MAX_LENGTH$1 = 2145386496;
428
+
429
+ declare function safeAllocUint8Array(length: number) {
430
+ if (length > MAX_LENGTH) {
431
+ // biome-ignore lint/suspicious/noConsole: can't have a dependency on logger here
432
+ console.warn(`Trying to allocate ${length} bytes, which is greater than the maximum of ${MAX_LENGTH}.`);
433
+ }
434
+ return new Uint8Array(Math.min(MAX_LENGTH, length));
435
+ }
436
+
423
437
  /**
424
438
  * Utilities for tests.
425
439
  */
@@ -755,11 +769,12 @@ declare const index$u_oomWarningPrinted: typeof oomWarningPrinted;
755
769
  declare const index$u_parseCurrentSuite: typeof parseCurrentSuite;
756
770
  declare const index$u_parseCurrentVersion: typeof parseCurrentVersion;
757
771
  declare const index$u_resultToString: typeof resultToString;
772
+ declare const index$u_safeAllocUint8Array: typeof safeAllocUint8Array;
758
773
  declare const index$u_seeThrough: typeof seeThrough;
759
774
  declare const index$u_trimStack: typeof trimStack;
760
775
  declare const index$u_workspacePathFix: typeof workspacePathFix;
761
776
  declare namespace index$u {
762
- export { index$u_ALL_VERSIONS_IN_ORDER as ALL_VERSIONS_IN_ORDER, index$u_CURRENT_SUITE as CURRENT_SUITE, index$u_CURRENT_VERSION as CURRENT_VERSION, index$u_Compatibility as Compatibility, index$u_DEFAULT_SUITE as DEFAULT_SUITE, index$u_DEFAULT_VERSION as DEFAULT_VERSION, index$u_ErrorsCollector as ErrorsCollector, index$u_GpVersion as GpVersion, Result$2 as Result, index$u_RichTaggedError as RichTaggedError, index$u_TEST_COMPARE_USING as TEST_COMPARE_USING, index$u_TestSuite as TestSuite, index$u_WithDebug as WithDebug, index$u___OPAQUE_TYPE__ as __OPAQUE_TYPE__, index$u_asOpaqueType as asOpaqueType, index$u_assertEmpty as assertEmpty, index$u_assertNever as assertNever, index$u_callCompareFunction as callCompareFunction, index$u_check as check, index$u_deepEqual as deepEqual, index$u_getAllKeysSorted as getAllKeysSorted, index$u_inspect as inspect, index$u_isBrowser as isBrowser, index$u_isResult as isResult, index$u_isTaggedError as isTaggedError, index$u_maybeTaggedErrorToString as maybeTaggedErrorToString, index$u_measure as measure, index$u_oomWarningPrinted as oomWarningPrinted, index$u_parseCurrentSuite as parseCurrentSuite, index$u_parseCurrentVersion as parseCurrentVersion, index$u_resultToString as resultToString, index$u_seeThrough as seeThrough, index$u_trimStack as trimStack, index$u_workspacePathFix as workspacePathFix };
777
+ export { index$u_ALL_VERSIONS_IN_ORDER as ALL_VERSIONS_IN_ORDER, index$u_CURRENT_SUITE as CURRENT_SUITE, index$u_CURRENT_VERSION as CURRENT_VERSION, index$u_Compatibility as Compatibility, index$u_DEFAULT_SUITE as DEFAULT_SUITE, index$u_DEFAULT_VERSION as DEFAULT_VERSION, index$u_ErrorsCollector as ErrorsCollector, index$u_GpVersion as GpVersion, MAX_LENGTH$1 as MAX_LENGTH, Result$2 as Result, index$u_RichTaggedError as RichTaggedError, index$u_TEST_COMPARE_USING as TEST_COMPARE_USING, index$u_TestSuite as TestSuite, index$u_WithDebug as WithDebug, index$u___OPAQUE_TYPE__ as __OPAQUE_TYPE__, index$u_asOpaqueType as asOpaqueType, index$u_assertEmpty as assertEmpty, index$u_assertNever as assertNever, index$u_callCompareFunction as callCompareFunction, index$u_check as check, index$u_deepEqual as deepEqual, index$u_getAllKeysSorted as getAllKeysSorted, index$u_inspect as inspect, index$u_isBrowser as isBrowser, index$u_isResult as isResult, index$u_isTaggedError as isTaggedError, index$u_maybeTaggedErrorToString as maybeTaggedErrorToString, index$u_measure as measure, index$u_oomWarningPrinted as oomWarningPrinted, index$u_parseCurrentSuite as parseCurrentSuite, index$u_parseCurrentVersion as parseCurrentVersion, index$u_resultToString as resultToString, index$u_safeAllocUint8Array as safeAllocUint8Array, index$u_seeThrough as seeThrough, index$u_trimStack as trimStack, index$u_workspacePathFix as workspacePathFix };
763
778
  export type { index$u_DeepEqualOptions as DeepEqualOptions, index$u_EnumMapping as EnumMapping, index$u_ErrorResult as ErrorResult, index$u_OK as OK, index$u_OkResult as OkResult, index$u_Opaque as Opaque, index$u_StringLiteral as StringLiteral, index$u_TaggedError as TaggedError, index$u_TokenOf as TokenOf, index$u_Uninstantiable as Uninstantiable, index$u_WithOpaque as WithOpaque };
764
779
  }
765
780
 
@@ -929,7 +944,7 @@ declare class BytesBlob {
929
944
  static blobFromParts(v: Uint8Array | Uint8Array[], ...rest: Uint8Array[]) {
930
945
  const vArr = v instanceof Uint8Array ? [v] : v;
931
946
  const totalLength = vArr.reduce((a, v) => a + v.length, 0) + rest.reduce((a, v) => a + v.length, 0);
932
- const buffer = new Uint8Array(totalLength);
947
+ const buffer = safeAllocUint8Array(totalLength);
933
948
  let offset = 0;
934
949
  for (const r of vArr) {
935
950
  buffer.set(r, offset);
@@ -1012,7 +1027,7 @@ declare class Bytes<T extends number> extends BytesBlob {
1012
1027
 
1013
1028
  /** Create an empty [`Bytes<X>`] of given length. */
1014
1029
  static zero<X extends number>(len: X): Bytes<X> {
1015
- return new Bytes(new Uint8Array(len), len);
1030
+ return new Bytes(safeAllocUint8Array(len), len);
1016
1031
  }
1017
1032
 
1018
1033
  // TODO [ToDr] `fill` should have the argments swapped to align with the rest.
@@ -1133,7 +1148,7 @@ declare class BitVec {
1133
1148
  * Create new [`BitVec`] with all values set to `false`.
1134
1149
  */
1135
1150
  static empty(bitLength: number) {
1136
- const data = new Uint8Array(Math.ceil(bitLength / 8));
1151
+ const data = safeAllocUint8Array(Math.ceil(bitLength / 8));
1137
1152
  return new BitVec(data, bitLength);
1138
1153
  }
1139
1154
 
@@ -3461,6 +3476,52 @@ declare namespace index$q {
3461
3476
  export type { index$q_ClassConstructor as ClassConstructor, index$q_Codec as Codec, index$q_CodecRecord as CodecRecord, index$q_Decode as Decode, index$q_DescribedBy as DescribedBy, index$q_DescriptorRecord as DescriptorRecord, index$q_Encode as Encode, index$q_LengthRange as LengthRange, index$q_OptionalRecord as OptionalRecord, Options$1 as Options, index$q_PropertyKeys as PropertyKeys, index$q_SimpleDescriptorRecord as SimpleDescriptorRecord, index$q_SizeHint as SizeHint, index$q_ViewOf as ViewOf };
3462
3477
  }
3463
3478
 
3479
+ type ITypedArray = Uint8Array | Uint16Array | Uint32Array;
3480
+ type IDataType = string | Buffer | ITypedArray;
3481
+
3482
+ type IHasher = {
3483
+ /**
3484
+ * Initializes hash state to default value
3485
+ */
3486
+ init: () => IHasher;
3487
+ /**
3488
+ * Updates the hash content with the given data
3489
+ */
3490
+ update: (data: IDataType) => IHasher;
3491
+ /**
3492
+ * Calculates the hash of all of the data passed to be hashed with hash.update().
3493
+ * Defaults to hexadecimal string
3494
+ * @param outputType If outputType is "binary", it returns Uint8Array. Otherwise it
3495
+ * returns hexadecimal string
3496
+ */
3497
+ digest: {
3498
+ (outputType: "binary"): Uint8Array;
3499
+ (outputType?: "hex"): string;
3500
+ };
3501
+ /**
3502
+ * Save the current internal state of the hasher for later resumption with load().
3503
+ * Cannot be called before .init() or after .digest()
3504
+ *
3505
+ * Note that this state can include arbitrary information about the value being hashed (e.g.
3506
+ * could include N plaintext bytes from the value), so needs to be treated as being as
3507
+ * sensitive as the input value itself.
3508
+ */
3509
+ save: () => Uint8Array;
3510
+ /**
3511
+ * Resume a state that was created by save(). If this state was not created by a
3512
+ * compatible build of hash-wasm, an exception will be thrown.
3513
+ */
3514
+ load: (state: Uint8Array) => IHasher;
3515
+ /**
3516
+ * Block size in bytes
3517
+ */
3518
+ blockSize: number;
3519
+ /**
3520
+ * Digest size in bytes
3521
+ */
3522
+ digestSize: number;
3523
+ };
3524
+
3464
3525
  /**
3465
3526
  * Size of the output of the hash functions.
3466
3527
  *
@@ -3516,144 +3577,46 @@ declare class WithHashAndBytes<THash extends OpaqueHash, TData> extends WithHash
3516
3577
  }
3517
3578
  }
3518
3579
 
3519
- /** Allocator interface - returns an empty bytes vector that can be filled with the hash. */
3520
- interface HashAllocator {
3521
- /** Return a new hash destination. */
3522
- emptyHash(): OpaqueHash;
3523
- }
3524
-
3525
- /** The simplest allocator returning just a fresh copy of bytes each time. */
3526
- declare class SimpleAllocator implements HashAllocator {
3527
- emptyHash(): OpaqueHash {
3528
- return Bytes.zero(HASH_SIZE);
3529
- }
3530
- }
3531
-
3532
- /** An allocator that works by allocating larger (continuous) pages of memory. */
3533
- declare class PageAllocator implements HashAllocator {
3534
- private page: Uint8Array = new Uint8Array(0);
3535
- private currentHash = 0;
3536
-
3537
- // TODO [ToDr] Benchmark the performance!
3538
- constructor(private readonly hashesPerPage: number) {
3539
- check`${hashesPerPage > 0 && hashesPerPage >>> 0 === hashesPerPage} Expected a non-zero integer.`;
3540
- this.resetPage();
3541
- }
3580
+ declare const zero$1 = Bytes.zero(HASH_SIZE);
3542
3581
 
3543
- private resetPage() {
3544
- const pageSizeBytes = this.hashesPerPage * HASH_SIZE;
3545
- this.currentHash = 0;
3546
- this.page = new Uint8Array(pageSizeBytes);
3582
+ declare class Blake2b {
3583
+ static async createHasher() {
3584
+ return new Blake2b(await createBLAKE2b(HASH_SIZE * 8));
3547
3585
  }
3548
3586
 
3549
- emptyHash(): OpaqueHash {
3550
- const startIdx = this.currentHash * HASH_SIZE;
3551
- const endIdx = startIdx + HASH_SIZE;
3587
+ private constructor(private readonly hasher: IHasher) {}
3552
3588
 
3553
- this.currentHash += 1;
3554
- if (this.currentHash >= this.hashesPerPage) {
3555
- this.resetPage();
3589
+ /**
3590
+ * Hash given collection of blobs.
3591
+ *
3592
+ * If empty array is given a zero-hash is returned.
3593
+ */
3594
+ hashBlobs<H extends Blake2bHash>(r: (BytesBlob | Uint8Array)[]): H {
3595
+ if (r.length === 0) {
3596
+ return zero.asOpaque();
3556
3597
  }
3557
3598
 
3558
- return Bytes.fromBlob(this.page.subarray(startIdx, endIdx), HASH_SIZE);
3599
+ const hasher = this.hasher.init();
3600
+ for (const v of r) {
3601
+ hasher.update(v instanceof BytesBlob ? v.raw : v);
3602
+ }
3603
+ return Bytes.fromBlob(hasher.digest("binary"), HASH_SIZE).asOpaque();
3559
3604
  }
3560
- }
3561
-
3562
- declare const defaultAllocator = new SimpleAllocator();
3563
3605
 
3564
- /**
3565
- * Hash given collection of blobs.
3566
- *
3567
- * If empty array is given a zero-hash is returned.
3568
- */
3569
- declare function hashBlobs$1<H extends Blake2bHash>(
3570
- r: (BytesBlob | Uint8Array)[],
3571
- allocator: HashAllocator = defaultAllocator,
3572
- ): H {
3573
- const out = allocator.emptyHash();
3574
- if (r.length === 0) {
3575
- return out.asOpaque();
3606
+ /** Hash given blob of bytes. */
3607
+ hashBytes(blob: BytesBlob | Uint8Array): Blake2bHash {
3608
+ const hasher = this.hasher.init();
3609
+ const bytes = blob instanceof BytesBlob ? blob.raw : blob;
3610
+ hasher.update(bytes);
3611
+ return Bytes.fromBlob(hasher.digest("binary"), HASH_SIZE).asOpaque();
3576
3612
  }
3577
3613
 
3578
- const hasher = blake2b(HASH_SIZE);
3579
- for (const v of r) {
3580
- hasher?.update(v instanceof BytesBlob ? v.raw : v);
3614
+ /** Convert given string into bytes and hash it. */
3615
+ hashString(str: string) {
3616
+ return this.hashBytes(BytesBlob.blobFromString(str));
3581
3617
  }
3582
- hasher?.digest(out.raw);
3583
- return out.asOpaque();
3584
3618
  }
3585
3619
 
3586
- /** Hash given blob of bytes. */
3587
- declare function hashBytes(blob: BytesBlob | Uint8Array, allocator: HashAllocator = defaultAllocator): Blake2bHash {
3588
- const hasher = blake2b(HASH_SIZE);
3589
- const bytes = blob instanceof BytesBlob ? blob.raw : blob;
3590
- hasher?.update(bytes);
3591
- const out = allocator.emptyHash();
3592
- hasher?.digest(out.raw);
3593
- return out;
3594
- }
3595
-
3596
- /** Convert given string into bytes and hash it. */
3597
- declare function hashString(str: string, allocator: HashAllocator = defaultAllocator) {
3598
- return hashBytes(BytesBlob.blobFromString(str), allocator);
3599
- }
3600
-
3601
- declare const blake2b_hashBytes: typeof hashBytes;
3602
- declare const blake2b_hashString: typeof hashString;
3603
- declare namespace blake2b {
3604
- export {
3605
- hashBlobs$1 as hashBlobs,
3606
- blake2b_hashBytes as hashBytes,
3607
- blake2b_hashString as hashString,
3608
- };
3609
- }
3610
-
3611
- type ITypedArray = Uint8Array | Uint16Array | Uint32Array;
3612
- type IDataType = string | Buffer | ITypedArray;
3613
-
3614
- type IHasher = {
3615
- /**
3616
- * Initializes hash state to default value
3617
- */
3618
- init: () => IHasher;
3619
- /**
3620
- * Updates the hash content with the given data
3621
- */
3622
- update: (data: IDataType) => IHasher;
3623
- /**
3624
- * Calculates the hash of all of the data passed to be hashed with hash.update().
3625
- * Defaults to hexadecimal string
3626
- * @param outputType If outputType is "binary", it returns Uint8Array. Otherwise it
3627
- * returns hexadecimal string
3628
- */
3629
- digest: {
3630
- (outputType: "binary"): Uint8Array;
3631
- (outputType?: "hex"): string;
3632
- };
3633
- /**
3634
- * Save the current internal state of the hasher for later resumption with load().
3635
- * Cannot be called before .init() or after .digest()
3636
- *
3637
- * Note that this state can include arbitrary information about the value being hashed (e.g.
3638
- * could include N plaintext bytes from the value), so needs to be treated as being as
3639
- * sensitive as the input value itself.
3640
- */
3641
- save: () => Uint8Array;
3642
- /**
3643
- * Resume a state that was created by save(). If this state was not created by a
3644
- * compatible build of hash-wasm, an exception will be thrown.
3645
- */
3646
- load: (state: Uint8Array) => IHasher;
3647
- /**
3648
- * Block size in bytes
3649
- */
3650
- blockSize: number;
3651
- /**
3652
- * Digest size in bytes
3653
- */
3654
- digestSize: number;
3655
- };
3656
-
3657
3620
  declare class KeccakHasher {
3658
3621
  static async create(): Promise<KeccakHasher> {
3659
3622
  return new KeccakHasher(await createKeccak(256));
@@ -3681,15 +3644,15 @@ declare namespace keccak {
3681
3644
  };
3682
3645
  }
3683
3646
 
3647
+ // TODO [ToDr] (#213) this should most likely be moved to a separate
3648
+ // package to avoid pulling in unnecessary deps.
3649
+
3650
+ type index$p_Blake2b = Blake2b;
3651
+ declare const index$p_Blake2b: typeof Blake2b;
3684
3652
  type index$p_Blake2bHash = Blake2bHash;
3685
3653
  type index$p_HASH_SIZE = HASH_SIZE;
3686
- type index$p_HashAllocator = HashAllocator;
3687
3654
  type index$p_KeccakHash = KeccakHash;
3688
3655
  type index$p_OpaqueHash = OpaqueHash;
3689
- type index$p_PageAllocator = PageAllocator;
3690
- declare const index$p_PageAllocator: typeof PageAllocator;
3691
- type index$p_SimpleAllocator = SimpleAllocator;
3692
- declare const index$p_SimpleAllocator: typeof SimpleAllocator;
3693
3656
  type index$p_TRUNCATED_HASH_SIZE = TRUNCATED_HASH_SIZE;
3694
3657
  type index$p_TruncatedHash = TruncatedHash;
3695
3658
  type index$p_WithHash<THash extends OpaqueHash, TData> = WithHash<THash, TData>;
@@ -3697,12 +3660,10 @@ declare const index$p_WithHash: typeof WithHash;
3697
3660
  type index$p_WithHashAndBytes<THash extends OpaqueHash, TData> = WithHashAndBytes<THash, TData>;
3698
3661
  declare const index$p_WithHashAndBytes: typeof WithHashAndBytes;
3699
3662
  declare const index$p_ZERO_HASH: typeof ZERO_HASH;
3700
- declare const index$p_blake2b: typeof blake2b;
3701
- declare const index$p_defaultAllocator: typeof defaultAllocator;
3702
3663
  declare const index$p_keccak: typeof keccak;
3703
3664
  declare namespace index$p {
3704
- export { index$p_PageAllocator as PageAllocator, index$p_SimpleAllocator as SimpleAllocator, index$p_WithHash as WithHash, index$p_WithHashAndBytes as WithHashAndBytes, index$p_ZERO_HASH as ZERO_HASH, index$p_blake2b as blake2b, index$p_defaultAllocator as defaultAllocator, index$p_keccak as keccak };
3705
- export type { index$p_Blake2bHash as Blake2bHash, index$p_HASH_SIZE as HASH_SIZE, index$p_HashAllocator as HashAllocator, index$p_KeccakHash as KeccakHash, index$p_OpaqueHash as OpaqueHash, index$p_TRUNCATED_HASH_SIZE as TRUNCATED_HASH_SIZE, index$p_TruncatedHash as TruncatedHash };
3665
+ export { index$p_Blake2b as Blake2b, index$p_WithHash as WithHash, index$p_WithHashAndBytes as WithHashAndBytes, index$p_ZERO_HASH as ZERO_HASH, index$p_keccak as keccak, zero$1 as zero };
3666
+ export type { index$p_Blake2bHash as Blake2bHash, index$p_HASH_SIZE as HASH_SIZE, index$p_KeccakHash as KeccakHash, index$p_OpaqueHash as OpaqueHash, index$p_TRUNCATED_HASH_SIZE as TRUNCATED_HASH_SIZE, index$p_TruncatedHash as TruncatedHash };
3706
3667
  }
3707
3668
 
3708
3669
  /** Immutable view of the `HashDictionary`. */
@@ -4735,7 +4696,7 @@ declare async function verify<T extends BytesBlob>(input: Input<T>[]): Promise<b
4735
4696
  (acc, { message, key, signature }) => acc + key.length + signature.length + message.length + 1,
4736
4697
  0,
4737
4698
  );
4738
- const data = new Uint8Array(dataLength);
4699
+ const data = safeAllocUint8Array(dataLength);
4739
4700
 
4740
4701
  let offset = 0;
4741
4702
 
@@ -4825,22 +4786,16 @@ declare function trivialSeed(s: U32): KeySeed {
4825
4786
  * Derives a Ed25519 secret key from a seed.
4826
4787
  * https://github.com/polkadot-fellows/JIPs/blob/7048f79edf4f4eb8bfe6fb42e6bbf61900f44c65/JIP-5.md#derivation-method
4827
4788
  */
4828
- declare function deriveEd25519SecretKey(
4829
- seed: KeySeed,
4830
- allocator: SimpleAllocator = new SimpleAllocator(),
4831
- ): Ed25519SecretSeed {
4832
- return blake2b.hashBytes(BytesBlob.blobFromParts([ED25519_SECRET_KEY.raw, seed.raw]), allocator).asOpaque();
4789
+ declare function deriveEd25519SecretKey(seed: KeySeed, blake2b: Blake2b): Ed25519SecretSeed {
4790
+ return blake2b.hashBytes(BytesBlob.blobFromParts([ED25519_SECRET_KEY.raw, seed.raw])).asOpaque();
4833
4791
  }
4834
4792
 
4835
4793
  /**
4836
4794
  * Derives a Bandersnatch secret key from a seed.
4837
4795
  * https://github.com/polkadot-fellows/JIPs/blob/7048f79edf4f4eb8bfe6fb42e6bbf61900f44c65/JIP-5.md#derivation-method
4838
4796
  */
4839
- declare function deriveBandersnatchSecretKey(
4840
- seed: KeySeed,
4841
- allocator: SimpleAllocator = new SimpleAllocator(),
4842
- ): BandersnatchSecretSeed {
4843
- return blake2b.hashBytes(BytesBlob.blobFromParts([BANDERSNATCH_SECRET_KEY.raw, seed.raw]), allocator).asOpaque();
4797
+ declare function deriveBandersnatchSecretKey(seed: KeySeed, blake2b: Blake2b): BandersnatchSecretSeed {
4798
+ return blake2b.hashBytes(BytesBlob.blobFromParts([BANDERSNATCH_SECRET_KEY.raw, seed.raw])).asOpaque();
4844
4799
  }
4845
4800
 
4846
4801
  /**
@@ -8373,7 +8328,7 @@ declare enum NodeType {
8373
8328
  declare class TrieNode {
8374
8329
  constructor(
8375
8330
  /** Exactly 512 bits / 64 bytes */
8376
- public readonly raw: Uint8Array = new Uint8Array(TRIE_NODE_BYTES),
8331
+ public readonly raw: Uint8Array = safeAllocUint8Array(TRIE_NODE_BYTES),
8377
8332
  ) {}
8378
8333
 
8379
8334
  /** Returns the type of the node */
@@ -9111,21 +9066,6 @@ declare function accumulationOutputComparator(a: AccumulationOutput, b: Accumula
9111
9066
  return Ordering.Equal;
9112
9067
  }
9113
9068
 
9114
- declare const codecWithHash = <T, V, H extends OpaqueHash>(val: Descriptor<T, V>): Descriptor<WithHash<H, T>, V> =>
9115
- Descriptor.withView(
9116
- val.name,
9117
- val.sizeHint,
9118
- (e, elem) => val.encode(e, elem.data),
9119
- (d): WithHash<H, T> => {
9120
- const decoder2 = d.clone();
9121
- const encoded = val.skipEncoded(decoder2);
9122
- const hash = blake2b.hashBytes(encoded);
9123
- return new WithHash(hash.asOpaque(), val.decode(d));
9124
- },
9125
- val.skip,
9126
- val.View,
9127
- );
9128
-
9129
9069
  /**
9130
9070
  * Assignment of particular work report to a core.
9131
9071
  *
@@ -9136,7 +9076,7 @@ declare const codecWithHash = <T, V, H extends OpaqueHash>(val: Descriptor<T, V>
9136
9076
  */
9137
9077
  declare class AvailabilityAssignment extends WithDebug {
9138
9078
  static Codec = codec.Class(AvailabilityAssignment, {
9139
- workReport: codecWithHash(WorkReport.Codec),
9079
+ workReport: WorkReport.Codec,
9140
9080
  timeout: codec.u32.asOpaque<TimeSlot>(),
9141
9081
  });
9142
9082
 
@@ -9146,7 +9086,7 @@ declare class AvailabilityAssignment extends WithDebug {
9146
9086
 
9147
9087
  private constructor(
9148
9088
  /** Work report assigned to a core. */
9149
- public readonly workReport: WithHash<WorkReportHash, WorkReport>,
9089
+ public readonly workReport: WorkReport,
9150
9090
  /** Time slot at which the report becomes obsolete. */
9151
9091
  public readonly timeout: TimeSlot,
9152
9092
  ) {
@@ -11293,7 +11233,6 @@ declare const index$e_codecPerCore: typeof codecPerCore;
11293
11233
  declare const index$e_codecServiceId: typeof codecServiceId;
11294
11234
  declare const index$e_codecVarGas: typeof codecVarGas;
11295
11235
  declare const index$e_codecVarU16: typeof codecVarU16;
11296
- declare const index$e_codecWithHash: typeof codecWithHash;
11297
11236
  declare const index$e_hashComparator: typeof hashComparator;
11298
11237
  declare const index$e_ignoreValueWithDefault: typeof ignoreValueWithDefault;
11299
11238
  declare const index$e_serviceDataCodec: typeof serviceDataCodec;
@@ -11304,7 +11243,7 @@ declare const index$e_tryAsPerCore: typeof tryAsPerCore;
11304
11243
  declare const index$e_workReportsSortedSetCodec: typeof workReportsSortedSetCodec;
11305
11244
  declare const index$e_zeroSizeHint: typeof zeroSizeHint;
11306
11245
  declare namespace index$e {
11307
- export { index$e_AccumulationOutput as AccumulationOutput, index$e_AutoAccumulate as AutoAccumulate, index$e_AvailabilityAssignment as AvailabilityAssignment, index$e_BASE_SERVICE_BALANCE as BASE_SERVICE_BALANCE, index$e_BlockState as BlockState, index$e_CoreStatistics as CoreStatistics, index$e_DisputesRecords as DisputesRecords, index$e_ELECTIVE_BYTE_BALANCE as ELECTIVE_BYTE_BALANCE, index$e_ELECTIVE_ITEM_BALANCE as ELECTIVE_ITEM_BALANCE, index$e_InMemoryService as InMemoryService, index$e_InMemoryState as InMemoryState, index$e_LookupHistoryItem as LookupHistoryItem, index$e_MAX_LOOKUP_HISTORY_SLOTS as MAX_LOOKUP_HISTORY_SLOTS, index$e_PreimageItem as PreimageItem, index$e_PrivilegedServices as PrivilegedServices, index$e_RecentBlocks as RecentBlocks, index$e_RecentBlocksHistory as RecentBlocksHistory, index$e_SafroleData as SafroleData, index$e_SafroleSealingKeysData as SafroleSealingKeysData, index$e_SafroleSealingKeysKind as SafroleSealingKeysKind, index$e_ServiceAccountInfo as ServiceAccountInfo, index$e_ServiceStatistics as ServiceStatistics, index$e_StatisticsData as StatisticsData, index$e_StorageItem as StorageItem, index$e_UpdateError as UpdateError, index$e_UpdatePreimage as UpdatePreimage, index$e_UpdatePreimageKind as UpdatePreimageKind, index$e_UpdateService as UpdateService, index$e_UpdateServiceKind as UpdateServiceKind, index$e_UpdateStorage as UpdateStorage, index$e_UpdateStorageKind as UpdateStorageKind, index$e_ValidatorData as ValidatorData, index$e_ValidatorStatistics as ValidatorStatistics, index$e_accumulationOutputComparator as accumulationOutputComparator, index$e_codecBandersnatchKey as codecBandersnatchKey, index$e_codecPerCore as codecPerCore, index$e_codecServiceId as codecServiceId, index$e_codecVarGas as codecVarGas, index$e_codecVarU16 as codecVarU16, index$e_codecWithHash as codecWithHash, index$e_hashComparator as hashComparator, index$e_ignoreValueWithDefault as ignoreValueWithDefault, index$e_serviceDataCodec as serviceDataCodec, index$e_serviceEntriesCodec as serviceEntriesCodec, index$e_sortedSetCodec as sortedSetCodec, index$e_tryAsLookupHistorySlots as tryAsLookupHistorySlots, index$e_tryAsPerCore as tryAsPerCore, index$e_workReportsSortedSetCodec as workReportsSortedSetCodec, index$e_zeroSizeHint as zeroSizeHint };
11246
+ export { index$e_AccumulationOutput as AccumulationOutput, index$e_AutoAccumulate as AutoAccumulate, index$e_AvailabilityAssignment as AvailabilityAssignment, index$e_BASE_SERVICE_BALANCE as BASE_SERVICE_BALANCE, index$e_BlockState as BlockState, index$e_CoreStatistics as CoreStatistics, index$e_DisputesRecords as DisputesRecords, index$e_ELECTIVE_BYTE_BALANCE as ELECTIVE_BYTE_BALANCE, index$e_ELECTIVE_ITEM_BALANCE as ELECTIVE_ITEM_BALANCE, index$e_InMemoryService as InMemoryService, index$e_InMemoryState as InMemoryState, index$e_LookupHistoryItem as LookupHistoryItem, index$e_MAX_LOOKUP_HISTORY_SLOTS as MAX_LOOKUP_HISTORY_SLOTS, index$e_PreimageItem as PreimageItem, index$e_PrivilegedServices as PrivilegedServices, index$e_RecentBlocks as RecentBlocks, index$e_RecentBlocksHistory as RecentBlocksHistory, index$e_SafroleData as SafroleData, index$e_SafroleSealingKeysData as SafroleSealingKeysData, index$e_SafroleSealingKeysKind as SafroleSealingKeysKind, index$e_ServiceAccountInfo as ServiceAccountInfo, index$e_ServiceStatistics as ServiceStatistics, index$e_StatisticsData as StatisticsData, index$e_StorageItem as StorageItem, index$e_UpdateError as UpdateError, index$e_UpdatePreimage as UpdatePreimage, index$e_UpdatePreimageKind as UpdatePreimageKind, index$e_UpdateService as UpdateService, index$e_UpdateServiceKind as UpdateServiceKind, index$e_UpdateStorage as UpdateStorage, index$e_UpdateStorageKind as UpdateStorageKind, index$e_ValidatorData as ValidatorData, index$e_ValidatorStatistics as ValidatorStatistics, index$e_accumulationOutputComparator as accumulationOutputComparator, index$e_codecBandersnatchKey as codecBandersnatchKey, index$e_codecPerCore as codecPerCore, index$e_codecServiceId as codecServiceId, index$e_codecVarGas as codecVarGas, index$e_codecVarU16 as codecVarU16, index$e_hashComparator as hashComparator, index$e_ignoreValueWithDefault as ignoreValueWithDefault, index$e_serviceDataCodec as serviceDataCodec, index$e_serviceEntriesCodec as serviceEntriesCodec, index$e_sortedSetCodec as sortedSetCodec, index$e_tryAsLookupHistorySlots as tryAsLookupHistorySlots, index$e_tryAsPerCore as tryAsPerCore, index$e_workReportsSortedSetCodec as workReportsSortedSetCodec, index$e_zeroSizeHint as zeroSizeHint };
11308
11247
  export type { index$e_BlocksState as BlocksState, index$e_ENTROPY_ENTRIES as ENTROPY_ENTRIES, index$e_EnumerableState as EnumerableState, index$e_FieldNames as FieldNames, index$e_InMemoryStateFields as InMemoryStateFields, index$e_LookupHistorySlots as LookupHistorySlots, index$e_MAX_RECENT_HISTORY as MAX_RECENT_HISTORY, index$e_PerCore as PerCore, index$e_SafroleSealingKeys as SafroleSealingKeys, index$e_Service as Service, index$e_ServiceData as ServiceData, index$e_ServiceEntries as ServiceEntries, index$e_ServicesUpdate as ServicesUpdate, index$e_State as State, index$e_StorageKey as StorageKey, index$e_VALIDATOR_META_BYTES as VALIDATOR_META_BYTES };
11309
11248
  }
11310
11249
 
@@ -11372,7 +11311,7 @@ declare namespace stateKeys {
11372
11311
  }
11373
11312
 
11374
11313
  /** https://graypaper.fluffylabs.dev/#/1c979cb/3bba033bba03?v=0.7.1 */
11375
- export function serviceStorage(serviceId: ServiceId, key: StorageKey): StateKey {
11314
+ export function serviceStorage(blake2b: Blake2b, serviceId: ServiceId, key: StorageKey): StateKey {
11376
11315
  if (Compatibility.isLessThan(GpVersion.V0_6_7)) {
11377
11316
  const out = Bytes.zero(HASH_SIZE);
11378
11317
  out.raw.set(u32AsLeBytes(tryAsU32(2 ** 32 - 1)), 0);
@@ -11380,11 +11319,11 @@ declare namespace stateKeys {
11380
11319
  return legacyServiceNested(serviceId, out);
11381
11320
  }
11382
11321
 
11383
- return serviceNested(serviceId, tryAsU32(2 ** 32 - 1), key);
11322
+ return serviceNested(blake2b, serviceId, tryAsU32(2 ** 32 - 1), key);
11384
11323
  }
11385
11324
 
11386
11325
  /** https://graypaper.fluffylabs.dev/#/1c979cb/3bd7033bd703?v=0.7.1 */
11387
- export function servicePreimage(serviceId: ServiceId, hash: PreimageHash): StateKey {
11326
+ export function servicePreimage(blake2b: Blake2b, serviceId: ServiceId, hash: PreimageHash): StateKey {
11388
11327
  if (Compatibility.isLessThan(GpVersion.V0_6_7)) {
11389
11328
  const out = Bytes.zero(HASH_SIZE);
11390
11329
  out.raw.set(u32AsLeBytes(tryAsU32(2 ** 32 - 2)), 0);
@@ -11392,11 +11331,16 @@ declare namespace stateKeys {
11392
11331
  return legacyServiceNested(serviceId, out);
11393
11332
  }
11394
11333
 
11395
- return serviceNested(serviceId, tryAsU32(2 ** 32 - 2), hash);
11334
+ return serviceNested(blake2b, serviceId, tryAsU32(2 ** 32 - 2), hash);
11396
11335
  }
11397
11336
 
11398
11337
  /** https://graypaper.fluffylabs.dev/#/1c979cb/3b0a043b0a04?v=0.7.1 */
11399
- export function serviceLookupHistory(serviceId: ServiceId, hash: PreimageHash, preimageLength: U32): StateKey {
11338
+ export function serviceLookupHistory(
11339
+ blake2b: Blake2b,
11340
+ serviceId: ServiceId,
11341
+ hash: PreimageHash,
11342
+ preimageLength: U32,
11343
+ ): StateKey {
11400
11344
  if (Compatibility.isLessThan(GpVersion.V0_6_7)) {
11401
11345
  const doubleHash = blake2b.hashBytes(hash);
11402
11346
  const out = Bytes.zero(HASH_SIZE);
@@ -11405,11 +11349,11 @@ declare namespace stateKeys {
11405
11349
  return legacyServiceNested(serviceId, out);
11406
11350
  }
11407
11351
 
11408
- return serviceNested(serviceId, preimageLength, hash);
11352
+ return serviceNested(blake2b, serviceId, preimageLength, hash);
11409
11353
  }
11410
11354
 
11411
11355
  /** https://graypaper.fluffylabs.dev/#/1c979cb/3b88003b8800?v=0.7.1 */
11412
- export function serviceNested(serviceId: ServiceId, numberPrefix: U32, hash: BytesBlob): StateKey {
11356
+ export function serviceNested(blake2b: Blake2b, serviceId: ServiceId, numberPrefix: U32, hash: BytesBlob): StateKey {
11413
11357
  const inputToHash = BytesBlob.blobFromParts(u32AsLeBytes(numberPrefix), hash.raw);
11414
11358
  const newHash = blake2b.hashBytes(inputToHash).raw.subarray(0, 28);
11415
11359
  const key = Bytes.zero(HASH_SIZE);
@@ -11593,20 +11537,20 @@ declare namespace serialize {
11593
11537
  });
11594
11538
 
11595
11539
  /** https://graypaper.fluffylabs.dev/#/85129da/384803384803?v=0.6.3 */
11596
- export const serviceStorage = (serviceId: ServiceId, key: StorageKey) => ({
11597
- key: stateKeys.serviceStorage(serviceId, key),
11540
+ export const serviceStorage = (blake2b: Blake2b, serviceId: ServiceId, key: StorageKey) => ({
11541
+ key: stateKeys.serviceStorage(blake2b, serviceId, key),
11598
11542
  Codec: dumpCodec,
11599
11543
  });
11600
11544
 
11601
11545
  /** https://graypaper.fluffylabs.dev/#/85129da/385b03385b03?v=0.6.3 */
11602
- export const servicePreimages = (serviceId: ServiceId, hash: PreimageHash) => ({
11603
- key: stateKeys.servicePreimage(serviceId, hash),
11546
+ export const servicePreimages = (blake2b: Blake2b, serviceId: ServiceId, hash: PreimageHash) => ({
11547
+ key: stateKeys.servicePreimage(blake2b, serviceId, hash),
11604
11548
  Codec: dumpCodec,
11605
11549
  });
11606
11550
 
11607
11551
  /** https://graypaper.fluffylabs.dev/#/85129da/387603387603?v=0.6.3 */
11608
- export const serviceLookupHistory = (serviceId: ServiceId, hash: PreimageHash, len: U32) => ({
11609
- key: stateKeys.serviceLookupHistory(serviceId, hash, len),
11552
+ export const serviceLookupHistory = (blake2b: Blake2b, serviceId: ServiceId, hash: PreimageHash, len: U32) => ({
11553
+ key: stateKeys.serviceLookupHistory(blake2b, serviceId, hash, len),
11610
11554
  Codec: readonlyArray(codec.sequenceVarLen(codec.u32)),
11611
11555
  });
11612
11556
  }
@@ -11641,6 +11585,7 @@ declare const EMPTY_BLOB = BytesBlob.empty();
11641
11585
  /** Serialize given state update into a series of key-value pairs. */
11642
11586
  declare function* serializeStateUpdate(
11643
11587
  spec: ChainSpec,
11588
+ blake2b: Blake2b,
11644
11589
  update: Partial<State & ServicesUpdate>,
11645
11590
  ): Generator<StateEntryUpdate> {
11646
11591
  // first let's serialize all of the simple entries (if present!)
@@ -11649,9 +11594,9 @@ declare function* serializeStateUpdate(
11649
11594
  const encode = <T>(codec: Encode<T>, val: T) => Encoder.encodeObject(codec, val, spec);
11650
11595
 
11651
11596
  // then let's proceed with service updates
11652
- yield* serializeServiceUpdates(update.servicesUpdates, encode);
11653
- yield* serializePreimages(update.preimages, encode);
11654
- yield* serializeStorage(update.storage);
11597
+ yield* serializeServiceUpdates(update.servicesUpdates, encode, blake2b);
11598
+ yield* serializePreimages(update.preimages, encode, blake2b);
11599
+ yield* serializeStorage(update.storage, blake2b);
11655
11600
  yield* serializeRemovedServices(update.servicesRemoved);
11656
11601
  }
11657
11602
 
@@ -11663,18 +11608,18 @@ declare function* serializeRemovedServices(servicesRemoved: ServiceId[] | undefi
11663
11608
  }
11664
11609
  }
11665
11610
 
11666
- declare function* serializeStorage(storage: UpdateStorage[] | undefined): Generator<StateEntryUpdate> {
11611
+ declare function* serializeStorage(storage: UpdateStorage[] | undefined, blake2b: Blake2b): Generator<StateEntryUpdate> {
11667
11612
  for (const { action, serviceId } of storage ?? []) {
11668
11613
  switch (action.kind) {
11669
11614
  case UpdateStorageKind.Set: {
11670
11615
  const key = action.storage.key;
11671
- const codec = serialize.serviceStorage(serviceId, key);
11616
+ const codec = serialize.serviceStorage(blake2b, serviceId, key);
11672
11617
  yield [StateEntryUpdateAction.Insert, codec.key, action.storage.value];
11673
11618
  break;
11674
11619
  }
11675
11620
  case UpdateStorageKind.Remove: {
11676
11621
  const key = action.key;
11677
- const codec = serialize.serviceStorage(serviceId, key);
11622
+ const codec = serialize.serviceStorage(blake2b, serviceId, key);
11678
11623
  yield [StateEntryUpdateAction.Remove, codec.key, EMPTY_BLOB];
11679
11624
  break;
11680
11625
  }
@@ -11684,16 +11629,20 @@ declare function* serializeStorage(storage: UpdateStorage[] | undefined): Genera
11684
11629
  }
11685
11630
  }
11686
11631
 
11687
- declare function* serializePreimages(preimages: UpdatePreimage[] | undefined, encode: EncodeFun): Generator<StateEntryUpdate> {
11632
+ declare function* serializePreimages(
11633
+ preimages: UpdatePreimage[] | undefined,
11634
+ encode: EncodeFun,
11635
+ blake2b: Blake2b,
11636
+ ): Generator<StateEntryUpdate> {
11688
11637
  for (const { action, serviceId } of preimages ?? []) {
11689
11638
  switch (action.kind) {
11690
11639
  case UpdatePreimageKind.Provide: {
11691
11640
  const { hash, blob } = action.preimage;
11692
- const codec = serialize.servicePreimages(serviceId, hash);
11641
+ const codec = serialize.servicePreimages(blake2b, serviceId, hash);
11693
11642
  yield [StateEntryUpdateAction.Insert, codec.key, blob];
11694
11643
 
11695
11644
  if (action.slot !== null) {
11696
- const codec2 = serialize.serviceLookupHistory(serviceId, hash, tryAsU32(blob.length));
11645
+ const codec2 = serialize.serviceLookupHistory(blake2b, serviceId, hash, tryAsU32(blob.length));
11697
11646
  yield [
11698
11647
  StateEntryUpdateAction.Insert,
11699
11648
  codec2.key,
@@ -11704,16 +11653,16 @@ declare function* serializePreimages(preimages: UpdatePreimage[] | undefined, en
11704
11653
  }
11705
11654
  case UpdatePreimageKind.UpdateOrAdd: {
11706
11655
  const { hash, length, slots } = action.item;
11707
- const codec = serialize.serviceLookupHistory(serviceId, hash, length);
11656
+ const codec = serialize.serviceLookupHistory(blake2b, serviceId, hash, length);
11708
11657
  yield [StateEntryUpdateAction.Insert, codec.key, encode(codec.Codec, slots)];
11709
11658
  break;
11710
11659
  }
11711
11660
  case UpdatePreimageKind.Remove: {
11712
11661
  const { hash, length } = action;
11713
- const codec = serialize.servicePreimages(serviceId, hash);
11662
+ const codec = serialize.servicePreimages(blake2b, serviceId, hash);
11714
11663
  yield [StateEntryUpdateAction.Remove, codec.key, EMPTY_BLOB];
11715
11664
 
11716
- const codec2 = serialize.serviceLookupHistory(serviceId, hash, length);
11665
+ const codec2 = serialize.serviceLookupHistory(blake2b, serviceId, hash, length);
11717
11666
  yield [StateEntryUpdateAction.Remove, codec2.key, EMPTY_BLOB];
11718
11667
  break;
11719
11668
  }
@@ -11725,6 +11674,7 @@ declare function* serializePreimages(preimages: UpdatePreimage[] | undefined, en
11725
11674
  declare function* serializeServiceUpdates(
11726
11675
  servicesUpdates: UpdateService[] | undefined,
11727
11676
  encode: EncodeFun,
11677
+ blake2b: Blake2b,
11728
11678
  ): Generator<StateEntryUpdate> {
11729
11679
  for (const { action, serviceId } of servicesUpdates ?? []) {
11730
11680
  // new service being created or updated
@@ -11734,7 +11684,7 @@ declare function* serializeServiceUpdates(
11734
11684
  // additional lookup history update
11735
11685
  if (action.kind === UpdateServiceKind.Create && action.lookupHistory !== null) {
11736
11686
  const { lookupHistory } = action;
11737
- const codec2 = serialize.serviceLookupHistory(serviceId, lookupHistory.hash, lookupHistory.length);
11687
+ const codec2 = serialize.serviceLookupHistory(blake2b, serviceId, lookupHistory.hash, lookupHistory.length);
11738
11688
  yield [StateEntryUpdateAction.Insert, codec2.key, encode(codec2.Codec, lookupHistory.slots)];
11739
11689
  }
11740
11690
  }
@@ -11868,8 +11818,8 @@ declare class StateEntries {
11868
11818
  );
11869
11819
 
11870
11820
  /** Turn in-memory state into it's serialized form. */
11871
- static serializeInMemory(spec: ChainSpec, state: InMemoryState) {
11872
- return new StateEntries(convertInMemoryStateToDictionary(spec, state));
11821
+ static serializeInMemory(spec: ChainSpec, blake2b: Blake2b, state: InMemoryState) {
11822
+ return new StateEntries(convertInMemoryStateToDictionary(spec, blake2b, state));
11873
11823
  }
11874
11824
 
11875
11825
  /**
@@ -11924,7 +11874,8 @@ declare class StateEntries {
11924
11874
  }
11925
11875
 
11926
11876
  /** https://graypaper.fluffylabs.dev/#/68eaa1f/391600391600?v=0.6.4 */
11927
- getRootHash(): StateRootHash {
11877
+ getRootHash(blake2b: Blake2b): StateRootHash {
11878
+ const blake2bTrieHasher = getBlake2bTrieHasher(blake2b);
11928
11879
  const leaves: SortedSet<LeafNode> = SortedSet.fromArray(leafComparator);
11929
11880
  for (const [key, value] of this) {
11930
11881
  leaves.insert(InMemoryTrie.constructLeaf(blake2bTrieHasher, key.asOpaque(), value));
@@ -11937,6 +11888,7 @@ declare class StateEntries {
11937
11888
  /** https://graypaper.fluffylabs.dev/#/68eaa1f/38a50038a500?v=0.6.4 */
11938
11889
  declare function convertInMemoryStateToDictionary(
11939
11890
  spec: ChainSpec,
11891
+ blake2b: Blake2b,
11940
11892
  state: InMemoryState,
11941
11893
  ): TruncatedHashDictionary<StateKey, BytesBlob> {
11942
11894
  const serialized = TruncatedHashDictionary.fromEntries<StateKey, BytesBlob>([]);
@@ -11969,20 +11921,25 @@ declare function convertInMemoryStateToDictionary(
11969
11921
 
11970
11922
  // preimages
11971
11923
  for (const preimage of service.data.preimages.values()) {
11972
- const { key, Codec } = serialize.servicePreimages(serviceId, preimage.hash);
11924
+ const { key, Codec } = serialize.servicePreimages(blake2b, serviceId, preimage.hash);
11973
11925
  serialized.set(key, Encoder.encodeObject(Codec, preimage.blob));
11974
11926
  }
11975
11927
 
11976
11928
  // storage
11977
11929
  for (const storage of service.data.storage.values()) {
11978
- const { key, Codec } = serialize.serviceStorage(serviceId, storage.key);
11930
+ const { key, Codec } = serialize.serviceStorage(blake2b, serviceId, storage.key);
11979
11931
  serialized.set(key, Encoder.encodeObject(Codec, storage.value));
11980
11932
  }
11981
11933
 
11982
11934
  // lookup history
11983
11935
  for (const lookupHistoryList of service.data.lookupHistory.values()) {
11984
11936
  for (const lookupHistory of lookupHistoryList) {
11985
- const { key, Codec } = serialize.serviceLookupHistory(serviceId, lookupHistory.hash, lookupHistory.length);
11937
+ const { key, Codec } = serialize.serviceLookupHistory(
11938
+ blake2b,
11939
+ serviceId,
11940
+ lookupHistory.hash,
11941
+ lookupHistory.length,
11942
+ );
11986
11943
  serialized.set(key, Encoder.encodeObject(Codec, lookupHistory.slots.slice()));
11987
11944
  }
11988
11945
  }
@@ -12013,21 +11970,23 @@ declare class SerializedState<T extends SerializedStateBackend = SerializedState
12013
11970
  implements State, EnumerableState
12014
11971
  {
12015
11972
  /** Create a state-like object from collection of serialized entries. */
12016
- static fromStateEntries(spec: ChainSpec, state: StateEntries, recentServices: ServiceId[] = []) {
12017
- return new SerializedState(spec, state, recentServices);
11973
+ static fromStateEntries(spec: ChainSpec, blake2b: Blake2b, state: StateEntries, recentServices: ServiceId[] = []) {
11974
+ return new SerializedState(spec, blake2b, state, recentServices);
12018
11975
  }
12019
11976
 
12020
11977
  /** Create a state-like object backed by some DB. */
12021
11978
  static new<T extends SerializedStateBackend>(
12022
11979
  spec: ChainSpec,
11980
+ blake2b: Blake2b,
12023
11981
  db: T,
12024
11982
  recentServices: ServiceId[] = [],
12025
11983
  ): SerializedState<T> {
12026
- return new SerializedState(spec, db, recentServices);
11984
+ return new SerializedState(spec, blake2b, db, recentServices);
12027
11985
  }
12028
11986
 
12029
11987
  private constructor(
12030
11988
  private readonly spec: ChainSpec,
11989
+ private readonly blake2b: Blake2b,
12031
11990
  public backend: T,
12032
11991
  /** Best-effort list of recently active services. */
12033
11992
  private readonly _recentServiceIds: ServiceId[],
@@ -12058,7 +12017,7 @@ declare class SerializedState<T extends SerializedStateBackend = SerializedState
12058
12017
  this._recentServiceIds.push(id);
12059
12018
  }
12060
12019
 
12061
- return new SerializedService(id, serviceData, (key) => this.retrieveOptional(key));
12020
+ return new SerializedService(this.blake2b, id, serviceData, (key) => this.retrieveOptional(key));
12062
12021
  }
12063
12022
 
12064
12023
  private retrieve<T>({ key, Codec }: KeyAndCodec<T>, description: string): T {
@@ -12157,6 +12116,7 @@ declare class SerializedState<T extends SerializedStateBackend = SerializedState
12157
12116
  /** Service data representation on a serialized state. */
12158
12117
  declare class SerializedService implements Service {
12159
12118
  constructor(
12119
+ public readonly blake2b: Blake2b,
12160
12120
  /** Service id */
12161
12121
  public readonly serviceId: ServiceId,
12162
12122
  private readonly accountInfo: ServiceAccountInfo,
@@ -12172,14 +12132,14 @@ declare class SerializedService implements Service {
12172
12132
  getStorage(rawKey: StorageKey): BytesBlob | null {
12173
12133
  if (Compatibility.isLessThan(GpVersion.V0_6_7)) {
12174
12134
  const SERVICE_ID_BYTES = 4;
12175
- const serviceIdAndKey = new Uint8Array(SERVICE_ID_BYTES + rawKey.length);
12135
+ const serviceIdAndKey = safeAllocUint8Array(SERVICE_ID_BYTES + rawKey.length);
12176
12136
  serviceIdAndKey.set(u32AsLeBytes(this.serviceId));
12177
12137
  serviceIdAndKey.set(rawKey.raw, SERVICE_ID_BYTES);
12178
- const key: StorageKey = asOpaqueType(BytesBlob.blobFrom(blake2b.hashBytes(serviceIdAndKey).raw));
12179
- return this.retrieveOptional(serialize.serviceStorage(this.serviceId, key)) ?? null;
12138
+ const key: StorageKey = asOpaqueType(BytesBlob.blobFrom(this.blake2b.hashBytes(serviceIdAndKey).raw));
12139
+ return this.retrieveOptional(serialize.serviceStorage(this.blake2b, this.serviceId, key)) ?? null;
12180
12140
  }
12181
12141
 
12182
- return this.retrieveOptional(serialize.serviceStorage(this.serviceId, rawKey)) ?? null;
12142
+ return this.retrieveOptional(serialize.serviceStorage(this.blake2b, this.serviceId, rawKey)) ?? null;
12183
12143
  }
12184
12144
 
12185
12145
  /**
@@ -12189,17 +12149,17 @@ declare class SerializedService implements Service {
12189
12149
  */
12190
12150
  hasPreimage(hash: PreimageHash): boolean {
12191
12151
  // TODO [ToDr] consider optimizing to avoid fetching the whole data.
12192
- return this.retrieveOptional(serialize.servicePreimages(this.serviceId, hash)) !== undefined;
12152
+ return this.retrieveOptional(serialize.servicePreimages(this.blake2b, this.serviceId, hash)) !== undefined;
12193
12153
  }
12194
12154
 
12195
12155
  /** Retrieve preimage from the DB. */
12196
12156
  getPreimage(hash: PreimageHash): BytesBlob | null {
12197
- return this.retrieveOptional(serialize.servicePreimages(this.serviceId, hash)) ?? null;
12157
+ return this.retrieveOptional(serialize.servicePreimages(this.blake2b, this.serviceId, hash)) ?? null;
12198
12158
  }
12199
12159
 
12200
12160
  /** Retrieve preimage lookup history. */
12201
12161
  getLookupHistory(hash: PreimageHash, len: U32): LookupHistorySlots | null {
12202
- const rawSlots = this.retrieveOptional(serialize.serviceLookupHistory(this.serviceId, hash, len));
12162
+ const rawSlots = this.retrieveOptional(serialize.serviceLookupHistory(this.blake2b, this.serviceId, hash, len));
12203
12163
  if (rawSlots === undefined) {
12204
12164
  return null;
12205
12165
  }
@@ -12212,9 +12172,9 @@ type KeyAndCodec<T> = {
12212
12172
  Codec: Decode<T>;
12213
12173
  };
12214
12174
 
12215
- declare function loadState(spec: ChainSpec, entries: Iterable<[StateKey | TruncatedHash, BytesBlob]>) {
12175
+ declare function loadState(spec: ChainSpec, blake2b: Blake2b, entries: Iterable<[StateKey | TruncatedHash, BytesBlob]>) {
12216
12176
  const stateEntries = StateEntries.fromEntriesUnsafe(entries);
12217
- return SerializedState.fromStateEntries(spec, stateEntries);
12177
+ return SerializedState.fromStateEntries(spec, blake2b, stateEntries);
12218
12178
  }
12219
12179
 
12220
12180
  /**
@@ -12370,7 +12330,8 @@ declare class LeafDb implements SerializedStateBackend {
12370
12330
  assertNever(val);
12371
12331
  }
12372
12332
 
12373
- getStateRoot(): StateRootHash {
12333
+ getStateRoot(blake2b: Blake2b): StateRootHash {
12334
+ const blake2bTrieHasher = getBlake2bTrieHasher(blake2b);
12374
12335
  return InMemoryTrie.computeStateRoot(blake2bTrieHasher, this.leaves).asOpaque();
12375
12336
  }
12376
12337
 
@@ -12468,7 +12429,8 @@ declare class InMemoryStates implements StatesDb<InMemoryState> {
12468
12429
  }
12469
12430
 
12470
12431
  async getStateRoot(state: InMemoryState): Promise<StateRootHash> {
12471
- return StateEntries.serializeInMemory(this.spec, state).getRootHash();
12432
+ const blake2b = await Blake2b.createHasher();
12433
+ return StateEntries.serializeInMemory(this.spec, blake2b, state).getRootHash(blake2b);
12472
12434
  }
12473
12435
 
12474
12436
  /** Insert a full state into the database. */
@@ -12573,7 +12535,7 @@ declare function padAndEncodeData(input: BytesBlob) {
12573
12535
  const paddedLength = Math.ceil(input.length / PIECE_SIZE) * PIECE_SIZE;
12574
12536
  let padded = input;
12575
12537
  if (input.length !== paddedLength) {
12576
- padded = BytesBlob.blobFrom(new Uint8Array(paddedLength));
12538
+ padded = BytesBlob.blobFrom(safeAllocUint8Array(paddedLength));
12577
12539
  padded.raw.set(input.raw, 0);
12578
12540
  }
12579
12541
  return chunkingFunction(padded);
@@ -12629,7 +12591,7 @@ declare function decodeData(input: FixedSizeArray<[number, BytesBlob], N_CHUNKS_
12629
12591
  */
12630
12592
  declare function encodePoints(input: Bytes<PIECE_SIZE>): FixedSizeArray<Bytes<POINT_LENGTH>, N_CHUNKS_TOTAL> {
12631
12593
  const result: Bytes<POINT_LENGTH>[] = [];
12632
- const data = new Uint8Array(POINT_ALIGNMENT * N_CHUNKS_REQUIRED);
12594
+ const data = safeAllocUint8Array(POINT_ALIGNMENT * N_CHUNKS_REQUIRED);
12633
12595
 
12634
12596
  // add original shards to the result
12635
12597
  for (let i = 0; i < N_CHUNKS_REQUIRED; i++) {
@@ -12649,7 +12611,7 @@ declare function encodePoints(input: Bytes<PIECE_SIZE>): FixedSizeArray<Bytes<PO
12649
12611
  for (let i = 0; i < N_CHUNKS_REDUNDANCY; i++) {
12650
12612
  const pointIndex = i * POINT_ALIGNMENT;
12651
12613
 
12652
- const redundancyPoint = new Uint8Array(POINT_LENGTH);
12614
+ const redundancyPoint = safeAllocUint8Array(POINT_LENGTH);
12653
12615
  for (let j = 0; j < POINT_LENGTH; j++) {
12654
12616
  redundancyPoint[j] = encodedData[pointIndex + j * HALF_POINT_SIZE];
12655
12617
  }
@@ -12669,7 +12631,7 @@ declare function decodePiece(
12669
12631
  ): Bytes<PIECE_SIZE> {
12670
12632
  const result = Bytes.zero(PIECE_SIZE);
12671
12633
 
12672
- const data = new Uint8Array(N_CHUNKS_REQUIRED * POINT_ALIGNMENT);
12634
+ const data = safeAllocUint8Array(N_CHUNKS_REQUIRED * POINT_ALIGNMENT);
12673
12635
  const indices = new Uint16Array(input.length);
12674
12636
 
12675
12637
  for (let i = 0; i < N_CHUNKS_REQUIRED; i++) {
@@ -12796,7 +12758,7 @@ declare function lace<N extends number, K extends number>(input: FixedSizeArray<
12796
12758
  return BytesBlob.empty();
12797
12759
  }
12798
12760
  const n = input[0].length;
12799
- const result = BytesBlob.blobFrom(new Uint8Array(k * n));
12761
+ const result = BytesBlob.blobFrom(safeAllocUint8Array(k * n));
12800
12762
  for (let i = 0; i < k; i++) {
12801
12763
  const entry = input[i].raw;
12802
12764
  for (let j = 0; j < n; j++) {
@@ -13694,13 +13656,12 @@ interface PartialState {
13694
13656
 
13695
13657
  /**
13696
13658
  * Transfer given `amount` of funds to the `destination`,
13697
- * passing `suppliedGas` to invoke `OnTransfer` entry point
13698
- * and given `memo`.
13659
+ * passing `gas` fee for transfer and given `memo`.
13699
13660
  */
13700
13661
  transfer(
13701
13662
  destination: ServiceId | null,
13702
13663
  amount: U64,
13703
- suppliedGas: ServiceGas,
13664
+ gas: ServiceGas,
13704
13665
  memo: Bytes<TRANSFER_MEMO_BYTES>,
13705
13666
  ): Result$2<OK, TransferError>;
13706
13667
 
@@ -13869,7 +13830,7 @@ declare class Mask {
13869
13830
  }
13870
13831
 
13871
13832
  private buildLookupTableForward(mask: BitVec) {
13872
- const table = new Uint8Array(mask.bitLength);
13833
+ const table = safeAllocUint8Array(mask.bitLength);
13873
13834
  let lastInstructionOffset = 0;
13874
13835
  for (let i = mask.bitLength - 1; i >= 0; i--) {
13875
13836
  if (mask.isSet(i)) {
@@ -14013,7 +13974,7 @@ declare class Registers {
14013
13974
  private asSigned: BigInt64Array;
14014
13975
  private asUnsigned: BigUint64Array;
14015
13976
 
14016
- constructor(private readonly bytes = new Uint8Array(NO_OF_REGISTERS << REGISTER_SIZE_SHIFT)) {
13977
+ constructor(private readonly bytes = safeAllocUint8Array(NO_OF_REGISTERS << REGISTER_SIZE_SHIFT)) {
14017
13978
  check`${bytes.length === NO_OF_REGISTERS << REGISTER_SIZE_SHIFT} Invalid size of registers array.`;
14018
13979
  this.asSigned = new BigInt64Array(bytes.buffer, bytes.byteOffset);
14019
13980
  this.asUnsigned = new BigUint64Array(bytes.buffer, bytes.byteOffset);
@@ -18071,9 +18032,15 @@ type HostCallIndex = Opaque<U32, "HostCallIndex[U32]">;
18071
18032
  /** Attempt to convert a number into `HostCallIndex`. */
18072
18033
  declare const tryAsHostCallIndex = (v: number): HostCallIndex => asOpaqueType(tryAsU32(v));
18073
18034
 
18035
+ /**
18036
+ * Host-call exit reason.
18037
+ *
18038
+ * https://graypaper.fluffylabs.dev/#/ab2cdbd/24a30124a501?v=0.7.2
18039
+ */
18074
18040
  declare enum PvmExecution {
18075
18041
  Halt = 0,
18076
18042
  Panic = 1,
18043
+ OOG = 2, // out-of-gas
18077
18044
  }
18078
18045
 
18079
18046
  /** A utility function to easily trace a bunch of registers. */
@@ -18086,8 +18053,12 @@ interface HostCallHandler {
18086
18053
  /** Index of that host call (i.e. what PVM invokes via `ecalli`) */
18087
18054
  readonly index: HostCallIndex;
18088
18055
 
18089
- /** The gas cost of invocation of that host call. */
18090
- readonly gasCost: SmallGas | ((reg: IHostCallRegisters) => Gas);
18056
+ /**
18057
+ * The gas cost of invocation of that host call.
18058
+ *
18059
+ * NOTE: `((reg: IHostCallRegisters) => Gas)` function is for compatibility reasons: pre GP 0.7.2
18060
+ */
18061
+ readonly basicGasCost: SmallGas | ((reg: IHostCallRegisters) => Gas);
18091
18062
 
18092
18063
  /** Currently executing service id. */
18093
18064
  readonly currentServiceId: U32;
@@ -18230,7 +18201,7 @@ declare class HostCalls {
18230
18201
  const maybeAddress = regs.getLowerU32(7);
18231
18202
  const maybeLength = regs.getLowerU32(8);
18232
18203
 
18233
- const result = new Uint8Array(maybeLength);
18204
+ const result = safeAllocUint8Array(maybeLength);
18234
18205
  const startAddress = tryAsMemoryIndex(maybeAddress);
18235
18206
  const loadResult = memory.loadInto(result, startAddress);
18236
18207
 
@@ -18263,8 +18234,10 @@ declare class HostCalls {
18263
18234
 
18264
18235
  const hostCall = this.hostCalls.get(index);
18265
18236
  const gasBefore = gas.get();
18266
- const gasCost = typeof hostCall.gasCost === "number" ? hostCall.gasCost : hostCall.gasCost(regs);
18267
- const underflow = gas.sub(gasCost);
18237
+ // NOTE: `basicGasCost(regs)` function is for compatibility reasons: pre GP 0.7.2
18238
+ const basicGasCost =
18239
+ typeof hostCall.basicGasCost === "number" ? hostCall.basicGasCost : hostCall.basicGasCost(regs);
18240
+ const underflow = gas.sub(basicGasCost);
18268
18241
 
18269
18242
  const pcLog = `[PC: ${pvmInstance.getPC()}]`;
18270
18243
  if (underflow) {
@@ -18291,6 +18264,11 @@ declare class HostCalls {
18291
18264
  return this.getReturnValue(status, pvmInstance);
18292
18265
  }
18293
18266
 
18267
+ if (result === PvmExecution.OOG) {
18268
+ status = Status.OOG;
18269
+ return this.getReturnValue(status, pvmInstance);
18270
+ }
18271
+
18294
18272
  if (result === undefined) {
18295
18273
  pvmInstance.runProgram();
18296
18274
  status = pvmInstance.getStatus();
@@ -18662,7 +18640,7 @@ declare class DebuggerAdapter {
18662
18640
 
18663
18641
  if (page === null) {
18664
18642
  // page wasn't allocated so we return an empty page
18665
- return new Uint8Array(PAGE_SIZE);
18643
+ return safeAllocUint8Array(PAGE_SIZE);
18666
18644
  }
18667
18645
 
18668
18646
  if (page.length === PAGE_SIZE) {
@@ -18671,7 +18649,7 @@ declare class DebuggerAdapter {
18671
18649
  }
18672
18650
 
18673
18651
  // page was allocated but it is shorter than PAGE_SIZE so we have to extend it
18674
- const fullPage = new Uint8Array(PAGE_SIZE);
18652
+ const fullPage = safeAllocUint8Array(PAGE_SIZE);
18675
18653
  fullPage.set(page);
18676
18654
  return fullPage;
18677
18655
  }
@@ -18864,10 +18842,10 @@ type ENTROPY_BYTES = typeof ENTROPY_BYTES;
18864
18842
  *
18865
18843
  * https://graypaper.fluffylabs.dev/#/579bd12/3b9a013b9a01
18866
18844
  */
18867
- declare function fisherYatesShuffle<T>(arr: T[], entropy: Bytes<ENTROPY_BYTES>): T[] {
18845
+ declare function fisherYatesShuffle<T>(blake2b: Blake2b, arr: T[], entropy: Bytes<ENTROPY_BYTES>): T[] {
18868
18846
  check`${entropy.length === ENTROPY_BYTES} Expected entropy of length ${ENTROPY_BYTES}, got ${entropy.length}`;
18869
18847
  const n = arr.length;
18870
- const randomNumbers = hashToNumberSequence(entropy, arr.length);
18848
+ const randomNumbers = hashToNumberSequence(blake2b, entropy, arr.length);
18871
18849
  const result: T[] = new Array<T>(n);
18872
18850
 
18873
18851
  let itemsLeft = n;
@@ -19039,8 +19017,7 @@ declare const availabilityAssignmentFromJson = json.object<JsonAvailabilityAssig
19039
19017
  timeout: "number",
19040
19018
  },
19041
19019
  ({ report, timeout }) => {
19042
- const workReportHash = blake2b.hashBytes(Encoder.encodeObject(WorkReport.Codec, report)).asOpaque();
19043
- return AvailabilityAssignment.create({ workReport: new WithHash(workReportHash, report), timeout });
19020
+ return AvailabilityAssignment.create({ workReport: report, timeout });
19044
19021
  },
19045
19022
  );
19046
19023
 
@@ -19541,7 +19518,7 @@ declare class TransitionHasher implements MmrHasher<KeccakHash> {
19541
19518
  constructor(
19542
19519
  private readonly context: ChainSpec,
19543
19520
  private readonly keccakHasher: KeccakHasher,
19544
- private readonly allocator: HashAllocator,
19521
+ public readonly blake2b: Blake2b,
19545
19522
  ) {}
19546
19523
 
19547
19524
  /** Concatenates two hashes and hash this concatenation */
@@ -19555,7 +19532,7 @@ declare class TransitionHasher implements MmrHasher<KeccakHash> {
19555
19532
 
19556
19533
  /** Creates hash from the block header view */
19557
19534
  header(header: HeaderView): WithHash<HeaderHash, HeaderView> {
19558
- return new WithHash(blake2b.hashBytes(header.encoded(), this.allocator).asOpaque(), header);
19535
+ return new WithHash(this.blake2b.hashBytes(header.encoded()).asOpaque(), header);
19559
19536
  }
19560
19537
 
19561
19538
  /**
@@ -19569,7 +19546,7 @@ declare class TransitionHasher implements MmrHasher<KeccakHash> {
19569
19546
  .view()
19570
19547
  .map((g) => g.view())
19571
19548
  .map((guarantee) => {
19572
- const reportHash = blake2b.hashBytes(guarantee.report.encoded(), this.allocator).asOpaque<WorkReportHash>();
19549
+ const reportHash = this.blake2b.hashBytes(guarantee.report.encoded()).asOpaque<WorkReportHash>();
19573
19550
  return BytesBlob.blobFromParts([
19574
19551
  reportHash.raw,
19575
19552
  guarantee.slot.encoded().raw,
@@ -19579,15 +19556,15 @@ declare class TransitionHasher implements MmrHasher<KeccakHash> {
19579
19556
 
19580
19557
  const guaranteeBlob = Encoder.encodeObject(codec.sequenceVarLen(dumpCodec), guarantees, this.context);
19581
19558
 
19582
- const et = blake2b.hashBytes(extrinsicView.tickets.encoded(), this.allocator).asOpaque<ExtrinsicHash>();
19583
- const ep = blake2b.hashBytes(extrinsicView.preimages.encoded(), this.allocator).asOpaque<ExtrinsicHash>();
19584
- const eg = blake2b.hashBytes(guaranteeBlob, this.allocator).asOpaque<ExtrinsicHash>();
19585
- const ea = blake2b.hashBytes(extrinsicView.assurances.encoded(), this.allocator).asOpaque<ExtrinsicHash>();
19586
- const ed = blake2b.hashBytes(extrinsicView.disputes.encoded(), this.allocator).asOpaque<ExtrinsicHash>();
19559
+ const et = this.blake2b.hashBytes(extrinsicView.tickets.encoded()).asOpaque<ExtrinsicHash>();
19560
+ const ep = this.blake2b.hashBytes(extrinsicView.preimages.encoded()).asOpaque<ExtrinsicHash>();
19561
+ const eg = this.blake2b.hashBytes(guaranteeBlob).asOpaque<ExtrinsicHash>();
19562
+ const ea = this.blake2b.hashBytes(extrinsicView.assurances.encoded()).asOpaque<ExtrinsicHash>();
19563
+ const ed = this.blake2b.hashBytes(extrinsicView.disputes.encoded()).asOpaque<ExtrinsicHash>();
19587
19564
 
19588
19565
  const encoded = BytesBlob.blobFromParts([et.raw, ep.raw, eg.raw, ea.raw, ed.raw]);
19589
19566
 
19590
- return new WithHashAndBytes(blake2b.hashBytes(encoded, this.allocator).asOpaque(), extrinsicView, encoded);
19567
+ return new WithHashAndBytes(this.blake2b.hashBytes(encoded).asOpaque(), extrinsicView, encoded);
19591
19568
  }
19592
19569
 
19593
19570
  /** Creates hash for given WorkPackage */
@@ -19598,7 +19575,7 @@ declare class TransitionHasher implements MmrHasher<KeccakHash> {
19598
19575
  private encode<T, THash extends OpaqueHash>(codec: Codec<T>, data: T): WithHashAndBytes<THash, T> {
19599
19576
  // TODO [ToDr] Use already allocated encoding destination and hash bytes from some arena.
19600
19577
  const encoded = Encoder.encodeObject(codec, data, this.context);
19601
- return new WithHashAndBytes(blake2b.hashBytes(encoded, this.allocator).asOpaque(), data, encoded);
19578
+ return new WithHashAndBytes(this.blake2b.hashBytes(encoded).asOpaque(), data, encoded);
19602
19579
  }
19603
19580
  }
19604
19581
 
@@ -19619,7 +19596,10 @@ declare enum PreimagesErrorCode {
19619
19596
 
19620
19597
  // TODO [SeKo] consider whether this module is the right place to remove expired preimages
19621
19598
  declare class Preimages {
19622
- constructor(public readonly state: PreimagesState) {}
19599
+ constructor(
19600
+ public readonly state: PreimagesState,
19601
+ public readonly blake2b: Blake2b,
19602
+ ) {}
19623
19603
 
19624
19604
  integrate(input: PreimagesInput): Result$2<PreimagesStateUpdate, PreimagesErrorCode> {
19625
19605
  // make sure lookup extrinsics are sorted and unique
@@ -19648,7 +19628,7 @@ declare class Preimages {
19648
19628
  // select preimages for integration
19649
19629
  for (const preimage of preimages) {
19650
19630
  const { requester, blob } = preimage;
19651
- const hash: PreimageHash = blake2b.hashBytes(blob).asOpaque();
19631
+ const hash: PreimageHash = this.blake2b.hashBytes(blob).asOpaque();
19652
19632
 
19653
19633
  const service = this.state.getService(requester);
19654
19634
  if (service === null) {
@@ -19679,156 +19659,6 @@ declare class Preimages {
19679
19659
  }
19680
19660
  }
19681
19661
 
19682
- declare enum ServiceExecutorError {
19683
- NoLookup = 0,
19684
- NoState = 1,
19685
- NoServiceCode = 2,
19686
- ServiceCodeMismatch = 3,
19687
- }
19688
-
19689
- declare class WorkPackageExecutor {
19690
- constructor(
19691
- private readonly blocks: BlocksDb,
19692
- private readonly state: StatesDb,
19693
- private readonly hasher: TransitionHasher,
19694
- ) {}
19695
-
19696
- // TODO [ToDr] this while thing should be triple-checked with the GP.
19697
- // I'm currently implementing some dirty version for the demo.
19698
- async executeWorkPackage(pack: WorkPackage): Promise<WorkReport> {
19699
- const headerHash = pack.context.lookupAnchor;
19700
- // execute authorisation first or is it already executed and we just need to check it?
19701
- const authExec = this.getServiceExecutor(
19702
- // TODO [ToDr] should this be anchor or lookupAnchor?
19703
- headerHash,
19704
- pack.authCodeHost,
19705
- pack.authCodeHash,
19706
- );
19707
-
19708
- if (authExec.isError) {
19709
- // TODO [ToDr] most likely shouldn't be throw.
19710
- throw new Error(`Could not get authorization executor: ${authExec.error}`);
19711
- }
19712
-
19713
- const pvm = authExec.ok;
19714
- const authGas = tryAsGas(15_000n);
19715
- const result = await pvm.run(pack.parametrization, authGas);
19716
-
19717
- if (!result.isEqualTo(pack.authorization)) {
19718
- throw new Error("Authorization is invalid.");
19719
- }
19720
-
19721
- const results: WorkResult[] = [];
19722
- for (const item of pack.items) {
19723
- const exec = this.getServiceExecutor(headerHash, item.service, item.codeHash);
19724
- if (exec.isError) {
19725
- throw new Error(`Could not get item executor: ${exec.error}`);
19726
- }
19727
- const pvm = exec.ok;
19728
-
19729
- const gasRatio = tryAsServiceGas(3_000n);
19730
- const ret = await pvm.run(item.payload, tryAsGas(item.refineGasLimit)); // or accumulateGasLimit?
19731
- results.push(
19732
- WorkResult.create({
19733
- serviceId: item.service,
19734
- codeHash: item.codeHash,
19735
- payloadHash: blake2b.hashBytes(item.payload),
19736
- gas: gasRatio,
19737
- result: new WorkExecResult(WorkExecResultKind.ok, ret),
19738
- load: WorkRefineLoad.create({
19739
- gasUsed: tryAsServiceGas(5),
19740
- importedSegments: tryAsU32(0),
19741
- exportedSegments: tryAsU32(0),
19742
- extrinsicSize: tryAsU32(0),
19743
- extrinsicCount: tryAsU32(0),
19744
- }),
19745
- }),
19746
- );
19747
- }
19748
-
19749
- const workPackage = this.hasher.workPackage(pack);
19750
- const workPackageSpec = WorkPackageSpec.create({
19751
- hash: workPackage.hash,
19752
- length: tryAsU32(workPackage.encoded.length),
19753
- erasureRoot: Bytes.zero(HASH_SIZE),
19754
- exportsRoot: Bytes.zero(HASH_SIZE).asOpaque(),
19755
- exportsCount: tryAsU16(0),
19756
- });
19757
- const coreIndex = tryAsCoreIndex(0);
19758
- const authorizerHash = Bytes.fill(HASH_SIZE, 5).asOpaque();
19759
-
19760
- const workResults = FixedSizeArray.new(results, tryAsWorkItemsCount(results.length));
19761
-
19762
- return Promise.resolve(
19763
- WorkReport.create({
19764
- workPackageSpec,
19765
- context: pack.context,
19766
- coreIndex,
19767
- authorizerHash,
19768
- authorizationOutput: pack.authorization,
19769
- segmentRootLookup: [],
19770
- results: workResults,
19771
- authorizationGasUsed: tryAsServiceGas(0),
19772
- }),
19773
- );
19774
- }
19775
-
19776
- getServiceExecutor(
19777
- lookupAnchor: HeaderHash,
19778
- serviceId: ServiceId,
19779
- expectedCodeHash: CodeHash,
19780
- ): Result$2<PvmExecutor, ServiceExecutorError> {
19781
- const header = this.blocks.getHeader(lookupAnchor);
19782
- if (header === null) {
19783
- return Result.error(ServiceExecutorError.NoLookup);
19784
- }
19785
-
19786
- const state = this.state.getState(lookupAnchor);
19787
- if (state === null) {
19788
- return Result.error(ServiceExecutorError.NoState);
19789
- }
19790
-
19791
- const service = state.getService(serviceId);
19792
- const serviceCodeHash = service?.getInfo().codeHash ?? null;
19793
- if (serviceCodeHash === null) {
19794
- return Result.error(ServiceExecutorError.NoServiceCode);
19795
- }
19796
-
19797
- if (!serviceCodeHash.isEqualTo(expectedCodeHash)) {
19798
- return Result.error(ServiceExecutorError.ServiceCodeMismatch);
19799
- }
19800
-
19801
- const serviceCode = service?.getPreimage(serviceCodeHash.asOpaque()) ?? null;
19802
- if (serviceCode === null) {
19803
- return Result.error(ServiceExecutorError.NoServiceCode);
19804
- }
19805
-
19806
- return Result.ok(new PvmExecutor(serviceCode));
19807
- }
19808
- }
19809
-
19810
- declare class PvmExecutor {
19811
- private readonly pvm: HostCalls;
19812
- private hostCalls = new HostCallsManager({ missing: new Missing() });
19813
- private pvmInstanceManager = new PvmInstanceManager(4);
19814
-
19815
- constructor(private serviceCode: BytesBlob) {
19816
- this.pvm = new PvmHostCallExtension(this.pvmInstanceManager, this.hostCalls);
19817
- }
19818
-
19819
- async run(args: BytesBlob, gas: Gas): Promise<BytesBlob> {
19820
- const program = Program.fromSpi(this.serviceCode.raw, args.raw, true);
19821
-
19822
- const result = await this.pvm.runProgram(program.code, 5, gas, program.registers, program.memory);
19823
-
19824
- if (result.hasMemorySlice()) {
19825
- return BytesBlob.blobFrom(result.memorySlice);
19826
- }
19827
-
19828
- return BytesBlob.empty();
19829
- }
19830
- }
19831
-
19832
19662
  type index_Preimages = Preimages;
19833
19663
  declare const index_Preimages: typeof Preimages;
19834
19664
  type index_PreimagesErrorCode = PreimagesErrorCode;
@@ -19838,10 +19668,8 @@ type index_PreimagesState = PreimagesState;
19838
19668
  type index_PreimagesStateUpdate = PreimagesStateUpdate;
19839
19669
  type index_TransitionHasher = TransitionHasher;
19840
19670
  declare const index_TransitionHasher: typeof TransitionHasher;
19841
- type index_WorkPackageExecutor = WorkPackageExecutor;
19842
- declare const index_WorkPackageExecutor: typeof WorkPackageExecutor;
19843
19671
  declare namespace index {
19844
- export { index_Preimages as Preimages, index_PreimagesErrorCode as PreimagesErrorCode, index_TransitionHasher as TransitionHasher, index_WorkPackageExecutor as WorkPackageExecutor };
19672
+ export { index_Preimages as Preimages, index_PreimagesErrorCode as PreimagesErrorCode, index_TransitionHasher as TransitionHasher };
19845
19673
  export type { index_PreimagesInput as PreimagesInput, index_PreimagesState as PreimagesState, index_PreimagesStateUpdate as PreimagesStateUpdate };
19846
19674
  }
19847
19675