@typeberry/lib 0.1.3-462ca77 → 0.1.3-6759174
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.cjs +335 -1099
- package/index.d.ts +225 -379
- package/index.js +334 -1098
- package/package.json +1 -1
package/index.d.ts
CHANGED
|
@@ -3476,6 +3476,99 @@ declare namespace index$q {
|
|
|
3476
3476
|
export type { index$q_ClassConstructor as ClassConstructor, index$q_Codec as Codec, index$q_CodecRecord as CodecRecord, index$q_Decode as Decode, index$q_DescribedBy as DescribedBy, index$q_DescriptorRecord as DescriptorRecord, index$q_Encode as Encode, index$q_LengthRange as LengthRange, index$q_OptionalRecord as OptionalRecord, Options$1 as Options, index$q_PropertyKeys as PropertyKeys, index$q_SimpleDescriptorRecord as SimpleDescriptorRecord, index$q_SizeHint as SizeHint, index$q_ViewOf as ViewOf };
|
|
3477
3477
|
}
|
|
3478
3478
|
|
|
3479
|
+
/**
|
|
3480
|
+
* A utility class providing a readonly view over a portion of an array without copying it.
|
|
3481
|
+
*/
|
|
3482
|
+
declare class ArrayView<T> implements Iterable<T> {
|
|
3483
|
+
private readonly source: T[];
|
|
3484
|
+
public readonly length: number;
|
|
3485
|
+
|
|
3486
|
+
private constructor(
|
|
3487
|
+
source: T[],
|
|
3488
|
+
private readonly start: number,
|
|
3489
|
+
private readonly end: number,
|
|
3490
|
+
) {
|
|
3491
|
+
this.source = source;
|
|
3492
|
+
this.length = end - start;
|
|
3493
|
+
}
|
|
3494
|
+
|
|
3495
|
+
static from<T>(source: T[], start = 0, end = source.length): ArrayView<T> {
|
|
3496
|
+
check`
|
|
3497
|
+
${start >= 0 && end <= source.length && start <= end}
|
|
3498
|
+
Invalid start (${start})/end (${end}) for ArrayView
|
|
3499
|
+
`;
|
|
3500
|
+
return new ArrayView(source, start, end);
|
|
3501
|
+
}
|
|
3502
|
+
|
|
3503
|
+
get(i: number): T {
|
|
3504
|
+
check`
|
|
3505
|
+
${i >= 0 && i < this.length}
|
|
3506
|
+
Index out of bounds: ${i} < ${this.length}
|
|
3507
|
+
`;
|
|
3508
|
+
return this.source[this.start + i];
|
|
3509
|
+
}
|
|
3510
|
+
|
|
3511
|
+
subview(from: number, to: number = this.length): ArrayView<T> {
|
|
3512
|
+
return ArrayView.from(this.source, this.start + from, this.start + to);
|
|
3513
|
+
}
|
|
3514
|
+
|
|
3515
|
+
toArray(): T[] {
|
|
3516
|
+
return this.source.slice(this.start, this.end);
|
|
3517
|
+
}
|
|
3518
|
+
|
|
3519
|
+
*[Symbol.iterator](): Iterator<T> {
|
|
3520
|
+
for (let i = this.start; i < this.end; i++) {
|
|
3521
|
+
yield this.source[i];
|
|
3522
|
+
}
|
|
3523
|
+
}
|
|
3524
|
+
}
|
|
3525
|
+
|
|
3526
|
+
type ITypedArray = Uint8Array | Uint16Array | Uint32Array;
|
|
3527
|
+
type IDataType = string | Buffer | ITypedArray;
|
|
3528
|
+
|
|
3529
|
+
type IHasher = {
|
|
3530
|
+
/**
|
|
3531
|
+
* Initializes hash state to default value
|
|
3532
|
+
*/
|
|
3533
|
+
init: () => IHasher;
|
|
3534
|
+
/**
|
|
3535
|
+
* Updates the hash content with the given data
|
|
3536
|
+
*/
|
|
3537
|
+
update: (data: IDataType) => IHasher;
|
|
3538
|
+
/**
|
|
3539
|
+
* Calculates the hash of all of the data passed to be hashed with hash.update().
|
|
3540
|
+
* Defaults to hexadecimal string
|
|
3541
|
+
* @param outputType If outputType is "binary", it returns Uint8Array. Otherwise it
|
|
3542
|
+
* returns hexadecimal string
|
|
3543
|
+
*/
|
|
3544
|
+
digest: {
|
|
3545
|
+
(outputType: "binary"): Uint8Array;
|
|
3546
|
+
(outputType?: "hex"): string;
|
|
3547
|
+
};
|
|
3548
|
+
/**
|
|
3549
|
+
* Save the current internal state of the hasher for later resumption with load().
|
|
3550
|
+
* Cannot be called before .init() or after .digest()
|
|
3551
|
+
*
|
|
3552
|
+
* Note that this state can include arbitrary information about the value being hashed (e.g.
|
|
3553
|
+
* could include N plaintext bytes from the value), so needs to be treated as being as
|
|
3554
|
+
* sensitive as the input value itself.
|
|
3555
|
+
*/
|
|
3556
|
+
save: () => Uint8Array;
|
|
3557
|
+
/**
|
|
3558
|
+
* Resume a state that was created by save(). If this state was not created by a
|
|
3559
|
+
* compatible build of hash-wasm, an exception will be thrown.
|
|
3560
|
+
*/
|
|
3561
|
+
load: (state: Uint8Array) => IHasher;
|
|
3562
|
+
/**
|
|
3563
|
+
* Block size in bytes
|
|
3564
|
+
*/
|
|
3565
|
+
blockSize: number;
|
|
3566
|
+
/**
|
|
3567
|
+
* Digest size in bytes
|
|
3568
|
+
*/
|
|
3569
|
+
digestSize: number;
|
|
3570
|
+
};
|
|
3571
|
+
|
|
3479
3572
|
/**
|
|
3480
3573
|
* Size of the output of the hash functions.
|
|
3481
3574
|
*
|
|
@@ -3531,144 +3624,46 @@ declare class WithHashAndBytes<THash extends OpaqueHash, TData> extends WithHash
|
|
|
3531
3624
|
}
|
|
3532
3625
|
}
|
|
3533
3626
|
|
|
3534
|
-
|
|
3535
|
-
interface HashAllocator {
|
|
3536
|
-
/** Return a new hash destination. */
|
|
3537
|
-
emptyHash(): OpaqueHash;
|
|
3538
|
-
}
|
|
3539
|
-
|
|
3540
|
-
/** The simplest allocator returning just a fresh copy of bytes each time. */
|
|
3541
|
-
declare class SimpleAllocator implements HashAllocator {
|
|
3542
|
-
emptyHash(): OpaqueHash {
|
|
3543
|
-
return Bytes.zero(HASH_SIZE);
|
|
3544
|
-
}
|
|
3545
|
-
}
|
|
3546
|
-
|
|
3547
|
-
/** An allocator that works by allocating larger (continuous) pages of memory. */
|
|
3548
|
-
declare class PageAllocator implements HashAllocator {
|
|
3549
|
-
private page: Uint8Array = safeAllocUint8Array(0);
|
|
3550
|
-
private currentHash = 0;
|
|
3627
|
+
declare const zero$1 = Bytes.zero(HASH_SIZE);
|
|
3551
3628
|
|
|
3552
|
-
|
|
3553
|
-
|
|
3554
|
-
|
|
3555
|
-
this.resetPage();
|
|
3629
|
+
declare class Blake2b {
|
|
3630
|
+
static async createHasher() {
|
|
3631
|
+
return new Blake2b(await createBLAKE2b(HASH_SIZE * 8));
|
|
3556
3632
|
}
|
|
3557
3633
|
|
|
3558
|
-
private
|
|
3559
|
-
const pageSizeBytes = this.hashesPerPage * HASH_SIZE;
|
|
3560
|
-
this.currentHash = 0;
|
|
3561
|
-
this.page = safeAllocUint8Array(pageSizeBytes);
|
|
3562
|
-
}
|
|
3563
|
-
|
|
3564
|
-
emptyHash(): OpaqueHash {
|
|
3565
|
-
const startIdx = this.currentHash * HASH_SIZE;
|
|
3566
|
-
const endIdx = startIdx + HASH_SIZE;
|
|
3634
|
+
private constructor(private readonly hasher: IHasher) {}
|
|
3567
3635
|
|
|
3568
|
-
|
|
3569
|
-
|
|
3570
|
-
|
|
3636
|
+
/**
|
|
3637
|
+
* Hash given collection of blobs.
|
|
3638
|
+
*
|
|
3639
|
+
* If empty array is given a zero-hash is returned.
|
|
3640
|
+
*/
|
|
3641
|
+
hashBlobs<H extends Blake2bHash>(r: (BytesBlob | Uint8Array)[]): H {
|
|
3642
|
+
if (r.length === 0) {
|
|
3643
|
+
return zero.asOpaque();
|
|
3571
3644
|
}
|
|
3572
3645
|
|
|
3573
|
-
|
|
3646
|
+
const hasher = this.hasher.init();
|
|
3647
|
+
for (const v of r) {
|
|
3648
|
+
hasher.update(v instanceof BytesBlob ? v.raw : v);
|
|
3649
|
+
}
|
|
3650
|
+
return Bytes.fromBlob(hasher.digest("binary"), HASH_SIZE).asOpaque();
|
|
3574
3651
|
}
|
|
3575
|
-
}
|
|
3576
3652
|
|
|
3577
|
-
|
|
3578
|
-
|
|
3579
|
-
|
|
3580
|
-
|
|
3581
|
-
|
|
3582
|
-
|
|
3583
|
-
*/
|
|
3584
|
-
declare function hashBlobs$1<H extends Blake2bHash>(
|
|
3585
|
-
r: (BytesBlob | Uint8Array)[],
|
|
3586
|
-
allocator: HashAllocator = defaultAllocator,
|
|
3587
|
-
): H {
|
|
3588
|
-
const out = allocator.emptyHash();
|
|
3589
|
-
if (r.length === 0) {
|
|
3590
|
-
return out.asOpaque();
|
|
3653
|
+
/** Hash given blob of bytes. */
|
|
3654
|
+
hashBytes(blob: BytesBlob | Uint8Array): Blake2bHash {
|
|
3655
|
+
const hasher = this.hasher.init();
|
|
3656
|
+
const bytes = blob instanceof BytesBlob ? blob.raw : blob;
|
|
3657
|
+
hasher.update(bytes);
|
|
3658
|
+
return Bytes.fromBlob(hasher.digest("binary"), HASH_SIZE).asOpaque();
|
|
3591
3659
|
}
|
|
3592
3660
|
|
|
3593
|
-
|
|
3594
|
-
|
|
3595
|
-
|
|
3661
|
+
/** Convert given string into bytes and hash it. */
|
|
3662
|
+
hashString(str: string) {
|
|
3663
|
+
return this.hashBytes(BytesBlob.blobFromString(str));
|
|
3596
3664
|
}
|
|
3597
|
-
hasher?.digest(out.raw);
|
|
3598
|
-
return out.asOpaque();
|
|
3599
|
-
}
|
|
3600
|
-
|
|
3601
|
-
/** Hash given blob of bytes. */
|
|
3602
|
-
declare function hashBytes(blob: BytesBlob | Uint8Array, allocator: HashAllocator = defaultAllocator): Blake2bHash {
|
|
3603
|
-
const hasher = blake2b(HASH_SIZE);
|
|
3604
|
-
const bytes = blob instanceof BytesBlob ? blob.raw : blob;
|
|
3605
|
-
hasher?.update(bytes);
|
|
3606
|
-
const out = allocator.emptyHash();
|
|
3607
|
-
hasher?.digest(out.raw);
|
|
3608
|
-
return out;
|
|
3609
3665
|
}
|
|
3610
3666
|
|
|
3611
|
-
/** Convert given string into bytes and hash it. */
|
|
3612
|
-
declare function hashString(str: string, allocator: HashAllocator = defaultAllocator) {
|
|
3613
|
-
return hashBytes(BytesBlob.blobFromString(str), allocator);
|
|
3614
|
-
}
|
|
3615
|
-
|
|
3616
|
-
declare const blake2b_hashBytes: typeof hashBytes;
|
|
3617
|
-
declare const blake2b_hashString: typeof hashString;
|
|
3618
|
-
declare namespace blake2b {
|
|
3619
|
-
export {
|
|
3620
|
-
hashBlobs$1 as hashBlobs,
|
|
3621
|
-
blake2b_hashBytes as hashBytes,
|
|
3622
|
-
blake2b_hashString as hashString,
|
|
3623
|
-
};
|
|
3624
|
-
}
|
|
3625
|
-
|
|
3626
|
-
type ITypedArray = Uint8Array | Uint16Array | Uint32Array;
|
|
3627
|
-
type IDataType = string | Buffer | ITypedArray;
|
|
3628
|
-
|
|
3629
|
-
type IHasher = {
|
|
3630
|
-
/**
|
|
3631
|
-
* Initializes hash state to default value
|
|
3632
|
-
*/
|
|
3633
|
-
init: () => IHasher;
|
|
3634
|
-
/**
|
|
3635
|
-
* Updates the hash content with the given data
|
|
3636
|
-
*/
|
|
3637
|
-
update: (data: IDataType) => IHasher;
|
|
3638
|
-
/**
|
|
3639
|
-
* Calculates the hash of all of the data passed to be hashed with hash.update().
|
|
3640
|
-
* Defaults to hexadecimal string
|
|
3641
|
-
* @param outputType If outputType is "binary", it returns Uint8Array. Otherwise it
|
|
3642
|
-
* returns hexadecimal string
|
|
3643
|
-
*/
|
|
3644
|
-
digest: {
|
|
3645
|
-
(outputType: "binary"): Uint8Array;
|
|
3646
|
-
(outputType?: "hex"): string;
|
|
3647
|
-
};
|
|
3648
|
-
/**
|
|
3649
|
-
* Save the current internal state of the hasher for later resumption with load().
|
|
3650
|
-
* Cannot be called before .init() or after .digest()
|
|
3651
|
-
*
|
|
3652
|
-
* Note that this state can include arbitrary information about the value being hashed (e.g.
|
|
3653
|
-
* could include N plaintext bytes from the value), so needs to be treated as being as
|
|
3654
|
-
* sensitive as the input value itself.
|
|
3655
|
-
*/
|
|
3656
|
-
save: () => Uint8Array;
|
|
3657
|
-
/**
|
|
3658
|
-
* Resume a state that was created by save(). If this state was not created by a
|
|
3659
|
-
* compatible build of hash-wasm, an exception will be thrown.
|
|
3660
|
-
*/
|
|
3661
|
-
load: (state: Uint8Array) => IHasher;
|
|
3662
|
-
/**
|
|
3663
|
-
* Block size in bytes
|
|
3664
|
-
*/
|
|
3665
|
-
blockSize: number;
|
|
3666
|
-
/**
|
|
3667
|
-
* Digest size in bytes
|
|
3668
|
-
*/
|
|
3669
|
-
digestSize: number;
|
|
3670
|
-
};
|
|
3671
|
-
|
|
3672
3667
|
declare class KeccakHasher {
|
|
3673
3668
|
static async create(): Promise<KeccakHasher> {
|
|
3674
3669
|
return new KeccakHasher(await createKeccak(256));
|
|
@@ -3696,15 +3691,15 @@ declare namespace keccak {
|
|
|
3696
3691
|
};
|
|
3697
3692
|
}
|
|
3698
3693
|
|
|
3694
|
+
// TODO [ToDr] (#213) this should most likely be moved to a separate
|
|
3695
|
+
// package to avoid pulling in unnecessary deps.
|
|
3696
|
+
|
|
3697
|
+
type index$p_Blake2b = Blake2b;
|
|
3698
|
+
declare const index$p_Blake2b: typeof Blake2b;
|
|
3699
3699
|
type index$p_Blake2bHash = Blake2bHash;
|
|
3700
3700
|
type index$p_HASH_SIZE = HASH_SIZE;
|
|
3701
|
-
type index$p_HashAllocator = HashAllocator;
|
|
3702
3701
|
type index$p_KeccakHash = KeccakHash;
|
|
3703
3702
|
type index$p_OpaqueHash = OpaqueHash;
|
|
3704
|
-
type index$p_PageAllocator = PageAllocator;
|
|
3705
|
-
declare const index$p_PageAllocator: typeof PageAllocator;
|
|
3706
|
-
type index$p_SimpleAllocator = SimpleAllocator;
|
|
3707
|
-
declare const index$p_SimpleAllocator: typeof SimpleAllocator;
|
|
3708
3703
|
type index$p_TRUNCATED_HASH_SIZE = TRUNCATED_HASH_SIZE;
|
|
3709
3704
|
type index$p_TruncatedHash = TruncatedHash;
|
|
3710
3705
|
type index$p_WithHash<THash extends OpaqueHash, TData> = WithHash<THash, TData>;
|
|
@@ -3712,12 +3707,10 @@ declare const index$p_WithHash: typeof WithHash;
|
|
|
3712
3707
|
type index$p_WithHashAndBytes<THash extends OpaqueHash, TData> = WithHashAndBytes<THash, TData>;
|
|
3713
3708
|
declare const index$p_WithHashAndBytes: typeof WithHashAndBytes;
|
|
3714
3709
|
declare const index$p_ZERO_HASH: typeof ZERO_HASH;
|
|
3715
|
-
declare const index$p_blake2b: typeof blake2b;
|
|
3716
|
-
declare const index$p_defaultAllocator: typeof defaultAllocator;
|
|
3717
3710
|
declare const index$p_keccak: typeof keccak;
|
|
3718
3711
|
declare namespace index$p {
|
|
3719
|
-
export { index$
|
|
3720
|
-
export type { index$p_Blake2bHash as Blake2bHash, index$p_HASH_SIZE as HASH_SIZE, index$
|
|
3712
|
+
export { index$p_Blake2b as Blake2b, index$p_WithHash as WithHash, index$p_WithHashAndBytes as WithHashAndBytes, index$p_ZERO_HASH as ZERO_HASH, index$p_keccak as keccak, zero$1 as zero };
|
|
3713
|
+
export type { index$p_Blake2bHash as Blake2bHash, index$p_HASH_SIZE as HASH_SIZE, index$p_KeccakHash as KeccakHash, index$p_OpaqueHash as OpaqueHash, index$p_TRUNCATED_HASH_SIZE as TRUNCATED_HASH_SIZE, index$p_TruncatedHash as TruncatedHash };
|
|
3721
3714
|
}
|
|
3722
3715
|
|
|
3723
3716
|
/** Immutable view of the `HashDictionary`. */
|
|
@@ -4494,6 +4487,8 @@ declare class TruncatedHashDictionary<T extends OpaqueHash, V> {
|
|
|
4494
4487
|
}
|
|
4495
4488
|
}
|
|
4496
4489
|
|
|
4490
|
+
type index$o_ArrayView<T> = ArrayView<T>;
|
|
4491
|
+
declare const index$o_ArrayView: typeof ArrayView;
|
|
4497
4492
|
type index$o_FixedSizeArray<T, N extends number> = FixedSizeArray<T, N>;
|
|
4498
4493
|
declare const index$o_FixedSizeArray: typeof FixedSizeArray;
|
|
4499
4494
|
type index$o_HashDictionary<K extends OpaqueHash, V> = HashDictionary<K, V>;
|
|
@@ -4521,7 +4516,7 @@ type index$o_TruncatedHashDictionary<T extends OpaqueHash, V> = TruncatedHashDic
|
|
|
4521
4516
|
declare const index$o_TruncatedHashDictionary: typeof TruncatedHashDictionary;
|
|
4522
4517
|
declare const index$o_asKnownSize: typeof asKnownSize;
|
|
4523
4518
|
declare namespace index$o {
|
|
4524
|
-
export { index$o_FixedSizeArray as FixedSizeArray, index$o_HashDictionary as HashDictionary, index$o_HashSet as HashSet, index$o_MultiMap as MultiMap, index$o_SortedArray as SortedArray, index$o_SortedSet as SortedSet, index$o_TruncatedHashDictionary as TruncatedHashDictionary, index$o_asKnownSize as asKnownSize };
|
|
4519
|
+
export { index$o_ArrayView as ArrayView, index$o_FixedSizeArray as FixedSizeArray, index$o_HashDictionary as HashDictionary, index$o_HashSet as HashSet, index$o_MultiMap as MultiMap, index$o_SortedArray as SortedArray, index$o_SortedSet as SortedSet, index$o_TruncatedHashDictionary as TruncatedHashDictionary, index$o_asKnownSize as asKnownSize };
|
|
4525
4520
|
export type { index$o_HashWithZeroedBit as HashWithZeroedBit, index$o_ImmutableHashDictionary as ImmutableHashDictionary, index$o_ImmutableHashSet as ImmutableHashSet, index$o_ImmutableSortedArray as ImmutableSortedArray, index$o_ImmutableSortedSet as ImmutableSortedSet, index$o_KeyMapper as KeyMapper, index$o_KeyMappers as KeyMappers, index$o_KnownSize as KnownSize, index$o_KnownSizeArray as KnownSizeArray, index$o_KnownSizeId as KnownSizeId, index$o_NestedMaps as NestedMaps };
|
|
4526
4521
|
}
|
|
4527
4522
|
|
|
@@ -4840,22 +4835,16 @@ declare function trivialSeed(s: U32): KeySeed {
|
|
|
4840
4835
|
* Derives a Ed25519 secret key from a seed.
|
|
4841
4836
|
* https://github.com/polkadot-fellows/JIPs/blob/7048f79edf4f4eb8bfe6fb42e6bbf61900f44c65/JIP-5.md#derivation-method
|
|
4842
4837
|
*/
|
|
4843
|
-
declare function deriveEd25519SecretKey(
|
|
4844
|
-
|
|
4845
|
-
allocator: SimpleAllocator = new SimpleAllocator(),
|
|
4846
|
-
): Ed25519SecretSeed {
|
|
4847
|
-
return blake2b.hashBytes(BytesBlob.blobFromParts([ED25519_SECRET_KEY.raw, seed.raw]), allocator).asOpaque();
|
|
4838
|
+
declare function deriveEd25519SecretKey(seed: KeySeed, blake2b: Blake2b): Ed25519SecretSeed {
|
|
4839
|
+
return blake2b.hashBytes(BytesBlob.blobFromParts([ED25519_SECRET_KEY.raw, seed.raw])).asOpaque();
|
|
4848
4840
|
}
|
|
4849
4841
|
|
|
4850
4842
|
/**
|
|
4851
4843
|
* Derives a Bandersnatch secret key from a seed.
|
|
4852
4844
|
* https://github.com/polkadot-fellows/JIPs/blob/7048f79edf4f4eb8bfe6fb42e6bbf61900f44c65/JIP-5.md#derivation-method
|
|
4853
4845
|
*/
|
|
4854
|
-
declare function deriveBandersnatchSecretKey(
|
|
4855
|
-
|
|
4856
|
-
allocator: SimpleAllocator = new SimpleAllocator(),
|
|
4857
|
-
): BandersnatchSecretSeed {
|
|
4858
|
-
return blake2b.hashBytes(BytesBlob.blobFromParts([BANDERSNATCH_SECRET_KEY.raw, seed.raw]), allocator).asOpaque();
|
|
4846
|
+
declare function deriveBandersnatchSecretKey(seed: KeySeed, blake2b: Blake2b): BandersnatchSecretSeed {
|
|
4847
|
+
return blake2b.hashBytes(BytesBlob.blobFromParts([BANDERSNATCH_SECRET_KEY.raw, seed.raw])).asOpaque();
|
|
4859
4848
|
}
|
|
4860
4849
|
|
|
4861
4850
|
/**
|
|
@@ -9126,21 +9115,6 @@ declare function accumulationOutputComparator(a: AccumulationOutput, b: Accumula
|
|
|
9126
9115
|
return Ordering.Equal;
|
|
9127
9116
|
}
|
|
9128
9117
|
|
|
9129
|
-
declare const codecWithHash = <T, V, H extends OpaqueHash>(val: Descriptor<T, V>): Descriptor<WithHash<H, T>, V> =>
|
|
9130
|
-
Descriptor.withView(
|
|
9131
|
-
val.name,
|
|
9132
|
-
val.sizeHint,
|
|
9133
|
-
(e, elem) => val.encode(e, elem.data),
|
|
9134
|
-
(d): WithHash<H, T> => {
|
|
9135
|
-
const decoder2 = d.clone();
|
|
9136
|
-
const encoded = val.skipEncoded(decoder2);
|
|
9137
|
-
const hash = blake2b.hashBytes(encoded);
|
|
9138
|
-
return new WithHash(hash.asOpaque(), val.decode(d));
|
|
9139
|
-
},
|
|
9140
|
-
val.skip,
|
|
9141
|
-
val.View,
|
|
9142
|
-
);
|
|
9143
|
-
|
|
9144
9118
|
/**
|
|
9145
9119
|
* Assignment of particular work report to a core.
|
|
9146
9120
|
*
|
|
@@ -9151,7 +9125,7 @@ declare const codecWithHash = <T, V, H extends OpaqueHash>(val: Descriptor<T, V>
|
|
|
9151
9125
|
*/
|
|
9152
9126
|
declare class AvailabilityAssignment extends WithDebug {
|
|
9153
9127
|
static Codec = codec.Class(AvailabilityAssignment, {
|
|
9154
|
-
workReport:
|
|
9128
|
+
workReport: WorkReport.Codec,
|
|
9155
9129
|
timeout: codec.u32.asOpaque<TimeSlot>(),
|
|
9156
9130
|
});
|
|
9157
9131
|
|
|
@@ -9161,7 +9135,7 @@ declare class AvailabilityAssignment extends WithDebug {
|
|
|
9161
9135
|
|
|
9162
9136
|
private constructor(
|
|
9163
9137
|
/** Work report assigned to a core. */
|
|
9164
|
-
public readonly workReport:
|
|
9138
|
+
public readonly workReport: WorkReport,
|
|
9165
9139
|
/** Time slot at which the report becomes obsolete. */
|
|
9166
9140
|
public readonly timeout: TimeSlot,
|
|
9167
9141
|
) {
|
|
@@ -11308,7 +11282,6 @@ declare const index$e_codecPerCore: typeof codecPerCore;
|
|
|
11308
11282
|
declare const index$e_codecServiceId: typeof codecServiceId;
|
|
11309
11283
|
declare const index$e_codecVarGas: typeof codecVarGas;
|
|
11310
11284
|
declare const index$e_codecVarU16: typeof codecVarU16;
|
|
11311
|
-
declare const index$e_codecWithHash: typeof codecWithHash;
|
|
11312
11285
|
declare const index$e_hashComparator: typeof hashComparator;
|
|
11313
11286
|
declare const index$e_ignoreValueWithDefault: typeof ignoreValueWithDefault;
|
|
11314
11287
|
declare const index$e_serviceDataCodec: typeof serviceDataCodec;
|
|
@@ -11319,7 +11292,7 @@ declare const index$e_tryAsPerCore: typeof tryAsPerCore;
|
|
|
11319
11292
|
declare const index$e_workReportsSortedSetCodec: typeof workReportsSortedSetCodec;
|
|
11320
11293
|
declare const index$e_zeroSizeHint: typeof zeroSizeHint;
|
|
11321
11294
|
declare namespace index$e {
|
|
11322
|
-
export { index$e_AccumulationOutput as AccumulationOutput, index$e_AutoAccumulate as AutoAccumulate, index$e_AvailabilityAssignment as AvailabilityAssignment, index$e_BASE_SERVICE_BALANCE as BASE_SERVICE_BALANCE, index$e_BlockState as BlockState, index$e_CoreStatistics as CoreStatistics, index$e_DisputesRecords as DisputesRecords, index$e_ELECTIVE_BYTE_BALANCE as ELECTIVE_BYTE_BALANCE, index$e_ELECTIVE_ITEM_BALANCE as ELECTIVE_ITEM_BALANCE, index$e_InMemoryService as InMemoryService, index$e_InMemoryState as InMemoryState, index$e_LookupHistoryItem as LookupHistoryItem, index$e_MAX_LOOKUP_HISTORY_SLOTS as MAX_LOOKUP_HISTORY_SLOTS, index$e_PreimageItem as PreimageItem, index$e_PrivilegedServices as PrivilegedServices, index$e_RecentBlocks as RecentBlocks, index$e_RecentBlocksHistory as RecentBlocksHistory, index$e_SafroleData as SafroleData, index$e_SafroleSealingKeysData as SafroleSealingKeysData, index$e_SafroleSealingKeysKind as SafroleSealingKeysKind, index$e_ServiceAccountInfo as ServiceAccountInfo, index$e_ServiceStatistics as ServiceStatistics, index$e_StatisticsData as StatisticsData, index$e_StorageItem as StorageItem, index$e_UpdateError as UpdateError, index$e_UpdatePreimage as UpdatePreimage, index$e_UpdatePreimageKind as UpdatePreimageKind, index$e_UpdateService as UpdateService, index$e_UpdateServiceKind as UpdateServiceKind, index$e_UpdateStorage as UpdateStorage, index$e_UpdateStorageKind as UpdateStorageKind, index$e_ValidatorData as ValidatorData, index$e_ValidatorStatistics as ValidatorStatistics, index$e_accumulationOutputComparator as accumulationOutputComparator, index$e_codecBandersnatchKey as codecBandersnatchKey, index$e_codecPerCore as codecPerCore, index$e_codecServiceId as codecServiceId, index$e_codecVarGas as codecVarGas, index$e_codecVarU16 as codecVarU16, index$
|
|
11295
|
+
export { index$e_AccumulationOutput as AccumulationOutput, index$e_AutoAccumulate as AutoAccumulate, index$e_AvailabilityAssignment as AvailabilityAssignment, index$e_BASE_SERVICE_BALANCE as BASE_SERVICE_BALANCE, index$e_BlockState as BlockState, index$e_CoreStatistics as CoreStatistics, index$e_DisputesRecords as DisputesRecords, index$e_ELECTIVE_BYTE_BALANCE as ELECTIVE_BYTE_BALANCE, index$e_ELECTIVE_ITEM_BALANCE as ELECTIVE_ITEM_BALANCE, index$e_InMemoryService as InMemoryService, index$e_InMemoryState as InMemoryState, index$e_LookupHistoryItem as LookupHistoryItem, index$e_MAX_LOOKUP_HISTORY_SLOTS as MAX_LOOKUP_HISTORY_SLOTS, index$e_PreimageItem as PreimageItem, index$e_PrivilegedServices as PrivilegedServices, index$e_RecentBlocks as RecentBlocks, index$e_RecentBlocksHistory as RecentBlocksHistory, index$e_SafroleData as SafroleData, index$e_SafroleSealingKeysData as SafroleSealingKeysData, index$e_SafroleSealingKeysKind as SafroleSealingKeysKind, index$e_ServiceAccountInfo as ServiceAccountInfo, index$e_ServiceStatistics as ServiceStatistics, index$e_StatisticsData as StatisticsData, index$e_StorageItem as StorageItem, index$e_UpdateError as UpdateError, index$e_UpdatePreimage as UpdatePreimage, index$e_UpdatePreimageKind as UpdatePreimageKind, index$e_UpdateService as UpdateService, index$e_UpdateServiceKind as UpdateServiceKind, index$e_UpdateStorage as UpdateStorage, index$e_UpdateStorageKind as UpdateStorageKind, index$e_ValidatorData as ValidatorData, index$e_ValidatorStatistics as ValidatorStatistics, index$e_accumulationOutputComparator as accumulationOutputComparator, index$e_codecBandersnatchKey as codecBandersnatchKey, index$e_codecPerCore as codecPerCore, index$e_codecServiceId as codecServiceId, index$e_codecVarGas as codecVarGas, index$e_codecVarU16 as codecVarU16, index$e_hashComparator as hashComparator, index$e_ignoreValueWithDefault as ignoreValueWithDefault, index$e_serviceDataCodec as serviceDataCodec, index$e_serviceEntriesCodec as serviceEntriesCodec, index$e_sortedSetCodec as sortedSetCodec, index$e_tryAsLookupHistorySlots as tryAsLookupHistorySlots, index$e_tryAsPerCore as tryAsPerCore, index$e_workReportsSortedSetCodec as workReportsSortedSetCodec, index$e_zeroSizeHint as zeroSizeHint };
|
|
11323
11296
|
export type { index$e_BlocksState as BlocksState, index$e_ENTROPY_ENTRIES as ENTROPY_ENTRIES, index$e_EnumerableState as EnumerableState, index$e_FieldNames as FieldNames, index$e_InMemoryStateFields as InMemoryStateFields, index$e_LookupHistorySlots as LookupHistorySlots, index$e_MAX_RECENT_HISTORY as MAX_RECENT_HISTORY, index$e_PerCore as PerCore, index$e_SafroleSealingKeys as SafroleSealingKeys, index$e_Service as Service, index$e_ServiceData as ServiceData, index$e_ServiceEntries as ServiceEntries, index$e_ServicesUpdate as ServicesUpdate, index$e_State as State, index$e_StorageKey as StorageKey, index$e_VALIDATOR_META_BYTES as VALIDATOR_META_BYTES };
|
|
11324
11297
|
}
|
|
11325
11298
|
|
|
@@ -11387,7 +11360,7 @@ declare namespace stateKeys {
|
|
|
11387
11360
|
}
|
|
11388
11361
|
|
|
11389
11362
|
/** https://graypaper.fluffylabs.dev/#/1c979cb/3bba033bba03?v=0.7.1 */
|
|
11390
|
-
export function serviceStorage(serviceId: ServiceId, key: StorageKey): StateKey {
|
|
11363
|
+
export function serviceStorage(blake2b: Blake2b, serviceId: ServiceId, key: StorageKey): StateKey {
|
|
11391
11364
|
if (Compatibility.isLessThan(GpVersion.V0_6_7)) {
|
|
11392
11365
|
const out = Bytes.zero(HASH_SIZE);
|
|
11393
11366
|
out.raw.set(u32AsLeBytes(tryAsU32(2 ** 32 - 1)), 0);
|
|
@@ -11395,11 +11368,11 @@ declare namespace stateKeys {
|
|
|
11395
11368
|
return legacyServiceNested(serviceId, out);
|
|
11396
11369
|
}
|
|
11397
11370
|
|
|
11398
|
-
return serviceNested(serviceId, tryAsU32(2 ** 32 - 1), key);
|
|
11371
|
+
return serviceNested(blake2b, serviceId, tryAsU32(2 ** 32 - 1), key);
|
|
11399
11372
|
}
|
|
11400
11373
|
|
|
11401
11374
|
/** https://graypaper.fluffylabs.dev/#/1c979cb/3bd7033bd703?v=0.7.1 */
|
|
11402
|
-
export function servicePreimage(serviceId: ServiceId, hash: PreimageHash): StateKey {
|
|
11375
|
+
export function servicePreimage(blake2b: Blake2b, serviceId: ServiceId, hash: PreimageHash): StateKey {
|
|
11403
11376
|
if (Compatibility.isLessThan(GpVersion.V0_6_7)) {
|
|
11404
11377
|
const out = Bytes.zero(HASH_SIZE);
|
|
11405
11378
|
out.raw.set(u32AsLeBytes(tryAsU32(2 ** 32 - 2)), 0);
|
|
@@ -11407,11 +11380,16 @@ declare namespace stateKeys {
|
|
|
11407
11380
|
return legacyServiceNested(serviceId, out);
|
|
11408
11381
|
}
|
|
11409
11382
|
|
|
11410
|
-
return serviceNested(serviceId, tryAsU32(2 ** 32 - 2), hash);
|
|
11383
|
+
return serviceNested(blake2b, serviceId, tryAsU32(2 ** 32 - 2), hash);
|
|
11411
11384
|
}
|
|
11412
11385
|
|
|
11413
11386
|
/** https://graypaper.fluffylabs.dev/#/1c979cb/3b0a043b0a04?v=0.7.1 */
|
|
11414
|
-
export function serviceLookupHistory(
|
|
11387
|
+
export function serviceLookupHistory(
|
|
11388
|
+
blake2b: Blake2b,
|
|
11389
|
+
serviceId: ServiceId,
|
|
11390
|
+
hash: PreimageHash,
|
|
11391
|
+
preimageLength: U32,
|
|
11392
|
+
): StateKey {
|
|
11415
11393
|
if (Compatibility.isLessThan(GpVersion.V0_6_7)) {
|
|
11416
11394
|
const doubleHash = blake2b.hashBytes(hash);
|
|
11417
11395
|
const out = Bytes.zero(HASH_SIZE);
|
|
@@ -11420,11 +11398,11 @@ declare namespace stateKeys {
|
|
|
11420
11398
|
return legacyServiceNested(serviceId, out);
|
|
11421
11399
|
}
|
|
11422
11400
|
|
|
11423
|
-
return serviceNested(serviceId, preimageLength, hash);
|
|
11401
|
+
return serviceNested(blake2b, serviceId, preimageLength, hash);
|
|
11424
11402
|
}
|
|
11425
11403
|
|
|
11426
11404
|
/** https://graypaper.fluffylabs.dev/#/1c979cb/3b88003b8800?v=0.7.1 */
|
|
11427
|
-
export function serviceNested(serviceId: ServiceId, numberPrefix: U32, hash: BytesBlob): StateKey {
|
|
11405
|
+
export function serviceNested(blake2b: Blake2b, serviceId: ServiceId, numberPrefix: U32, hash: BytesBlob): StateKey {
|
|
11428
11406
|
const inputToHash = BytesBlob.blobFromParts(u32AsLeBytes(numberPrefix), hash.raw);
|
|
11429
11407
|
const newHash = blake2b.hashBytes(inputToHash).raw.subarray(0, 28);
|
|
11430
11408
|
const key = Bytes.zero(HASH_SIZE);
|
|
@@ -11608,20 +11586,20 @@ declare namespace serialize {
|
|
|
11608
11586
|
});
|
|
11609
11587
|
|
|
11610
11588
|
/** https://graypaper.fluffylabs.dev/#/85129da/384803384803?v=0.6.3 */
|
|
11611
|
-
export const serviceStorage = (serviceId: ServiceId, key: StorageKey) => ({
|
|
11612
|
-
key: stateKeys.serviceStorage(serviceId, key),
|
|
11589
|
+
export const serviceStorage = (blake2b: Blake2b, serviceId: ServiceId, key: StorageKey) => ({
|
|
11590
|
+
key: stateKeys.serviceStorage(blake2b, serviceId, key),
|
|
11613
11591
|
Codec: dumpCodec,
|
|
11614
11592
|
});
|
|
11615
11593
|
|
|
11616
11594
|
/** https://graypaper.fluffylabs.dev/#/85129da/385b03385b03?v=0.6.3 */
|
|
11617
|
-
export const servicePreimages = (serviceId: ServiceId, hash: PreimageHash) => ({
|
|
11618
|
-
key: stateKeys.servicePreimage(serviceId, hash),
|
|
11595
|
+
export const servicePreimages = (blake2b: Blake2b, serviceId: ServiceId, hash: PreimageHash) => ({
|
|
11596
|
+
key: stateKeys.servicePreimage(blake2b, serviceId, hash),
|
|
11619
11597
|
Codec: dumpCodec,
|
|
11620
11598
|
});
|
|
11621
11599
|
|
|
11622
11600
|
/** https://graypaper.fluffylabs.dev/#/85129da/387603387603?v=0.6.3 */
|
|
11623
|
-
export const serviceLookupHistory = (serviceId: ServiceId, hash: PreimageHash, len: U32) => ({
|
|
11624
|
-
key: stateKeys.serviceLookupHistory(serviceId, hash, len),
|
|
11601
|
+
export const serviceLookupHistory = (blake2b: Blake2b, serviceId: ServiceId, hash: PreimageHash, len: U32) => ({
|
|
11602
|
+
key: stateKeys.serviceLookupHistory(blake2b, serviceId, hash, len),
|
|
11625
11603
|
Codec: readonlyArray(codec.sequenceVarLen(codec.u32)),
|
|
11626
11604
|
});
|
|
11627
11605
|
}
|
|
@@ -11656,6 +11634,7 @@ declare const EMPTY_BLOB = BytesBlob.empty();
|
|
|
11656
11634
|
/** Serialize given state update into a series of key-value pairs. */
|
|
11657
11635
|
declare function* serializeStateUpdate(
|
|
11658
11636
|
spec: ChainSpec,
|
|
11637
|
+
blake2b: Blake2b,
|
|
11659
11638
|
update: Partial<State & ServicesUpdate>,
|
|
11660
11639
|
): Generator<StateEntryUpdate> {
|
|
11661
11640
|
// first let's serialize all of the simple entries (if present!)
|
|
@@ -11664,9 +11643,9 @@ declare function* serializeStateUpdate(
|
|
|
11664
11643
|
const encode = <T>(codec: Encode<T>, val: T) => Encoder.encodeObject(codec, val, spec);
|
|
11665
11644
|
|
|
11666
11645
|
// then let's proceed with service updates
|
|
11667
|
-
yield* serializeServiceUpdates(update.servicesUpdates, encode);
|
|
11668
|
-
yield* serializePreimages(update.preimages, encode);
|
|
11669
|
-
yield* serializeStorage(update.storage);
|
|
11646
|
+
yield* serializeServiceUpdates(update.servicesUpdates, encode, blake2b);
|
|
11647
|
+
yield* serializePreimages(update.preimages, encode, blake2b);
|
|
11648
|
+
yield* serializeStorage(update.storage, blake2b);
|
|
11670
11649
|
yield* serializeRemovedServices(update.servicesRemoved);
|
|
11671
11650
|
}
|
|
11672
11651
|
|
|
@@ -11678,18 +11657,18 @@ declare function* serializeRemovedServices(servicesRemoved: ServiceId[] | undefi
|
|
|
11678
11657
|
}
|
|
11679
11658
|
}
|
|
11680
11659
|
|
|
11681
|
-
declare function* serializeStorage(storage: UpdateStorage[] | undefined): Generator<StateEntryUpdate> {
|
|
11660
|
+
declare function* serializeStorage(storage: UpdateStorage[] | undefined, blake2b: Blake2b): Generator<StateEntryUpdate> {
|
|
11682
11661
|
for (const { action, serviceId } of storage ?? []) {
|
|
11683
11662
|
switch (action.kind) {
|
|
11684
11663
|
case UpdateStorageKind.Set: {
|
|
11685
11664
|
const key = action.storage.key;
|
|
11686
|
-
const codec = serialize.serviceStorage(serviceId, key);
|
|
11665
|
+
const codec = serialize.serviceStorage(blake2b, serviceId, key);
|
|
11687
11666
|
yield [StateEntryUpdateAction.Insert, codec.key, action.storage.value];
|
|
11688
11667
|
break;
|
|
11689
11668
|
}
|
|
11690
11669
|
case UpdateStorageKind.Remove: {
|
|
11691
11670
|
const key = action.key;
|
|
11692
|
-
const codec = serialize.serviceStorage(serviceId, key);
|
|
11671
|
+
const codec = serialize.serviceStorage(blake2b, serviceId, key);
|
|
11693
11672
|
yield [StateEntryUpdateAction.Remove, codec.key, EMPTY_BLOB];
|
|
11694
11673
|
break;
|
|
11695
11674
|
}
|
|
@@ -11699,16 +11678,20 @@ declare function* serializeStorage(storage: UpdateStorage[] | undefined): Genera
|
|
|
11699
11678
|
}
|
|
11700
11679
|
}
|
|
11701
11680
|
|
|
11702
|
-
declare function* serializePreimages(
|
|
11681
|
+
declare function* serializePreimages(
|
|
11682
|
+
preimages: UpdatePreimage[] | undefined,
|
|
11683
|
+
encode: EncodeFun,
|
|
11684
|
+
blake2b: Blake2b,
|
|
11685
|
+
): Generator<StateEntryUpdate> {
|
|
11703
11686
|
for (const { action, serviceId } of preimages ?? []) {
|
|
11704
11687
|
switch (action.kind) {
|
|
11705
11688
|
case UpdatePreimageKind.Provide: {
|
|
11706
11689
|
const { hash, blob } = action.preimage;
|
|
11707
|
-
const codec = serialize.servicePreimages(serviceId, hash);
|
|
11690
|
+
const codec = serialize.servicePreimages(blake2b, serviceId, hash);
|
|
11708
11691
|
yield [StateEntryUpdateAction.Insert, codec.key, blob];
|
|
11709
11692
|
|
|
11710
11693
|
if (action.slot !== null) {
|
|
11711
|
-
const codec2 = serialize.serviceLookupHistory(serviceId, hash, tryAsU32(blob.length));
|
|
11694
|
+
const codec2 = serialize.serviceLookupHistory(blake2b, serviceId, hash, tryAsU32(blob.length));
|
|
11712
11695
|
yield [
|
|
11713
11696
|
StateEntryUpdateAction.Insert,
|
|
11714
11697
|
codec2.key,
|
|
@@ -11719,16 +11702,16 @@ declare function* serializePreimages(preimages: UpdatePreimage[] | undefined, en
|
|
|
11719
11702
|
}
|
|
11720
11703
|
case UpdatePreimageKind.UpdateOrAdd: {
|
|
11721
11704
|
const { hash, length, slots } = action.item;
|
|
11722
|
-
const codec = serialize.serviceLookupHistory(serviceId, hash, length);
|
|
11705
|
+
const codec = serialize.serviceLookupHistory(blake2b, serviceId, hash, length);
|
|
11723
11706
|
yield [StateEntryUpdateAction.Insert, codec.key, encode(codec.Codec, slots)];
|
|
11724
11707
|
break;
|
|
11725
11708
|
}
|
|
11726
11709
|
case UpdatePreimageKind.Remove: {
|
|
11727
11710
|
const { hash, length } = action;
|
|
11728
|
-
const codec = serialize.servicePreimages(serviceId, hash);
|
|
11711
|
+
const codec = serialize.servicePreimages(blake2b, serviceId, hash);
|
|
11729
11712
|
yield [StateEntryUpdateAction.Remove, codec.key, EMPTY_BLOB];
|
|
11730
11713
|
|
|
11731
|
-
const codec2 = serialize.serviceLookupHistory(serviceId, hash, length);
|
|
11714
|
+
const codec2 = serialize.serviceLookupHistory(blake2b, serviceId, hash, length);
|
|
11732
11715
|
yield [StateEntryUpdateAction.Remove, codec2.key, EMPTY_BLOB];
|
|
11733
11716
|
break;
|
|
11734
11717
|
}
|
|
@@ -11740,6 +11723,7 @@ declare function* serializePreimages(preimages: UpdatePreimage[] | undefined, en
|
|
|
11740
11723
|
declare function* serializeServiceUpdates(
|
|
11741
11724
|
servicesUpdates: UpdateService[] | undefined,
|
|
11742
11725
|
encode: EncodeFun,
|
|
11726
|
+
blake2b: Blake2b,
|
|
11743
11727
|
): Generator<StateEntryUpdate> {
|
|
11744
11728
|
for (const { action, serviceId } of servicesUpdates ?? []) {
|
|
11745
11729
|
// new service being created or updated
|
|
@@ -11749,7 +11733,7 @@ declare function* serializeServiceUpdates(
|
|
|
11749
11733
|
// additional lookup history update
|
|
11750
11734
|
if (action.kind === UpdateServiceKind.Create && action.lookupHistory !== null) {
|
|
11751
11735
|
const { lookupHistory } = action;
|
|
11752
|
-
const codec2 = serialize.serviceLookupHistory(serviceId, lookupHistory.hash, lookupHistory.length);
|
|
11736
|
+
const codec2 = serialize.serviceLookupHistory(blake2b, serviceId, lookupHistory.hash, lookupHistory.length);
|
|
11753
11737
|
yield [StateEntryUpdateAction.Insert, codec2.key, encode(codec2.Codec, lookupHistory.slots)];
|
|
11754
11738
|
}
|
|
11755
11739
|
}
|
|
@@ -11883,8 +11867,8 @@ declare class StateEntries {
|
|
|
11883
11867
|
);
|
|
11884
11868
|
|
|
11885
11869
|
/** Turn in-memory state into it's serialized form. */
|
|
11886
|
-
static serializeInMemory(spec: ChainSpec, state: InMemoryState) {
|
|
11887
|
-
return new StateEntries(convertInMemoryStateToDictionary(spec, state));
|
|
11870
|
+
static serializeInMemory(spec: ChainSpec, blake2b: Blake2b, state: InMemoryState) {
|
|
11871
|
+
return new StateEntries(convertInMemoryStateToDictionary(spec, blake2b, state));
|
|
11888
11872
|
}
|
|
11889
11873
|
|
|
11890
11874
|
/**
|
|
@@ -11939,7 +11923,8 @@ declare class StateEntries {
|
|
|
11939
11923
|
}
|
|
11940
11924
|
|
|
11941
11925
|
/** https://graypaper.fluffylabs.dev/#/68eaa1f/391600391600?v=0.6.4 */
|
|
11942
|
-
getRootHash(): StateRootHash {
|
|
11926
|
+
getRootHash(blake2b: Blake2b): StateRootHash {
|
|
11927
|
+
const blake2bTrieHasher = getBlake2bTrieHasher(blake2b);
|
|
11943
11928
|
const leaves: SortedSet<LeafNode> = SortedSet.fromArray(leafComparator);
|
|
11944
11929
|
for (const [key, value] of this) {
|
|
11945
11930
|
leaves.insert(InMemoryTrie.constructLeaf(blake2bTrieHasher, key.asOpaque(), value));
|
|
@@ -11952,6 +11937,7 @@ declare class StateEntries {
|
|
|
11952
11937
|
/** https://graypaper.fluffylabs.dev/#/68eaa1f/38a50038a500?v=0.6.4 */
|
|
11953
11938
|
declare function convertInMemoryStateToDictionary(
|
|
11954
11939
|
spec: ChainSpec,
|
|
11940
|
+
blake2b: Blake2b,
|
|
11955
11941
|
state: InMemoryState,
|
|
11956
11942
|
): TruncatedHashDictionary<StateKey, BytesBlob> {
|
|
11957
11943
|
const serialized = TruncatedHashDictionary.fromEntries<StateKey, BytesBlob>([]);
|
|
@@ -11984,20 +11970,25 @@ declare function convertInMemoryStateToDictionary(
|
|
|
11984
11970
|
|
|
11985
11971
|
// preimages
|
|
11986
11972
|
for (const preimage of service.data.preimages.values()) {
|
|
11987
|
-
const { key, Codec } = serialize.servicePreimages(serviceId, preimage.hash);
|
|
11973
|
+
const { key, Codec } = serialize.servicePreimages(blake2b, serviceId, preimage.hash);
|
|
11988
11974
|
serialized.set(key, Encoder.encodeObject(Codec, preimage.blob));
|
|
11989
11975
|
}
|
|
11990
11976
|
|
|
11991
11977
|
// storage
|
|
11992
11978
|
for (const storage of service.data.storage.values()) {
|
|
11993
|
-
const { key, Codec } = serialize.serviceStorage(serviceId, storage.key);
|
|
11979
|
+
const { key, Codec } = serialize.serviceStorage(blake2b, serviceId, storage.key);
|
|
11994
11980
|
serialized.set(key, Encoder.encodeObject(Codec, storage.value));
|
|
11995
11981
|
}
|
|
11996
11982
|
|
|
11997
11983
|
// lookup history
|
|
11998
11984
|
for (const lookupHistoryList of service.data.lookupHistory.values()) {
|
|
11999
11985
|
for (const lookupHistory of lookupHistoryList) {
|
|
12000
|
-
const { key, Codec } = serialize.serviceLookupHistory(
|
|
11986
|
+
const { key, Codec } = serialize.serviceLookupHistory(
|
|
11987
|
+
blake2b,
|
|
11988
|
+
serviceId,
|
|
11989
|
+
lookupHistory.hash,
|
|
11990
|
+
lookupHistory.length,
|
|
11991
|
+
);
|
|
12001
11992
|
serialized.set(key, Encoder.encodeObject(Codec, lookupHistory.slots.slice()));
|
|
12002
11993
|
}
|
|
12003
11994
|
}
|
|
@@ -12028,21 +12019,23 @@ declare class SerializedState<T extends SerializedStateBackend = SerializedState
|
|
|
12028
12019
|
implements State, EnumerableState
|
|
12029
12020
|
{
|
|
12030
12021
|
/** Create a state-like object from collection of serialized entries. */
|
|
12031
|
-
static fromStateEntries(spec: ChainSpec, state: StateEntries, recentServices: ServiceId[] = []) {
|
|
12032
|
-
return new SerializedState(spec, state, recentServices);
|
|
12022
|
+
static fromStateEntries(spec: ChainSpec, blake2b: Blake2b, state: StateEntries, recentServices: ServiceId[] = []) {
|
|
12023
|
+
return new SerializedState(spec, blake2b, state, recentServices);
|
|
12033
12024
|
}
|
|
12034
12025
|
|
|
12035
12026
|
/** Create a state-like object backed by some DB. */
|
|
12036
12027
|
static new<T extends SerializedStateBackend>(
|
|
12037
12028
|
spec: ChainSpec,
|
|
12029
|
+
blake2b: Blake2b,
|
|
12038
12030
|
db: T,
|
|
12039
12031
|
recentServices: ServiceId[] = [],
|
|
12040
12032
|
): SerializedState<T> {
|
|
12041
|
-
return new SerializedState(spec, db, recentServices);
|
|
12033
|
+
return new SerializedState(spec, blake2b, db, recentServices);
|
|
12042
12034
|
}
|
|
12043
12035
|
|
|
12044
12036
|
private constructor(
|
|
12045
12037
|
private readonly spec: ChainSpec,
|
|
12038
|
+
private readonly blake2b: Blake2b,
|
|
12046
12039
|
public backend: T,
|
|
12047
12040
|
/** Best-effort list of recently active services. */
|
|
12048
12041
|
private readonly _recentServiceIds: ServiceId[],
|
|
@@ -12073,7 +12066,7 @@ declare class SerializedState<T extends SerializedStateBackend = SerializedState
|
|
|
12073
12066
|
this._recentServiceIds.push(id);
|
|
12074
12067
|
}
|
|
12075
12068
|
|
|
12076
|
-
return new SerializedService(id, serviceData, (key) => this.retrieveOptional(key));
|
|
12069
|
+
return new SerializedService(this.blake2b, id, serviceData, (key) => this.retrieveOptional(key));
|
|
12077
12070
|
}
|
|
12078
12071
|
|
|
12079
12072
|
private retrieve<T>({ key, Codec }: KeyAndCodec<T>, description: string): T {
|
|
@@ -12172,6 +12165,7 @@ declare class SerializedState<T extends SerializedStateBackend = SerializedState
|
|
|
12172
12165
|
/** Service data representation on a serialized state. */
|
|
12173
12166
|
declare class SerializedService implements Service {
|
|
12174
12167
|
constructor(
|
|
12168
|
+
public readonly blake2b: Blake2b,
|
|
12175
12169
|
/** Service id */
|
|
12176
12170
|
public readonly serviceId: ServiceId,
|
|
12177
12171
|
private readonly accountInfo: ServiceAccountInfo,
|
|
@@ -12190,11 +12184,11 @@ declare class SerializedService implements Service {
|
|
|
12190
12184
|
const serviceIdAndKey = safeAllocUint8Array(SERVICE_ID_BYTES + rawKey.length);
|
|
12191
12185
|
serviceIdAndKey.set(u32AsLeBytes(this.serviceId));
|
|
12192
12186
|
serviceIdAndKey.set(rawKey.raw, SERVICE_ID_BYTES);
|
|
12193
|
-
const key: StorageKey = asOpaqueType(BytesBlob.blobFrom(blake2b.hashBytes(serviceIdAndKey).raw));
|
|
12194
|
-
return this.retrieveOptional(serialize.serviceStorage(this.serviceId, key)) ?? null;
|
|
12187
|
+
const key: StorageKey = asOpaqueType(BytesBlob.blobFrom(this.blake2b.hashBytes(serviceIdAndKey).raw));
|
|
12188
|
+
return this.retrieveOptional(serialize.serviceStorage(this.blake2b, this.serviceId, key)) ?? null;
|
|
12195
12189
|
}
|
|
12196
12190
|
|
|
12197
|
-
return this.retrieveOptional(serialize.serviceStorage(this.serviceId, rawKey)) ?? null;
|
|
12191
|
+
return this.retrieveOptional(serialize.serviceStorage(this.blake2b, this.serviceId, rawKey)) ?? null;
|
|
12198
12192
|
}
|
|
12199
12193
|
|
|
12200
12194
|
/**
|
|
@@ -12204,17 +12198,17 @@ declare class SerializedService implements Service {
|
|
|
12204
12198
|
*/
|
|
12205
12199
|
hasPreimage(hash: PreimageHash): boolean {
|
|
12206
12200
|
// TODO [ToDr] consider optimizing to avoid fetching the whole data.
|
|
12207
|
-
return this.retrieveOptional(serialize.servicePreimages(this.serviceId, hash)) !== undefined;
|
|
12201
|
+
return this.retrieveOptional(serialize.servicePreimages(this.blake2b, this.serviceId, hash)) !== undefined;
|
|
12208
12202
|
}
|
|
12209
12203
|
|
|
12210
12204
|
/** Retrieve preimage from the DB. */
|
|
12211
12205
|
getPreimage(hash: PreimageHash): BytesBlob | null {
|
|
12212
|
-
return this.retrieveOptional(serialize.servicePreimages(this.serviceId, hash)) ?? null;
|
|
12206
|
+
return this.retrieveOptional(serialize.servicePreimages(this.blake2b, this.serviceId, hash)) ?? null;
|
|
12213
12207
|
}
|
|
12214
12208
|
|
|
12215
12209
|
/** Retrieve preimage lookup history. */
|
|
12216
12210
|
getLookupHistory(hash: PreimageHash, len: U32): LookupHistorySlots | null {
|
|
12217
|
-
const rawSlots = this.retrieveOptional(serialize.serviceLookupHistory(this.serviceId, hash, len));
|
|
12211
|
+
const rawSlots = this.retrieveOptional(serialize.serviceLookupHistory(this.blake2b, this.serviceId, hash, len));
|
|
12218
12212
|
if (rawSlots === undefined) {
|
|
12219
12213
|
return null;
|
|
12220
12214
|
}
|
|
@@ -12227,9 +12221,9 @@ type KeyAndCodec<T> = {
|
|
|
12227
12221
|
Codec: Decode<T>;
|
|
12228
12222
|
};
|
|
12229
12223
|
|
|
12230
|
-
declare function loadState(spec: ChainSpec, entries: Iterable<[StateKey | TruncatedHash, BytesBlob]>) {
|
|
12224
|
+
declare function loadState(spec: ChainSpec, blake2b: Blake2b, entries: Iterable<[StateKey | TruncatedHash, BytesBlob]>) {
|
|
12231
12225
|
const stateEntries = StateEntries.fromEntriesUnsafe(entries);
|
|
12232
|
-
return SerializedState.fromStateEntries(spec, stateEntries);
|
|
12226
|
+
return SerializedState.fromStateEntries(spec, blake2b, stateEntries);
|
|
12233
12227
|
}
|
|
12234
12228
|
|
|
12235
12229
|
/**
|
|
@@ -12385,7 +12379,8 @@ declare class LeafDb implements SerializedStateBackend {
|
|
|
12385
12379
|
assertNever(val);
|
|
12386
12380
|
}
|
|
12387
12381
|
|
|
12388
|
-
getStateRoot(): StateRootHash {
|
|
12382
|
+
getStateRoot(blake2b: Blake2b): StateRootHash {
|
|
12383
|
+
const blake2bTrieHasher = getBlake2bTrieHasher(blake2b);
|
|
12389
12384
|
return InMemoryTrie.computeStateRoot(blake2bTrieHasher, this.leaves).asOpaque();
|
|
12390
12385
|
}
|
|
12391
12386
|
|
|
@@ -12483,7 +12478,8 @@ declare class InMemoryStates implements StatesDb<InMemoryState> {
|
|
|
12483
12478
|
}
|
|
12484
12479
|
|
|
12485
12480
|
async getStateRoot(state: InMemoryState): Promise<StateRootHash> {
|
|
12486
|
-
|
|
12481
|
+
const blake2b = await Blake2b.createHasher();
|
|
12482
|
+
return StateEntries.serializeInMemory(this.spec, blake2b, state).getRootHash(blake2b);
|
|
12487
12483
|
}
|
|
12488
12484
|
|
|
12489
12485
|
/** Insert a full state into the database. */
|
|
@@ -18895,10 +18891,10 @@ type ENTROPY_BYTES = typeof ENTROPY_BYTES;
|
|
|
18895
18891
|
*
|
|
18896
18892
|
* https://graypaper.fluffylabs.dev/#/579bd12/3b9a013b9a01
|
|
18897
18893
|
*/
|
|
18898
|
-
declare function fisherYatesShuffle<T>(arr: T[], entropy: Bytes<ENTROPY_BYTES>): T[] {
|
|
18894
|
+
declare function fisherYatesShuffle<T>(blake2b: Blake2b, arr: T[], entropy: Bytes<ENTROPY_BYTES>): T[] {
|
|
18899
18895
|
check`${entropy.length === ENTROPY_BYTES} Expected entropy of length ${ENTROPY_BYTES}, got ${entropy.length}`;
|
|
18900
18896
|
const n = arr.length;
|
|
18901
|
-
const randomNumbers = hashToNumberSequence(entropy, arr.length);
|
|
18897
|
+
const randomNumbers = hashToNumberSequence(blake2b, entropy, arr.length);
|
|
18902
18898
|
const result: T[] = new Array<T>(n);
|
|
18903
18899
|
|
|
18904
18900
|
let itemsLeft = n;
|
|
@@ -19070,8 +19066,7 @@ declare const availabilityAssignmentFromJson = json.object<JsonAvailabilityAssig
|
|
|
19070
19066
|
timeout: "number",
|
|
19071
19067
|
},
|
|
19072
19068
|
({ report, timeout }) => {
|
|
19073
|
-
|
|
19074
|
-
return AvailabilityAssignment.create({ workReport: new WithHash(workReportHash, report), timeout });
|
|
19069
|
+
return AvailabilityAssignment.create({ workReport: report, timeout });
|
|
19075
19070
|
},
|
|
19076
19071
|
);
|
|
19077
19072
|
|
|
@@ -19572,7 +19567,7 @@ declare class TransitionHasher implements MmrHasher<KeccakHash> {
|
|
|
19572
19567
|
constructor(
|
|
19573
19568
|
private readonly context: ChainSpec,
|
|
19574
19569
|
private readonly keccakHasher: KeccakHasher,
|
|
19575
|
-
|
|
19570
|
+
public readonly blake2b: Blake2b,
|
|
19576
19571
|
) {}
|
|
19577
19572
|
|
|
19578
19573
|
/** Concatenates two hashes and hash this concatenation */
|
|
@@ -19586,7 +19581,7 @@ declare class TransitionHasher implements MmrHasher<KeccakHash> {
|
|
|
19586
19581
|
|
|
19587
19582
|
/** Creates hash from the block header view */
|
|
19588
19583
|
header(header: HeaderView): WithHash<HeaderHash, HeaderView> {
|
|
19589
|
-
return new WithHash(blake2b.hashBytes(header.encoded()
|
|
19584
|
+
return new WithHash(this.blake2b.hashBytes(header.encoded()).asOpaque(), header);
|
|
19590
19585
|
}
|
|
19591
19586
|
|
|
19592
19587
|
/**
|
|
@@ -19600,7 +19595,7 @@ declare class TransitionHasher implements MmrHasher<KeccakHash> {
|
|
|
19600
19595
|
.view()
|
|
19601
19596
|
.map((g) => g.view())
|
|
19602
19597
|
.map((guarantee) => {
|
|
19603
|
-
const reportHash = blake2b.hashBytes(guarantee.report.encoded()
|
|
19598
|
+
const reportHash = this.blake2b.hashBytes(guarantee.report.encoded()).asOpaque<WorkReportHash>();
|
|
19604
19599
|
return BytesBlob.blobFromParts([
|
|
19605
19600
|
reportHash.raw,
|
|
19606
19601
|
guarantee.slot.encoded().raw,
|
|
@@ -19610,15 +19605,15 @@ declare class TransitionHasher implements MmrHasher<KeccakHash> {
|
|
|
19610
19605
|
|
|
19611
19606
|
const guaranteeBlob = Encoder.encodeObject(codec.sequenceVarLen(dumpCodec), guarantees, this.context);
|
|
19612
19607
|
|
|
19613
|
-
const et = blake2b.hashBytes(extrinsicView.tickets.encoded()
|
|
19614
|
-
const ep = blake2b.hashBytes(extrinsicView.preimages.encoded()
|
|
19615
|
-
const eg = blake2b.hashBytes(guaranteeBlob
|
|
19616
|
-
const ea = blake2b.hashBytes(extrinsicView.assurances.encoded()
|
|
19617
|
-
const ed = blake2b.hashBytes(extrinsicView.disputes.encoded()
|
|
19608
|
+
const et = this.blake2b.hashBytes(extrinsicView.tickets.encoded()).asOpaque<ExtrinsicHash>();
|
|
19609
|
+
const ep = this.blake2b.hashBytes(extrinsicView.preimages.encoded()).asOpaque<ExtrinsicHash>();
|
|
19610
|
+
const eg = this.blake2b.hashBytes(guaranteeBlob).asOpaque<ExtrinsicHash>();
|
|
19611
|
+
const ea = this.blake2b.hashBytes(extrinsicView.assurances.encoded()).asOpaque<ExtrinsicHash>();
|
|
19612
|
+
const ed = this.blake2b.hashBytes(extrinsicView.disputes.encoded()).asOpaque<ExtrinsicHash>();
|
|
19618
19613
|
|
|
19619
19614
|
const encoded = BytesBlob.blobFromParts([et.raw, ep.raw, eg.raw, ea.raw, ed.raw]);
|
|
19620
19615
|
|
|
19621
|
-
return new WithHashAndBytes(blake2b.hashBytes(encoded
|
|
19616
|
+
return new WithHashAndBytes(this.blake2b.hashBytes(encoded).asOpaque(), extrinsicView, encoded);
|
|
19622
19617
|
}
|
|
19623
19618
|
|
|
19624
19619
|
/** Creates hash for given WorkPackage */
|
|
@@ -19629,7 +19624,7 @@ declare class TransitionHasher implements MmrHasher<KeccakHash> {
|
|
|
19629
19624
|
private encode<T, THash extends OpaqueHash>(codec: Codec<T>, data: T): WithHashAndBytes<THash, T> {
|
|
19630
19625
|
// TODO [ToDr] Use already allocated encoding destination and hash bytes from some arena.
|
|
19631
19626
|
const encoded = Encoder.encodeObject(codec, data, this.context);
|
|
19632
|
-
return new WithHashAndBytes(blake2b.hashBytes(encoded
|
|
19627
|
+
return new WithHashAndBytes(this.blake2b.hashBytes(encoded).asOpaque(), data, encoded);
|
|
19633
19628
|
}
|
|
19634
19629
|
}
|
|
19635
19630
|
|
|
@@ -19650,7 +19645,10 @@ declare enum PreimagesErrorCode {
|
|
|
19650
19645
|
|
|
19651
19646
|
// TODO [SeKo] consider whether this module is the right place to remove expired preimages
|
|
19652
19647
|
declare class Preimages {
|
|
19653
|
-
constructor(
|
|
19648
|
+
constructor(
|
|
19649
|
+
public readonly state: PreimagesState,
|
|
19650
|
+
public readonly blake2b: Blake2b,
|
|
19651
|
+
) {}
|
|
19654
19652
|
|
|
19655
19653
|
integrate(input: PreimagesInput): Result$2<PreimagesStateUpdate, PreimagesErrorCode> {
|
|
19656
19654
|
// make sure lookup extrinsics are sorted and unique
|
|
@@ -19679,7 +19677,7 @@ declare class Preimages {
|
|
|
19679
19677
|
// select preimages for integration
|
|
19680
19678
|
for (const preimage of preimages) {
|
|
19681
19679
|
const { requester, blob } = preimage;
|
|
19682
|
-
const hash: PreimageHash = blake2b.hashBytes(blob).asOpaque();
|
|
19680
|
+
const hash: PreimageHash = this.blake2b.hashBytes(blob).asOpaque();
|
|
19683
19681
|
|
|
19684
19682
|
const service = this.state.getService(requester);
|
|
19685
19683
|
if (service === null) {
|
|
@@ -19710,156 +19708,6 @@ declare class Preimages {
|
|
|
19710
19708
|
}
|
|
19711
19709
|
}
|
|
19712
19710
|
|
|
19713
|
-
declare enum ServiceExecutorError {
|
|
19714
|
-
NoLookup = 0,
|
|
19715
|
-
NoState = 1,
|
|
19716
|
-
NoServiceCode = 2,
|
|
19717
|
-
ServiceCodeMismatch = 3,
|
|
19718
|
-
}
|
|
19719
|
-
|
|
19720
|
-
declare class WorkPackageExecutor {
|
|
19721
|
-
constructor(
|
|
19722
|
-
private readonly blocks: BlocksDb,
|
|
19723
|
-
private readonly state: StatesDb,
|
|
19724
|
-
private readonly hasher: TransitionHasher,
|
|
19725
|
-
) {}
|
|
19726
|
-
|
|
19727
|
-
// TODO [ToDr] this while thing should be triple-checked with the GP.
|
|
19728
|
-
// I'm currently implementing some dirty version for the demo.
|
|
19729
|
-
async executeWorkPackage(pack: WorkPackage): Promise<WorkReport> {
|
|
19730
|
-
const headerHash = pack.context.lookupAnchor;
|
|
19731
|
-
// execute authorisation first or is it already executed and we just need to check it?
|
|
19732
|
-
const authExec = this.getServiceExecutor(
|
|
19733
|
-
// TODO [ToDr] should this be anchor or lookupAnchor?
|
|
19734
|
-
headerHash,
|
|
19735
|
-
pack.authCodeHost,
|
|
19736
|
-
pack.authCodeHash,
|
|
19737
|
-
);
|
|
19738
|
-
|
|
19739
|
-
if (authExec.isError) {
|
|
19740
|
-
// TODO [ToDr] most likely shouldn't be throw.
|
|
19741
|
-
throw new Error(`Could not get authorization executor: ${authExec.error}`);
|
|
19742
|
-
}
|
|
19743
|
-
|
|
19744
|
-
const pvm = authExec.ok;
|
|
19745
|
-
const authGas = tryAsGas(15_000n);
|
|
19746
|
-
const result = await pvm.run(pack.parametrization, authGas);
|
|
19747
|
-
|
|
19748
|
-
if (!result.isEqualTo(pack.authorization)) {
|
|
19749
|
-
throw new Error("Authorization is invalid.");
|
|
19750
|
-
}
|
|
19751
|
-
|
|
19752
|
-
const results: WorkResult[] = [];
|
|
19753
|
-
for (const item of pack.items) {
|
|
19754
|
-
const exec = this.getServiceExecutor(headerHash, item.service, item.codeHash);
|
|
19755
|
-
if (exec.isError) {
|
|
19756
|
-
throw new Error(`Could not get item executor: ${exec.error}`);
|
|
19757
|
-
}
|
|
19758
|
-
const pvm = exec.ok;
|
|
19759
|
-
|
|
19760
|
-
const gasRatio = tryAsServiceGas(3_000n);
|
|
19761
|
-
const ret = await pvm.run(item.payload, tryAsGas(item.refineGasLimit)); // or accumulateGasLimit?
|
|
19762
|
-
results.push(
|
|
19763
|
-
WorkResult.create({
|
|
19764
|
-
serviceId: item.service,
|
|
19765
|
-
codeHash: item.codeHash,
|
|
19766
|
-
payloadHash: blake2b.hashBytes(item.payload),
|
|
19767
|
-
gas: gasRatio,
|
|
19768
|
-
result: new WorkExecResult(WorkExecResultKind.ok, ret),
|
|
19769
|
-
load: WorkRefineLoad.create({
|
|
19770
|
-
gasUsed: tryAsServiceGas(5),
|
|
19771
|
-
importedSegments: tryAsU32(0),
|
|
19772
|
-
exportedSegments: tryAsU32(0),
|
|
19773
|
-
extrinsicSize: tryAsU32(0),
|
|
19774
|
-
extrinsicCount: tryAsU32(0),
|
|
19775
|
-
}),
|
|
19776
|
-
}),
|
|
19777
|
-
);
|
|
19778
|
-
}
|
|
19779
|
-
|
|
19780
|
-
const workPackage = this.hasher.workPackage(pack);
|
|
19781
|
-
const workPackageSpec = WorkPackageSpec.create({
|
|
19782
|
-
hash: workPackage.hash,
|
|
19783
|
-
length: tryAsU32(workPackage.encoded.length),
|
|
19784
|
-
erasureRoot: Bytes.zero(HASH_SIZE),
|
|
19785
|
-
exportsRoot: Bytes.zero(HASH_SIZE).asOpaque(),
|
|
19786
|
-
exportsCount: tryAsU16(0),
|
|
19787
|
-
});
|
|
19788
|
-
const coreIndex = tryAsCoreIndex(0);
|
|
19789
|
-
const authorizerHash = Bytes.fill(HASH_SIZE, 5).asOpaque();
|
|
19790
|
-
|
|
19791
|
-
const workResults = FixedSizeArray.new(results, tryAsWorkItemsCount(results.length));
|
|
19792
|
-
|
|
19793
|
-
return Promise.resolve(
|
|
19794
|
-
WorkReport.create({
|
|
19795
|
-
workPackageSpec,
|
|
19796
|
-
context: pack.context,
|
|
19797
|
-
coreIndex,
|
|
19798
|
-
authorizerHash,
|
|
19799
|
-
authorizationOutput: pack.authorization,
|
|
19800
|
-
segmentRootLookup: [],
|
|
19801
|
-
results: workResults,
|
|
19802
|
-
authorizationGasUsed: tryAsServiceGas(0),
|
|
19803
|
-
}),
|
|
19804
|
-
);
|
|
19805
|
-
}
|
|
19806
|
-
|
|
19807
|
-
getServiceExecutor(
|
|
19808
|
-
lookupAnchor: HeaderHash,
|
|
19809
|
-
serviceId: ServiceId,
|
|
19810
|
-
expectedCodeHash: CodeHash,
|
|
19811
|
-
): Result$2<PvmExecutor, ServiceExecutorError> {
|
|
19812
|
-
const header = this.blocks.getHeader(lookupAnchor);
|
|
19813
|
-
if (header === null) {
|
|
19814
|
-
return Result.error(ServiceExecutorError.NoLookup);
|
|
19815
|
-
}
|
|
19816
|
-
|
|
19817
|
-
const state = this.state.getState(lookupAnchor);
|
|
19818
|
-
if (state === null) {
|
|
19819
|
-
return Result.error(ServiceExecutorError.NoState);
|
|
19820
|
-
}
|
|
19821
|
-
|
|
19822
|
-
const service = state.getService(serviceId);
|
|
19823
|
-
const serviceCodeHash = service?.getInfo().codeHash ?? null;
|
|
19824
|
-
if (serviceCodeHash === null) {
|
|
19825
|
-
return Result.error(ServiceExecutorError.NoServiceCode);
|
|
19826
|
-
}
|
|
19827
|
-
|
|
19828
|
-
if (!serviceCodeHash.isEqualTo(expectedCodeHash)) {
|
|
19829
|
-
return Result.error(ServiceExecutorError.ServiceCodeMismatch);
|
|
19830
|
-
}
|
|
19831
|
-
|
|
19832
|
-
const serviceCode = service?.getPreimage(serviceCodeHash.asOpaque()) ?? null;
|
|
19833
|
-
if (serviceCode === null) {
|
|
19834
|
-
return Result.error(ServiceExecutorError.NoServiceCode);
|
|
19835
|
-
}
|
|
19836
|
-
|
|
19837
|
-
return Result.ok(new PvmExecutor(serviceCode));
|
|
19838
|
-
}
|
|
19839
|
-
}
|
|
19840
|
-
|
|
19841
|
-
declare class PvmExecutor {
|
|
19842
|
-
private readonly pvm: HostCalls;
|
|
19843
|
-
private hostCalls = new HostCallsManager({ missing: new Missing() });
|
|
19844
|
-
private pvmInstanceManager = new PvmInstanceManager(4);
|
|
19845
|
-
|
|
19846
|
-
constructor(private serviceCode: BytesBlob) {
|
|
19847
|
-
this.pvm = new PvmHostCallExtension(this.pvmInstanceManager, this.hostCalls);
|
|
19848
|
-
}
|
|
19849
|
-
|
|
19850
|
-
async run(args: BytesBlob, gas: Gas): Promise<BytesBlob> {
|
|
19851
|
-
const program = Program.fromSpi(this.serviceCode.raw, args.raw, true);
|
|
19852
|
-
|
|
19853
|
-
const result = await this.pvm.runProgram(program.code, 5, gas, program.registers, program.memory);
|
|
19854
|
-
|
|
19855
|
-
if (result.hasMemorySlice()) {
|
|
19856
|
-
return BytesBlob.blobFrom(result.memorySlice);
|
|
19857
|
-
}
|
|
19858
|
-
|
|
19859
|
-
return BytesBlob.empty();
|
|
19860
|
-
}
|
|
19861
|
-
}
|
|
19862
|
-
|
|
19863
19711
|
type index_Preimages = Preimages;
|
|
19864
19712
|
declare const index_Preimages: typeof Preimages;
|
|
19865
19713
|
type index_PreimagesErrorCode = PreimagesErrorCode;
|
|
@@ -19869,10 +19717,8 @@ type index_PreimagesState = PreimagesState;
|
|
|
19869
19717
|
type index_PreimagesStateUpdate = PreimagesStateUpdate;
|
|
19870
19718
|
type index_TransitionHasher = TransitionHasher;
|
|
19871
19719
|
declare const index_TransitionHasher: typeof TransitionHasher;
|
|
19872
|
-
type index_WorkPackageExecutor = WorkPackageExecutor;
|
|
19873
|
-
declare const index_WorkPackageExecutor: typeof WorkPackageExecutor;
|
|
19874
19720
|
declare namespace index {
|
|
19875
|
-
export { index_Preimages as Preimages, index_PreimagesErrorCode as PreimagesErrorCode, index_TransitionHasher as TransitionHasher
|
|
19721
|
+
export { index_Preimages as Preimages, index_PreimagesErrorCode as PreimagesErrorCode, index_TransitionHasher as TransitionHasher };
|
|
19876
19722
|
export type { index_PreimagesInput as PreimagesInput, index_PreimagesState as PreimagesState, index_PreimagesStateUpdate as PreimagesStateUpdate };
|
|
19877
19723
|
}
|
|
19878
19724
|
|