@typeberry/lib 0.1.3-707962d → 0.1.3-8258907
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.cjs +423 -1122
- package/index.d.ts +338 -408
- package/index.js +422 -1121
- package/package.json +1 -1
package/index.d.ts
CHANGED
|
@@ -35,7 +35,9 @@ declare function parseCurrentVersion(env?: string): GpVersion | undefined {
|
|
|
35
35
|
}
|
|
36
36
|
|
|
37
37
|
declare function parseCurrentSuite(env?: string): TestSuite | undefined {
|
|
38
|
-
if (env === undefined)
|
|
38
|
+
if (env === undefined) {
|
|
39
|
+
return undefined;
|
|
40
|
+
}
|
|
39
41
|
switch (env) {
|
|
40
42
|
case TestSuite.W3F_DAVXY:
|
|
41
43
|
case TestSuite.JAMDUNA:
|
|
@@ -420,6 +422,20 @@ declare const Result$2 = {
|
|
|
420
422
|
},
|
|
421
423
|
};
|
|
422
424
|
|
|
425
|
+
// about 2GB, the maximum ArrayBuffer length on Chrome confirmed by several sources:
|
|
426
|
+
// - https://issues.chromium.org/issues/40055619
|
|
427
|
+
// - https://stackoverflow.com/a/72124984
|
|
428
|
+
// - https://onnxruntime.ai/docs/tutorials/web/large-models.html#maximum-size-of-arraybuffer
|
|
429
|
+
declare const MAX_LENGTH$1 = 2145386496;
|
|
430
|
+
|
|
431
|
+
declare function safeAllocUint8Array(length: number) {
|
|
432
|
+
if (length > MAX_LENGTH) {
|
|
433
|
+
// biome-ignore lint/suspicious/noConsole: can't have a dependency on logger here
|
|
434
|
+
console.warn(`Trying to allocate ${length} bytes, which is greater than the maximum of ${MAX_LENGTH}.`);
|
|
435
|
+
}
|
|
436
|
+
return new Uint8Array(Math.min(MAX_LENGTH, length));
|
|
437
|
+
}
|
|
438
|
+
|
|
423
439
|
/**
|
|
424
440
|
* Utilities for tests.
|
|
425
441
|
*/
|
|
@@ -573,8 +589,12 @@ declare function deepEqual<T>(
|
|
|
573
589
|
const aKey = `${a.key}`;
|
|
574
590
|
const bKey = `${b.key}`;
|
|
575
591
|
|
|
576
|
-
if (aKey < bKey)
|
|
577
|
-
|
|
592
|
+
if (aKey < bKey) {
|
|
593
|
+
return -1;
|
|
594
|
+
}
|
|
595
|
+
if (bKey < aKey) {
|
|
596
|
+
return 1;
|
|
597
|
+
}
|
|
578
598
|
return 0;
|
|
579
599
|
});
|
|
580
600
|
};
|
|
@@ -755,11 +775,12 @@ declare const index$u_oomWarningPrinted: typeof oomWarningPrinted;
|
|
|
755
775
|
declare const index$u_parseCurrentSuite: typeof parseCurrentSuite;
|
|
756
776
|
declare const index$u_parseCurrentVersion: typeof parseCurrentVersion;
|
|
757
777
|
declare const index$u_resultToString: typeof resultToString;
|
|
778
|
+
declare const index$u_safeAllocUint8Array: typeof safeAllocUint8Array;
|
|
758
779
|
declare const index$u_seeThrough: typeof seeThrough;
|
|
759
780
|
declare const index$u_trimStack: typeof trimStack;
|
|
760
781
|
declare const index$u_workspacePathFix: typeof workspacePathFix;
|
|
761
782
|
declare namespace index$u {
|
|
762
|
-
export { index$u_ALL_VERSIONS_IN_ORDER as ALL_VERSIONS_IN_ORDER, index$u_CURRENT_SUITE as CURRENT_SUITE, index$u_CURRENT_VERSION as CURRENT_VERSION, index$u_Compatibility as Compatibility, index$u_DEFAULT_SUITE as DEFAULT_SUITE, index$u_DEFAULT_VERSION as DEFAULT_VERSION, index$u_ErrorsCollector as ErrorsCollector, index$u_GpVersion as GpVersion, Result$2 as Result, index$u_RichTaggedError as RichTaggedError, index$u_TEST_COMPARE_USING as TEST_COMPARE_USING, index$u_TestSuite as TestSuite, index$u_WithDebug as WithDebug, index$u___OPAQUE_TYPE__ as __OPAQUE_TYPE__, index$u_asOpaqueType as asOpaqueType, index$u_assertEmpty as assertEmpty, index$u_assertNever as assertNever, index$u_callCompareFunction as callCompareFunction, index$u_check as check, index$u_deepEqual as deepEqual, index$u_getAllKeysSorted as getAllKeysSorted, index$u_inspect as inspect, index$u_isBrowser as isBrowser, index$u_isResult as isResult, index$u_isTaggedError as isTaggedError, index$u_maybeTaggedErrorToString as maybeTaggedErrorToString, index$u_measure as measure, index$u_oomWarningPrinted as oomWarningPrinted, index$u_parseCurrentSuite as parseCurrentSuite, index$u_parseCurrentVersion as parseCurrentVersion, index$u_resultToString as resultToString, index$u_seeThrough as seeThrough, index$u_trimStack as trimStack, index$u_workspacePathFix as workspacePathFix };
|
|
783
|
+
export { index$u_ALL_VERSIONS_IN_ORDER as ALL_VERSIONS_IN_ORDER, index$u_CURRENT_SUITE as CURRENT_SUITE, index$u_CURRENT_VERSION as CURRENT_VERSION, index$u_Compatibility as Compatibility, index$u_DEFAULT_SUITE as DEFAULT_SUITE, index$u_DEFAULT_VERSION as DEFAULT_VERSION, index$u_ErrorsCollector as ErrorsCollector, index$u_GpVersion as GpVersion, MAX_LENGTH$1 as MAX_LENGTH, Result$2 as Result, index$u_RichTaggedError as RichTaggedError, index$u_TEST_COMPARE_USING as TEST_COMPARE_USING, index$u_TestSuite as TestSuite, index$u_WithDebug as WithDebug, index$u___OPAQUE_TYPE__ as __OPAQUE_TYPE__, index$u_asOpaqueType as asOpaqueType, index$u_assertEmpty as assertEmpty, index$u_assertNever as assertNever, index$u_callCompareFunction as callCompareFunction, index$u_check as check, index$u_deepEqual as deepEqual, index$u_getAllKeysSorted as getAllKeysSorted, index$u_inspect as inspect, index$u_isBrowser as isBrowser, index$u_isResult as isResult, index$u_isTaggedError as isTaggedError, index$u_maybeTaggedErrorToString as maybeTaggedErrorToString, index$u_measure as measure, index$u_oomWarningPrinted as oomWarningPrinted, index$u_parseCurrentSuite as parseCurrentSuite, index$u_parseCurrentVersion as parseCurrentVersion, index$u_resultToString as resultToString, index$u_safeAllocUint8Array as safeAllocUint8Array, index$u_seeThrough as seeThrough, index$u_trimStack as trimStack, index$u_workspacePathFix as workspacePathFix };
|
|
763
784
|
export type { index$u_DeepEqualOptions as DeepEqualOptions, index$u_EnumMapping as EnumMapping, index$u_ErrorResult as ErrorResult, index$u_OK as OK, index$u_OkResult as OkResult, index$u_Opaque as Opaque, index$u_StringLiteral as StringLiteral, index$u_TaggedError as TaggedError, index$u_TokenOf as TokenOf, index$u_Uninstantiable as Uninstantiable, index$u_WithOpaque as WithOpaque };
|
|
764
785
|
}
|
|
765
786
|
|
|
@@ -929,7 +950,7 @@ declare class BytesBlob {
|
|
|
929
950
|
static blobFromParts(v: Uint8Array | Uint8Array[], ...rest: Uint8Array[]) {
|
|
930
951
|
const vArr = v instanceof Uint8Array ? [v] : v;
|
|
931
952
|
const totalLength = vArr.reduce((a, v) => a + v.length, 0) + rest.reduce((a, v) => a + v.length, 0);
|
|
932
|
-
const buffer =
|
|
953
|
+
const buffer = safeAllocUint8Array(totalLength);
|
|
933
954
|
let offset = 0;
|
|
934
955
|
for (const r of vArr) {
|
|
935
956
|
buffer.set(r, offset);
|
|
@@ -1012,7 +1033,7 @@ declare class Bytes<T extends number> extends BytesBlob {
|
|
|
1012
1033
|
|
|
1013
1034
|
/** Create an empty [`Bytes<X>`] of given length. */
|
|
1014
1035
|
static zero<X extends number>(len: X): Bytes<X> {
|
|
1015
|
-
return new Bytes(
|
|
1036
|
+
return new Bytes(safeAllocUint8Array(len), len);
|
|
1016
1037
|
}
|
|
1017
1038
|
|
|
1018
1039
|
// TODO [ToDr] `fill` should have the argments swapped to align with the rest.
|
|
@@ -1133,7 +1154,7 @@ declare class BitVec {
|
|
|
1133
1154
|
* Create new [`BitVec`] with all values set to `false`.
|
|
1134
1155
|
*/
|
|
1135
1156
|
static empty(bitLength: number) {
|
|
1136
|
-
const data =
|
|
1157
|
+
const data = safeAllocUint8Array(Math.ceil(bitLength / 8));
|
|
1137
1158
|
return new BitVec(data, bitLength);
|
|
1138
1159
|
}
|
|
1139
1160
|
|
|
@@ -3461,6 +3482,99 @@ declare namespace index$q {
|
|
|
3461
3482
|
export type { index$q_ClassConstructor as ClassConstructor, index$q_Codec as Codec, index$q_CodecRecord as CodecRecord, index$q_Decode as Decode, index$q_DescribedBy as DescribedBy, index$q_DescriptorRecord as DescriptorRecord, index$q_Encode as Encode, index$q_LengthRange as LengthRange, index$q_OptionalRecord as OptionalRecord, Options$1 as Options, index$q_PropertyKeys as PropertyKeys, index$q_SimpleDescriptorRecord as SimpleDescriptorRecord, index$q_SizeHint as SizeHint, index$q_ViewOf as ViewOf };
|
|
3462
3483
|
}
|
|
3463
3484
|
|
|
3485
|
+
/**
|
|
3486
|
+
* A utility class providing a readonly view over a portion of an array without copying it.
|
|
3487
|
+
*/
|
|
3488
|
+
declare class ArrayView<T> implements Iterable<T> {
|
|
3489
|
+
private readonly source: T[];
|
|
3490
|
+
public readonly length: number;
|
|
3491
|
+
|
|
3492
|
+
private constructor(
|
|
3493
|
+
source: T[],
|
|
3494
|
+
private readonly start: number,
|
|
3495
|
+
private readonly end: number,
|
|
3496
|
+
) {
|
|
3497
|
+
this.source = source;
|
|
3498
|
+
this.length = end - start;
|
|
3499
|
+
}
|
|
3500
|
+
|
|
3501
|
+
static from<T>(source: T[], start = 0, end = source.length): ArrayView<T> {
|
|
3502
|
+
check`
|
|
3503
|
+
${start >= 0 && end <= source.length && start <= end}
|
|
3504
|
+
Invalid start (${start})/end (${end}) for ArrayView
|
|
3505
|
+
`;
|
|
3506
|
+
return new ArrayView(source, start, end);
|
|
3507
|
+
}
|
|
3508
|
+
|
|
3509
|
+
get(i: number): T {
|
|
3510
|
+
check`
|
|
3511
|
+
${i >= 0 && i < this.length}
|
|
3512
|
+
Index out of bounds: ${i} < ${this.length}
|
|
3513
|
+
`;
|
|
3514
|
+
return this.source[this.start + i];
|
|
3515
|
+
}
|
|
3516
|
+
|
|
3517
|
+
subview(from: number, to: number = this.length): ArrayView<T> {
|
|
3518
|
+
return ArrayView.from(this.source, this.start + from, this.start + to);
|
|
3519
|
+
}
|
|
3520
|
+
|
|
3521
|
+
toArray(): T[] {
|
|
3522
|
+
return this.source.slice(this.start, this.end);
|
|
3523
|
+
}
|
|
3524
|
+
|
|
3525
|
+
*[Symbol.iterator](): Iterator<T> {
|
|
3526
|
+
for (let i = this.start; i < this.end; i++) {
|
|
3527
|
+
yield this.source[i];
|
|
3528
|
+
}
|
|
3529
|
+
}
|
|
3530
|
+
}
|
|
3531
|
+
|
|
3532
|
+
type ITypedArray = Uint8Array | Uint16Array | Uint32Array;
|
|
3533
|
+
type IDataType = string | Buffer | ITypedArray;
|
|
3534
|
+
|
|
3535
|
+
type IHasher = {
|
|
3536
|
+
/**
|
|
3537
|
+
* Initializes hash state to default value
|
|
3538
|
+
*/
|
|
3539
|
+
init: () => IHasher;
|
|
3540
|
+
/**
|
|
3541
|
+
* Updates the hash content with the given data
|
|
3542
|
+
*/
|
|
3543
|
+
update: (data: IDataType) => IHasher;
|
|
3544
|
+
/**
|
|
3545
|
+
* Calculates the hash of all of the data passed to be hashed with hash.update().
|
|
3546
|
+
* Defaults to hexadecimal string
|
|
3547
|
+
* @param outputType If outputType is "binary", it returns Uint8Array. Otherwise it
|
|
3548
|
+
* returns hexadecimal string
|
|
3549
|
+
*/
|
|
3550
|
+
digest: {
|
|
3551
|
+
(outputType: "binary"): Uint8Array;
|
|
3552
|
+
(outputType?: "hex"): string;
|
|
3553
|
+
};
|
|
3554
|
+
/**
|
|
3555
|
+
* Save the current internal state of the hasher for later resumption with load().
|
|
3556
|
+
* Cannot be called before .init() or after .digest()
|
|
3557
|
+
*
|
|
3558
|
+
* Note that this state can include arbitrary information about the value being hashed (e.g.
|
|
3559
|
+
* could include N plaintext bytes from the value), so needs to be treated as being as
|
|
3560
|
+
* sensitive as the input value itself.
|
|
3561
|
+
*/
|
|
3562
|
+
save: () => Uint8Array;
|
|
3563
|
+
/**
|
|
3564
|
+
* Resume a state that was created by save(). If this state was not created by a
|
|
3565
|
+
* compatible build of hash-wasm, an exception will be thrown.
|
|
3566
|
+
*/
|
|
3567
|
+
load: (state: Uint8Array) => IHasher;
|
|
3568
|
+
/**
|
|
3569
|
+
* Block size in bytes
|
|
3570
|
+
*/
|
|
3571
|
+
blockSize: number;
|
|
3572
|
+
/**
|
|
3573
|
+
* Digest size in bytes
|
|
3574
|
+
*/
|
|
3575
|
+
digestSize: number;
|
|
3576
|
+
};
|
|
3577
|
+
|
|
3464
3578
|
/**
|
|
3465
3579
|
* Size of the output of the hash functions.
|
|
3466
3580
|
*
|
|
@@ -3516,144 +3630,46 @@ declare class WithHashAndBytes<THash extends OpaqueHash, TData> extends WithHash
|
|
|
3516
3630
|
}
|
|
3517
3631
|
}
|
|
3518
3632
|
|
|
3519
|
-
|
|
3520
|
-
interface HashAllocator {
|
|
3521
|
-
/** Return a new hash destination. */
|
|
3522
|
-
emptyHash(): OpaqueHash;
|
|
3523
|
-
}
|
|
3633
|
+
declare const zero$1 = Bytes.zero(HASH_SIZE);
|
|
3524
3634
|
|
|
3525
|
-
|
|
3526
|
-
|
|
3527
|
-
|
|
3528
|
-
return Bytes.zero(HASH_SIZE);
|
|
3635
|
+
declare class Blake2b {
|
|
3636
|
+
static async createHasher() {
|
|
3637
|
+
return new Blake2b(await createBLAKE2b(HASH_SIZE * 8));
|
|
3529
3638
|
}
|
|
3530
|
-
}
|
|
3531
3639
|
|
|
3532
|
-
|
|
3533
|
-
declare class PageAllocator implements HashAllocator {
|
|
3534
|
-
private page: Uint8Array = new Uint8Array(0);
|
|
3535
|
-
private currentHash = 0;
|
|
3536
|
-
|
|
3537
|
-
// TODO [ToDr] Benchmark the performance!
|
|
3538
|
-
constructor(private readonly hashesPerPage: number) {
|
|
3539
|
-
check`${hashesPerPage > 0 && hashesPerPage >>> 0 === hashesPerPage} Expected a non-zero integer.`;
|
|
3540
|
-
this.resetPage();
|
|
3541
|
-
}
|
|
3640
|
+
private constructor(private readonly hasher: IHasher) {}
|
|
3542
3641
|
|
|
3543
|
-
|
|
3544
|
-
|
|
3545
|
-
|
|
3546
|
-
|
|
3547
|
-
|
|
3548
|
-
|
|
3549
|
-
|
|
3550
|
-
|
|
3551
|
-
const endIdx = startIdx + HASH_SIZE;
|
|
3552
|
-
|
|
3553
|
-
this.currentHash += 1;
|
|
3554
|
-
if (this.currentHash >= this.hashesPerPage) {
|
|
3555
|
-
this.resetPage();
|
|
3642
|
+
/**
|
|
3643
|
+
* Hash given collection of blobs.
|
|
3644
|
+
*
|
|
3645
|
+
* If empty array is given a zero-hash is returned.
|
|
3646
|
+
*/
|
|
3647
|
+
hashBlobs<H extends Blake2bHash>(r: (BytesBlob | Uint8Array)[]): H {
|
|
3648
|
+
if (r.length === 0) {
|
|
3649
|
+
return zero.asOpaque();
|
|
3556
3650
|
}
|
|
3557
3651
|
|
|
3558
|
-
|
|
3652
|
+
const hasher = this.hasher.init();
|
|
3653
|
+
for (const v of r) {
|
|
3654
|
+
hasher.update(v instanceof BytesBlob ? v.raw : v);
|
|
3655
|
+
}
|
|
3656
|
+
return Bytes.fromBlob(hasher.digest("binary"), HASH_SIZE).asOpaque();
|
|
3559
3657
|
}
|
|
3560
|
-
}
|
|
3561
|
-
|
|
3562
|
-
declare const defaultAllocator = new SimpleAllocator();
|
|
3563
3658
|
|
|
3564
|
-
/**
|
|
3565
|
-
|
|
3566
|
-
|
|
3567
|
-
|
|
3568
|
-
|
|
3569
|
-
|
|
3570
|
-
r: (BytesBlob | Uint8Array)[],
|
|
3571
|
-
allocator: HashAllocator = defaultAllocator,
|
|
3572
|
-
): H {
|
|
3573
|
-
const out = allocator.emptyHash();
|
|
3574
|
-
if (r.length === 0) {
|
|
3575
|
-
return out.asOpaque();
|
|
3659
|
+
/** Hash given blob of bytes. */
|
|
3660
|
+
hashBytes(blob: BytesBlob | Uint8Array): Blake2bHash {
|
|
3661
|
+
const hasher = this.hasher.init();
|
|
3662
|
+
const bytes = blob instanceof BytesBlob ? blob.raw : blob;
|
|
3663
|
+
hasher.update(bytes);
|
|
3664
|
+
return Bytes.fromBlob(hasher.digest("binary"), HASH_SIZE).asOpaque();
|
|
3576
3665
|
}
|
|
3577
3666
|
|
|
3578
|
-
|
|
3579
|
-
|
|
3580
|
-
|
|
3667
|
+
/** Convert given string into bytes and hash it. */
|
|
3668
|
+
hashString(str: string) {
|
|
3669
|
+
return this.hashBytes(BytesBlob.blobFromString(str));
|
|
3581
3670
|
}
|
|
3582
|
-
hasher?.digest(out.raw);
|
|
3583
|
-
return out.asOpaque();
|
|
3584
|
-
}
|
|
3585
|
-
|
|
3586
|
-
/** Hash given blob of bytes. */
|
|
3587
|
-
declare function hashBytes(blob: BytesBlob | Uint8Array, allocator: HashAllocator = defaultAllocator): Blake2bHash {
|
|
3588
|
-
const hasher = blake2b(HASH_SIZE);
|
|
3589
|
-
const bytes = blob instanceof BytesBlob ? blob.raw : blob;
|
|
3590
|
-
hasher?.update(bytes);
|
|
3591
|
-
const out = allocator.emptyHash();
|
|
3592
|
-
hasher?.digest(out.raw);
|
|
3593
|
-
return out;
|
|
3594
3671
|
}
|
|
3595
3672
|
|
|
3596
|
-
/** Convert given string into bytes and hash it. */
|
|
3597
|
-
declare function hashString(str: string, allocator: HashAllocator = defaultAllocator) {
|
|
3598
|
-
return hashBytes(BytesBlob.blobFromString(str), allocator);
|
|
3599
|
-
}
|
|
3600
|
-
|
|
3601
|
-
declare const blake2b_hashBytes: typeof hashBytes;
|
|
3602
|
-
declare const blake2b_hashString: typeof hashString;
|
|
3603
|
-
declare namespace blake2b {
|
|
3604
|
-
export {
|
|
3605
|
-
hashBlobs$1 as hashBlobs,
|
|
3606
|
-
blake2b_hashBytes as hashBytes,
|
|
3607
|
-
blake2b_hashString as hashString,
|
|
3608
|
-
};
|
|
3609
|
-
}
|
|
3610
|
-
|
|
3611
|
-
type ITypedArray = Uint8Array | Uint16Array | Uint32Array;
|
|
3612
|
-
type IDataType = string | Buffer | ITypedArray;
|
|
3613
|
-
|
|
3614
|
-
type IHasher = {
|
|
3615
|
-
/**
|
|
3616
|
-
* Initializes hash state to default value
|
|
3617
|
-
*/
|
|
3618
|
-
init: () => IHasher;
|
|
3619
|
-
/**
|
|
3620
|
-
* Updates the hash content with the given data
|
|
3621
|
-
*/
|
|
3622
|
-
update: (data: IDataType) => IHasher;
|
|
3623
|
-
/**
|
|
3624
|
-
* Calculates the hash of all of the data passed to be hashed with hash.update().
|
|
3625
|
-
* Defaults to hexadecimal string
|
|
3626
|
-
* @param outputType If outputType is "binary", it returns Uint8Array. Otherwise it
|
|
3627
|
-
* returns hexadecimal string
|
|
3628
|
-
*/
|
|
3629
|
-
digest: {
|
|
3630
|
-
(outputType: "binary"): Uint8Array;
|
|
3631
|
-
(outputType?: "hex"): string;
|
|
3632
|
-
};
|
|
3633
|
-
/**
|
|
3634
|
-
* Save the current internal state of the hasher for later resumption with load().
|
|
3635
|
-
* Cannot be called before .init() or after .digest()
|
|
3636
|
-
*
|
|
3637
|
-
* Note that this state can include arbitrary information about the value being hashed (e.g.
|
|
3638
|
-
* could include N plaintext bytes from the value), so needs to be treated as being as
|
|
3639
|
-
* sensitive as the input value itself.
|
|
3640
|
-
*/
|
|
3641
|
-
save: () => Uint8Array;
|
|
3642
|
-
/**
|
|
3643
|
-
* Resume a state that was created by save(). If this state was not created by a
|
|
3644
|
-
* compatible build of hash-wasm, an exception will be thrown.
|
|
3645
|
-
*/
|
|
3646
|
-
load: (state: Uint8Array) => IHasher;
|
|
3647
|
-
/**
|
|
3648
|
-
* Block size in bytes
|
|
3649
|
-
*/
|
|
3650
|
-
blockSize: number;
|
|
3651
|
-
/**
|
|
3652
|
-
* Digest size in bytes
|
|
3653
|
-
*/
|
|
3654
|
-
digestSize: number;
|
|
3655
|
-
};
|
|
3656
|
-
|
|
3657
3673
|
declare class KeccakHasher {
|
|
3658
3674
|
static async create(): Promise<KeccakHasher> {
|
|
3659
3675
|
return new KeccakHasher(await createKeccak(256));
|
|
@@ -3681,15 +3697,15 @@ declare namespace keccak {
|
|
|
3681
3697
|
};
|
|
3682
3698
|
}
|
|
3683
3699
|
|
|
3700
|
+
// TODO [ToDr] (#213) this should most likely be moved to a separate
|
|
3701
|
+
// package to avoid pulling in unnecessary deps.
|
|
3702
|
+
|
|
3703
|
+
type index$p_Blake2b = Blake2b;
|
|
3704
|
+
declare const index$p_Blake2b: typeof Blake2b;
|
|
3684
3705
|
type index$p_Blake2bHash = Blake2bHash;
|
|
3685
3706
|
type index$p_HASH_SIZE = HASH_SIZE;
|
|
3686
|
-
type index$p_HashAllocator = HashAllocator;
|
|
3687
3707
|
type index$p_KeccakHash = KeccakHash;
|
|
3688
3708
|
type index$p_OpaqueHash = OpaqueHash;
|
|
3689
|
-
type index$p_PageAllocator = PageAllocator;
|
|
3690
|
-
declare const index$p_PageAllocator: typeof PageAllocator;
|
|
3691
|
-
type index$p_SimpleAllocator = SimpleAllocator;
|
|
3692
|
-
declare const index$p_SimpleAllocator: typeof SimpleAllocator;
|
|
3693
3709
|
type index$p_TRUNCATED_HASH_SIZE = TRUNCATED_HASH_SIZE;
|
|
3694
3710
|
type index$p_TruncatedHash = TruncatedHash;
|
|
3695
3711
|
type index$p_WithHash<THash extends OpaqueHash, TData> = WithHash<THash, TData>;
|
|
@@ -3697,12 +3713,10 @@ declare const index$p_WithHash: typeof WithHash;
|
|
|
3697
3713
|
type index$p_WithHashAndBytes<THash extends OpaqueHash, TData> = WithHashAndBytes<THash, TData>;
|
|
3698
3714
|
declare const index$p_WithHashAndBytes: typeof WithHashAndBytes;
|
|
3699
3715
|
declare const index$p_ZERO_HASH: typeof ZERO_HASH;
|
|
3700
|
-
declare const index$p_blake2b: typeof blake2b;
|
|
3701
|
-
declare const index$p_defaultAllocator: typeof defaultAllocator;
|
|
3702
3716
|
declare const index$p_keccak: typeof keccak;
|
|
3703
3717
|
declare namespace index$p {
|
|
3704
|
-
export { index$
|
|
3705
|
-
export type { index$p_Blake2bHash as Blake2bHash, index$p_HASH_SIZE as HASH_SIZE, index$
|
|
3718
|
+
export { index$p_Blake2b as Blake2b, index$p_WithHash as WithHash, index$p_WithHashAndBytes as WithHashAndBytes, index$p_ZERO_HASH as ZERO_HASH, index$p_keccak as keccak, zero$1 as zero };
|
|
3719
|
+
export type { index$p_Blake2bHash as Blake2bHash, index$p_HASH_SIZE as HASH_SIZE, index$p_KeccakHash as KeccakHash, index$p_OpaqueHash as OpaqueHash, index$p_TRUNCATED_HASH_SIZE as TRUNCATED_HASH_SIZE, index$p_TruncatedHash as TruncatedHash };
|
|
3706
3720
|
}
|
|
3707
3721
|
|
|
3708
3722
|
/** Immutable view of the `HashDictionary`. */
|
|
@@ -4479,6 +4493,8 @@ declare class TruncatedHashDictionary<T extends OpaqueHash, V> {
|
|
|
4479
4493
|
}
|
|
4480
4494
|
}
|
|
4481
4495
|
|
|
4496
|
+
type index$o_ArrayView<T> = ArrayView<T>;
|
|
4497
|
+
declare const index$o_ArrayView: typeof ArrayView;
|
|
4482
4498
|
type index$o_FixedSizeArray<T, N extends number> = FixedSizeArray<T, N>;
|
|
4483
4499
|
declare const index$o_FixedSizeArray: typeof FixedSizeArray;
|
|
4484
4500
|
type index$o_HashDictionary<K extends OpaqueHash, V> = HashDictionary<K, V>;
|
|
@@ -4506,7 +4522,7 @@ type index$o_TruncatedHashDictionary<T extends OpaqueHash, V> = TruncatedHashDic
|
|
|
4506
4522
|
declare const index$o_TruncatedHashDictionary: typeof TruncatedHashDictionary;
|
|
4507
4523
|
declare const index$o_asKnownSize: typeof asKnownSize;
|
|
4508
4524
|
declare namespace index$o {
|
|
4509
|
-
export { index$o_FixedSizeArray as FixedSizeArray, index$o_HashDictionary as HashDictionary, index$o_HashSet as HashSet, index$o_MultiMap as MultiMap, index$o_SortedArray as SortedArray, index$o_SortedSet as SortedSet, index$o_TruncatedHashDictionary as TruncatedHashDictionary, index$o_asKnownSize as asKnownSize };
|
|
4525
|
+
export { index$o_ArrayView as ArrayView, index$o_FixedSizeArray as FixedSizeArray, index$o_HashDictionary as HashDictionary, index$o_HashSet as HashSet, index$o_MultiMap as MultiMap, index$o_SortedArray as SortedArray, index$o_SortedSet as SortedSet, index$o_TruncatedHashDictionary as TruncatedHashDictionary, index$o_asKnownSize as asKnownSize };
|
|
4510
4526
|
export type { index$o_HashWithZeroedBit as HashWithZeroedBit, index$o_ImmutableHashDictionary as ImmutableHashDictionary, index$o_ImmutableHashSet as ImmutableHashSet, index$o_ImmutableSortedArray as ImmutableSortedArray, index$o_ImmutableSortedSet as ImmutableSortedSet, index$o_KeyMapper as KeyMapper, index$o_KeyMappers as KeyMappers, index$o_KnownSize as KnownSize, index$o_KnownSizeArray as KnownSizeArray, index$o_KnownSizeId as KnownSizeId, index$o_NestedMaps as NestedMaps };
|
|
4511
4527
|
}
|
|
4512
4528
|
|
|
@@ -4735,7 +4751,7 @@ declare async function verify<T extends BytesBlob>(input: Input<T>[]): Promise<b
|
|
|
4735
4751
|
(acc, { message, key, signature }) => acc + key.length + signature.length + message.length + 1,
|
|
4736
4752
|
0,
|
|
4737
4753
|
);
|
|
4738
|
-
const data =
|
|
4754
|
+
const data = safeAllocUint8Array(dataLength);
|
|
4739
4755
|
|
|
4740
4756
|
let offset = 0;
|
|
4741
4757
|
|
|
@@ -4825,22 +4841,16 @@ declare function trivialSeed(s: U32): KeySeed {
|
|
|
4825
4841
|
* Derives a Ed25519 secret key from a seed.
|
|
4826
4842
|
* https://github.com/polkadot-fellows/JIPs/blob/7048f79edf4f4eb8bfe6fb42e6bbf61900f44c65/JIP-5.md#derivation-method
|
|
4827
4843
|
*/
|
|
4828
|
-
declare function deriveEd25519SecretKey(
|
|
4829
|
-
|
|
4830
|
-
allocator: SimpleAllocator = new SimpleAllocator(),
|
|
4831
|
-
): Ed25519SecretSeed {
|
|
4832
|
-
return blake2b.hashBytes(BytesBlob.blobFromParts([ED25519_SECRET_KEY.raw, seed.raw]), allocator).asOpaque();
|
|
4844
|
+
declare function deriveEd25519SecretKey(seed: KeySeed, blake2b: Blake2b): Ed25519SecretSeed {
|
|
4845
|
+
return blake2b.hashBytes(BytesBlob.blobFromParts([ED25519_SECRET_KEY.raw, seed.raw])).asOpaque();
|
|
4833
4846
|
}
|
|
4834
4847
|
|
|
4835
4848
|
/**
|
|
4836
4849
|
* Derives a Bandersnatch secret key from a seed.
|
|
4837
4850
|
* https://github.com/polkadot-fellows/JIPs/blob/7048f79edf4f4eb8bfe6fb42e6bbf61900f44c65/JIP-5.md#derivation-method
|
|
4838
4851
|
*/
|
|
4839
|
-
declare function deriveBandersnatchSecretKey(
|
|
4840
|
-
|
|
4841
|
-
allocator: SimpleAllocator = new SimpleAllocator(),
|
|
4842
|
-
): BandersnatchSecretSeed {
|
|
4843
|
-
return blake2b.hashBytes(BytesBlob.blobFromParts([BANDERSNATCH_SECRET_KEY.raw, seed.raw]), allocator).asOpaque();
|
|
4852
|
+
declare function deriveBandersnatchSecretKey(seed: KeySeed, blake2b: Blake2b): BandersnatchSecretSeed {
|
|
4853
|
+
return blake2b.hashBytes(BytesBlob.blobFromParts([BANDERSNATCH_SECRET_KEY.raw, seed.raw])).asOpaque();
|
|
4844
4854
|
}
|
|
4845
4855
|
|
|
4846
4856
|
/**
|
|
@@ -8373,7 +8383,7 @@ declare enum NodeType {
|
|
|
8373
8383
|
declare class TrieNode {
|
|
8374
8384
|
constructor(
|
|
8375
8385
|
/** Exactly 512 bits / 64 bytes */
|
|
8376
|
-
public readonly raw: Uint8Array =
|
|
8386
|
+
public readonly raw: Uint8Array = safeAllocUint8Array(TRIE_NODE_BYTES),
|
|
8377
8387
|
) {}
|
|
8378
8388
|
|
|
8379
8389
|
/** Returns the type of the node */
|
|
@@ -9111,21 +9121,6 @@ declare function accumulationOutputComparator(a: AccumulationOutput, b: Accumula
|
|
|
9111
9121
|
return Ordering.Equal;
|
|
9112
9122
|
}
|
|
9113
9123
|
|
|
9114
|
-
declare const codecWithHash = <T, V, H extends OpaqueHash>(val: Descriptor<T, V>): Descriptor<WithHash<H, T>, V> =>
|
|
9115
|
-
Descriptor.withView(
|
|
9116
|
-
val.name,
|
|
9117
|
-
val.sizeHint,
|
|
9118
|
-
(e, elem) => val.encode(e, elem.data),
|
|
9119
|
-
(d): WithHash<H, T> => {
|
|
9120
|
-
const decoder2 = d.clone();
|
|
9121
|
-
const encoded = val.skipEncoded(decoder2);
|
|
9122
|
-
const hash = blake2b.hashBytes(encoded);
|
|
9123
|
-
return new WithHash(hash.asOpaque(), val.decode(d));
|
|
9124
|
-
},
|
|
9125
|
-
val.skip,
|
|
9126
|
-
val.View,
|
|
9127
|
-
);
|
|
9128
|
-
|
|
9129
9124
|
/**
|
|
9130
9125
|
* Assignment of particular work report to a core.
|
|
9131
9126
|
*
|
|
@@ -9136,7 +9131,7 @@ declare const codecWithHash = <T, V, H extends OpaqueHash>(val: Descriptor<T, V>
|
|
|
9136
9131
|
*/
|
|
9137
9132
|
declare class AvailabilityAssignment extends WithDebug {
|
|
9138
9133
|
static Codec = codec.Class(AvailabilityAssignment, {
|
|
9139
|
-
workReport:
|
|
9134
|
+
workReport: WorkReport.Codec,
|
|
9140
9135
|
timeout: codec.u32.asOpaque<TimeSlot>(),
|
|
9141
9136
|
});
|
|
9142
9137
|
|
|
@@ -9146,7 +9141,7 @@ declare class AvailabilityAssignment extends WithDebug {
|
|
|
9146
9141
|
|
|
9147
9142
|
private constructor(
|
|
9148
9143
|
/** Work report assigned to a core. */
|
|
9149
|
-
public readonly workReport:
|
|
9144
|
+
public readonly workReport: WorkReport,
|
|
9150
9145
|
/** Time slot at which the report becomes obsolete. */
|
|
9151
9146
|
public readonly timeout: TimeSlot,
|
|
9152
9147
|
) {
|
|
@@ -9196,6 +9191,11 @@ declare class DisputesRecords {
|
|
|
9196
9191
|
return new DisputesRecords(goodSet, badSet, wonkySet, punishSet);
|
|
9197
9192
|
}
|
|
9198
9193
|
|
|
9194
|
+
private readonly goodSetDict: ImmutableHashSet<WorkReportHash>;
|
|
9195
|
+
private readonly badSetDict: ImmutableHashSet<WorkReportHash>;
|
|
9196
|
+
private readonly wonkySetDict: ImmutableHashSet<WorkReportHash>;
|
|
9197
|
+
private readonly punishSetDict: ImmutableHashSet<Ed25519Key>;
|
|
9198
|
+
|
|
9199
9199
|
private constructor(
|
|
9200
9200
|
/** `goodSet`: all work-reports hashes which were judged to be correct */
|
|
9201
9201
|
public readonly goodSet: ImmutableSortedSet<WorkReportHash>,
|
|
@@ -9205,7 +9205,21 @@ declare class DisputesRecords {
|
|
|
9205
9205
|
public readonly wonkySet: ImmutableSortedSet<WorkReportHash>,
|
|
9206
9206
|
/** `punishSet`: set of Ed25519 keys representing validators which were found to have misjudged a work-report */
|
|
9207
9207
|
public readonly punishSet: ImmutableSortedSet<Ed25519Key>,
|
|
9208
|
-
) {
|
|
9208
|
+
) {
|
|
9209
|
+
this.goodSetDict = HashSet.from(goodSet.array);
|
|
9210
|
+
this.badSetDict = HashSet.from(badSet.array);
|
|
9211
|
+
this.wonkySetDict = HashSet.from(wonkySet.array);
|
|
9212
|
+
this.punishSetDict = HashSet.from(punishSet.array);
|
|
9213
|
+
}
|
|
9214
|
+
|
|
9215
|
+
public asDictionaries() {
|
|
9216
|
+
return {
|
|
9217
|
+
goodSet: this.goodSetDict,
|
|
9218
|
+
badSet: this.badSetDict,
|
|
9219
|
+
wonkySet: this.wonkySetDict,
|
|
9220
|
+
punishSet: this.punishSetDict,
|
|
9221
|
+
};
|
|
9222
|
+
}
|
|
9209
9223
|
|
|
9210
9224
|
static fromSortedArrays({
|
|
9211
9225
|
goodSet,
|
|
@@ -9815,6 +9829,31 @@ declare const ignoreValueWithDefault = <T>(defaultValue: T) =>
|
|
|
9815
9829
|
(_s) => {},
|
|
9816
9830
|
);
|
|
9817
9831
|
|
|
9832
|
+
/** Encode and decode object with leading version number. */
|
|
9833
|
+
declare const codecWithVersion = <T>(val: Descriptor<T>): Descriptor<T> =>
|
|
9834
|
+
Descriptor.new<T>(
|
|
9835
|
+
"withVersion",
|
|
9836
|
+
{
|
|
9837
|
+
bytes: val.sizeHint.bytes + 8,
|
|
9838
|
+
isExact: false,
|
|
9839
|
+
},
|
|
9840
|
+
(e, v) => {
|
|
9841
|
+
e.varU64(0n);
|
|
9842
|
+
val.encode(e, v);
|
|
9843
|
+
},
|
|
9844
|
+
(d) => {
|
|
9845
|
+
const version = d.varU64();
|
|
9846
|
+
if (version !== 0n) {
|
|
9847
|
+
throw new Error("Non-zero version is not supported!");
|
|
9848
|
+
}
|
|
9849
|
+
return val.decode(d);
|
|
9850
|
+
},
|
|
9851
|
+
(s) => {
|
|
9852
|
+
s.varU64();
|
|
9853
|
+
val.skip(s);
|
|
9854
|
+
},
|
|
9855
|
+
);
|
|
9856
|
+
|
|
9818
9857
|
/**
|
|
9819
9858
|
* Service account details.
|
|
9820
9859
|
*
|
|
@@ -11274,7 +11313,7 @@ declare const index$e_codecPerCore: typeof codecPerCore;
|
|
|
11274
11313
|
declare const index$e_codecServiceId: typeof codecServiceId;
|
|
11275
11314
|
declare const index$e_codecVarGas: typeof codecVarGas;
|
|
11276
11315
|
declare const index$e_codecVarU16: typeof codecVarU16;
|
|
11277
|
-
declare const index$
|
|
11316
|
+
declare const index$e_codecWithVersion: typeof codecWithVersion;
|
|
11278
11317
|
declare const index$e_hashComparator: typeof hashComparator;
|
|
11279
11318
|
declare const index$e_ignoreValueWithDefault: typeof ignoreValueWithDefault;
|
|
11280
11319
|
declare const index$e_serviceDataCodec: typeof serviceDataCodec;
|
|
@@ -11285,7 +11324,7 @@ declare const index$e_tryAsPerCore: typeof tryAsPerCore;
|
|
|
11285
11324
|
declare const index$e_workReportsSortedSetCodec: typeof workReportsSortedSetCodec;
|
|
11286
11325
|
declare const index$e_zeroSizeHint: typeof zeroSizeHint;
|
|
11287
11326
|
declare namespace index$e {
|
|
11288
|
-
export { index$e_AccumulationOutput as AccumulationOutput, index$e_AutoAccumulate as AutoAccumulate, index$e_AvailabilityAssignment as AvailabilityAssignment, index$e_BASE_SERVICE_BALANCE as BASE_SERVICE_BALANCE, index$e_BlockState as BlockState, index$e_CoreStatistics as CoreStatistics, index$e_DisputesRecords as DisputesRecords, index$e_ELECTIVE_BYTE_BALANCE as ELECTIVE_BYTE_BALANCE, index$e_ELECTIVE_ITEM_BALANCE as ELECTIVE_ITEM_BALANCE, index$e_InMemoryService as InMemoryService, index$e_InMemoryState as InMemoryState, index$e_LookupHistoryItem as LookupHistoryItem, index$e_MAX_LOOKUP_HISTORY_SLOTS as MAX_LOOKUP_HISTORY_SLOTS, index$e_PreimageItem as PreimageItem, index$e_PrivilegedServices as PrivilegedServices, index$e_RecentBlocks as RecentBlocks, index$e_RecentBlocksHistory as RecentBlocksHistory, index$e_SafroleData as SafroleData, index$e_SafroleSealingKeysData as SafroleSealingKeysData, index$e_SafroleSealingKeysKind as SafroleSealingKeysKind, index$e_ServiceAccountInfo as ServiceAccountInfo, index$e_ServiceStatistics as ServiceStatistics, index$e_StatisticsData as StatisticsData, index$e_StorageItem as StorageItem, index$e_UpdateError as UpdateError, index$e_UpdatePreimage as UpdatePreimage, index$e_UpdatePreimageKind as UpdatePreimageKind, index$e_UpdateService as UpdateService, index$e_UpdateServiceKind as UpdateServiceKind, index$e_UpdateStorage as UpdateStorage, index$e_UpdateStorageKind as UpdateStorageKind, index$e_ValidatorData as ValidatorData, index$e_ValidatorStatistics as ValidatorStatistics, index$e_accumulationOutputComparator as accumulationOutputComparator, index$e_codecBandersnatchKey as codecBandersnatchKey, index$e_codecPerCore as codecPerCore, index$e_codecServiceId as codecServiceId, index$e_codecVarGas as codecVarGas, index$e_codecVarU16 as codecVarU16, index$
|
|
11327
|
+
export { index$e_AccumulationOutput as AccumulationOutput, index$e_AutoAccumulate as AutoAccumulate, index$e_AvailabilityAssignment as AvailabilityAssignment, index$e_BASE_SERVICE_BALANCE as BASE_SERVICE_BALANCE, index$e_BlockState as BlockState, index$e_CoreStatistics as CoreStatistics, index$e_DisputesRecords as DisputesRecords, index$e_ELECTIVE_BYTE_BALANCE as ELECTIVE_BYTE_BALANCE, index$e_ELECTIVE_ITEM_BALANCE as ELECTIVE_ITEM_BALANCE, index$e_InMemoryService as InMemoryService, index$e_InMemoryState as InMemoryState, index$e_LookupHistoryItem as LookupHistoryItem, index$e_MAX_LOOKUP_HISTORY_SLOTS as MAX_LOOKUP_HISTORY_SLOTS, index$e_PreimageItem as PreimageItem, index$e_PrivilegedServices as PrivilegedServices, index$e_RecentBlocks as RecentBlocks, index$e_RecentBlocksHistory as RecentBlocksHistory, index$e_SafroleData as SafroleData, index$e_SafroleSealingKeysData as SafroleSealingKeysData, index$e_SafroleSealingKeysKind as SafroleSealingKeysKind, index$e_ServiceAccountInfo as ServiceAccountInfo, index$e_ServiceStatistics as ServiceStatistics, index$e_StatisticsData as StatisticsData, index$e_StorageItem as StorageItem, index$e_UpdateError as UpdateError, index$e_UpdatePreimage as UpdatePreimage, index$e_UpdatePreimageKind as UpdatePreimageKind, index$e_UpdateService as UpdateService, index$e_UpdateServiceKind as UpdateServiceKind, index$e_UpdateStorage as UpdateStorage, index$e_UpdateStorageKind as UpdateStorageKind, index$e_ValidatorData as ValidatorData, index$e_ValidatorStatistics as ValidatorStatistics, index$e_accumulationOutputComparator as accumulationOutputComparator, index$e_codecBandersnatchKey as codecBandersnatchKey, index$e_codecPerCore as codecPerCore, index$e_codecServiceId as codecServiceId, index$e_codecVarGas as codecVarGas, index$e_codecVarU16 as codecVarU16, index$e_codecWithVersion as codecWithVersion, index$e_hashComparator as hashComparator, index$e_ignoreValueWithDefault as ignoreValueWithDefault, index$e_serviceDataCodec as serviceDataCodec, index$e_serviceEntriesCodec as serviceEntriesCodec, index$e_sortedSetCodec as sortedSetCodec, index$e_tryAsLookupHistorySlots as tryAsLookupHistorySlots, index$e_tryAsPerCore as tryAsPerCore, index$e_workReportsSortedSetCodec as workReportsSortedSetCodec, index$e_zeroSizeHint as zeroSizeHint };
|
|
11289
11328
|
export type { index$e_BlocksState as BlocksState, index$e_ENTROPY_ENTRIES as ENTROPY_ENTRIES, index$e_EnumerableState as EnumerableState, index$e_FieldNames as FieldNames, index$e_InMemoryStateFields as InMemoryStateFields, index$e_LookupHistorySlots as LookupHistorySlots, index$e_MAX_RECENT_HISTORY as MAX_RECENT_HISTORY, index$e_PerCore as PerCore, index$e_SafroleSealingKeys as SafroleSealingKeys, index$e_Service as Service, index$e_ServiceData as ServiceData, index$e_ServiceEntries as ServiceEntries, index$e_ServicesUpdate as ServicesUpdate, index$e_State as State, index$e_StorageKey as StorageKey, index$e_VALIDATOR_META_BYTES as VALIDATOR_META_BYTES };
|
|
11290
11329
|
}
|
|
11291
11330
|
|
|
@@ -11353,7 +11392,7 @@ declare namespace stateKeys {
|
|
|
11353
11392
|
}
|
|
11354
11393
|
|
|
11355
11394
|
/** https://graypaper.fluffylabs.dev/#/1c979cb/3bba033bba03?v=0.7.1 */
|
|
11356
|
-
export function serviceStorage(serviceId: ServiceId, key: StorageKey): StateKey {
|
|
11395
|
+
export function serviceStorage(blake2b: Blake2b, serviceId: ServiceId, key: StorageKey): StateKey {
|
|
11357
11396
|
if (Compatibility.isLessThan(GpVersion.V0_6_7)) {
|
|
11358
11397
|
const out = Bytes.zero(HASH_SIZE);
|
|
11359
11398
|
out.raw.set(u32AsLeBytes(tryAsU32(2 ** 32 - 1)), 0);
|
|
@@ -11361,11 +11400,11 @@ declare namespace stateKeys {
|
|
|
11361
11400
|
return legacyServiceNested(serviceId, out);
|
|
11362
11401
|
}
|
|
11363
11402
|
|
|
11364
|
-
return serviceNested(serviceId, tryAsU32(2 ** 32 - 1), key);
|
|
11403
|
+
return serviceNested(blake2b, serviceId, tryAsU32(2 ** 32 - 1), key);
|
|
11365
11404
|
}
|
|
11366
11405
|
|
|
11367
11406
|
/** https://graypaper.fluffylabs.dev/#/1c979cb/3bd7033bd703?v=0.7.1 */
|
|
11368
|
-
export function servicePreimage(serviceId: ServiceId, hash: PreimageHash): StateKey {
|
|
11407
|
+
export function servicePreimage(blake2b: Blake2b, serviceId: ServiceId, hash: PreimageHash): StateKey {
|
|
11369
11408
|
if (Compatibility.isLessThan(GpVersion.V0_6_7)) {
|
|
11370
11409
|
const out = Bytes.zero(HASH_SIZE);
|
|
11371
11410
|
out.raw.set(u32AsLeBytes(tryAsU32(2 ** 32 - 2)), 0);
|
|
@@ -11373,11 +11412,16 @@ declare namespace stateKeys {
|
|
|
11373
11412
|
return legacyServiceNested(serviceId, out);
|
|
11374
11413
|
}
|
|
11375
11414
|
|
|
11376
|
-
return serviceNested(serviceId, tryAsU32(2 ** 32 - 2), hash);
|
|
11415
|
+
return serviceNested(blake2b, serviceId, tryAsU32(2 ** 32 - 2), hash);
|
|
11377
11416
|
}
|
|
11378
11417
|
|
|
11379
11418
|
/** https://graypaper.fluffylabs.dev/#/1c979cb/3b0a043b0a04?v=0.7.1 */
|
|
11380
|
-
export function serviceLookupHistory(
|
|
11419
|
+
export function serviceLookupHistory(
|
|
11420
|
+
blake2b: Blake2b,
|
|
11421
|
+
serviceId: ServiceId,
|
|
11422
|
+
hash: PreimageHash,
|
|
11423
|
+
preimageLength: U32,
|
|
11424
|
+
): StateKey {
|
|
11381
11425
|
if (Compatibility.isLessThan(GpVersion.V0_6_7)) {
|
|
11382
11426
|
const doubleHash = blake2b.hashBytes(hash);
|
|
11383
11427
|
const out = Bytes.zero(HASH_SIZE);
|
|
@@ -11386,11 +11430,11 @@ declare namespace stateKeys {
|
|
|
11386
11430
|
return legacyServiceNested(serviceId, out);
|
|
11387
11431
|
}
|
|
11388
11432
|
|
|
11389
|
-
return serviceNested(serviceId, preimageLength, hash);
|
|
11433
|
+
return serviceNested(blake2b, serviceId, preimageLength, hash);
|
|
11390
11434
|
}
|
|
11391
11435
|
|
|
11392
11436
|
/** https://graypaper.fluffylabs.dev/#/1c979cb/3b88003b8800?v=0.7.1 */
|
|
11393
|
-
export function serviceNested(serviceId: ServiceId, numberPrefix: U32, hash: BytesBlob): StateKey {
|
|
11437
|
+
export function serviceNested(blake2b: Blake2b, serviceId: ServiceId, numberPrefix: U32, hash: BytesBlob): StateKey {
|
|
11394
11438
|
const inputToHash = BytesBlob.blobFromParts(u32AsLeBytes(numberPrefix), hash.raw);
|
|
11395
11439
|
const newHash = blake2b.hashBytes(inputToHash).raw.subarray(0, 28);
|
|
11396
11440
|
const key = Bytes.zero(HASH_SIZE);
|
|
@@ -11570,24 +11614,26 @@ declare namespace serialize {
|
|
|
11570
11614
|
/** C(255, s): https://graypaper.fluffylabs.dev/#/85129da/383103383103?v=0.6.3 */
|
|
11571
11615
|
export const serviceData = (serviceId: ServiceId) => ({
|
|
11572
11616
|
key: stateKeys.serviceInfo(serviceId),
|
|
11573
|
-
Codec:
|
|
11617
|
+
Codec: Compatibility.isGreaterOrEqual(GpVersion.V0_7_1)
|
|
11618
|
+
? codecWithVersion(ServiceAccountInfo.Codec)
|
|
11619
|
+
: ServiceAccountInfo.Codec,
|
|
11574
11620
|
});
|
|
11575
11621
|
|
|
11576
11622
|
/** https://graypaper.fluffylabs.dev/#/85129da/384803384803?v=0.6.3 */
|
|
11577
|
-
export const serviceStorage = (serviceId: ServiceId, key: StorageKey) => ({
|
|
11578
|
-
key: stateKeys.serviceStorage(serviceId, key),
|
|
11623
|
+
export const serviceStorage = (blake2b: Blake2b, serviceId: ServiceId, key: StorageKey) => ({
|
|
11624
|
+
key: stateKeys.serviceStorage(blake2b, serviceId, key),
|
|
11579
11625
|
Codec: dumpCodec,
|
|
11580
11626
|
});
|
|
11581
11627
|
|
|
11582
11628
|
/** https://graypaper.fluffylabs.dev/#/85129da/385b03385b03?v=0.6.3 */
|
|
11583
|
-
export const servicePreimages = (serviceId: ServiceId, hash: PreimageHash) => ({
|
|
11584
|
-
key: stateKeys.servicePreimage(serviceId, hash),
|
|
11629
|
+
export const servicePreimages = (blake2b: Blake2b, serviceId: ServiceId, hash: PreimageHash) => ({
|
|
11630
|
+
key: stateKeys.servicePreimage(blake2b, serviceId, hash),
|
|
11585
11631
|
Codec: dumpCodec,
|
|
11586
11632
|
});
|
|
11587
11633
|
|
|
11588
11634
|
/** https://graypaper.fluffylabs.dev/#/85129da/387603387603?v=0.6.3 */
|
|
11589
|
-
export const serviceLookupHistory = (serviceId: ServiceId, hash: PreimageHash, len: U32) => ({
|
|
11590
|
-
key: stateKeys.serviceLookupHistory(serviceId, hash, len),
|
|
11635
|
+
export const serviceLookupHistory = (blake2b: Blake2b, serviceId: ServiceId, hash: PreimageHash, len: U32) => ({
|
|
11636
|
+
key: stateKeys.serviceLookupHistory(blake2b, serviceId, hash, len),
|
|
11591
11637
|
Codec: readonlyArray(codec.sequenceVarLen(codec.u32)),
|
|
11592
11638
|
});
|
|
11593
11639
|
}
|
|
@@ -11622,6 +11668,7 @@ declare const EMPTY_BLOB = BytesBlob.empty();
|
|
|
11622
11668
|
/** Serialize given state update into a series of key-value pairs. */
|
|
11623
11669
|
declare function* serializeStateUpdate(
|
|
11624
11670
|
spec: ChainSpec,
|
|
11671
|
+
blake2b: Blake2b,
|
|
11625
11672
|
update: Partial<State & ServicesUpdate>,
|
|
11626
11673
|
): Generator<StateEntryUpdate> {
|
|
11627
11674
|
// first let's serialize all of the simple entries (if present!)
|
|
@@ -11630,9 +11677,9 @@ declare function* serializeStateUpdate(
|
|
|
11630
11677
|
const encode = <T>(codec: Encode<T>, val: T) => Encoder.encodeObject(codec, val, spec);
|
|
11631
11678
|
|
|
11632
11679
|
// then let's proceed with service updates
|
|
11633
|
-
yield* serializeServiceUpdates(update.servicesUpdates, encode);
|
|
11634
|
-
yield* serializePreimages(update.preimages, encode);
|
|
11635
|
-
yield* serializeStorage(update.storage);
|
|
11680
|
+
yield* serializeServiceUpdates(update.servicesUpdates, encode, blake2b);
|
|
11681
|
+
yield* serializePreimages(update.preimages, encode, blake2b);
|
|
11682
|
+
yield* serializeStorage(update.storage, blake2b);
|
|
11636
11683
|
yield* serializeRemovedServices(update.servicesRemoved);
|
|
11637
11684
|
}
|
|
11638
11685
|
|
|
@@ -11644,18 +11691,18 @@ declare function* serializeRemovedServices(servicesRemoved: ServiceId[] | undefi
|
|
|
11644
11691
|
}
|
|
11645
11692
|
}
|
|
11646
11693
|
|
|
11647
|
-
declare function* serializeStorage(storage: UpdateStorage[] | undefined): Generator<StateEntryUpdate> {
|
|
11694
|
+
declare function* serializeStorage(storage: UpdateStorage[] | undefined, blake2b: Blake2b): Generator<StateEntryUpdate> {
|
|
11648
11695
|
for (const { action, serviceId } of storage ?? []) {
|
|
11649
11696
|
switch (action.kind) {
|
|
11650
11697
|
case UpdateStorageKind.Set: {
|
|
11651
11698
|
const key = action.storage.key;
|
|
11652
|
-
const codec = serialize.serviceStorage(serviceId, key);
|
|
11699
|
+
const codec = serialize.serviceStorage(blake2b, serviceId, key);
|
|
11653
11700
|
yield [StateEntryUpdateAction.Insert, codec.key, action.storage.value];
|
|
11654
11701
|
break;
|
|
11655
11702
|
}
|
|
11656
11703
|
case UpdateStorageKind.Remove: {
|
|
11657
11704
|
const key = action.key;
|
|
11658
|
-
const codec = serialize.serviceStorage(serviceId, key);
|
|
11705
|
+
const codec = serialize.serviceStorage(blake2b, serviceId, key);
|
|
11659
11706
|
yield [StateEntryUpdateAction.Remove, codec.key, EMPTY_BLOB];
|
|
11660
11707
|
break;
|
|
11661
11708
|
}
|
|
@@ -11665,16 +11712,20 @@ declare function* serializeStorage(storage: UpdateStorage[] | undefined): Genera
|
|
|
11665
11712
|
}
|
|
11666
11713
|
}
|
|
11667
11714
|
|
|
11668
|
-
declare function* serializePreimages(
|
|
11715
|
+
declare function* serializePreimages(
|
|
11716
|
+
preimages: UpdatePreimage[] | undefined,
|
|
11717
|
+
encode: EncodeFun,
|
|
11718
|
+
blake2b: Blake2b,
|
|
11719
|
+
): Generator<StateEntryUpdate> {
|
|
11669
11720
|
for (const { action, serviceId } of preimages ?? []) {
|
|
11670
11721
|
switch (action.kind) {
|
|
11671
11722
|
case UpdatePreimageKind.Provide: {
|
|
11672
11723
|
const { hash, blob } = action.preimage;
|
|
11673
|
-
const codec = serialize.servicePreimages(serviceId, hash);
|
|
11724
|
+
const codec = serialize.servicePreimages(blake2b, serviceId, hash);
|
|
11674
11725
|
yield [StateEntryUpdateAction.Insert, codec.key, blob];
|
|
11675
11726
|
|
|
11676
11727
|
if (action.slot !== null) {
|
|
11677
|
-
const codec2 = serialize.serviceLookupHistory(serviceId, hash, tryAsU32(blob.length));
|
|
11728
|
+
const codec2 = serialize.serviceLookupHistory(blake2b, serviceId, hash, tryAsU32(blob.length));
|
|
11678
11729
|
yield [
|
|
11679
11730
|
StateEntryUpdateAction.Insert,
|
|
11680
11731
|
codec2.key,
|
|
@@ -11685,16 +11736,16 @@ declare function* serializePreimages(preimages: UpdatePreimage[] | undefined, en
|
|
|
11685
11736
|
}
|
|
11686
11737
|
case UpdatePreimageKind.UpdateOrAdd: {
|
|
11687
11738
|
const { hash, length, slots } = action.item;
|
|
11688
|
-
const codec = serialize.serviceLookupHistory(serviceId, hash, length);
|
|
11739
|
+
const codec = serialize.serviceLookupHistory(blake2b, serviceId, hash, length);
|
|
11689
11740
|
yield [StateEntryUpdateAction.Insert, codec.key, encode(codec.Codec, slots)];
|
|
11690
11741
|
break;
|
|
11691
11742
|
}
|
|
11692
11743
|
case UpdatePreimageKind.Remove: {
|
|
11693
11744
|
const { hash, length } = action;
|
|
11694
|
-
const codec = serialize.servicePreimages(serviceId, hash);
|
|
11745
|
+
const codec = serialize.servicePreimages(blake2b, serviceId, hash);
|
|
11695
11746
|
yield [StateEntryUpdateAction.Remove, codec.key, EMPTY_BLOB];
|
|
11696
11747
|
|
|
11697
|
-
const codec2 = serialize.serviceLookupHistory(serviceId, hash, length);
|
|
11748
|
+
const codec2 = serialize.serviceLookupHistory(blake2b, serviceId, hash, length);
|
|
11698
11749
|
yield [StateEntryUpdateAction.Remove, codec2.key, EMPTY_BLOB];
|
|
11699
11750
|
break;
|
|
11700
11751
|
}
|
|
@@ -11706,6 +11757,7 @@ declare function* serializePreimages(preimages: UpdatePreimage[] | undefined, en
|
|
|
11706
11757
|
declare function* serializeServiceUpdates(
|
|
11707
11758
|
servicesUpdates: UpdateService[] | undefined,
|
|
11708
11759
|
encode: EncodeFun,
|
|
11760
|
+
blake2b: Blake2b,
|
|
11709
11761
|
): Generator<StateEntryUpdate> {
|
|
11710
11762
|
for (const { action, serviceId } of servicesUpdates ?? []) {
|
|
11711
11763
|
// new service being created or updated
|
|
@@ -11715,7 +11767,7 @@ declare function* serializeServiceUpdates(
|
|
|
11715
11767
|
// additional lookup history update
|
|
11716
11768
|
if (action.kind === UpdateServiceKind.Create && action.lookupHistory !== null) {
|
|
11717
11769
|
const { lookupHistory } = action;
|
|
11718
|
-
const codec2 = serialize.serviceLookupHistory(serviceId, lookupHistory.hash, lookupHistory.length);
|
|
11770
|
+
const codec2 = serialize.serviceLookupHistory(blake2b, serviceId, lookupHistory.hash, lookupHistory.length);
|
|
11719
11771
|
yield [StateEntryUpdateAction.Insert, codec2.key, encode(codec2.Codec, lookupHistory.slots)];
|
|
11720
11772
|
}
|
|
11721
11773
|
}
|
|
@@ -11849,8 +11901,8 @@ declare class StateEntries {
|
|
|
11849
11901
|
);
|
|
11850
11902
|
|
|
11851
11903
|
/** Turn in-memory state into it's serialized form. */
|
|
11852
|
-
static serializeInMemory(spec: ChainSpec, state: InMemoryState) {
|
|
11853
|
-
return new StateEntries(convertInMemoryStateToDictionary(spec, state));
|
|
11904
|
+
static serializeInMemory(spec: ChainSpec, blake2b: Blake2b, state: InMemoryState) {
|
|
11905
|
+
return new StateEntries(convertInMemoryStateToDictionary(spec, blake2b, state));
|
|
11854
11906
|
}
|
|
11855
11907
|
|
|
11856
11908
|
/**
|
|
@@ -11905,7 +11957,8 @@ declare class StateEntries {
|
|
|
11905
11957
|
}
|
|
11906
11958
|
|
|
11907
11959
|
/** https://graypaper.fluffylabs.dev/#/68eaa1f/391600391600?v=0.6.4 */
|
|
11908
|
-
getRootHash(): StateRootHash {
|
|
11960
|
+
getRootHash(blake2b: Blake2b): StateRootHash {
|
|
11961
|
+
const blake2bTrieHasher = getBlake2bTrieHasher(blake2b);
|
|
11909
11962
|
const leaves: SortedSet<LeafNode> = SortedSet.fromArray(leafComparator);
|
|
11910
11963
|
for (const [key, value] of this) {
|
|
11911
11964
|
leaves.insert(InMemoryTrie.constructLeaf(blake2bTrieHasher, key.asOpaque(), value));
|
|
@@ -11918,6 +11971,7 @@ declare class StateEntries {
|
|
|
11918
11971
|
/** https://graypaper.fluffylabs.dev/#/68eaa1f/38a50038a500?v=0.6.4 */
|
|
11919
11972
|
declare function convertInMemoryStateToDictionary(
|
|
11920
11973
|
spec: ChainSpec,
|
|
11974
|
+
blake2b: Blake2b,
|
|
11921
11975
|
state: InMemoryState,
|
|
11922
11976
|
): TruncatedHashDictionary<StateKey, BytesBlob> {
|
|
11923
11977
|
const serialized = TruncatedHashDictionary.fromEntries<StateKey, BytesBlob>([]);
|
|
@@ -11950,20 +12004,25 @@ declare function convertInMemoryStateToDictionary(
|
|
|
11950
12004
|
|
|
11951
12005
|
// preimages
|
|
11952
12006
|
for (const preimage of service.data.preimages.values()) {
|
|
11953
|
-
const { key, Codec } = serialize.servicePreimages(serviceId, preimage.hash);
|
|
12007
|
+
const { key, Codec } = serialize.servicePreimages(blake2b, serviceId, preimage.hash);
|
|
11954
12008
|
serialized.set(key, Encoder.encodeObject(Codec, preimage.blob));
|
|
11955
12009
|
}
|
|
11956
12010
|
|
|
11957
12011
|
// storage
|
|
11958
12012
|
for (const storage of service.data.storage.values()) {
|
|
11959
|
-
const { key, Codec } = serialize.serviceStorage(serviceId, storage.key);
|
|
12013
|
+
const { key, Codec } = serialize.serviceStorage(blake2b, serviceId, storage.key);
|
|
11960
12014
|
serialized.set(key, Encoder.encodeObject(Codec, storage.value));
|
|
11961
12015
|
}
|
|
11962
12016
|
|
|
11963
12017
|
// lookup history
|
|
11964
12018
|
for (const lookupHistoryList of service.data.lookupHistory.values()) {
|
|
11965
12019
|
for (const lookupHistory of lookupHistoryList) {
|
|
11966
|
-
const { key, Codec } = serialize.serviceLookupHistory(
|
|
12020
|
+
const { key, Codec } = serialize.serviceLookupHistory(
|
|
12021
|
+
blake2b,
|
|
12022
|
+
serviceId,
|
|
12023
|
+
lookupHistory.hash,
|
|
12024
|
+
lookupHistory.length,
|
|
12025
|
+
);
|
|
11967
12026
|
serialized.set(key, Encoder.encodeObject(Codec, lookupHistory.slots.slice()));
|
|
11968
12027
|
}
|
|
11969
12028
|
}
|
|
@@ -11994,21 +12053,23 @@ declare class SerializedState<T extends SerializedStateBackend = SerializedState
|
|
|
11994
12053
|
implements State, EnumerableState
|
|
11995
12054
|
{
|
|
11996
12055
|
/** Create a state-like object from collection of serialized entries. */
|
|
11997
|
-
static fromStateEntries(spec: ChainSpec, state: StateEntries, recentServices: ServiceId[] = []) {
|
|
11998
|
-
return new SerializedState(spec, state, recentServices);
|
|
12056
|
+
static fromStateEntries(spec: ChainSpec, blake2b: Blake2b, state: StateEntries, recentServices: ServiceId[] = []) {
|
|
12057
|
+
return new SerializedState(spec, blake2b, state, recentServices);
|
|
11999
12058
|
}
|
|
12000
12059
|
|
|
12001
12060
|
/** Create a state-like object backed by some DB. */
|
|
12002
12061
|
static new<T extends SerializedStateBackend>(
|
|
12003
12062
|
spec: ChainSpec,
|
|
12063
|
+
blake2b: Blake2b,
|
|
12004
12064
|
db: T,
|
|
12005
12065
|
recentServices: ServiceId[] = [],
|
|
12006
12066
|
): SerializedState<T> {
|
|
12007
|
-
return new SerializedState(spec, db, recentServices);
|
|
12067
|
+
return new SerializedState(spec, blake2b, db, recentServices);
|
|
12008
12068
|
}
|
|
12009
12069
|
|
|
12010
12070
|
private constructor(
|
|
12011
12071
|
private readonly spec: ChainSpec,
|
|
12072
|
+
private readonly blake2b: Blake2b,
|
|
12012
12073
|
public backend: T,
|
|
12013
12074
|
/** Best-effort list of recently active services. */
|
|
12014
12075
|
private readonly _recentServiceIds: ServiceId[],
|
|
@@ -12039,7 +12100,7 @@ declare class SerializedState<T extends SerializedStateBackend = SerializedState
|
|
|
12039
12100
|
this._recentServiceIds.push(id);
|
|
12040
12101
|
}
|
|
12041
12102
|
|
|
12042
|
-
return new SerializedService(id, serviceData, (key) => this.retrieveOptional(key));
|
|
12103
|
+
return new SerializedService(this.blake2b, id, serviceData, (key) => this.retrieveOptional(key));
|
|
12043
12104
|
}
|
|
12044
12105
|
|
|
12045
12106
|
private retrieve<T>({ key, Codec }: KeyAndCodec<T>, description: string): T {
|
|
@@ -12138,6 +12199,7 @@ declare class SerializedState<T extends SerializedStateBackend = SerializedState
|
|
|
12138
12199
|
/** Service data representation on a serialized state. */
|
|
12139
12200
|
declare class SerializedService implements Service {
|
|
12140
12201
|
constructor(
|
|
12202
|
+
public readonly blake2b: Blake2b,
|
|
12141
12203
|
/** Service id */
|
|
12142
12204
|
public readonly serviceId: ServiceId,
|
|
12143
12205
|
private readonly accountInfo: ServiceAccountInfo,
|
|
@@ -12153,14 +12215,14 @@ declare class SerializedService implements Service {
|
|
|
12153
12215
|
getStorage(rawKey: StorageKey): BytesBlob | null {
|
|
12154
12216
|
if (Compatibility.isLessThan(GpVersion.V0_6_7)) {
|
|
12155
12217
|
const SERVICE_ID_BYTES = 4;
|
|
12156
|
-
const serviceIdAndKey =
|
|
12218
|
+
const serviceIdAndKey = safeAllocUint8Array(SERVICE_ID_BYTES + rawKey.length);
|
|
12157
12219
|
serviceIdAndKey.set(u32AsLeBytes(this.serviceId));
|
|
12158
12220
|
serviceIdAndKey.set(rawKey.raw, SERVICE_ID_BYTES);
|
|
12159
|
-
const key: StorageKey = asOpaqueType(BytesBlob.blobFrom(blake2b.hashBytes(serviceIdAndKey).raw));
|
|
12160
|
-
return this.retrieveOptional(serialize.serviceStorage(this.serviceId, key)) ?? null;
|
|
12221
|
+
const key: StorageKey = asOpaqueType(BytesBlob.blobFrom(this.blake2b.hashBytes(serviceIdAndKey).raw));
|
|
12222
|
+
return this.retrieveOptional(serialize.serviceStorage(this.blake2b, this.serviceId, key)) ?? null;
|
|
12161
12223
|
}
|
|
12162
12224
|
|
|
12163
|
-
return this.retrieveOptional(serialize.serviceStorage(this.serviceId, rawKey)) ?? null;
|
|
12225
|
+
return this.retrieveOptional(serialize.serviceStorage(this.blake2b, this.serviceId, rawKey)) ?? null;
|
|
12164
12226
|
}
|
|
12165
12227
|
|
|
12166
12228
|
/**
|
|
@@ -12170,17 +12232,17 @@ declare class SerializedService implements Service {
|
|
|
12170
12232
|
*/
|
|
12171
12233
|
hasPreimage(hash: PreimageHash): boolean {
|
|
12172
12234
|
// TODO [ToDr] consider optimizing to avoid fetching the whole data.
|
|
12173
|
-
return this.retrieveOptional(serialize.servicePreimages(this.serviceId, hash)) !== undefined;
|
|
12235
|
+
return this.retrieveOptional(serialize.servicePreimages(this.blake2b, this.serviceId, hash)) !== undefined;
|
|
12174
12236
|
}
|
|
12175
12237
|
|
|
12176
12238
|
/** Retrieve preimage from the DB. */
|
|
12177
12239
|
getPreimage(hash: PreimageHash): BytesBlob | null {
|
|
12178
|
-
return this.retrieveOptional(serialize.servicePreimages(this.serviceId, hash)) ?? null;
|
|
12240
|
+
return this.retrieveOptional(serialize.servicePreimages(this.blake2b, this.serviceId, hash)) ?? null;
|
|
12179
12241
|
}
|
|
12180
12242
|
|
|
12181
12243
|
/** Retrieve preimage lookup history. */
|
|
12182
12244
|
getLookupHistory(hash: PreimageHash, len: U32): LookupHistorySlots | null {
|
|
12183
|
-
const rawSlots = this.retrieveOptional(serialize.serviceLookupHistory(this.serviceId, hash, len));
|
|
12245
|
+
const rawSlots = this.retrieveOptional(serialize.serviceLookupHistory(this.blake2b, this.serviceId, hash, len));
|
|
12184
12246
|
if (rawSlots === undefined) {
|
|
12185
12247
|
return null;
|
|
12186
12248
|
}
|
|
@@ -12193,9 +12255,9 @@ type KeyAndCodec<T> = {
|
|
|
12193
12255
|
Codec: Decode<T>;
|
|
12194
12256
|
};
|
|
12195
12257
|
|
|
12196
|
-
declare function loadState(spec: ChainSpec, entries: Iterable<[StateKey | TruncatedHash, BytesBlob]>) {
|
|
12258
|
+
declare function loadState(spec: ChainSpec, blake2b: Blake2b, entries: Iterable<[StateKey | TruncatedHash, BytesBlob]>) {
|
|
12197
12259
|
const stateEntries = StateEntries.fromEntriesUnsafe(entries);
|
|
12198
|
-
return SerializedState.fromStateEntries(spec, stateEntries);
|
|
12260
|
+
return SerializedState.fromStateEntries(spec, blake2b, stateEntries);
|
|
12199
12261
|
}
|
|
12200
12262
|
|
|
12201
12263
|
/**
|
|
@@ -12351,7 +12413,8 @@ declare class LeafDb implements SerializedStateBackend {
|
|
|
12351
12413
|
assertNever(val);
|
|
12352
12414
|
}
|
|
12353
12415
|
|
|
12354
|
-
getStateRoot(): StateRootHash {
|
|
12416
|
+
getStateRoot(blake2b: Blake2b): StateRootHash {
|
|
12417
|
+
const blake2bTrieHasher = getBlake2bTrieHasher(blake2b);
|
|
12355
12418
|
return InMemoryTrie.computeStateRoot(blake2bTrieHasher, this.leaves).asOpaque();
|
|
12356
12419
|
}
|
|
12357
12420
|
|
|
@@ -12449,7 +12512,8 @@ declare class InMemoryStates implements StatesDb<InMemoryState> {
|
|
|
12449
12512
|
}
|
|
12450
12513
|
|
|
12451
12514
|
async getStateRoot(state: InMemoryState): Promise<StateRootHash> {
|
|
12452
|
-
|
|
12515
|
+
const blake2b = await Blake2b.createHasher();
|
|
12516
|
+
return StateEntries.serializeInMemory(this.spec, blake2b, state).getRootHash(blake2b);
|
|
12453
12517
|
}
|
|
12454
12518
|
|
|
12455
12519
|
/** Insert a full state into the database. */
|
|
@@ -12554,7 +12618,7 @@ declare function padAndEncodeData(input: BytesBlob) {
|
|
|
12554
12618
|
const paddedLength = Math.ceil(input.length / PIECE_SIZE) * PIECE_SIZE;
|
|
12555
12619
|
let padded = input;
|
|
12556
12620
|
if (input.length !== paddedLength) {
|
|
12557
|
-
padded = BytesBlob.blobFrom(
|
|
12621
|
+
padded = BytesBlob.blobFrom(safeAllocUint8Array(paddedLength));
|
|
12558
12622
|
padded.raw.set(input.raw, 0);
|
|
12559
12623
|
}
|
|
12560
12624
|
return chunkingFunction(padded);
|
|
@@ -12610,7 +12674,7 @@ declare function decodeData(input: FixedSizeArray<[number, BytesBlob], N_CHUNKS_
|
|
|
12610
12674
|
*/
|
|
12611
12675
|
declare function encodePoints(input: Bytes<PIECE_SIZE>): FixedSizeArray<Bytes<POINT_LENGTH>, N_CHUNKS_TOTAL> {
|
|
12612
12676
|
const result: Bytes<POINT_LENGTH>[] = [];
|
|
12613
|
-
const data =
|
|
12677
|
+
const data = safeAllocUint8Array(POINT_ALIGNMENT * N_CHUNKS_REQUIRED);
|
|
12614
12678
|
|
|
12615
12679
|
// add original shards to the result
|
|
12616
12680
|
for (let i = 0; i < N_CHUNKS_REQUIRED; i++) {
|
|
@@ -12630,7 +12694,7 @@ declare function encodePoints(input: Bytes<PIECE_SIZE>): FixedSizeArray<Bytes<PO
|
|
|
12630
12694
|
for (let i = 0; i < N_CHUNKS_REDUNDANCY; i++) {
|
|
12631
12695
|
const pointIndex = i * POINT_ALIGNMENT;
|
|
12632
12696
|
|
|
12633
|
-
const redundancyPoint =
|
|
12697
|
+
const redundancyPoint = safeAllocUint8Array(POINT_LENGTH);
|
|
12634
12698
|
for (let j = 0; j < POINT_LENGTH; j++) {
|
|
12635
12699
|
redundancyPoint[j] = encodedData[pointIndex + j * HALF_POINT_SIZE];
|
|
12636
12700
|
}
|
|
@@ -12650,7 +12714,7 @@ declare function decodePiece(
|
|
|
12650
12714
|
): Bytes<PIECE_SIZE> {
|
|
12651
12715
|
const result = Bytes.zero(PIECE_SIZE);
|
|
12652
12716
|
|
|
12653
|
-
const data =
|
|
12717
|
+
const data = safeAllocUint8Array(N_CHUNKS_REQUIRED * POINT_ALIGNMENT);
|
|
12654
12718
|
const indices = new Uint16Array(input.length);
|
|
12655
12719
|
|
|
12656
12720
|
for (let i = 0; i < N_CHUNKS_REQUIRED; i++) {
|
|
@@ -12777,7 +12841,7 @@ declare function lace<N extends number, K extends number>(input: FixedSizeArray<
|
|
|
12777
12841
|
return BytesBlob.empty();
|
|
12778
12842
|
}
|
|
12779
12843
|
const n = input[0].length;
|
|
12780
|
-
const result = BytesBlob.blobFrom(
|
|
12844
|
+
const result = BytesBlob.blobFrom(safeAllocUint8Array(k * n));
|
|
12781
12845
|
for (let i = 0; i < k; i++) {
|
|
12782
12846
|
const entry = input[i].raw;
|
|
12783
12847
|
for (let j = 0; j < n; j++) {
|
|
@@ -13675,13 +13739,12 @@ interface PartialState {
|
|
|
13675
13739
|
|
|
13676
13740
|
/**
|
|
13677
13741
|
* Transfer given `amount` of funds to the `destination`,
|
|
13678
|
-
* passing `
|
|
13679
|
-
* and given `memo`.
|
|
13742
|
+
* passing `gas` fee for transfer and given `memo`.
|
|
13680
13743
|
*/
|
|
13681
13744
|
transfer(
|
|
13682
13745
|
destination: ServiceId | null,
|
|
13683
13746
|
amount: U64,
|
|
13684
|
-
|
|
13747
|
+
gas: ServiceGas,
|
|
13685
13748
|
memo: Bytes<TRANSFER_MEMO_BYTES>,
|
|
13686
13749
|
): Result$2<OK, TransferError>;
|
|
13687
13750
|
|
|
@@ -13850,7 +13913,7 @@ declare class Mask {
|
|
|
13850
13913
|
}
|
|
13851
13914
|
|
|
13852
13915
|
private buildLookupTableForward(mask: BitVec) {
|
|
13853
|
-
const table =
|
|
13916
|
+
const table = safeAllocUint8Array(mask.bitLength);
|
|
13854
13917
|
let lastInstructionOffset = 0;
|
|
13855
13918
|
for (let i = mask.bitLength - 1; i >= 0; i--) {
|
|
13856
13919
|
if (mask.isSet(i)) {
|
|
@@ -13994,7 +14057,7 @@ declare class Registers {
|
|
|
13994
14057
|
private asSigned: BigInt64Array;
|
|
13995
14058
|
private asUnsigned: BigUint64Array;
|
|
13996
14059
|
|
|
13997
|
-
constructor(private readonly bytes =
|
|
14060
|
+
constructor(private readonly bytes = safeAllocUint8Array(NO_OF_REGISTERS << REGISTER_SIZE_SHIFT)) {
|
|
13998
14061
|
check`${bytes.length === NO_OF_REGISTERS << REGISTER_SIZE_SHIFT} Invalid size of registers array.`;
|
|
13999
14062
|
this.asSigned = new BigInt64Array(bytes.buffer, bytes.byteOffset);
|
|
14000
14063
|
this.asUnsigned = new BigUint64Array(bytes.buffer, bytes.byteOffset);
|
|
@@ -18052,9 +18115,15 @@ type HostCallIndex = Opaque<U32, "HostCallIndex[U32]">;
|
|
|
18052
18115
|
/** Attempt to convert a number into `HostCallIndex`. */
|
|
18053
18116
|
declare const tryAsHostCallIndex = (v: number): HostCallIndex => asOpaqueType(tryAsU32(v));
|
|
18054
18117
|
|
|
18118
|
+
/**
|
|
18119
|
+
* Host-call exit reason.
|
|
18120
|
+
*
|
|
18121
|
+
* https://graypaper.fluffylabs.dev/#/ab2cdbd/24a30124a501?v=0.7.2
|
|
18122
|
+
*/
|
|
18055
18123
|
declare enum PvmExecution {
|
|
18056
18124
|
Halt = 0,
|
|
18057
18125
|
Panic = 1,
|
|
18126
|
+
OOG = 2, // out-of-gas
|
|
18058
18127
|
}
|
|
18059
18128
|
|
|
18060
18129
|
/** A utility function to easily trace a bunch of registers. */
|
|
@@ -18067,8 +18136,12 @@ interface HostCallHandler {
|
|
|
18067
18136
|
/** Index of that host call (i.e. what PVM invokes via `ecalli`) */
|
|
18068
18137
|
readonly index: HostCallIndex;
|
|
18069
18138
|
|
|
18070
|
-
/**
|
|
18071
|
-
|
|
18139
|
+
/**
|
|
18140
|
+
* The gas cost of invocation of that host call.
|
|
18141
|
+
*
|
|
18142
|
+
* NOTE: `((reg: IHostCallRegisters) => Gas)` function is for compatibility reasons: pre GP 0.7.2
|
|
18143
|
+
*/
|
|
18144
|
+
readonly basicGasCost: SmallGas | ((reg: IHostCallRegisters) => Gas);
|
|
18072
18145
|
|
|
18073
18146
|
/** Currently executing service id. */
|
|
18074
18147
|
readonly currentServiceId: U32;
|
|
@@ -18211,7 +18284,7 @@ declare class HostCalls {
|
|
|
18211
18284
|
const maybeAddress = regs.getLowerU32(7);
|
|
18212
18285
|
const maybeLength = regs.getLowerU32(8);
|
|
18213
18286
|
|
|
18214
|
-
const result =
|
|
18287
|
+
const result = safeAllocUint8Array(maybeLength);
|
|
18215
18288
|
const startAddress = tryAsMemoryIndex(maybeAddress);
|
|
18216
18289
|
const loadResult = memory.loadInto(result, startAddress);
|
|
18217
18290
|
|
|
@@ -18244,8 +18317,10 @@ declare class HostCalls {
|
|
|
18244
18317
|
|
|
18245
18318
|
const hostCall = this.hostCalls.get(index);
|
|
18246
18319
|
const gasBefore = gas.get();
|
|
18247
|
-
|
|
18248
|
-
const
|
|
18320
|
+
// NOTE: `basicGasCost(regs)` function is for compatibility reasons: pre GP 0.7.2
|
|
18321
|
+
const basicGasCost =
|
|
18322
|
+
typeof hostCall.basicGasCost === "number" ? hostCall.basicGasCost : hostCall.basicGasCost(regs);
|
|
18323
|
+
const underflow = gas.sub(basicGasCost);
|
|
18249
18324
|
|
|
18250
18325
|
const pcLog = `[PC: ${pvmInstance.getPC()}]`;
|
|
18251
18326
|
if (underflow) {
|
|
@@ -18272,6 +18347,11 @@ declare class HostCalls {
|
|
|
18272
18347
|
return this.getReturnValue(status, pvmInstance);
|
|
18273
18348
|
}
|
|
18274
18349
|
|
|
18350
|
+
if (result === PvmExecution.OOG) {
|
|
18351
|
+
status = Status.OOG;
|
|
18352
|
+
return this.getReturnValue(status, pvmInstance);
|
|
18353
|
+
}
|
|
18354
|
+
|
|
18275
18355
|
if (result === undefined) {
|
|
18276
18356
|
pvmInstance.runProgram();
|
|
18277
18357
|
status = pvmInstance.getStatus();
|
|
@@ -18643,7 +18723,7 @@ declare class DebuggerAdapter {
|
|
|
18643
18723
|
|
|
18644
18724
|
if (page === null) {
|
|
18645
18725
|
// page wasn't allocated so we return an empty page
|
|
18646
|
-
return
|
|
18726
|
+
return safeAllocUint8Array(PAGE_SIZE);
|
|
18647
18727
|
}
|
|
18648
18728
|
|
|
18649
18729
|
if (page.length === PAGE_SIZE) {
|
|
@@ -18652,7 +18732,7 @@ declare class DebuggerAdapter {
|
|
|
18652
18732
|
}
|
|
18653
18733
|
|
|
18654
18734
|
// page was allocated but it is shorter than PAGE_SIZE so we have to extend it
|
|
18655
|
-
const fullPage =
|
|
18735
|
+
const fullPage = safeAllocUint8Array(PAGE_SIZE);
|
|
18656
18736
|
fullPage.set(page);
|
|
18657
18737
|
return fullPage;
|
|
18658
18738
|
}
|
|
@@ -18845,10 +18925,10 @@ type ENTROPY_BYTES = typeof ENTROPY_BYTES;
|
|
|
18845
18925
|
*
|
|
18846
18926
|
* https://graypaper.fluffylabs.dev/#/579bd12/3b9a013b9a01
|
|
18847
18927
|
*/
|
|
18848
|
-
declare function fisherYatesShuffle<T>(arr: T[], entropy: Bytes<ENTROPY_BYTES>): T[] {
|
|
18928
|
+
declare function fisherYatesShuffle<T>(blake2b: Blake2b, arr: T[], entropy: Bytes<ENTROPY_BYTES>): T[] {
|
|
18849
18929
|
check`${entropy.length === ENTROPY_BYTES} Expected entropy of length ${ENTROPY_BYTES}, got ${entropy.length}`;
|
|
18850
18930
|
const n = arr.length;
|
|
18851
|
-
const randomNumbers = hashToNumberSequence(entropy, arr.length);
|
|
18931
|
+
const randomNumbers = hashToNumberSequence(blake2b, entropy, arr.length);
|
|
18852
18932
|
const result: T[] = new Array<T>(n);
|
|
18853
18933
|
|
|
18854
18934
|
let itemsLeft = n;
|
|
@@ -19020,8 +19100,7 @@ declare const availabilityAssignmentFromJson = json.object<JsonAvailabilityAssig
|
|
|
19020
19100
|
timeout: "number",
|
|
19021
19101
|
},
|
|
19022
19102
|
({ report, timeout }) => {
|
|
19023
|
-
|
|
19024
|
-
return AvailabilityAssignment.create({ workReport: new WithHash(workReportHash, report), timeout });
|
|
19103
|
+
return AvailabilityAssignment.create({ workReport: report, timeout });
|
|
19025
19104
|
},
|
|
19026
19105
|
);
|
|
19027
19106
|
|
|
@@ -19522,7 +19601,7 @@ declare class TransitionHasher implements MmrHasher<KeccakHash> {
|
|
|
19522
19601
|
constructor(
|
|
19523
19602
|
private readonly context: ChainSpec,
|
|
19524
19603
|
private readonly keccakHasher: KeccakHasher,
|
|
19525
|
-
|
|
19604
|
+
public readonly blake2b: Blake2b,
|
|
19526
19605
|
) {}
|
|
19527
19606
|
|
|
19528
19607
|
/** Concatenates two hashes and hash this concatenation */
|
|
@@ -19536,7 +19615,7 @@ declare class TransitionHasher implements MmrHasher<KeccakHash> {
|
|
|
19536
19615
|
|
|
19537
19616
|
/** Creates hash from the block header view */
|
|
19538
19617
|
header(header: HeaderView): WithHash<HeaderHash, HeaderView> {
|
|
19539
|
-
return new WithHash(blake2b.hashBytes(header.encoded()
|
|
19618
|
+
return new WithHash(this.blake2b.hashBytes(header.encoded()).asOpaque(), header);
|
|
19540
19619
|
}
|
|
19541
19620
|
|
|
19542
19621
|
/**
|
|
@@ -19550,7 +19629,7 @@ declare class TransitionHasher implements MmrHasher<KeccakHash> {
|
|
|
19550
19629
|
.view()
|
|
19551
19630
|
.map((g) => g.view())
|
|
19552
19631
|
.map((guarantee) => {
|
|
19553
|
-
const reportHash = blake2b.hashBytes(guarantee.report.encoded()
|
|
19632
|
+
const reportHash = this.blake2b.hashBytes(guarantee.report.encoded()).asOpaque<WorkReportHash>();
|
|
19554
19633
|
return BytesBlob.blobFromParts([
|
|
19555
19634
|
reportHash.raw,
|
|
19556
19635
|
guarantee.slot.encoded().raw,
|
|
@@ -19560,15 +19639,15 @@ declare class TransitionHasher implements MmrHasher<KeccakHash> {
|
|
|
19560
19639
|
|
|
19561
19640
|
const guaranteeBlob = Encoder.encodeObject(codec.sequenceVarLen(dumpCodec), guarantees, this.context);
|
|
19562
19641
|
|
|
19563
|
-
const et = blake2b.hashBytes(extrinsicView.tickets.encoded()
|
|
19564
|
-
const ep = blake2b.hashBytes(extrinsicView.preimages.encoded()
|
|
19565
|
-
const eg = blake2b.hashBytes(guaranteeBlob
|
|
19566
|
-
const ea = blake2b.hashBytes(extrinsicView.assurances.encoded()
|
|
19567
|
-
const ed = blake2b.hashBytes(extrinsicView.disputes.encoded()
|
|
19642
|
+
const et = this.blake2b.hashBytes(extrinsicView.tickets.encoded()).asOpaque<ExtrinsicHash>();
|
|
19643
|
+
const ep = this.blake2b.hashBytes(extrinsicView.preimages.encoded()).asOpaque<ExtrinsicHash>();
|
|
19644
|
+
const eg = this.blake2b.hashBytes(guaranteeBlob).asOpaque<ExtrinsicHash>();
|
|
19645
|
+
const ea = this.blake2b.hashBytes(extrinsicView.assurances.encoded()).asOpaque<ExtrinsicHash>();
|
|
19646
|
+
const ed = this.blake2b.hashBytes(extrinsicView.disputes.encoded()).asOpaque<ExtrinsicHash>();
|
|
19568
19647
|
|
|
19569
19648
|
const encoded = BytesBlob.blobFromParts([et.raw, ep.raw, eg.raw, ea.raw, ed.raw]);
|
|
19570
19649
|
|
|
19571
|
-
return new WithHashAndBytes(blake2b.hashBytes(encoded
|
|
19650
|
+
return new WithHashAndBytes(this.blake2b.hashBytes(encoded).asOpaque(), extrinsicView, encoded);
|
|
19572
19651
|
}
|
|
19573
19652
|
|
|
19574
19653
|
/** Creates hash for given WorkPackage */
|
|
@@ -19579,7 +19658,7 @@ declare class TransitionHasher implements MmrHasher<KeccakHash> {
|
|
|
19579
19658
|
private encode<T, THash extends OpaqueHash>(codec: Codec<T>, data: T): WithHashAndBytes<THash, T> {
|
|
19580
19659
|
// TODO [ToDr] Use already allocated encoding destination and hash bytes from some arena.
|
|
19581
19660
|
const encoded = Encoder.encodeObject(codec, data, this.context);
|
|
19582
|
-
return new WithHashAndBytes(blake2b.hashBytes(encoded
|
|
19661
|
+
return new WithHashAndBytes(this.blake2b.hashBytes(encoded).asOpaque(), data, encoded);
|
|
19583
19662
|
}
|
|
19584
19663
|
}
|
|
19585
19664
|
|
|
@@ -19600,7 +19679,10 @@ declare enum PreimagesErrorCode {
|
|
|
19600
19679
|
|
|
19601
19680
|
// TODO [SeKo] consider whether this module is the right place to remove expired preimages
|
|
19602
19681
|
declare class Preimages {
|
|
19603
|
-
constructor(
|
|
19682
|
+
constructor(
|
|
19683
|
+
public readonly state: PreimagesState,
|
|
19684
|
+
public readonly blake2b: Blake2b,
|
|
19685
|
+
) {}
|
|
19604
19686
|
|
|
19605
19687
|
integrate(input: PreimagesInput): Result$2<PreimagesStateUpdate, PreimagesErrorCode> {
|
|
19606
19688
|
// make sure lookup extrinsics are sorted and unique
|
|
@@ -19629,7 +19711,7 @@ declare class Preimages {
|
|
|
19629
19711
|
// select preimages for integration
|
|
19630
19712
|
for (const preimage of preimages) {
|
|
19631
19713
|
const { requester, blob } = preimage;
|
|
19632
|
-
const hash: PreimageHash = blake2b.hashBytes(blob).asOpaque();
|
|
19714
|
+
const hash: PreimageHash = this.blake2b.hashBytes(blob).asOpaque();
|
|
19633
19715
|
|
|
19634
19716
|
const service = this.state.getService(requester);
|
|
19635
19717
|
if (service === null) {
|
|
@@ -19660,156 +19742,6 @@ declare class Preimages {
|
|
|
19660
19742
|
}
|
|
19661
19743
|
}
|
|
19662
19744
|
|
|
19663
|
-
declare enum ServiceExecutorError {
|
|
19664
|
-
NoLookup = 0,
|
|
19665
|
-
NoState = 1,
|
|
19666
|
-
NoServiceCode = 2,
|
|
19667
|
-
ServiceCodeMismatch = 3,
|
|
19668
|
-
}
|
|
19669
|
-
|
|
19670
|
-
declare class WorkPackageExecutor {
|
|
19671
|
-
constructor(
|
|
19672
|
-
private readonly blocks: BlocksDb,
|
|
19673
|
-
private readonly state: StatesDb,
|
|
19674
|
-
private readonly hasher: TransitionHasher,
|
|
19675
|
-
) {}
|
|
19676
|
-
|
|
19677
|
-
// TODO [ToDr] this while thing should be triple-checked with the GP.
|
|
19678
|
-
// I'm currently implementing some dirty version for the demo.
|
|
19679
|
-
async executeWorkPackage(pack: WorkPackage): Promise<WorkReport> {
|
|
19680
|
-
const headerHash = pack.context.lookupAnchor;
|
|
19681
|
-
// execute authorisation first or is it already executed and we just need to check it?
|
|
19682
|
-
const authExec = this.getServiceExecutor(
|
|
19683
|
-
// TODO [ToDr] should this be anchor or lookupAnchor?
|
|
19684
|
-
headerHash,
|
|
19685
|
-
pack.authCodeHost,
|
|
19686
|
-
pack.authCodeHash,
|
|
19687
|
-
);
|
|
19688
|
-
|
|
19689
|
-
if (authExec.isError) {
|
|
19690
|
-
// TODO [ToDr] most likely shouldn't be throw.
|
|
19691
|
-
throw new Error(`Could not get authorization executor: ${authExec.error}`);
|
|
19692
|
-
}
|
|
19693
|
-
|
|
19694
|
-
const pvm = authExec.ok;
|
|
19695
|
-
const authGas = tryAsGas(15_000n);
|
|
19696
|
-
const result = await pvm.run(pack.parametrization, authGas);
|
|
19697
|
-
|
|
19698
|
-
if (!result.isEqualTo(pack.authorization)) {
|
|
19699
|
-
throw new Error("Authorization is invalid.");
|
|
19700
|
-
}
|
|
19701
|
-
|
|
19702
|
-
const results: WorkResult[] = [];
|
|
19703
|
-
for (const item of pack.items) {
|
|
19704
|
-
const exec = this.getServiceExecutor(headerHash, item.service, item.codeHash);
|
|
19705
|
-
if (exec.isError) {
|
|
19706
|
-
throw new Error(`Could not get item executor: ${exec.error}`);
|
|
19707
|
-
}
|
|
19708
|
-
const pvm = exec.ok;
|
|
19709
|
-
|
|
19710
|
-
const gasRatio = tryAsServiceGas(3_000n);
|
|
19711
|
-
const ret = await pvm.run(item.payload, tryAsGas(item.refineGasLimit)); // or accumulateGasLimit?
|
|
19712
|
-
results.push(
|
|
19713
|
-
WorkResult.create({
|
|
19714
|
-
serviceId: item.service,
|
|
19715
|
-
codeHash: item.codeHash,
|
|
19716
|
-
payloadHash: blake2b.hashBytes(item.payload),
|
|
19717
|
-
gas: gasRatio,
|
|
19718
|
-
result: new WorkExecResult(WorkExecResultKind.ok, ret),
|
|
19719
|
-
load: WorkRefineLoad.create({
|
|
19720
|
-
gasUsed: tryAsServiceGas(5),
|
|
19721
|
-
importedSegments: tryAsU32(0),
|
|
19722
|
-
exportedSegments: tryAsU32(0),
|
|
19723
|
-
extrinsicSize: tryAsU32(0),
|
|
19724
|
-
extrinsicCount: tryAsU32(0),
|
|
19725
|
-
}),
|
|
19726
|
-
}),
|
|
19727
|
-
);
|
|
19728
|
-
}
|
|
19729
|
-
|
|
19730
|
-
const workPackage = this.hasher.workPackage(pack);
|
|
19731
|
-
const workPackageSpec = WorkPackageSpec.create({
|
|
19732
|
-
hash: workPackage.hash,
|
|
19733
|
-
length: tryAsU32(workPackage.encoded.length),
|
|
19734
|
-
erasureRoot: Bytes.zero(HASH_SIZE),
|
|
19735
|
-
exportsRoot: Bytes.zero(HASH_SIZE).asOpaque(),
|
|
19736
|
-
exportsCount: tryAsU16(0),
|
|
19737
|
-
});
|
|
19738
|
-
const coreIndex = tryAsCoreIndex(0);
|
|
19739
|
-
const authorizerHash = Bytes.fill(HASH_SIZE, 5).asOpaque();
|
|
19740
|
-
|
|
19741
|
-
const workResults = FixedSizeArray.new(results, tryAsWorkItemsCount(results.length));
|
|
19742
|
-
|
|
19743
|
-
return Promise.resolve(
|
|
19744
|
-
WorkReport.create({
|
|
19745
|
-
workPackageSpec,
|
|
19746
|
-
context: pack.context,
|
|
19747
|
-
coreIndex,
|
|
19748
|
-
authorizerHash,
|
|
19749
|
-
authorizationOutput: pack.authorization,
|
|
19750
|
-
segmentRootLookup: [],
|
|
19751
|
-
results: workResults,
|
|
19752
|
-
authorizationGasUsed: tryAsServiceGas(0),
|
|
19753
|
-
}),
|
|
19754
|
-
);
|
|
19755
|
-
}
|
|
19756
|
-
|
|
19757
|
-
getServiceExecutor(
|
|
19758
|
-
lookupAnchor: HeaderHash,
|
|
19759
|
-
serviceId: ServiceId,
|
|
19760
|
-
expectedCodeHash: CodeHash,
|
|
19761
|
-
): Result$2<PvmExecutor, ServiceExecutorError> {
|
|
19762
|
-
const header = this.blocks.getHeader(lookupAnchor);
|
|
19763
|
-
if (header === null) {
|
|
19764
|
-
return Result.error(ServiceExecutorError.NoLookup);
|
|
19765
|
-
}
|
|
19766
|
-
|
|
19767
|
-
const state = this.state.getState(lookupAnchor);
|
|
19768
|
-
if (state === null) {
|
|
19769
|
-
return Result.error(ServiceExecutorError.NoState);
|
|
19770
|
-
}
|
|
19771
|
-
|
|
19772
|
-
const service = state.getService(serviceId);
|
|
19773
|
-
const serviceCodeHash = service?.getInfo().codeHash ?? null;
|
|
19774
|
-
if (serviceCodeHash === null) {
|
|
19775
|
-
return Result.error(ServiceExecutorError.NoServiceCode);
|
|
19776
|
-
}
|
|
19777
|
-
|
|
19778
|
-
if (!serviceCodeHash.isEqualTo(expectedCodeHash)) {
|
|
19779
|
-
return Result.error(ServiceExecutorError.ServiceCodeMismatch);
|
|
19780
|
-
}
|
|
19781
|
-
|
|
19782
|
-
const serviceCode = service?.getPreimage(serviceCodeHash.asOpaque()) ?? null;
|
|
19783
|
-
if (serviceCode === null) {
|
|
19784
|
-
return Result.error(ServiceExecutorError.NoServiceCode);
|
|
19785
|
-
}
|
|
19786
|
-
|
|
19787
|
-
return Result.ok(new PvmExecutor(serviceCode));
|
|
19788
|
-
}
|
|
19789
|
-
}
|
|
19790
|
-
|
|
19791
|
-
declare class PvmExecutor {
|
|
19792
|
-
private readonly pvm: HostCalls;
|
|
19793
|
-
private hostCalls = new HostCallsManager({ missing: new Missing() });
|
|
19794
|
-
private pvmInstanceManager = new PvmInstanceManager(4);
|
|
19795
|
-
|
|
19796
|
-
constructor(private serviceCode: BytesBlob) {
|
|
19797
|
-
this.pvm = new PvmHostCallExtension(this.pvmInstanceManager, this.hostCalls);
|
|
19798
|
-
}
|
|
19799
|
-
|
|
19800
|
-
async run(args: BytesBlob, gas: Gas): Promise<BytesBlob> {
|
|
19801
|
-
const program = Program.fromSpi(this.serviceCode.raw, args.raw, true);
|
|
19802
|
-
|
|
19803
|
-
const result = await this.pvm.runProgram(program.code, 5, gas, program.registers, program.memory);
|
|
19804
|
-
|
|
19805
|
-
if (result.hasMemorySlice()) {
|
|
19806
|
-
return BytesBlob.blobFrom(result.memorySlice);
|
|
19807
|
-
}
|
|
19808
|
-
|
|
19809
|
-
return BytesBlob.empty();
|
|
19810
|
-
}
|
|
19811
|
-
}
|
|
19812
|
-
|
|
19813
19745
|
type index_Preimages = Preimages;
|
|
19814
19746
|
declare const index_Preimages: typeof Preimages;
|
|
19815
19747
|
type index_PreimagesErrorCode = PreimagesErrorCode;
|
|
@@ -19819,10 +19751,8 @@ type index_PreimagesState = PreimagesState;
|
|
|
19819
19751
|
type index_PreimagesStateUpdate = PreimagesStateUpdate;
|
|
19820
19752
|
type index_TransitionHasher = TransitionHasher;
|
|
19821
19753
|
declare const index_TransitionHasher: typeof TransitionHasher;
|
|
19822
|
-
type index_WorkPackageExecutor = WorkPackageExecutor;
|
|
19823
|
-
declare const index_WorkPackageExecutor: typeof WorkPackageExecutor;
|
|
19824
19754
|
declare namespace index {
|
|
19825
|
-
export { index_Preimages as Preimages, index_PreimagesErrorCode as PreimagesErrorCode, index_TransitionHasher as TransitionHasher
|
|
19755
|
+
export { index_Preimages as Preimages, index_PreimagesErrorCode as PreimagesErrorCode, index_TransitionHasher as TransitionHasher };
|
|
19826
19756
|
export type { index_PreimagesInput as PreimagesInput, index_PreimagesState as PreimagesState, index_PreimagesStateUpdate as PreimagesStateUpdate };
|
|
19827
19757
|
}
|
|
19828
19758
|
|