@typeberry/lib 0.1.3-3f7b9cf → 0.1.3-47d06ae
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.cjs +700 -1368
- package/index.d.ts +805 -828
- package/index.js +699 -1367
- package/package.json +1 -1
package/index.d.ts
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
declare enum GpVersion {
|
|
2
2
|
V0_6_7 = "0.6.7",
|
|
3
3
|
V0_7_0 = "0.7.0",
|
|
4
|
-
V0_7_1 = "0.7.1
|
|
4
|
+
V0_7_1 = "0.7.1",
|
|
5
5
|
V0_7_2 = "0.7.2-preview",
|
|
6
6
|
}
|
|
7
7
|
|
|
@@ -11,12 +11,12 @@ declare enum TestSuite {
|
|
|
11
11
|
}
|
|
12
12
|
|
|
13
13
|
declare const DEFAULT_SUITE = TestSuite.W3F_DAVXY;
|
|
14
|
-
|
|
15
|
-
declare const ALL_VERSIONS_IN_ORDER = [GpVersion.V0_6_7, GpVersion.V0_7_0, GpVersion.V0_7_1, GpVersion.V0_7_2];
|
|
16
|
-
declare const DEFAULT_VERSION = GpVersion.V0_7_0;
|
|
14
|
+
declare const DEFAULT_VERSION = GpVersion.V0_7_1;
|
|
17
15
|
declare let CURRENT_VERSION = parseCurrentVersion(env.GP_VERSION) ?? DEFAULT_VERSION;
|
|
18
16
|
declare let CURRENT_SUITE = parseCurrentSuite(env.TEST_SUITE) ?? DEFAULT_SUITE;
|
|
19
17
|
|
|
18
|
+
declare const ALL_VERSIONS_IN_ORDER = [GpVersion.V0_6_7, GpVersion.V0_7_0, GpVersion.V0_7_1, GpVersion.V0_7_2];
|
|
19
|
+
|
|
20
20
|
declare function parseCurrentVersion(env?: string): GpVersion | undefined {
|
|
21
21
|
if (env === undefined) {
|
|
22
22
|
return undefined;
|
|
@@ -35,7 +35,9 @@ declare function parseCurrentVersion(env?: string): GpVersion | undefined {
|
|
|
35
35
|
}
|
|
36
36
|
|
|
37
37
|
declare function parseCurrentSuite(env?: string): TestSuite | undefined {
|
|
38
|
-
if (env === undefined)
|
|
38
|
+
if (env === undefined) {
|
|
39
|
+
return undefined;
|
|
40
|
+
}
|
|
39
41
|
switch (env) {
|
|
40
42
|
case TestSuite.W3F_DAVXY:
|
|
41
43
|
case TestSuite.JAMDUNA:
|
|
@@ -420,6 +422,20 @@ declare const Result$2 = {
|
|
|
420
422
|
},
|
|
421
423
|
};
|
|
422
424
|
|
|
425
|
+
// about 2GB, the maximum ArrayBuffer length on Chrome confirmed by several sources:
|
|
426
|
+
// - https://issues.chromium.org/issues/40055619
|
|
427
|
+
// - https://stackoverflow.com/a/72124984
|
|
428
|
+
// - https://onnxruntime.ai/docs/tutorials/web/large-models.html#maximum-size-of-arraybuffer
|
|
429
|
+
declare const MAX_LENGTH$1 = 2145386496;
|
|
430
|
+
|
|
431
|
+
declare function safeAllocUint8Array(length: number) {
|
|
432
|
+
if (length > MAX_LENGTH) {
|
|
433
|
+
// biome-ignore lint/suspicious/noConsole: can't have a dependency on logger here
|
|
434
|
+
console.warn(`Trying to allocate ${length} bytes, which is greater than the maximum of ${MAX_LENGTH}.`);
|
|
435
|
+
}
|
|
436
|
+
return new Uint8Array(Math.min(MAX_LENGTH, length));
|
|
437
|
+
}
|
|
438
|
+
|
|
423
439
|
/**
|
|
424
440
|
* Utilities for tests.
|
|
425
441
|
*/
|
|
@@ -573,8 +589,12 @@ declare function deepEqual<T>(
|
|
|
573
589
|
const aKey = `${a.key}`;
|
|
574
590
|
const bKey = `${b.key}`;
|
|
575
591
|
|
|
576
|
-
if (aKey < bKey)
|
|
577
|
-
|
|
592
|
+
if (aKey < bKey) {
|
|
593
|
+
return -1;
|
|
594
|
+
}
|
|
595
|
+
if (bKey < aKey) {
|
|
596
|
+
return 1;
|
|
597
|
+
}
|
|
578
598
|
return 0;
|
|
579
599
|
});
|
|
580
600
|
};
|
|
@@ -755,11 +775,12 @@ declare const index$u_oomWarningPrinted: typeof oomWarningPrinted;
|
|
|
755
775
|
declare const index$u_parseCurrentSuite: typeof parseCurrentSuite;
|
|
756
776
|
declare const index$u_parseCurrentVersion: typeof parseCurrentVersion;
|
|
757
777
|
declare const index$u_resultToString: typeof resultToString;
|
|
778
|
+
declare const index$u_safeAllocUint8Array: typeof safeAllocUint8Array;
|
|
758
779
|
declare const index$u_seeThrough: typeof seeThrough;
|
|
759
780
|
declare const index$u_trimStack: typeof trimStack;
|
|
760
781
|
declare const index$u_workspacePathFix: typeof workspacePathFix;
|
|
761
782
|
declare namespace index$u {
|
|
762
|
-
export { index$u_ALL_VERSIONS_IN_ORDER as ALL_VERSIONS_IN_ORDER, index$u_CURRENT_SUITE as CURRENT_SUITE, index$u_CURRENT_VERSION as CURRENT_VERSION, index$u_Compatibility as Compatibility, index$u_DEFAULT_SUITE as DEFAULT_SUITE, index$u_DEFAULT_VERSION as DEFAULT_VERSION, index$u_ErrorsCollector as ErrorsCollector, index$u_GpVersion as GpVersion, Result$2 as Result, index$u_RichTaggedError as RichTaggedError, index$u_TEST_COMPARE_USING as TEST_COMPARE_USING, index$u_TestSuite as TestSuite, index$u_WithDebug as WithDebug, index$u___OPAQUE_TYPE__ as __OPAQUE_TYPE__, index$u_asOpaqueType as asOpaqueType, index$u_assertEmpty as assertEmpty, index$u_assertNever as assertNever, index$u_callCompareFunction as callCompareFunction, index$u_check as check, index$u_deepEqual as deepEqual, index$u_getAllKeysSorted as getAllKeysSorted, index$u_inspect as inspect, index$u_isBrowser as isBrowser, index$u_isResult as isResult, index$u_isTaggedError as isTaggedError, index$u_maybeTaggedErrorToString as maybeTaggedErrorToString, index$u_measure as measure, index$u_oomWarningPrinted as oomWarningPrinted, index$u_parseCurrentSuite as parseCurrentSuite, index$u_parseCurrentVersion as parseCurrentVersion, index$u_resultToString as resultToString, index$u_seeThrough as seeThrough, index$u_trimStack as trimStack, index$u_workspacePathFix as workspacePathFix };
|
|
783
|
+
export { index$u_ALL_VERSIONS_IN_ORDER as ALL_VERSIONS_IN_ORDER, index$u_CURRENT_SUITE as CURRENT_SUITE, index$u_CURRENT_VERSION as CURRENT_VERSION, index$u_Compatibility as Compatibility, index$u_DEFAULT_SUITE as DEFAULT_SUITE, index$u_DEFAULT_VERSION as DEFAULT_VERSION, index$u_ErrorsCollector as ErrorsCollector, index$u_GpVersion as GpVersion, MAX_LENGTH$1 as MAX_LENGTH, Result$2 as Result, index$u_RichTaggedError as RichTaggedError, index$u_TEST_COMPARE_USING as TEST_COMPARE_USING, index$u_TestSuite as TestSuite, index$u_WithDebug as WithDebug, index$u___OPAQUE_TYPE__ as __OPAQUE_TYPE__, index$u_asOpaqueType as asOpaqueType, index$u_assertEmpty as assertEmpty, index$u_assertNever as assertNever, index$u_callCompareFunction as callCompareFunction, index$u_check as check, index$u_deepEqual as deepEqual, index$u_getAllKeysSorted as getAllKeysSorted, index$u_inspect as inspect, index$u_isBrowser as isBrowser, index$u_isResult as isResult, index$u_isTaggedError as isTaggedError, index$u_maybeTaggedErrorToString as maybeTaggedErrorToString, index$u_measure as measure, index$u_oomWarningPrinted as oomWarningPrinted, index$u_parseCurrentSuite as parseCurrentSuite, index$u_parseCurrentVersion as parseCurrentVersion, index$u_resultToString as resultToString, index$u_safeAllocUint8Array as safeAllocUint8Array, index$u_seeThrough as seeThrough, index$u_trimStack as trimStack, index$u_workspacePathFix as workspacePathFix };
|
|
763
784
|
export type { index$u_DeepEqualOptions as DeepEqualOptions, index$u_EnumMapping as EnumMapping, index$u_ErrorResult as ErrorResult, index$u_OK as OK, index$u_OkResult as OkResult, index$u_Opaque as Opaque, index$u_StringLiteral as StringLiteral, index$u_TaggedError as TaggedError, index$u_TokenOf as TokenOf, index$u_Uninstantiable as Uninstantiable, index$u_WithOpaque as WithOpaque };
|
|
764
785
|
}
|
|
765
786
|
|
|
@@ -929,7 +950,7 @@ declare class BytesBlob {
|
|
|
929
950
|
static blobFromParts(v: Uint8Array | Uint8Array[], ...rest: Uint8Array[]) {
|
|
930
951
|
const vArr = v instanceof Uint8Array ? [v] : v;
|
|
931
952
|
const totalLength = vArr.reduce((a, v) => a + v.length, 0) + rest.reduce((a, v) => a + v.length, 0);
|
|
932
|
-
const buffer =
|
|
953
|
+
const buffer = safeAllocUint8Array(totalLength);
|
|
933
954
|
let offset = 0;
|
|
934
955
|
for (const r of vArr) {
|
|
935
956
|
buffer.set(r, offset);
|
|
@@ -1012,7 +1033,7 @@ declare class Bytes<T extends number> extends BytesBlob {
|
|
|
1012
1033
|
|
|
1013
1034
|
/** Create an empty [`Bytes<X>`] of given length. */
|
|
1014
1035
|
static zero<X extends number>(len: X): Bytes<X> {
|
|
1015
|
-
return new Bytes(
|
|
1036
|
+
return new Bytes(safeAllocUint8Array(len), len);
|
|
1016
1037
|
}
|
|
1017
1038
|
|
|
1018
1039
|
// TODO [ToDr] `fill` should have the argments swapped to align with the rest.
|
|
@@ -1133,7 +1154,7 @@ declare class BitVec {
|
|
|
1133
1154
|
* Create new [`BitVec`] with all values set to `false`.
|
|
1134
1155
|
*/
|
|
1135
1156
|
static empty(bitLength: number) {
|
|
1136
|
-
const data =
|
|
1157
|
+
const data = safeAllocUint8Array(Math.ceil(bitLength / 8));
|
|
1137
1158
|
return new BitVec(data, bitLength);
|
|
1138
1159
|
}
|
|
1139
1160
|
|
|
@@ -3461,6 +3482,99 @@ declare namespace index$q {
|
|
|
3461
3482
|
export type { index$q_ClassConstructor as ClassConstructor, index$q_Codec as Codec, index$q_CodecRecord as CodecRecord, index$q_Decode as Decode, index$q_DescribedBy as DescribedBy, index$q_DescriptorRecord as DescriptorRecord, index$q_Encode as Encode, index$q_LengthRange as LengthRange, index$q_OptionalRecord as OptionalRecord, Options$1 as Options, index$q_PropertyKeys as PropertyKeys, index$q_SimpleDescriptorRecord as SimpleDescriptorRecord, index$q_SizeHint as SizeHint, index$q_ViewOf as ViewOf };
|
|
3462
3483
|
}
|
|
3463
3484
|
|
|
3485
|
+
/**
|
|
3486
|
+
* A utility class providing a readonly view over a portion of an array without copying it.
|
|
3487
|
+
*/
|
|
3488
|
+
declare class ArrayView<T> implements Iterable<T> {
|
|
3489
|
+
private readonly source: T[];
|
|
3490
|
+
public readonly length: number;
|
|
3491
|
+
|
|
3492
|
+
private constructor(
|
|
3493
|
+
source: T[],
|
|
3494
|
+
private readonly start: number,
|
|
3495
|
+
private readonly end: number,
|
|
3496
|
+
) {
|
|
3497
|
+
this.source = source;
|
|
3498
|
+
this.length = end - start;
|
|
3499
|
+
}
|
|
3500
|
+
|
|
3501
|
+
static from<T>(source: T[], start = 0, end = source.length): ArrayView<T> {
|
|
3502
|
+
check`
|
|
3503
|
+
${start >= 0 && end <= source.length && start <= end}
|
|
3504
|
+
Invalid start (${start})/end (${end}) for ArrayView
|
|
3505
|
+
`;
|
|
3506
|
+
return new ArrayView(source, start, end);
|
|
3507
|
+
}
|
|
3508
|
+
|
|
3509
|
+
get(i: number): T {
|
|
3510
|
+
check`
|
|
3511
|
+
${i >= 0 && i < this.length}
|
|
3512
|
+
Index out of bounds: ${i} < ${this.length}
|
|
3513
|
+
`;
|
|
3514
|
+
return this.source[this.start + i];
|
|
3515
|
+
}
|
|
3516
|
+
|
|
3517
|
+
subview(from: number, to: number = this.length): ArrayView<T> {
|
|
3518
|
+
return ArrayView.from(this.source, this.start + from, this.start + to);
|
|
3519
|
+
}
|
|
3520
|
+
|
|
3521
|
+
toArray(): T[] {
|
|
3522
|
+
return this.source.slice(this.start, this.end);
|
|
3523
|
+
}
|
|
3524
|
+
|
|
3525
|
+
*[Symbol.iterator](): Iterator<T> {
|
|
3526
|
+
for (let i = this.start; i < this.end; i++) {
|
|
3527
|
+
yield this.source[i];
|
|
3528
|
+
}
|
|
3529
|
+
}
|
|
3530
|
+
}
|
|
3531
|
+
|
|
3532
|
+
type ITypedArray = Uint8Array | Uint16Array | Uint32Array;
|
|
3533
|
+
type IDataType = string | Buffer | ITypedArray;
|
|
3534
|
+
|
|
3535
|
+
type IHasher = {
|
|
3536
|
+
/**
|
|
3537
|
+
* Initializes hash state to default value
|
|
3538
|
+
*/
|
|
3539
|
+
init: () => IHasher;
|
|
3540
|
+
/**
|
|
3541
|
+
* Updates the hash content with the given data
|
|
3542
|
+
*/
|
|
3543
|
+
update: (data: IDataType) => IHasher;
|
|
3544
|
+
/**
|
|
3545
|
+
* Calculates the hash of all of the data passed to be hashed with hash.update().
|
|
3546
|
+
* Defaults to hexadecimal string
|
|
3547
|
+
* @param outputType If outputType is "binary", it returns Uint8Array. Otherwise it
|
|
3548
|
+
* returns hexadecimal string
|
|
3549
|
+
*/
|
|
3550
|
+
digest: {
|
|
3551
|
+
(outputType: "binary"): Uint8Array;
|
|
3552
|
+
(outputType?: "hex"): string;
|
|
3553
|
+
};
|
|
3554
|
+
/**
|
|
3555
|
+
* Save the current internal state of the hasher for later resumption with load().
|
|
3556
|
+
* Cannot be called before .init() or after .digest()
|
|
3557
|
+
*
|
|
3558
|
+
* Note that this state can include arbitrary information about the value being hashed (e.g.
|
|
3559
|
+
* could include N plaintext bytes from the value), so needs to be treated as being as
|
|
3560
|
+
* sensitive as the input value itself.
|
|
3561
|
+
*/
|
|
3562
|
+
save: () => Uint8Array;
|
|
3563
|
+
/**
|
|
3564
|
+
* Resume a state that was created by save(). If this state was not created by a
|
|
3565
|
+
* compatible build of hash-wasm, an exception will be thrown.
|
|
3566
|
+
*/
|
|
3567
|
+
load: (state: Uint8Array) => IHasher;
|
|
3568
|
+
/**
|
|
3569
|
+
* Block size in bytes
|
|
3570
|
+
*/
|
|
3571
|
+
blockSize: number;
|
|
3572
|
+
/**
|
|
3573
|
+
* Digest size in bytes
|
|
3574
|
+
*/
|
|
3575
|
+
digestSize: number;
|
|
3576
|
+
};
|
|
3577
|
+
|
|
3464
3578
|
/**
|
|
3465
3579
|
* Size of the output of the hash functions.
|
|
3466
3580
|
*
|
|
@@ -3516,144 +3630,46 @@ declare class WithHashAndBytes<THash extends OpaqueHash, TData> extends WithHash
|
|
|
3516
3630
|
}
|
|
3517
3631
|
}
|
|
3518
3632
|
|
|
3519
|
-
|
|
3520
|
-
interface HashAllocator {
|
|
3521
|
-
/** Return a new hash destination. */
|
|
3522
|
-
emptyHash(): OpaqueHash;
|
|
3523
|
-
}
|
|
3524
|
-
|
|
3525
|
-
/** The simplest allocator returning just a fresh copy of bytes each time. */
|
|
3526
|
-
declare class SimpleAllocator implements HashAllocator {
|
|
3527
|
-
emptyHash(): OpaqueHash {
|
|
3528
|
-
return Bytes.zero(HASH_SIZE);
|
|
3529
|
-
}
|
|
3530
|
-
}
|
|
3531
|
-
|
|
3532
|
-
/** An allocator that works by allocating larger (continuous) pages of memory. */
|
|
3533
|
-
declare class PageAllocator implements HashAllocator {
|
|
3534
|
-
private page: Uint8Array = new Uint8Array(0);
|
|
3535
|
-
private currentHash = 0;
|
|
3633
|
+
declare const zero$1 = Bytes.zero(HASH_SIZE);
|
|
3536
3634
|
|
|
3537
|
-
|
|
3538
|
-
|
|
3539
|
-
|
|
3540
|
-
this.resetPage();
|
|
3635
|
+
declare class Blake2b {
|
|
3636
|
+
static async createHasher() {
|
|
3637
|
+
return new Blake2b(await createBLAKE2b(HASH_SIZE * 8));
|
|
3541
3638
|
}
|
|
3542
3639
|
|
|
3543
|
-
private
|
|
3544
|
-
const pageSizeBytes = this.hashesPerPage * HASH_SIZE;
|
|
3545
|
-
this.currentHash = 0;
|
|
3546
|
-
this.page = new Uint8Array(pageSizeBytes);
|
|
3547
|
-
}
|
|
3548
|
-
|
|
3549
|
-
emptyHash(): OpaqueHash {
|
|
3550
|
-
const startIdx = this.currentHash * HASH_SIZE;
|
|
3551
|
-
const endIdx = startIdx + HASH_SIZE;
|
|
3640
|
+
private constructor(private readonly hasher: IHasher) {}
|
|
3552
3641
|
|
|
3553
|
-
|
|
3554
|
-
|
|
3555
|
-
|
|
3642
|
+
/**
|
|
3643
|
+
* Hash given collection of blobs.
|
|
3644
|
+
*
|
|
3645
|
+
* If empty array is given a zero-hash is returned.
|
|
3646
|
+
*/
|
|
3647
|
+
hashBlobs<H extends Blake2bHash>(r: (BytesBlob | Uint8Array)[]): H {
|
|
3648
|
+
if (r.length === 0) {
|
|
3649
|
+
return zero.asOpaque();
|
|
3556
3650
|
}
|
|
3557
3651
|
|
|
3558
|
-
|
|
3652
|
+
const hasher = this.hasher.init();
|
|
3653
|
+
for (const v of r) {
|
|
3654
|
+
hasher.update(v instanceof BytesBlob ? v.raw : v);
|
|
3655
|
+
}
|
|
3656
|
+
return Bytes.fromBlob(hasher.digest("binary"), HASH_SIZE).asOpaque();
|
|
3559
3657
|
}
|
|
3560
|
-
}
|
|
3561
|
-
|
|
3562
|
-
declare const defaultAllocator = new SimpleAllocator();
|
|
3563
3658
|
|
|
3564
|
-
/**
|
|
3565
|
-
|
|
3566
|
-
|
|
3567
|
-
|
|
3568
|
-
|
|
3569
|
-
|
|
3570
|
-
r: (BytesBlob | Uint8Array)[],
|
|
3571
|
-
allocator: HashAllocator = defaultAllocator,
|
|
3572
|
-
): H {
|
|
3573
|
-
const out = allocator.emptyHash();
|
|
3574
|
-
if (r.length === 0) {
|
|
3575
|
-
return out.asOpaque();
|
|
3659
|
+
/** Hash given blob of bytes. */
|
|
3660
|
+
hashBytes(blob: BytesBlob | Uint8Array): Blake2bHash {
|
|
3661
|
+
const hasher = this.hasher.init();
|
|
3662
|
+
const bytes = blob instanceof BytesBlob ? blob.raw : blob;
|
|
3663
|
+
hasher.update(bytes);
|
|
3664
|
+
return Bytes.fromBlob(hasher.digest("binary"), HASH_SIZE).asOpaque();
|
|
3576
3665
|
}
|
|
3577
3666
|
|
|
3578
|
-
|
|
3579
|
-
|
|
3580
|
-
|
|
3667
|
+
/** Convert given string into bytes and hash it. */
|
|
3668
|
+
hashString(str: string) {
|
|
3669
|
+
return this.hashBytes(BytesBlob.blobFromString(str));
|
|
3581
3670
|
}
|
|
3582
|
-
hasher?.digest(out.raw);
|
|
3583
|
-
return out.asOpaque();
|
|
3584
|
-
}
|
|
3585
|
-
|
|
3586
|
-
/** Hash given blob of bytes. */
|
|
3587
|
-
declare function hashBytes(blob: BytesBlob | Uint8Array, allocator: HashAllocator = defaultAllocator): Blake2bHash {
|
|
3588
|
-
const hasher = blake2b(HASH_SIZE);
|
|
3589
|
-
const bytes = blob instanceof BytesBlob ? blob.raw : blob;
|
|
3590
|
-
hasher?.update(bytes);
|
|
3591
|
-
const out = allocator.emptyHash();
|
|
3592
|
-
hasher?.digest(out.raw);
|
|
3593
|
-
return out;
|
|
3594
|
-
}
|
|
3595
|
-
|
|
3596
|
-
/** Convert given string into bytes and hash it. */
|
|
3597
|
-
declare function hashString(str: string, allocator: HashAllocator = defaultAllocator) {
|
|
3598
|
-
return hashBytes(BytesBlob.blobFromString(str), allocator);
|
|
3599
|
-
}
|
|
3600
|
-
|
|
3601
|
-
declare const blake2b_hashBytes: typeof hashBytes;
|
|
3602
|
-
declare const blake2b_hashString: typeof hashString;
|
|
3603
|
-
declare namespace blake2b {
|
|
3604
|
-
export {
|
|
3605
|
-
hashBlobs$1 as hashBlobs,
|
|
3606
|
-
blake2b_hashBytes as hashBytes,
|
|
3607
|
-
blake2b_hashString as hashString,
|
|
3608
|
-
};
|
|
3609
3671
|
}
|
|
3610
3672
|
|
|
3611
|
-
type ITypedArray = Uint8Array | Uint16Array | Uint32Array;
|
|
3612
|
-
type IDataType = string | Buffer | ITypedArray;
|
|
3613
|
-
|
|
3614
|
-
type IHasher = {
|
|
3615
|
-
/**
|
|
3616
|
-
* Initializes hash state to default value
|
|
3617
|
-
*/
|
|
3618
|
-
init: () => IHasher;
|
|
3619
|
-
/**
|
|
3620
|
-
* Updates the hash content with the given data
|
|
3621
|
-
*/
|
|
3622
|
-
update: (data: IDataType) => IHasher;
|
|
3623
|
-
/**
|
|
3624
|
-
* Calculates the hash of all of the data passed to be hashed with hash.update().
|
|
3625
|
-
* Defaults to hexadecimal string
|
|
3626
|
-
* @param outputType If outputType is "binary", it returns Uint8Array. Otherwise it
|
|
3627
|
-
* returns hexadecimal string
|
|
3628
|
-
*/
|
|
3629
|
-
digest: {
|
|
3630
|
-
(outputType: "binary"): Uint8Array;
|
|
3631
|
-
(outputType?: "hex"): string;
|
|
3632
|
-
};
|
|
3633
|
-
/**
|
|
3634
|
-
* Save the current internal state of the hasher for later resumption with load().
|
|
3635
|
-
* Cannot be called before .init() or after .digest()
|
|
3636
|
-
*
|
|
3637
|
-
* Note that this state can include arbitrary information about the value being hashed (e.g.
|
|
3638
|
-
* could include N plaintext bytes from the value), so needs to be treated as being as
|
|
3639
|
-
* sensitive as the input value itself.
|
|
3640
|
-
*/
|
|
3641
|
-
save: () => Uint8Array;
|
|
3642
|
-
/**
|
|
3643
|
-
* Resume a state that was created by save(). If this state was not created by a
|
|
3644
|
-
* compatible build of hash-wasm, an exception will be thrown.
|
|
3645
|
-
*/
|
|
3646
|
-
load: (state: Uint8Array) => IHasher;
|
|
3647
|
-
/**
|
|
3648
|
-
* Block size in bytes
|
|
3649
|
-
*/
|
|
3650
|
-
blockSize: number;
|
|
3651
|
-
/**
|
|
3652
|
-
* Digest size in bytes
|
|
3653
|
-
*/
|
|
3654
|
-
digestSize: number;
|
|
3655
|
-
};
|
|
3656
|
-
|
|
3657
3673
|
declare class KeccakHasher {
|
|
3658
3674
|
static async create(): Promise<KeccakHasher> {
|
|
3659
3675
|
return new KeccakHasher(await createKeccak(256));
|
|
@@ -3681,15 +3697,15 @@ declare namespace keccak {
|
|
|
3681
3697
|
};
|
|
3682
3698
|
}
|
|
3683
3699
|
|
|
3700
|
+
// TODO [ToDr] (#213) this should most likely be moved to a separate
|
|
3701
|
+
// package to avoid pulling in unnecessary deps.
|
|
3702
|
+
|
|
3703
|
+
type index$p_Blake2b = Blake2b;
|
|
3704
|
+
declare const index$p_Blake2b: typeof Blake2b;
|
|
3684
3705
|
type index$p_Blake2bHash = Blake2bHash;
|
|
3685
3706
|
type index$p_HASH_SIZE = HASH_SIZE;
|
|
3686
|
-
type index$p_HashAllocator = HashAllocator;
|
|
3687
3707
|
type index$p_KeccakHash = KeccakHash;
|
|
3688
3708
|
type index$p_OpaqueHash = OpaqueHash;
|
|
3689
|
-
type index$p_PageAllocator = PageAllocator;
|
|
3690
|
-
declare const index$p_PageAllocator: typeof PageAllocator;
|
|
3691
|
-
type index$p_SimpleAllocator = SimpleAllocator;
|
|
3692
|
-
declare const index$p_SimpleAllocator: typeof SimpleAllocator;
|
|
3693
3709
|
type index$p_TRUNCATED_HASH_SIZE = TRUNCATED_HASH_SIZE;
|
|
3694
3710
|
type index$p_TruncatedHash = TruncatedHash;
|
|
3695
3711
|
type index$p_WithHash<THash extends OpaqueHash, TData> = WithHash<THash, TData>;
|
|
@@ -3697,12 +3713,10 @@ declare const index$p_WithHash: typeof WithHash;
|
|
|
3697
3713
|
type index$p_WithHashAndBytes<THash extends OpaqueHash, TData> = WithHashAndBytes<THash, TData>;
|
|
3698
3714
|
declare const index$p_WithHashAndBytes: typeof WithHashAndBytes;
|
|
3699
3715
|
declare const index$p_ZERO_HASH: typeof ZERO_HASH;
|
|
3700
|
-
declare const index$p_blake2b: typeof blake2b;
|
|
3701
|
-
declare const index$p_defaultAllocator: typeof defaultAllocator;
|
|
3702
3716
|
declare const index$p_keccak: typeof keccak;
|
|
3703
3717
|
declare namespace index$p {
|
|
3704
|
-
export { index$
|
|
3705
|
-
export type { index$p_Blake2bHash as Blake2bHash, index$p_HASH_SIZE as HASH_SIZE, index$
|
|
3718
|
+
export { index$p_Blake2b as Blake2b, index$p_WithHash as WithHash, index$p_WithHashAndBytes as WithHashAndBytes, index$p_ZERO_HASH as ZERO_HASH, index$p_keccak as keccak, zero$1 as zero };
|
|
3719
|
+
export type { index$p_Blake2bHash as Blake2bHash, index$p_HASH_SIZE as HASH_SIZE, index$p_KeccakHash as KeccakHash, index$p_OpaqueHash as OpaqueHash, index$p_TRUNCATED_HASH_SIZE as TRUNCATED_HASH_SIZE, index$p_TruncatedHash as TruncatedHash };
|
|
3706
3720
|
}
|
|
3707
3721
|
|
|
3708
3722
|
/** Immutable view of the `HashDictionary`. */
|
|
@@ -4479,6 +4493,8 @@ declare class TruncatedHashDictionary<T extends OpaqueHash, V> {
|
|
|
4479
4493
|
}
|
|
4480
4494
|
}
|
|
4481
4495
|
|
|
4496
|
+
type index$o_ArrayView<T> = ArrayView<T>;
|
|
4497
|
+
declare const index$o_ArrayView: typeof ArrayView;
|
|
4482
4498
|
type index$o_FixedSizeArray<T, N extends number> = FixedSizeArray<T, N>;
|
|
4483
4499
|
declare const index$o_FixedSizeArray: typeof FixedSizeArray;
|
|
4484
4500
|
type index$o_HashDictionary<K extends OpaqueHash, V> = HashDictionary<K, V>;
|
|
@@ -4506,7 +4522,7 @@ type index$o_TruncatedHashDictionary<T extends OpaqueHash, V> = TruncatedHashDic
|
|
|
4506
4522
|
declare const index$o_TruncatedHashDictionary: typeof TruncatedHashDictionary;
|
|
4507
4523
|
declare const index$o_asKnownSize: typeof asKnownSize;
|
|
4508
4524
|
declare namespace index$o {
|
|
4509
|
-
export { index$o_FixedSizeArray as FixedSizeArray, index$o_HashDictionary as HashDictionary, index$o_HashSet as HashSet, index$o_MultiMap as MultiMap, index$o_SortedArray as SortedArray, index$o_SortedSet as SortedSet, index$o_TruncatedHashDictionary as TruncatedHashDictionary, index$o_asKnownSize as asKnownSize };
|
|
4525
|
+
export { index$o_ArrayView as ArrayView, index$o_FixedSizeArray as FixedSizeArray, index$o_HashDictionary as HashDictionary, index$o_HashSet as HashSet, index$o_MultiMap as MultiMap, index$o_SortedArray as SortedArray, index$o_SortedSet as SortedSet, index$o_TruncatedHashDictionary as TruncatedHashDictionary, index$o_asKnownSize as asKnownSize };
|
|
4510
4526
|
export type { index$o_HashWithZeroedBit as HashWithZeroedBit, index$o_ImmutableHashDictionary as ImmutableHashDictionary, index$o_ImmutableHashSet as ImmutableHashSet, index$o_ImmutableSortedArray as ImmutableSortedArray, index$o_ImmutableSortedSet as ImmutableSortedSet, index$o_KeyMapper as KeyMapper, index$o_KeyMappers as KeyMappers, index$o_KnownSize as KnownSize, index$o_KnownSizeArray as KnownSizeArray, index$o_KnownSizeId as KnownSizeId, index$o_NestedMaps as NestedMaps };
|
|
4511
4527
|
}
|
|
4512
4528
|
|
|
@@ -4735,7 +4751,7 @@ declare async function verify<T extends BytesBlob>(input: Input<T>[]): Promise<b
|
|
|
4735
4751
|
(acc, { message, key, signature }) => acc + key.length + signature.length + message.length + 1,
|
|
4736
4752
|
0,
|
|
4737
4753
|
);
|
|
4738
|
-
const data =
|
|
4754
|
+
const data = safeAllocUint8Array(dataLength);
|
|
4739
4755
|
|
|
4740
4756
|
let offset = 0;
|
|
4741
4757
|
|
|
@@ -4825,22 +4841,16 @@ declare function trivialSeed(s: U32): KeySeed {
|
|
|
4825
4841
|
* Derives a Ed25519 secret key from a seed.
|
|
4826
4842
|
* https://github.com/polkadot-fellows/JIPs/blob/7048f79edf4f4eb8bfe6fb42e6bbf61900f44c65/JIP-5.md#derivation-method
|
|
4827
4843
|
*/
|
|
4828
|
-
declare function deriveEd25519SecretKey(
|
|
4829
|
-
|
|
4830
|
-
allocator: SimpleAllocator = new SimpleAllocator(),
|
|
4831
|
-
): Ed25519SecretSeed {
|
|
4832
|
-
return blake2b.hashBytes(BytesBlob.blobFromParts([ED25519_SECRET_KEY.raw, seed.raw]), allocator).asOpaque();
|
|
4844
|
+
declare function deriveEd25519SecretKey(seed: KeySeed, blake2b: Blake2b): Ed25519SecretSeed {
|
|
4845
|
+
return blake2b.hashBytes(BytesBlob.blobFromParts([ED25519_SECRET_KEY.raw, seed.raw])).asOpaque();
|
|
4833
4846
|
}
|
|
4834
4847
|
|
|
4835
4848
|
/**
|
|
4836
4849
|
* Derives a Bandersnatch secret key from a seed.
|
|
4837
4850
|
* https://github.com/polkadot-fellows/JIPs/blob/7048f79edf4f4eb8bfe6fb42e6bbf61900f44c65/JIP-5.md#derivation-method
|
|
4838
4851
|
*/
|
|
4839
|
-
declare function deriveBandersnatchSecretKey(
|
|
4840
|
-
|
|
4841
|
-
allocator: SimpleAllocator = new SimpleAllocator(),
|
|
4842
|
-
): BandersnatchSecretSeed {
|
|
4843
|
-
return blake2b.hashBytes(BytesBlob.blobFromParts([BANDERSNATCH_SECRET_KEY.raw, seed.raw]), allocator).asOpaque();
|
|
4852
|
+
declare function deriveBandersnatchSecretKey(seed: KeySeed, blake2b: Blake2b): BandersnatchSecretSeed {
|
|
4853
|
+
return blake2b.hashBytes(BytesBlob.blobFromParts([BANDERSNATCH_SECRET_KEY.raw, seed.raw])).asOpaque();
|
|
4844
4854
|
}
|
|
4845
4855
|
|
|
4846
4856
|
/**
|
|
@@ -8373,7 +8383,7 @@ declare enum NodeType {
|
|
|
8373
8383
|
declare class TrieNode {
|
|
8374
8384
|
constructor(
|
|
8375
8385
|
/** Exactly 512 bits / 64 bytes */
|
|
8376
|
-
public readonly raw: Uint8Array =
|
|
8386
|
+
public readonly raw: Uint8Array = safeAllocUint8Array(TRIE_NODE_BYTES),
|
|
8377
8387
|
) {}
|
|
8378
8388
|
|
|
8379
8389
|
/** Returns the type of the node */
|
|
@@ -9111,21 +9121,6 @@ declare function accumulationOutputComparator(a: AccumulationOutput, b: Accumula
|
|
|
9111
9121
|
return Ordering.Equal;
|
|
9112
9122
|
}
|
|
9113
9123
|
|
|
9114
|
-
declare const codecWithHash = <T, V, H extends OpaqueHash>(val: Descriptor<T, V>): Descriptor<WithHash<H, T>, V> =>
|
|
9115
|
-
Descriptor.withView(
|
|
9116
|
-
val.name,
|
|
9117
|
-
val.sizeHint,
|
|
9118
|
-
(e, elem) => val.encode(e, elem.data),
|
|
9119
|
-
(d): WithHash<H, T> => {
|
|
9120
|
-
const decoder2 = d.clone();
|
|
9121
|
-
const encoded = val.skipEncoded(decoder2);
|
|
9122
|
-
const hash = blake2b.hashBytes(encoded);
|
|
9123
|
-
return new WithHash(hash.asOpaque(), val.decode(d));
|
|
9124
|
-
},
|
|
9125
|
-
val.skip,
|
|
9126
|
-
val.View,
|
|
9127
|
-
);
|
|
9128
|
-
|
|
9129
9124
|
/**
|
|
9130
9125
|
* Assignment of particular work report to a core.
|
|
9131
9126
|
*
|
|
@@ -9136,7 +9131,7 @@ declare const codecWithHash = <T, V, H extends OpaqueHash>(val: Descriptor<T, V>
|
|
|
9136
9131
|
*/
|
|
9137
9132
|
declare class AvailabilityAssignment extends WithDebug {
|
|
9138
9133
|
static Codec = codec.Class(AvailabilityAssignment, {
|
|
9139
|
-
workReport:
|
|
9134
|
+
workReport: WorkReport.Codec,
|
|
9140
9135
|
timeout: codec.u32.asOpaque<TimeSlot>(),
|
|
9141
9136
|
});
|
|
9142
9137
|
|
|
@@ -9146,7 +9141,7 @@ declare class AvailabilityAssignment extends WithDebug {
|
|
|
9146
9141
|
|
|
9147
9142
|
private constructor(
|
|
9148
9143
|
/** Work report assigned to a core. */
|
|
9149
|
-
public readonly workReport:
|
|
9144
|
+
public readonly workReport: WorkReport,
|
|
9150
9145
|
/** Time slot at which the report becomes obsolete. */
|
|
9151
9146
|
public readonly timeout: TimeSlot,
|
|
9152
9147
|
) {
|
|
@@ -9250,8 +9245,6 @@ declare function hashComparator<V extends OpaqueHash>(a: V, b: V) {
|
|
|
9250
9245
|
return a.compare(b);
|
|
9251
9246
|
}
|
|
9252
9247
|
|
|
9253
|
-
// TODO [ToDr] Not sure where these should live yet :(
|
|
9254
|
-
|
|
9255
9248
|
/**
|
|
9256
9249
|
* `J`: The maximum sum of dependency items in a work-report.
|
|
9257
9250
|
*
|
|
@@ -9305,83 +9298,301 @@ declare class NotYetAccumulatedReport extends WithDebug {
|
|
|
9305
9298
|
}
|
|
9306
9299
|
}
|
|
9307
9300
|
|
|
9308
|
-
/** Dictionary entry of services that auto-accumulate every block. */
|
|
9309
|
-
declare class AutoAccumulate {
|
|
9310
|
-
static Codec = codec.Class(AutoAccumulate, {
|
|
9311
|
-
service: codec.u32.asOpaque<ServiceId>(),
|
|
9312
|
-
gasLimit: codec.u64.asOpaque<ServiceGas>(),
|
|
9313
|
-
});
|
|
9314
|
-
|
|
9315
|
-
static create({ service, gasLimit }: CodecRecord<AutoAccumulate>) {
|
|
9316
|
-
return new AutoAccumulate(service, gasLimit);
|
|
9317
|
-
}
|
|
9318
|
-
|
|
9319
|
-
private constructor(
|
|
9320
|
-
/** Service id that auto-accumulates. */
|
|
9321
|
-
readonly service: ServiceId,
|
|
9322
|
-
/** Gas limit for auto-accumulation. */
|
|
9323
|
-
readonly gasLimit: ServiceGas,
|
|
9324
|
-
) {}
|
|
9325
|
-
}
|
|
9326
|
-
|
|
9327
9301
|
/**
|
|
9328
|
-
*
|
|
9302
|
+
* `B_S`: The basic minimum balance which all services require.
|
|
9303
|
+
*
|
|
9304
|
+
* https://graypaper.fluffylabs.dev/#/7e6ff6a/445800445800?v=0.6.7
|
|
9329
9305
|
*/
|
|
9330
|
-
declare
|
|
9331
|
-
|
|
9332
|
-
|
|
9333
|
-
|
|
9334
|
-
|
|
9335
|
-
|
|
9336
|
-
|
|
9337
|
-
|
|
9338
|
-
|
|
9339
|
-
|
|
9340
|
-
|
|
9341
|
-
|
|
9342
|
-
|
|
9343
|
-
/**
|
|
9344
|
-
* `chi_m`: The first, χm, is the index of the manager service which is
|
|
9345
|
-
* the service able to effect an alteration of χ from block to block,
|
|
9346
|
-
* as well as bestow services with storage deposit credits.
|
|
9347
|
-
* https://graypaper.fluffylabs.dev/#/7e6ff6a/11a40111a801?v=0.6.7
|
|
9348
|
-
*/
|
|
9349
|
-
readonly manager: ServiceId,
|
|
9350
|
-
/** `chi_a`: Manages authorization queue one for each core. */
|
|
9351
|
-
readonly authManager: PerCore<ServiceId>,
|
|
9352
|
-
/** `chi_v`: Managers validator keys. */
|
|
9353
|
-
readonly validatorsManager: ServiceId,
|
|
9354
|
-
/** `chi_g`: Dictionary of services that auto-accumulate every block with their gas limit. */
|
|
9355
|
-
readonly autoAccumulateServices: readonly AutoAccumulate[],
|
|
9356
|
-
) {}
|
|
9357
|
-
}
|
|
9306
|
+
declare const BASE_SERVICE_BALANCE = 100n;
|
|
9307
|
+
/**
|
|
9308
|
+
* `B_I`: The additional minimum balance required per item of elective service state.
|
|
9309
|
+
*
|
|
9310
|
+
* https://graypaper.fluffylabs.dev/#/7e6ff6a/445000445000?v=0.6.7
|
|
9311
|
+
*/
|
|
9312
|
+
declare const ELECTIVE_ITEM_BALANCE = 10n;
|
|
9313
|
+
/**
|
|
9314
|
+
* `B_L`: The additional minimum balance required per octet of elective service state.
|
|
9315
|
+
*
|
|
9316
|
+
* https://graypaper.fluffylabs.dev/#/7e6ff6a/445400445400?v=0.6.7
|
|
9317
|
+
*/
|
|
9318
|
+
declare const ELECTIVE_BYTE_BALANCE = 1n;
|
|
9358
9319
|
|
|
9359
|
-
declare const
|
|
9320
|
+
declare const zeroSizeHint: SizeHint = {
|
|
9321
|
+
bytes: 0,
|
|
9322
|
+
isExact: true,
|
|
9323
|
+
};
|
|
9360
9324
|
|
|
9361
|
-
/**
|
|
9362
|
-
|
|
9363
|
-
|
|
9364
|
-
|
|
9365
|
-
|
|
9366
|
-
|
|
9367
|
-
|
|
9368
|
-
|
|
9369
|
-
|
|
9325
|
+
/** 0-byte read, return given default value */
|
|
9326
|
+
declare const ignoreValueWithDefault = <T>(defaultValue: T) =>
|
|
9327
|
+
Descriptor.new<T>(
|
|
9328
|
+
"ignoreValue",
|
|
9329
|
+
zeroSizeHint,
|
|
9330
|
+
(_e, _v) => {},
|
|
9331
|
+
(_d) => defaultValue,
|
|
9332
|
+
(_s) => {},
|
|
9333
|
+
);
|
|
9370
9334
|
|
|
9371
|
-
/**
|
|
9372
|
-
|
|
9373
|
-
|
|
9374
|
-
|
|
9375
|
-
|
|
9376
|
-
|
|
9377
|
-
|
|
9335
|
+
/** Encode and decode object with leading version number. */
|
|
9336
|
+
declare const codecWithVersion = <T>(val: Descriptor<T>): Descriptor<T> =>
|
|
9337
|
+
Descriptor.new<T>(
|
|
9338
|
+
"withVersion",
|
|
9339
|
+
{
|
|
9340
|
+
bytes: val.sizeHint.bytes + 8,
|
|
9341
|
+
isExact: false,
|
|
9342
|
+
},
|
|
9343
|
+
(e, v) => {
|
|
9344
|
+
e.varU64(0n);
|
|
9345
|
+
val.encode(e, v);
|
|
9346
|
+
},
|
|
9347
|
+
(d) => {
|
|
9348
|
+
const version = d.varU64();
|
|
9349
|
+
if (version !== 0n) {
|
|
9350
|
+
throw new Error("Non-zero version is not supported!");
|
|
9351
|
+
}
|
|
9352
|
+
return val.decode(d);
|
|
9353
|
+
},
|
|
9354
|
+
(s) => {
|
|
9355
|
+
s.varU64();
|
|
9356
|
+
val.skip(s);
|
|
9357
|
+
},
|
|
9358
|
+
);
|
|
9378
9359
|
|
|
9379
9360
|
/**
|
|
9380
|
-
*
|
|
9361
|
+
* Service account details.
|
|
9381
9362
|
*
|
|
9382
|
-
* https://graypaper.fluffylabs.dev/#/
|
|
9363
|
+
* https://graypaper.fluffylabs.dev/#/7e6ff6a/108301108301?v=0.6.7
|
|
9383
9364
|
*/
|
|
9384
|
-
declare class
|
|
9365
|
+
declare class ServiceAccountInfo extends WithDebug {
|
|
9366
|
+
static Codec = codec.Class(ServiceAccountInfo, {
|
|
9367
|
+
codeHash: codec.bytes(HASH_SIZE).asOpaque<CodeHash>(),
|
|
9368
|
+
balance: codec.u64,
|
|
9369
|
+
accumulateMinGas: codec.u64.convert((x) => x, tryAsServiceGas),
|
|
9370
|
+
onTransferMinGas: codec.u64.convert((x) => x, tryAsServiceGas),
|
|
9371
|
+
storageUtilisationBytes: codec.u64,
|
|
9372
|
+
gratisStorage: codec.u64,
|
|
9373
|
+
storageUtilisationCount: codec.u32,
|
|
9374
|
+
created: codec.u32.convert((x) => x, tryAsTimeSlot),
|
|
9375
|
+
lastAccumulation: codec.u32.convert((x) => x, tryAsTimeSlot),
|
|
9376
|
+
parentService: codec.u32.convert((x) => x, tryAsServiceId),
|
|
9377
|
+
});
|
|
9378
|
+
|
|
9379
|
+
static create(a: CodecRecord<ServiceAccountInfo>) {
|
|
9380
|
+
return new ServiceAccountInfo(
|
|
9381
|
+
a.codeHash,
|
|
9382
|
+
a.balance,
|
|
9383
|
+
a.accumulateMinGas,
|
|
9384
|
+
a.onTransferMinGas,
|
|
9385
|
+
a.storageUtilisationBytes,
|
|
9386
|
+
a.gratisStorage,
|
|
9387
|
+
a.storageUtilisationCount,
|
|
9388
|
+
a.created,
|
|
9389
|
+
a.lastAccumulation,
|
|
9390
|
+
a.parentService,
|
|
9391
|
+
);
|
|
9392
|
+
}
|
|
9393
|
+
|
|
9394
|
+
/**
|
|
9395
|
+
* `a_t = max(0, BS + BI * a_i + BL * a_o - a_f)`
|
|
9396
|
+
* https://graypaper.fluffylabs.dev/#/7e6ff6a/119e01119e01?v=0.6.7
|
|
9397
|
+
*/
|
|
9398
|
+
static calculateThresholdBalance(items: U32, bytes: U64, gratisStorage: U64): U64 {
|
|
9399
|
+
const storageCost =
|
|
9400
|
+
BASE_SERVICE_BALANCE + ELECTIVE_ITEM_BALANCE * BigInt(items) + ELECTIVE_BYTE_BALANCE * bytes - gratisStorage;
|
|
9401
|
+
|
|
9402
|
+
if (storageCost < 0n) {
|
|
9403
|
+
return tryAsU64(0);
|
|
9404
|
+
}
|
|
9405
|
+
|
|
9406
|
+
if (storageCost >= 2n ** 64n) {
|
|
9407
|
+
return tryAsU64(2n ** 64n - 1n);
|
|
9408
|
+
}
|
|
9409
|
+
|
|
9410
|
+
return tryAsU64(storageCost);
|
|
9411
|
+
}
|
|
9412
|
+
|
|
9413
|
+
private constructor(
|
|
9414
|
+
/** `a_c`: Hash of the service code. */
|
|
9415
|
+
public readonly codeHash: CodeHash,
|
|
9416
|
+
/** `a_b`: Current account balance. */
|
|
9417
|
+
public readonly balance: U64,
|
|
9418
|
+
/** `a_g`: Minimal gas required to execute Accumulate entrypoint. */
|
|
9419
|
+
public readonly accumulateMinGas: ServiceGas,
|
|
9420
|
+
/** `a_m`: Minimal gas required to execute On Transfer entrypoint. */
|
|
9421
|
+
public readonly onTransferMinGas: ServiceGas,
|
|
9422
|
+
/** `a_o`: Total number of octets in storage. */
|
|
9423
|
+
public readonly storageUtilisationBytes: U64,
|
|
9424
|
+
/** `a_f`: Cost-free storage. Decreases both storage item count and total byte size. */
|
|
9425
|
+
public readonly gratisStorage: U64,
|
|
9426
|
+
/** `a_i`: Number of items in storage. */
|
|
9427
|
+
public readonly storageUtilisationCount: U32,
|
|
9428
|
+
/** `a_r`: Creation account time slot. */
|
|
9429
|
+
public readonly created: TimeSlot,
|
|
9430
|
+
/** `a_a`: Most recent accumulation time slot. */
|
|
9431
|
+
public readonly lastAccumulation: TimeSlot,
|
|
9432
|
+
/** `a_p`: Parent service ID. */
|
|
9433
|
+
public readonly parentService: ServiceId,
|
|
9434
|
+
) {
|
|
9435
|
+
super();
|
|
9436
|
+
}
|
|
9437
|
+
}
|
|
9438
|
+
|
|
9439
|
+
declare class PreimageItem extends WithDebug {
|
|
9440
|
+
static Codec = codec.Class(PreimageItem, {
|
|
9441
|
+
hash: codec.bytes(HASH_SIZE).asOpaque<PreimageHash>(),
|
|
9442
|
+
blob: codec.blob,
|
|
9443
|
+
});
|
|
9444
|
+
|
|
9445
|
+
static create({ hash, blob }: CodecRecord<PreimageItem>) {
|
|
9446
|
+
return new PreimageItem(hash, blob);
|
|
9447
|
+
}
|
|
9448
|
+
|
|
9449
|
+
private constructor(
|
|
9450
|
+
readonly hash: PreimageHash,
|
|
9451
|
+
readonly blob: BytesBlob,
|
|
9452
|
+
) {
|
|
9453
|
+
super();
|
|
9454
|
+
}
|
|
9455
|
+
}
|
|
9456
|
+
|
|
9457
|
+
type StorageKey = Opaque<BytesBlob, "storage key">;
|
|
9458
|
+
|
|
9459
|
+
declare class StorageItem extends WithDebug {
|
|
9460
|
+
static Codec = codec.Class(StorageItem, {
|
|
9461
|
+
key: codec.blob.convert(
|
|
9462
|
+
(i) => i,
|
|
9463
|
+
(o) => asOpaqueType(o),
|
|
9464
|
+
),
|
|
9465
|
+
value: codec.blob,
|
|
9466
|
+
});
|
|
9467
|
+
|
|
9468
|
+
static create({ key, value }: CodecRecord<StorageItem>) {
|
|
9469
|
+
return new StorageItem(key, value);
|
|
9470
|
+
}
|
|
9471
|
+
|
|
9472
|
+
private constructor(
|
|
9473
|
+
readonly key: StorageKey,
|
|
9474
|
+
readonly value: BytesBlob,
|
|
9475
|
+
) {
|
|
9476
|
+
super();
|
|
9477
|
+
}
|
|
9478
|
+
}
|
|
9479
|
+
|
|
9480
|
+
declare const MAX_LOOKUP_HISTORY_SLOTS = 3;
|
|
9481
|
+
type LookupHistorySlots = KnownSizeArray<TimeSlot, `0-${typeof MAX_LOOKUP_HISTORY_SLOTS} timeslots`>;
|
|
9482
|
+
declare function tryAsLookupHistorySlots(items: readonly TimeSlot[]): LookupHistorySlots {
|
|
9483
|
+
const knownSize = asKnownSize(items) as LookupHistorySlots;
|
|
9484
|
+
if (knownSize.length > MAX_LOOKUP_HISTORY_SLOTS) {
|
|
9485
|
+
throw new Error(`Lookup history items must contain 0-${MAX_LOOKUP_HISTORY_SLOTS} timeslots.`);
|
|
9486
|
+
}
|
|
9487
|
+
return knownSize;
|
|
9488
|
+
}
|
|
9489
|
+
|
|
9490
|
+
/** https://graypaper.fluffylabs.dev/#/5f542d7/115400115800 */
|
|
9491
|
+
declare class LookupHistoryItem {
|
|
9492
|
+
constructor(
|
|
9493
|
+
public readonly hash: PreimageHash,
|
|
9494
|
+
public readonly length: U32,
|
|
9495
|
+
/**
|
|
9496
|
+
* Preimage availability history as a sequence of time slots.
|
|
9497
|
+
* See PreimageStatus and the following GP fragment for more details.
|
|
9498
|
+
* https://graypaper.fluffylabs.dev/#/5f542d7/11780011a500 */
|
|
9499
|
+
public readonly slots: LookupHistorySlots,
|
|
9500
|
+
) {}
|
|
9501
|
+
|
|
9502
|
+
static isRequested(item: LookupHistoryItem | LookupHistorySlots): boolean {
|
|
9503
|
+
if ("slots" in item) {
|
|
9504
|
+
return item.slots.length === 0;
|
|
9505
|
+
}
|
|
9506
|
+
return item.length === 0;
|
|
9507
|
+
}
|
|
9508
|
+
}
|
|
9509
|
+
|
|
9510
|
+
/** Dictionary entry of services that auto-accumulate every block. */
|
|
9511
|
+
declare class AutoAccumulate {
|
|
9512
|
+
static Codec = codec.Class(AutoAccumulate, {
|
|
9513
|
+
service: codec.u32.asOpaque<ServiceId>(),
|
|
9514
|
+
gasLimit: codec.u64.asOpaque<ServiceGas>(),
|
|
9515
|
+
});
|
|
9516
|
+
|
|
9517
|
+
static create({ service, gasLimit }: CodecRecord<AutoAccumulate>) {
|
|
9518
|
+
return new AutoAccumulate(service, gasLimit);
|
|
9519
|
+
}
|
|
9520
|
+
|
|
9521
|
+
private constructor(
|
|
9522
|
+
/** Service id that auto-accumulates. */
|
|
9523
|
+
readonly service: ServiceId,
|
|
9524
|
+
/** Gas limit for auto-accumulation. */
|
|
9525
|
+
readonly gasLimit: ServiceGas,
|
|
9526
|
+
) {}
|
|
9527
|
+
}
|
|
9528
|
+
|
|
9529
|
+
/**
|
|
9530
|
+
* https://graypaper.fluffylabs.dev/#/ab2cdbd/114402114402?v=0.7.2
|
|
9531
|
+
*/
|
|
9532
|
+
declare class PrivilegedServices {
|
|
9533
|
+
/** https://graypaper.fluffylabs.dev/#/ab2cdbd/3bbd023bcb02?v=0.7.2 */
|
|
9534
|
+
static Codec = codec.Class(PrivilegedServices, {
|
|
9535
|
+
manager: codec.u32.asOpaque<ServiceId>(),
|
|
9536
|
+
assigners: codecPerCore(codec.u32.asOpaque<ServiceId>()),
|
|
9537
|
+
delegator: codec.u32.asOpaque<ServiceId>(),
|
|
9538
|
+
registrar: Compatibility.isGreaterOrEqual(GpVersion.V0_7_1)
|
|
9539
|
+
? codec.u32.asOpaque<ServiceId>()
|
|
9540
|
+
: ignoreValueWithDefault(tryAsServiceId(2 ** 32 - 1)),
|
|
9541
|
+
autoAccumulateServices: readonlyArray(codec.sequenceVarLen(AutoAccumulate.Codec)),
|
|
9542
|
+
});
|
|
9543
|
+
|
|
9544
|
+
static create(a: CodecRecord<PrivilegedServices>) {
|
|
9545
|
+
return new PrivilegedServices(a.manager, a.delegator, a.registrar, a.assigners, a.autoAccumulateServices);
|
|
9546
|
+
}
|
|
9547
|
+
|
|
9548
|
+
private constructor(
|
|
9549
|
+
/**
|
|
9550
|
+
* `χ_M`: Manages alteration of χ from block to block,
|
|
9551
|
+
* as well as bestow services with storage deposit credits.
|
|
9552
|
+
* https://graypaper.fluffylabs.dev/#/ab2cdbd/111502111902?v=0.7.2
|
|
9553
|
+
*/
|
|
9554
|
+
readonly manager: ServiceId,
|
|
9555
|
+
/** `χ_V`: Managers validator keys. */
|
|
9556
|
+
readonly delegator: ServiceId,
|
|
9557
|
+
/**
|
|
9558
|
+
* `χ_R`: Manages the creation of services in protected range.
|
|
9559
|
+
*
|
|
9560
|
+
* https://graypaper.fluffylabs.dev/#/ab2cdbd/111b02111d02?v=0.7.2
|
|
9561
|
+
*/
|
|
9562
|
+
readonly registrar: ServiceId,
|
|
9563
|
+
/** `χ_A`: Manages authorization queue one for each core. */
|
|
9564
|
+
readonly assigners: PerCore<ServiceId>,
|
|
9565
|
+
/** `χ_Z`: Dictionary of services that auto-accumulate every block with their gas limit. */
|
|
9566
|
+
readonly autoAccumulateServices: readonly AutoAccumulate[],
|
|
9567
|
+
) {}
|
|
9568
|
+
}
|
|
9569
|
+
|
|
9570
|
+
declare const SUPER_PEAK_STRING = BytesBlob.blobFromString("peak");
|
|
9571
|
+
|
|
9572
|
+
/** Merkle Mountain Range peaks. */
|
|
9573
|
+
interface MmrPeaks<H extends OpaqueHash> {
|
|
9574
|
+
/**
|
|
9575
|
+
* Peaks at particular positions.
|
|
9576
|
+
*
|
|
9577
|
+
* In case there is no merkle trie at given index, `null` is placed.
|
|
9578
|
+
*/
|
|
9579
|
+
peaks: readonly (H | null)[];
|
|
9580
|
+
}
|
|
9581
|
+
|
|
9582
|
+
/** Hasher interface for MMR. */
|
|
9583
|
+
interface MmrHasher<H extends OpaqueHash> {
|
|
9584
|
+
/** Hash two items together. */
|
|
9585
|
+
hashConcat(a: H, b: H): H;
|
|
9586
|
+
/** Hash two items together with extra bytes blob prepended. */
|
|
9587
|
+
hashConcatPrepend(id: BytesBlob, a: H, b: H): H;
|
|
9588
|
+
}
|
|
9589
|
+
|
|
9590
|
+
/**
|
|
9591
|
+
* Merkle Mountain Range.
|
|
9592
|
+
*
|
|
9593
|
+
* https://graypaper.fluffylabs.dev/#/5f542d7/3aa0023aa002?v=0.6.2
|
|
9594
|
+
*/
|
|
9595
|
+
declare class MerkleMountainRange<H extends OpaqueHash> {
|
|
9385
9596
|
/** Construct an empty MMR. */
|
|
9386
9597
|
static empty<H extends OpaqueHash>(hasher: MmrHasher<H>) {
|
|
9387
9598
|
return new MerkleMountainRange(hasher);
|
|
@@ -9648,340 +9859,156 @@ declare class RecentBlocksHistory extends WithDebug {
|
|
|
9648
9859
|
return RecentBlocksHistory.create(
|
|
9649
9860
|
RecentBlocks.create({
|
|
9650
9861
|
...this.current,
|
|
9651
|
-
blocks: asOpaqueType(blocks as BlockState[]),
|
|
9652
|
-
}),
|
|
9653
|
-
);
|
|
9654
|
-
}
|
|
9655
|
-
|
|
9656
|
-
throw new Error("RecentBlocksHistory is in invalid state. Cannot be updated!");
|
|
9657
|
-
}
|
|
9658
|
-
}
|
|
9659
|
-
|
|
9660
|
-
/**
|
|
9661
|
-
* Fixed size of validator metadata.
|
|
9662
|
-
*
|
|
9663
|
-
* https://graypaper.fluffylabs.dev/#/5f542d7/0d55010d5501
|
|
9664
|
-
*/
|
|
9665
|
-
declare const VALIDATOR_META_BYTES = 128;
|
|
9666
|
-
type VALIDATOR_META_BYTES = typeof VALIDATOR_META_BYTES;
|
|
9667
|
-
|
|
9668
|
-
/**
|
|
9669
|
-
* Details about validators' identity.
|
|
9670
|
-
*
|
|
9671
|
-
* https://graypaper.fluffylabs.dev/#/5f542d7/0d4b010d4c01
|
|
9672
|
-
*/
|
|
9673
|
-
declare class ValidatorData extends WithDebug {
|
|
9674
|
-
static Codec = codec.Class(ValidatorData, {
|
|
9675
|
-
bandersnatch: codec.bytes(BANDERSNATCH_KEY_BYTES).asOpaque<BandersnatchKey>(),
|
|
9676
|
-
ed25519: codec.bytes(ED25519_KEY_BYTES).asOpaque<Ed25519Key>(),
|
|
9677
|
-
bls: codec.bytes(BLS_KEY_BYTES).asOpaque<BlsKey>(),
|
|
9678
|
-
metadata: codec.bytes(VALIDATOR_META_BYTES),
|
|
9679
|
-
});
|
|
9680
|
-
|
|
9681
|
-
static create({ ed25519, bandersnatch, bls, metadata }: CodecRecord<ValidatorData>) {
|
|
9682
|
-
return new ValidatorData(bandersnatch, ed25519, bls, metadata);
|
|
9683
|
-
}
|
|
9684
|
-
|
|
9685
|
-
private constructor(
|
|
9686
|
-
/** Bandersnatch public key. */
|
|
9687
|
-
public readonly bandersnatch: BandersnatchKey,
|
|
9688
|
-
/** ED25519 key data. */
|
|
9689
|
-
public readonly ed25519: Ed25519Key,
|
|
9690
|
-
/** BLS public key. */
|
|
9691
|
-
public readonly bls: BlsKey,
|
|
9692
|
-
/** Validator-defined additional metdata. */
|
|
9693
|
-
public readonly metadata: Bytes<VALIDATOR_META_BYTES>,
|
|
9694
|
-
) {
|
|
9695
|
-
super();
|
|
9696
|
-
}
|
|
9697
|
-
}
|
|
9698
|
-
|
|
9699
|
-
declare enum SafroleSealingKeysKind {
|
|
9700
|
-
Tickets = 0,
|
|
9701
|
-
Keys = 1,
|
|
9702
|
-
}
|
|
9703
|
-
|
|
9704
|
-
type SafroleSealingKeys =
|
|
9705
|
-
| {
|
|
9706
|
-
kind: SafroleSealingKeysKind.Keys;
|
|
9707
|
-
keys: PerEpochBlock<BandersnatchKey>;
|
|
9708
|
-
}
|
|
9709
|
-
| {
|
|
9710
|
-
kind: SafroleSealingKeysKind.Tickets;
|
|
9711
|
-
tickets: PerEpochBlock<Ticket>;
|
|
9712
|
-
};
|
|
9713
|
-
|
|
9714
|
-
declare const codecBandersnatchKey = codec.bytes(BANDERSNATCH_KEY_BYTES).asOpaque<BandersnatchKey>();
|
|
9715
|
-
|
|
9716
|
-
declare class SafroleSealingKeysData extends WithDebug {
|
|
9717
|
-
static Codec = codecWithContext((context) => {
|
|
9718
|
-
return codec.custom<SafroleSealingKeys>(
|
|
9719
|
-
{
|
|
9720
|
-
name: "SafroleSealingKeys",
|
|
9721
|
-
sizeHint: { bytes: 1 + HASH_SIZE * context.epochLength, isExact: false },
|
|
9722
|
-
},
|
|
9723
|
-
(e, x) => {
|
|
9724
|
-
e.varU32(tryAsU32(x.kind));
|
|
9725
|
-
if (x.kind === SafroleSealingKeysKind.Keys) {
|
|
9726
|
-
e.sequenceFixLen(codecBandersnatchKey, x.keys);
|
|
9727
|
-
} else {
|
|
9728
|
-
e.sequenceFixLen(Ticket.Codec, x.tickets);
|
|
9729
|
-
}
|
|
9730
|
-
},
|
|
9731
|
-
(d) => {
|
|
9732
|
-
const epochLength = context.epochLength;
|
|
9733
|
-
const kind = d.varU32();
|
|
9734
|
-
if (kind === SafroleSealingKeysKind.Keys) {
|
|
9735
|
-
const keys = d.sequenceFixLen<BandersnatchKey>(codecBandersnatchKey, epochLength);
|
|
9736
|
-
return SafroleSealingKeysData.keys(tryAsPerEpochBlock(keys, context));
|
|
9737
|
-
}
|
|
9738
|
-
|
|
9739
|
-
if (kind === SafroleSealingKeysKind.Tickets) {
|
|
9740
|
-
const tickets = d.sequenceFixLen(Ticket.Codec, epochLength);
|
|
9741
|
-
return SafroleSealingKeysData.tickets(tryAsPerEpochBlock(tickets, context));
|
|
9742
|
-
}
|
|
9743
|
-
|
|
9744
|
-
throw new Error(`Unexpected safrole sealing keys kind: ${kind}`);
|
|
9745
|
-
},
|
|
9746
|
-
(s) => {
|
|
9747
|
-
const kind = s.decoder.varU32();
|
|
9748
|
-
if (kind === SafroleSealingKeysKind.Keys) {
|
|
9749
|
-
s.sequenceFixLen(codecBandersnatchKey, context.epochLength);
|
|
9750
|
-
return;
|
|
9751
|
-
}
|
|
9752
|
-
if (kind === SafroleSealingKeysKind.Tickets) {
|
|
9753
|
-
s.sequenceFixLen(Ticket.Codec, context.epochLength);
|
|
9754
|
-
return;
|
|
9755
|
-
}
|
|
9756
|
-
|
|
9757
|
-
throw new Error(`Unexpected safrole sealing keys kind: ${kind}`);
|
|
9758
|
-
},
|
|
9759
|
-
);
|
|
9760
|
-
});
|
|
9761
|
-
|
|
9762
|
-
static keys(keys: PerEpochBlock<BandersnatchKey>): SafroleSealingKeys {
|
|
9763
|
-
return new SafroleSealingKeysData(SafroleSealingKeysKind.Keys, keys, undefined) as SafroleSealingKeys;
|
|
9764
|
-
}
|
|
9765
|
-
|
|
9766
|
-
static tickets(tickets: PerEpochBlock<Ticket>): SafroleSealingKeys {
|
|
9767
|
-
return new SafroleSealingKeysData(SafroleSealingKeysKind.Tickets, undefined, tickets) as SafroleSealingKeys;
|
|
9768
|
-
}
|
|
9769
|
-
|
|
9770
|
-
private constructor(
|
|
9771
|
-
readonly kind: SafroleSealingKeysKind,
|
|
9772
|
-
readonly keys?: PerEpochBlock<BandersnatchKey>,
|
|
9773
|
-
readonly tickets?: PerEpochBlock<Ticket>,
|
|
9774
|
-
) {
|
|
9775
|
-
super();
|
|
9776
|
-
}
|
|
9777
|
-
}
|
|
9778
|
-
|
|
9779
|
-
declare class SafroleData {
|
|
9780
|
-
static Codec = codec.Class(SafroleData, {
|
|
9781
|
-
nextValidatorData: codecPerValidator(ValidatorData.Codec),
|
|
9782
|
-
epochRoot: codec.bytes(BANDERSNATCH_RING_ROOT_BYTES).asOpaque<BandersnatchRingRoot>(),
|
|
9783
|
-
sealingKeySeries: SafroleSealingKeysData.Codec,
|
|
9784
|
-
ticketsAccumulator: readonlyArray(codec.sequenceVarLen(Ticket.Codec)).convert(seeThrough, asKnownSize),
|
|
9785
|
-
});
|
|
9786
|
-
|
|
9787
|
-
static create({ nextValidatorData, epochRoot, sealingKeySeries, ticketsAccumulator }: CodecRecord<SafroleData>) {
|
|
9788
|
-
return new SafroleData(nextValidatorData, epochRoot, sealingKeySeries, ticketsAccumulator);
|
|
9789
|
-
}
|
|
9790
|
-
|
|
9791
|
-
private constructor(
|
|
9792
|
-
/** gamma_k */
|
|
9793
|
-
public readonly nextValidatorData: PerValidator<ValidatorData>,
|
|
9794
|
-
/** gamma_z */
|
|
9795
|
-
public readonly epochRoot: BandersnatchRingRoot,
|
|
9796
|
-
/** gamma_s */
|
|
9797
|
-
public readonly sealingKeySeries: SafroleSealingKeys,
|
|
9798
|
-
/** gamma_a */
|
|
9799
|
-
public readonly ticketsAccumulator: KnownSizeArray<Ticket, "0...EpochLength">,
|
|
9800
|
-
) {}
|
|
9801
|
-
}
|
|
9802
|
-
|
|
9803
|
-
/**
|
|
9804
|
-
* `B_S`: The basic minimum balance which all services require.
|
|
9805
|
-
*
|
|
9806
|
-
* https://graypaper.fluffylabs.dev/#/7e6ff6a/445800445800?v=0.6.7
|
|
9807
|
-
*/
|
|
9808
|
-
declare const BASE_SERVICE_BALANCE = 100n;
|
|
9809
|
-
/**
|
|
9810
|
-
* `B_I`: The additional minimum balance required per item of elective service state.
|
|
9811
|
-
*
|
|
9812
|
-
* https://graypaper.fluffylabs.dev/#/7e6ff6a/445000445000?v=0.6.7
|
|
9813
|
-
*/
|
|
9814
|
-
declare const ELECTIVE_ITEM_BALANCE = 10n;
|
|
9815
|
-
/**
|
|
9816
|
-
* `B_L`: The additional minimum balance required per octet of elective service state.
|
|
9817
|
-
*
|
|
9818
|
-
* https://graypaper.fluffylabs.dev/#/7e6ff6a/445400445400?v=0.6.7
|
|
9819
|
-
*/
|
|
9820
|
-
declare const ELECTIVE_BYTE_BALANCE = 1n;
|
|
9821
|
-
|
|
9822
|
-
declare const zeroSizeHint: SizeHint = {
|
|
9823
|
-
bytes: 0,
|
|
9824
|
-
isExact: true,
|
|
9825
|
-
};
|
|
9826
|
-
|
|
9827
|
-
/** 0-byte read, return given default value */
|
|
9828
|
-
declare const ignoreValueWithDefault = <T>(defaultValue: T) =>
|
|
9829
|
-
Descriptor.new<T>(
|
|
9830
|
-
"ignoreValue",
|
|
9831
|
-
zeroSizeHint,
|
|
9832
|
-
(_e, _v) => {},
|
|
9833
|
-
(_d) => defaultValue,
|
|
9834
|
-
(_s) => {},
|
|
9835
|
-
);
|
|
9836
|
-
|
|
9837
|
-
/**
|
|
9838
|
-
* Service account details.
|
|
9839
|
-
*
|
|
9840
|
-
* https://graypaper.fluffylabs.dev/#/7e6ff6a/108301108301?v=0.6.7
|
|
9841
|
-
*/
|
|
9842
|
-
declare class ServiceAccountInfo extends WithDebug {
|
|
9843
|
-
static Codec = codec.Class(ServiceAccountInfo, {
|
|
9844
|
-
codeHash: codec.bytes(HASH_SIZE).asOpaque<CodeHash>(),
|
|
9845
|
-
balance: codec.u64,
|
|
9846
|
-
accumulateMinGas: codec.u64.convert((x) => x, tryAsServiceGas),
|
|
9847
|
-
onTransferMinGas: codec.u64.convert((x) => x, tryAsServiceGas),
|
|
9848
|
-
storageUtilisationBytes: codec.u64,
|
|
9849
|
-
gratisStorage: codec.u64,
|
|
9850
|
-
storageUtilisationCount: codec.u32,
|
|
9851
|
-
created: codec.u32.convert((x) => x, tryAsTimeSlot),
|
|
9852
|
-
lastAccumulation: codec.u32.convert((x) => x, tryAsTimeSlot),
|
|
9853
|
-
parentService: codec.u32.convert((x) => x, tryAsServiceId),
|
|
9854
|
-
});
|
|
9855
|
-
|
|
9856
|
-
static create(a: CodecRecord<ServiceAccountInfo>) {
|
|
9857
|
-
return new ServiceAccountInfo(
|
|
9858
|
-
a.codeHash,
|
|
9859
|
-
a.balance,
|
|
9860
|
-
a.accumulateMinGas,
|
|
9861
|
-
a.onTransferMinGas,
|
|
9862
|
-
a.storageUtilisationBytes,
|
|
9863
|
-
a.gratisStorage,
|
|
9864
|
-
a.storageUtilisationCount,
|
|
9865
|
-
a.created,
|
|
9866
|
-
a.lastAccumulation,
|
|
9867
|
-
a.parentService,
|
|
9868
|
-
);
|
|
9869
|
-
}
|
|
9870
|
-
|
|
9871
|
-
/**
|
|
9872
|
-
* `a_t = max(0, BS + BI * a_i + BL * a_o - a_f)`
|
|
9873
|
-
* https://graypaper.fluffylabs.dev/#/7e6ff6a/119e01119e01?v=0.6.7
|
|
9874
|
-
*/
|
|
9875
|
-
static calculateThresholdBalance(items: U32, bytes: U64, gratisStorage: U64): U64 {
|
|
9876
|
-
const storageCost =
|
|
9877
|
-
BASE_SERVICE_BALANCE + ELECTIVE_ITEM_BALANCE * BigInt(items) + ELECTIVE_BYTE_BALANCE * bytes - gratisStorage;
|
|
9878
|
-
|
|
9879
|
-
if (storageCost < 0n) {
|
|
9880
|
-
return tryAsU64(0);
|
|
9881
|
-
}
|
|
9882
|
-
|
|
9883
|
-
if (storageCost >= 2n ** 64n) {
|
|
9884
|
-
return tryAsU64(2n ** 64n - 1n);
|
|
9885
|
-
}
|
|
9886
|
-
|
|
9887
|
-
return tryAsU64(storageCost);
|
|
9888
|
-
}
|
|
9889
|
-
|
|
9890
|
-
private constructor(
|
|
9891
|
-
/** `a_c`: Hash of the service code. */
|
|
9892
|
-
public readonly codeHash: CodeHash,
|
|
9893
|
-
/** `a_b`: Current account balance. */
|
|
9894
|
-
public readonly balance: U64,
|
|
9895
|
-
/** `a_g`: Minimal gas required to execute Accumulate entrypoint. */
|
|
9896
|
-
public readonly accumulateMinGas: ServiceGas,
|
|
9897
|
-
/** `a_m`: Minimal gas required to execute On Transfer entrypoint. */
|
|
9898
|
-
public readonly onTransferMinGas: ServiceGas,
|
|
9899
|
-
/** `a_o`: Total number of octets in storage. */
|
|
9900
|
-
public readonly storageUtilisationBytes: U64,
|
|
9901
|
-
/** `a_f`: Cost-free storage. Decreases both storage item count and total byte size. */
|
|
9902
|
-
public readonly gratisStorage: U64,
|
|
9903
|
-
/** `a_i`: Number of items in storage. */
|
|
9904
|
-
public readonly storageUtilisationCount: U32,
|
|
9905
|
-
/** `a_r`: Creation account time slot. */
|
|
9906
|
-
public readonly created: TimeSlot,
|
|
9907
|
-
/** `a_a`: Most recent accumulation time slot. */
|
|
9908
|
-
public readonly lastAccumulation: TimeSlot,
|
|
9909
|
-
/** `a_p`: Parent service ID. */
|
|
9910
|
-
public readonly parentService: ServiceId,
|
|
9911
|
-
) {
|
|
9912
|
-
super();
|
|
9862
|
+
blocks: asOpaqueType(blocks as BlockState[]),
|
|
9863
|
+
}),
|
|
9864
|
+
);
|
|
9865
|
+
}
|
|
9866
|
+
|
|
9867
|
+
throw new Error("RecentBlocksHistory is in invalid state. Cannot be updated!");
|
|
9913
9868
|
}
|
|
9914
9869
|
}
|
|
9915
9870
|
|
|
9916
|
-
|
|
9917
|
-
|
|
9918
|
-
|
|
9919
|
-
|
|
9871
|
+
/**
|
|
9872
|
+
* Fixed size of validator metadata.
|
|
9873
|
+
*
|
|
9874
|
+
* https://graypaper.fluffylabs.dev/#/5f542d7/0d55010d5501
|
|
9875
|
+
*/
|
|
9876
|
+
declare const VALIDATOR_META_BYTES = 128;
|
|
9877
|
+
type VALIDATOR_META_BYTES = typeof VALIDATOR_META_BYTES;
|
|
9878
|
+
|
|
9879
|
+
/**
|
|
9880
|
+
* Details about validators' identity.
|
|
9881
|
+
*
|
|
9882
|
+
* https://graypaper.fluffylabs.dev/#/5f542d7/0d4b010d4c01
|
|
9883
|
+
*/
|
|
9884
|
+
declare class ValidatorData extends WithDebug {
|
|
9885
|
+
static Codec = codec.Class(ValidatorData, {
|
|
9886
|
+
bandersnatch: codec.bytes(BANDERSNATCH_KEY_BYTES).asOpaque<BandersnatchKey>(),
|
|
9887
|
+
ed25519: codec.bytes(ED25519_KEY_BYTES).asOpaque<Ed25519Key>(),
|
|
9888
|
+
bls: codec.bytes(BLS_KEY_BYTES).asOpaque<BlsKey>(),
|
|
9889
|
+
metadata: codec.bytes(VALIDATOR_META_BYTES),
|
|
9920
9890
|
});
|
|
9921
9891
|
|
|
9922
|
-
static create({
|
|
9923
|
-
return new
|
|
9892
|
+
static create({ ed25519, bandersnatch, bls, metadata }: CodecRecord<ValidatorData>) {
|
|
9893
|
+
return new ValidatorData(bandersnatch, ed25519, bls, metadata);
|
|
9924
9894
|
}
|
|
9925
9895
|
|
|
9926
9896
|
private constructor(
|
|
9927
|
-
|
|
9928
|
-
readonly
|
|
9897
|
+
/** Bandersnatch public key. */
|
|
9898
|
+
public readonly bandersnatch: BandersnatchKey,
|
|
9899
|
+
/** ED25519 key data. */
|
|
9900
|
+
public readonly ed25519: Ed25519Key,
|
|
9901
|
+
/** BLS public key. */
|
|
9902
|
+
public readonly bls: BlsKey,
|
|
9903
|
+
/** Validator-defined additional metdata. */
|
|
9904
|
+
public readonly metadata: Bytes<VALIDATOR_META_BYTES>,
|
|
9929
9905
|
) {
|
|
9930
9906
|
super();
|
|
9931
9907
|
}
|
|
9932
9908
|
}
|
|
9933
9909
|
|
|
9934
|
-
|
|
9910
|
+
declare enum SafroleSealingKeysKind {
|
|
9911
|
+
Tickets = 0,
|
|
9912
|
+
Keys = 1,
|
|
9913
|
+
}
|
|
9935
9914
|
|
|
9936
|
-
|
|
9937
|
-
|
|
9938
|
-
|
|
9939
|
-
|
|
9940
|
-
|
|
9941
|
-
|
|
9942
|
-
|
|
9915
|
+
type SafroleSealingKeys =
|
|
9916
|
+
| {
|
|
9917
|
+
kind: SafroleSealingKeysKind.Keys;
|
|
9918
|
+
keys: PerEpochBlock<BandersnatchKey>;
|
|
9919
|
+
}
|
|
9920
|
+
| {
|
|
9921
|
+
kind: SafroleSealingKeysKind.Tickets;
|
|
9922
|
+
tickets: PerEpochBlock<Ticket>;
|
|
9923
|
+
};
|
|
9924
|
+
|
|
9925
|
+
declare const codecBandersnatchKey = codec.bytes(BANDERSNATCH_KEY_BYTES).asOpaque<BandersnatchKey>();
|
|
9926
|
+
|
|
9927
|
+
declare class SafroleSealingKeysData extends WithDebug {
|
|
9928
|
+
static Codec = codecWithContext((context) => {
|
|
9929
|
+
return codec.custom<SafroleSealingKeys>(
|
|
9930
|
+
{
|
|
9931
|
+
name: "SafroleSealingKeys",
|
|
9932
|
+
sizeHint: { bytes: 1 + HASH_SIZE * context.epochLength, isExact: false },
|
|
9933
|
+
},
|
|
9934
|
+
(e, x) => {
|
|
9935
|
+
e.varU32(tryAsU32(x.kind));
|
|
9936
|
+
if (x.kind === SafroleSealingKeysKind.Keys) {
|
|
9937
|
+
e.sequenceFixLen(codecBandersnatchKey, x.keys);
|
|
9938
|
+
} else {
|
|
9939
|
+
e.sequenceFixLen(Ticket.Codec, x.tickets);
|
|
9940
|
+
}
|
|
9941
|
+
},
|
|
9942
|
+
(d) => {
|
|
9943
|
+
const epochLength = context.epochLength;
|
|
9944
|
+
const kind = d.varU32();
|
|
9945
|
+
if (kind === SafroleSealingKeysKind.Keys) {
|
|
9946
|
+
const keys = d.sequenceFixLen<BandersnatchKey>(codecBandersnatchKey, epochLength);
|
|
9947
|
+
return SafroleSealingKeysData.keys(tryAsPerEpochBlock(keys, context));
|
|
9948
|
+
}
|
|
9949
|
+
|
|
9950
|
+
if (kind === SafroleSealingKeysKind.Tickets) {
|
|
9951
|
+
const tickets = d.sequenceFixLen(Ticket.Codec, epochLength);
|
|
9952
|
+
return SafroleSealingKeysData.tickets(tryAsPerEpochBlock(tickets, context));
|
|
9953
|
+
}
|
|
9954
|
+
|
|
9955
|
+
throw new Error(`Unexpected safrole sealing keys kind: ${kind}`);
|
|
9956
|
+
},
|
|
9957
|
+
(s) => {
|
|
9958
|
+
const kind = s.decoder.varU32();
|
|
9959
|
+
if (kind === SafroleSealingKeysKind.Keys) {
|
|
9960
|
+
s.sequenceFixLen(codecBandersnatchKey, context.epochLength);
|
|
9961
|
+
return;
|
|
9962
|
+
}
|
|
9963
|
+
if (kind === SafroleSealingKeysKind.Tickets) {
|
|
9964
|
+
s.sequenceFixLen(Ticket.Codec, context.epochLength);
|
|
9965
|
+
return;
|
|
9966
|
+
}
|
|
9967
|
+
|
|
9968
|
+
throw new Error(`Unexpected safrole sealing keys kind: ${kind}`);
|
|
9969
|
+
},
|
|
9970
|
+
);
|
|
9943
9971
|
});
|
|
9944
9972
|
|
|
9945
|
-
static
|
|
9946
|
-
return new
|
|
9973
|
+
static keys(keys: PerEpochBlock<BandersnatchKey>): SafroleSealingKeys {
|
|
9974
|
+
return new SafroleSealingKeysData(SafroleSealingKeysKind.Keys, keys, undefined) as SafroleSealingKeys;
|
|
9975
|
+
}
|
|
9976
|
+
|
|
9977
|
+
static tickets(tickets: PerEpochBlock<Ticket>): SafroleSealingKeys {
|
|
9978
|
+
return new SafroleSealingKeysData(SafroleSealingKeysKind.Tickets, undefined, tickets) as SafroleSealingKeys;
|
|
9947
9979
|
}
|
|
9948
9980
|
|
|
9949
9981
|
private constructor(
|
|
9950
|
-
readonly
|
|
9951
|
-
readonly
|
|
9982
|
+
readonly kind: SafroleSealingKeysKind,
|
|
9983
|
+
readonly keys?: PerEpochBlock<BandersnatchKey>,
|
|
9984
|
+
readonly tickets?: PerEpochBlock<Ticket>,
|
|
9952
9985
|
) {
|
|
9953
9986
|
super();
|
|
9954
9987
|
}
|
|
9955
9988
|
}
|
|
9956
9989
|
|
|
9957
|
-
declare
|
|
9958
|
-
|
|
9959
|
-
|
|
9960
|
-
|
|
9961
|
-
|
|
9962
|
-
|
|
9990
|
+
declare class SafroleData {
|
|
9991
|
+
static Codec = codec.Class(SafroleData, {
|
|
9992
|
+
nextValidatorData: codecPerValidator(ValidatorData.Codec),
|
|
9993
|
+
epochRoot: codec.bytes(BANDERSNATCH_RING_ROOT_BYTES).asOpaque<BandersnatchRingRoot>(),
|
|
9994
|
+
sealingKeySeries: SafroleSealingKeysData.Codec,
|
|
9995
|
+
ticketsAccumulator: readonlyArray(codec.sequenceVarLen(Ticket.Codec)).convert(seeThrough, asKnownSize),
|
|
9996
|
+
});
|
|
9997
|
+
|
|
9998
|
+
static create({ nextValidatorData, epochRoot, sealingKeySeries, ticketsAccumulator }: CodecRecord<SafroleData>) {
|
|
9999
|
+
return new SafroleData(nextValidatorData, epochRoot, sealingKeySeries, ticketsAccumulator);
|
|
9963
10000
|
}
|
|
9964
|
-
return knownSize;
|
|
9965
|
-
}
|
|
9966
10001
|
|
|
9967
|
-
|
|
9968
|
-
|
|
9969
|
-
|
|
9970
|
-
|
|
9971
|
-
public readonly
|
|
9972
|
-
/**
|
|
9973
|
-
|
|
9974
|
-
|
|
9975
|
-
|
|
9976
|
-
public readonly slots: LookupHistorySlots,
|
|
10002
|
+
private constructor(
|
|
10003
|
+
/** gamma_k */
|
|
10004
|
+
public readonly nextValidatorData: PerValidator<ValidatorData>,
|
|
10005
|
+
/** gamma_z */
|
|
10006
|
+
public readonly epochRoot: BandersnatchRingRoot,
|
|
10007
|
+
/** gamma_s */
|
|
10008
|
+
public readonly sealingKeySeries: SafroleSealingKeys,
|
|
10009
|
+
/** gamma_a */
|
|
10010
|
+
public readonly ticketsAccumulator: KnownSizeArray<Ticket, "0...EpochLength">,
|
|
9977
10011
|
) {}
|
|
9978
|
-
|
|
9979
|
-
static isRequested(item: LookupHistoryItem | LookupHistorySlots): boolean {
|
|
9980
|
-
if ("slots" in item) {
|
|
9981
|
-
return item.slots.length === 0;
|
|
9982
|
-
}
|
|
9983
|
-
return item.length === 0;
|
|
9984
|
-
}
|
|
9985
10012
|
}
|
|
9986
10013
|
|
|
9987
10014
|
declare const codecServiceId: Descriptor<ServiceId> =
|
|
@@ -10124,12 +10151,26 @@ declare class CoreStatistics {
|
|
|
10124
10151
|
* Service statistics.
|
|
10125
10152
|
* Updated per block, based on available work reports (`W`).
|
|
10126
10153
|
*
|
|
10127
|
-
* https://graypaper.fluffylabs.dev/#/
|
|
10128
|
-
* https://github.com/gavofyork/graypaper/blob/9bffb08f3ea7b67832019176754df4fb36b9557d/text/statistics.tex#L77
|
|
10154
|
+
* https://graypaper.fluffylabs.dev/#/1c979cb/199802199802?v=0.7.1
|
|
10129
10155
|
*/
|
|
10130
10156
|
declare class ServiceStatistics {
|
|
10131
|
-
static Codec = Compatibility.
|
|
10132
|
-
|
|
10157
|
+
static Codec = Compatibility.selectIfGreaterOrEqual({
|
|
10158
|
+
fallback: codec.Class(ServiceStatistics, {
|
|
10159
|
+
providedCount: codecVarU16,
|
|
10160
|
+
providedSize: codec.varU32,
|
|
10161
|
+
refinementCount: codec.varU32,
|
|
10162
|
+
refinementGasUsed: codecVarGas,
|
|
10163
|
+
imports: codecVarU16,
|
|
10164
|
+
exports: codecVarU16,
|
|
10165
|
+
extrinsicSize: codec.varU32,
|
|
10166
|
+
extrinsicCount: codecVarU16,
|
|
10167
|
+
accumulateCount: codec.varU32,
|
|
10168
|
+
accumulateGasUsed: codecVarGas,
|
|
10169
|
+
onTransfersCount: codec.varU32,
|
|
10170
|
+
onTransfersGasUsed: codecVarGas,
|
|
10171
|
+
}),
|
|
10172
|
+
versions: {
|
|
10173
|
+
[GpVersion.V0_7_0]: codec.Class(ServiceStatistics, {
|
|
10133
10174
|
providedCount: codecVarU16,
|
|
10134
10175
|
providedSize: codec.varU32,
|
|
10135
10176
|
refinementCount: codec.varU32,
|
|
@@ -10142,21 +10183,23 @@ declare class ServiceStatistics {
|
|
|
10142
10183
|
accumulateGasUsed: codecVarGas,
|
|
10143
10184
|
onTransfersCount: codec.varU32,
|
|
10144
10185
|
onTransfersGasUsed: codecVarGas,
|
|
10145
|
-
})
|
|
10146
|
-
|
|
10186
|
+
}),
|
|
10187
|
+
[GpVersion.V0_7_1]: codec.Class(ServiceStatistics, {
|
|
10147
10188
|
providedCount: codecVarU16,
|
|
10148
10189
|
providedSize: codec.varU32,
|
|
10149
10190
|
refinementCount: codec.varU32,
|
|
10150
10191
|
refinementGasUsed: codecVarGas,
|
|
10151
10192
|
imports: codecVarU16,
|
|
10152
|
-
exports: codecVarU16,
|
|
10153
|
-
extrinsicSize: codec.varU32,
|
|
10154
10193
|
extrinsicCount: codecVarU16,
|
|
10194
|
+
extrinsicSize: codec.varU32,
|
|
10195
|
+
exports: codecVarU16,
|
|
10155
10196
|
accumulateCount: codec.varU32,
|
|
10156
10197
|
accumulateGasUsed: codecVarGas,
|
|
10157
|
-
onTransfersCount:
|
|
10158
|
-
onTransfersGasUsed:
|
|
10159
|
-
})
|
|
10198
|
+
onTransfersCount: ignoreValueWithDefault(tryAsU32(0)),
|
|
10199
|
+
onTransfersGasUsed: ignoreValueWithDefault(tryAsServiceGas(0)),
|
|
10200
|
+
}),
|
|
10201
|
+
},
|
|
10202
|
+
});
|
|
10160
10203
|
|
|
10161
10204
|
static create(v: CodecRecord<ServiceStatistics>) {
|
|
10162
10205
|
return new ServiceStatistics(
|
|
@@ -10196,9 +10239,9 @@ declare class ServiceStatistics {
|
|
|
10196
10239
|
public accumulateCount: U32,
|
|
10197
10240
|
/** `a.1` */
|
|
10198
10241
|
public accumulateGasUsed: ServiceGas,
|
|
10199
|
-
/** `t.0` */
|
|
10242
|
+
/** `t.0` @deprecated since 0.7.1 */
|
|
10200
10243
|
public onTransfersCount: U32,
|
|
10201
|
-
/** `t.1` */
|
|
10244
|
+
/** `t.1` @deprecated since 0.7.1 */
|
|
10202
10245
|
public onTransfersGasUsed: ServiceGas,
|
|
10203
10246
|
) {}
|
|
10204
10247
|
|
|
@@ -11158,8 +11201,9 @@ declare class InMemoryState extends WithDebug implements State, EnumerableState
|
|
|
11158
11201
|
epochRoot: Bytes.zero(BANDERSNATCH_RING_ROOT_BYTES).asOpaque(),
|
|
11159
11202
|
privilegedServices: PrivilegedServices.create({
|
|
11160
11203
|
manager: tryAsServiceId(0),
|
|
11161
|
-
|
|
11162
|
-
|
|
11204
|
+
assigners: tryAsPerCore(new Array(spec.coresCount).fill(tryAsServiceId(0)), spec),
|
|
11205
|
+
delegator: tryAsServiceId(0),
|
|
11206
|
+
registrar: tryAsServiceId(MAX_VALUE),
|
|
11163
11207
|
autoAccumulateServices: [],
|
|
11164
11208
|
}),
|
|
11165
11209
|
accumulationOutputLog: SortedArray.fromArray(accumulationOutputComparator, []),
|
|
@@ -11293,7 +11337,7 @@ declare const index$e_codecPerCore: typeof codecPerCore;
|
|
|
11293
11337
|
declare const index$e_codecServiceId: typeof codecServiceId;
|
|
11294
11338
|
declare const index$e_codecVarGas: typeof codecVarGas;
|
|
11295
11339
|
declare const index$e_codecVarU16: typeof codecVarU16;
|
|
11296
|
-
declare const index$
|
|
11340
|
+
declare const index$e_codecWithVersion: typeof codecWithVersion;
|
|
11297
11341
|
declare const index$e_hashComparator: typeof hashComparator;
|
|
11298
11342
|
declare const index$e_ignoreValueWithDefault: typeof ignoreValueWithDefault;
|
|
11299
11343
|
declare const index$e_serviceDataCodec: typeof serviceDataCodec;
|
|
@@ -11304,7 +11348,7 @@ declare const index$e_tryAsPerCore: typeof tryAsPerCore;
|
|
|
11304
11348
|
declare const index$e_workReportsSortedSetCodec: typeof workReportsSortedSetCodec;
|
|
11305
11349
|
declare const index$e_zeroSizeHint: typeof zeroSizeHint;
|
|
11306
11350
|
declare namespace index$e {
|
|
11307
|
-
export { index$e_AccumulationOutput as AccumulationOutput, index$e_AutoAccumulate as AutoAccumulate, index$e_AvailabilityAssignment as AvailabilityAssignment, index$e_BASE_SERVICE_BALANCE as BASE_SERVICE_BALANCE, index$e_BlockState as BlockState, index$e_CoreStatistics as CoreStatistics, index$e_DisputesRecords as DisputesRecords, index$e_ELECTIVE_BYTE_BALANCE as ELECTIVE_BYTE_BALANCE, index$e_ELECTIVE_ITEM_BALANCE as ELECTIVE_ITEM_BALANCE, index$e_InMemoryService as InMemoryService, index$e_InMemoryState as InMemoryState, index$e_LookupHistoryItem as LookupHistoryItem, index$e_MAX_LOOKUP_HISTORY_SLOTS as MAX_LOOKUP_HISTORY_SLOTS, index$e_PreimageItem as PreimageItem, index$e_PrivilegedServices as PrivilegedServices, index$e_RecentBlocks as RecentBlocks, index$e_RecentBlocksHistory as RecentBlocksHistory, index$e_SafroleData as SafroleData, index$e_SafroleSealingKeysData as SafroleSealingKeysData, index$e_SafroleSealingKeysKind as SafroleSealingKeysKind, index$e_ServiceAccountInfo as ServiceAccountInfo, index$e_ServiceStatistics as ServiceStatistics, index$e_StatisticsData as StatisticsData, index$e_StorageItem as StorageItem, index$e_UpdateError as UpdateError, index$e_UpdatePreimage as UpdatePreimage, index$e_UpdatePreimageKind as UpdatePreimageKind, index$e_UpdateService as UpdateService, index$e_UpdateServiceKind as UpdateServiceKind, index$e_UpdateStorage as UpdateStorage, index$e_UpdateStorageKind as UpdateStorageKind, index$e_ValidatorData as ValidatorData, index$e_ValidatorStatistics as ValidatorStatistics, index$e_accumulationOutputComparator as accumulationOutputComparator, index$e_codecBandersnatchKey as codecBandersnatchKey, index$e_codecPerCore as codecPerCore, index$e_codecServiceId as codecServiceId, index$e_codecVarGas as codecVarGas, index$e_codecVarU16 as codecVarU16, index$
|
|
11351
|
+
export { index$e_AccumulationOutput as AccumulationOutput, index$e_AutoAccumulate as AutoAccumulate, index$e_AvailabilityAssignment as AvailabilityAssignment, index$e_BASE_SERVICE_BALANCE as BASE_SERVICE_BALANCE, index$e_BlockState as BlockState, index$e_CoreStatistics as CoreStatistics, index$e_DisputesRecords as DisputesRecords, index$e_ELECTIVE_BYTE_BALANCE as ELECTIVE_BYTE_BALANCE, index$e_ELECTIVE_ITEM_BALANCE as ELECTIVE_ITEM_BALANCE, index$e_InMemoryService as InMemoryService, index$e_InMemoryState as InMemoryState, index$e_LookupHistoryItem as LookupHistoryItem, index$e_MAX_LOOKUP_HISTORY_SLOTS as MAX_LOOKUP_HISTORY_SLOTS, index$e_PreimageItem as PreimageItem, index$e_PrivilegedServices as PrivilegedServices, index$e_RecentBlocks as RecentBlocks, index$e_RecentBlocksHistory as RecentBlocksHistory, index$e_SafroleData as SafroleData, index$e_SafroleSealingKeysData as SafroleSealingKeysData, index$e_SafroleSealingKeysKind as SafroleSealingKeysKind, index$e_ServiceAccountInfo as ServiceAccountInfo, index$e_ServiceStatistics as ServiceStatistics, index$e_StatisticsData as StatisticsData, index$e_StorageItem as StorageItem, index$e_UpdateError as UpdateError, index$e_UpdatePreimage as UpdatePreimage, index$e_UpdatePreimageKind as UpdatePreimageKind, index$e_UpdateService as UpdateService, index$e_UpdateServiceKind as UpdateServiceKind, index$e_UpdateStorage as UpdateStorage, index$e_UpdateStorageKind as UpdateStorageKind, index$e_ValidatorData as ValidatorData, index$e_ValidatorStatistics as ValidatorStatistics, index$e_accumulationOutputComparator as accumulationOutputComparator, index$e_codecBandersnatchKey as codecBandersnatchKey, index$e_codecPerCore as codecPerCore, index$e_codecServiceId as codecServiceId, index$e_codecVarGas as codecVarGas, index$e_codecVarU16 as codecVarU16, index$e_codecWithVersion as codecWithVersion, index$e_hashComparator as hashComparator, index$e_ignoreValueWithDefault as ignoreValueWithDefault, index$e_serviceDataCodec as serviceDataCodec, index$e_serviceEntriesCodec as serviceEntriesCodec, index$e_sortedSetCodec as sortedSetCodec, index$e_tryAsLookupHistorySlots as tryAsLookupHistorySlots, index$e_tryAsPerCore as tryAsPerCore, index$e_workReportsSortedSetCodec as workReportsSortedSetCodec, index$e_zeroSizeHint as zeroSizeHint };
|
|
11308
11352
|
export type { index$e_BlocksState as BlocksState, index$e_ENTROPY_ENTRIES as ENTROPY_ENTRIES, index$e_EnumerableState as EnumerableState, index$e_FieldNames as FieldNames, index$e_InMemoryStateFields as InMemoryStateFields, index$e_LookupHistorySlots as LookupHistorySlots, index$e_MAX_RECENT_HISTORY as MAX_RECENT_HISTORY, index$e_PerCore as PerCore, index$e_SafroleSealingKeys as SafroleSealingKeys, index$e_Service as Service, index$e_ServiceData as ServiceData, index$e_ServiceEntries as ServiceEntries, index$e_ServicesUpdate as ServicesUpdate, index$e_State as State, index$e_StorageKey as StorageKey, index$e_VALIDATOR_META_BYTES as VALIDATOR_META_BYTES };
|
|
11309
11353
|
}
|
|
11310
11354
|
|
|
@@ -11372,7 +11416,7 @@ declare namespace stateKeys {
|
|
|
11372
11416
|
}
|
|
11373
11417
|
|
|
11374
11418
|
/** https://graypaper.fluffylabs.dev/#/1c979cb/3bba033bba03?v=0.7.1 */
|
|
11375
|
-
export function serviceStorage(serviceId: ServiceId, key: StorageKey): StateKey {
|
|
11419
|
+
export function serviceStorage(blake2b: Blake2b, serviceId: ServiceId, key: StorageKey): StateKey {
|
|
11376
11420
|
if (Compatibility.isLessThan(GpVersion.V0_6_7)) {
|
|
11377
11421
|
const out = Bytes.zero(HASH_SIZE);
|
|
11378
11422
|
out.raw.set(u32AsLeBytes(tryAsU32(2 ** 32 - 1)), 0);
|
|
@@ -11380,11 +11424,11 @@ declare namespace stateKeys {
|
|
|
11380
11424
|
return legacyServiceNested(serviceId, out);
|
|
11381
11425
|
}
|
|
11382
11426
|
|
|
11383
|
-
return serviceNested(serviceId, tryAsU32(2 ** 32 - 1), key);
|
|
11427
|
+
return serviceNested(blake2b, serviceId, tryAsU32(2 ** 32 - 1), key);
|
|
11384
11428
|
}
|
|
11385
11429
|
|
|
11386
11430
|
/** https://graypaper.fluffylabs.dev/#/1c979cb/3bd7033bd703?v=0.7.1 */
|
|
11387
|
-
export function servicePreimage(serviceId: ServiceId, hash: PreimageHash): StateKey {
|
|
11431
|
+
export function servicePreimage(blake2b: Blake2b, serviceId: ServiceId, hash: PreimageHash): StateKey {
|
|
11388
11432
|
if (Compatibility.isLessThan(GpVersion.V0_6_7)) {
|
|
11389
11433
|
const out = Bytes.zero(HASH_SIZE);
|
|
11390
11434
|
out.raw.set(u32AsLeBytes(tryAsU32(2 ** 32 - 2)), 0);
|
|
@@ -11392,11 +11436,16 @@ declare namespace stateKeys {
|
|
|
11392
11436
|
return legacyServiceNested(serviceId, out);
|
|
11393
11437
|
}
|
|
11394
11438
|
|
|
11395
|
-
return serviceNested(serviceId, tryAsU32(2 ** 32 - 2), hash);
|
|
11439
|
+
return serviceNested(blake2b, serviceId, tryAsU32(2 ** 32 - 2), hash);
|
|
11396
11440
|
}
|
|
11397
11441
|
|
|
11398
11442
|
/** https://graypaper.fluffylabs.dev/#/1c979cb/3b0a043b0a04?v=0.7.1 */
|
|
11399
|
-
export function serviceLookupHistory(
|
|
11443
|
+
export function serviceLookupHistory(
|
|
11444
|
+
blake2b: Blake2b,
|
|
11445
|
+
serviceId: ServiceId,
|
|
11446
|
+
hash: PreimageHash,
|
|
11447
|
+
preimageLength: U32,
|
|
11448
|
+
): StateKey {
|
|
11400
11449
|
if (Compatibility.isLessThan(GpVersion.V0_6_7)) {
|
|
11401
11450
|
const doubleHash = blake2b.hashBytes(hash);
|
|
11402
11451
|
const out = Bytes.zero(HASH_SIZE);
|
|
@@ -11405,11 +11454,11 @@ declare namespace stateKeys {
|
|
|
11405
11454
|
return legacyServiceNested(serviceId, out);
|
|
11406
11455
|
}
|
|
11407
11456
|
|
|
11408
|
-
return serviceNested(serviceId, preimageLength, hash);
|
|
11457
|
+
return serviceNested(blake2b, serviceId, preimageLength, hash);
|
|
11409
11458
|
}
|
|
11410
11459
|
|
|
11411
11460
|
/** https://graypaper.fluffylabs.dev/#/1c979cb/3b88003b8800?v=0.7.1 */
|
|
11412
|
-
export function serviceNested(serviceId: ServiceId, numberPrefix: U32, hash: BytesBlob): StateKey {
|
|
11461
|
+
export function serviceNested(blake2b: Blake2b, serviceId: ServiceId, numberPrefix: U32, hash: BytesBlob): StateKey {
|
|
11413
11462
|
const inputToHash = BytesBlob.blobFromParts(u32AsLeBytes(numberPrefix), hash.raw);
|
|
11414
11463
|
const newHash = blake2b.hashBytes(inputToHash).raw.subarray(0, 28);
|
|
11415
11464
|
const key = Bytes.zero(HASH_SIZE);
|
|
@@ -11589,24 +11638,26 @@ declare namespace serialize {
|
|
|
11589
11638
|
/** C(255, s): https://graypaper.fluffylabs.dev/#/85129da/383103383103?v=0.6.3 */
|
|
11590
11639
|
export const serviceData = (serviceId: ServiceId) => ({
|
|
11591
11640
|
key: stateKeys.serviceInfo(serviceId),
|
|
11592
|
-
Codec:
|
|
11641
|
+
Codec: Compatibility.isGreaterOrEqual(GpVersion.V0_7_1)
|
|
11642
|
+
? codecWithVersion(ServiceAccountInfo.Codec)
|
|
11643
|
+
: ServiceAccountInfo.Codec,
|
|
11593
11644
|
});
|
|
11594
11645
|
|
|
11595
11646
|
/** https://graypaper.fluffylabs.dev/#/85129da/384803384803?v=0.6.3 */
|
|
11596
|
-
export const serviceStorage = (serviceId: ServiceId, key: StorageKey) => ({
|
|
11597
|
-
key: stateKeys.serviceStorage(serviceId, key),
|
|
11647
|
+
export const serviceStorage = (blake2b: Blake2b, serviceId: ServiceId, key: StorageKey) => ({
|
|
11648
|
+
key: stateKeys.serviceStorage(blake2b, serviceId, key),
|
|
11598
11649
|
Codec: dumpCodec,
|
|
11599
11650
|
});
|
|
11600
11651
|
|
|
11601
11652
|
/** https://graypaper.fluffylabs.dev/#/85129da/385b03385b03?v=0.6.3 */
|
|
11602
|
-
export const servicePreimages = (serviceId: ServiceId, hash: PreimageHash) => ({
|
|
11603
|
-
key: stateKeys.servicePreimage(serviceId, hash),
|
|
11653
|
+
export const servicePreimages = (blake2b: Blake2b, serviceId: ServiceId, hash: PreimageHash) => ({
|
|
11654
|
+
key: stateKeys.servicePreimage(blake2b, serviceId, hash),
|
|
11604
11655
|
Codec: dumpCodec,
|
|
11605
11656
|
});
|
|
11606
11657
|
|
|
11607
11658
|
/** https://graypaper.fluffylabs.dev/#/85129da/387603387603?v=0.6.3 */
|
|
11608
|
-
export const serviceLookupHistory = (serviceId: ServiceId, hash: PreimageHash, len: U32) => ({
|
|
11609
|
-
key: stateKeys.serviceLookupHistory(serviceId, hash, len),
|
|
11659
|
+
export const serviceLookupHistory = (blake2b: Blake2b, serviceId: ServiceId, hash: PreimageHash, len: U32) => ({
|
|
11660
|
+
key: stateKeys.serviceLookupHistory(blake2b, serviceId, hash, len),
|
|
11610
11661
|
Codec: readonlyArray(codec.sequenceVarLen(codec.u32)),
|
|
11611
11662
|
});
|
|
11612
11663
|
}
|
|
@@ -11641,6 +11692,7 @@ declare const EMPTY_BLOB = BytesBlob.empty();
|
|
|
11641
11692
|
/** Serialize given state update into a series of key-value pairs. */
|
|
11642
11693
|
declare function* serializeStateUpdate(
|
|
11643
11694
|
spec: ChainSpec,
|
|
11695
|
+
blake2b: Blake2b,
|
|
11644
11696
|
update: Partial<State & ServicesUpdate>,
|
|
11645
11697
|
): Generator<StateEntryUpdate> {
|
|
11646
11698
|
// first let's serialize all of the simple entries (if present!)
|
|
@@ -11649,9 +11701,9 @@ declare function* serializeStateUpdate(
|
|
|
11649
11701
|
const encode = <T>(codec: Encode<T>, val: T) => Encoder.encodeObject(codec, val, spec);
|
|
11650
11702
|
|
|
11651
11703
|
// then let's proceed with service updates
|
|
11652
|
-
yield* serializeServiceUpdates(update.servicesUpdates, encode);
|
|
11653
|
-
yield* serializePreimages(update.preimages, encode);
|
|
11654
|
-
yield* serializeStorage(update.storage);
|
|
11704
|
+
yield* serializeServiceUpdates(update.servicesUpdates, encode, blake2b);
|
|
11705
|
+
yield* serializePreimages(update.preimages, encode, blake2b);
|
|
11706
|
+
yield* serializeStorage(update.storage, blake2b);
|
|
11655
11707
|
yield* serializeRemovedServices(update.servicesRemoved);
|
|
11656
11708
|
}
|
|
11657
11709
|
|
|
@@ -11663,18 +11715,18 @@ declare function* serializeRemovedServices(servicesRemoved: ServiceId[] | undefi
|
|
|
11663
11715
|
}
|
|
11664
11716
|
}
|
|
11665
11717
|
|
|
11666
|
-
declare function* serializeStorage(storage: UpdateStorage[] | undefined): Generator<StateEntryUpdate> {
|
|
11718
|
+
declare function* serializeStorage(storage: UpdateStorage[] | undefined, blake2b: Blake2b): Generator<StateEntryUpdate> {
|
|
11667
11719
|
for (const { action, serviceId } of storage ?? []) {
|
|
11668
11720
|
switch (action.kind) {
|
|
11669
11721
|
case UpdateStorageKind.Set: {
|
|
11670
11722
|
const key = action.storage.key;
|
|
11671
|
-
const codec = serialize.serviceStorage(serviceId, key);
|
|
11723
|
+
const codec = serialize.serviceStorage(blake2b, serviceId, key);
|
|
11672
11724
|
yield [StateEntryUpdateAction.Insert, codec.key, action.storage.value];
|
|
11673
11725
|
break;
|
|
11674
11726
|
}
|
|
11675
11727
|
case UpdateStorageKind.Remove: {
|
|
11676
11728
|
const key = action.key;
|
|
11677
|
-
const codec = serialize.serviceStorage(serviceId, key);
|
|
11729
|
+
const codec = serialize.serviceStorage(blake2b, serviceId, key);
|
|
11678
11730
|
yield [StateEntryUpdateAction.Remove, codec.key, EMPTY_BLOB];
|
|
11679
11731
|
break;
|
|
11680
11732
|
}
|
|
@@ -11684,16 +11736,20 @@ declare function* serializeStorage(storage: UpdateStorage[] | undefined): Genera
|
|
|
11684
11736
|
}
|
|
11685
11737
|
}
|
|
11686
11738
|
|
|
11687
|
-
declare function* serializePreimages(
|
|
11739
|
+
declare function* serializePreimages(
|
|
11740
|
+
preimages: UpdatePreimage[] | undefined,
|
|
11741
|
+
encode: EncodeFun,
|
|
11742
|
+
blake2b: Blake2b,
|
|
11743
|
+
): Generator<StateEntryUpdate> {
|
|
11688
11744
|
for (const { action, serviceId } of preimages ?? []) {
|
|
11689
11745
|
switch (action.kind) {
|
|
11690
11746
|
case UpdatePreimageKind.Provide: {
|
|
11691
11747
|
const { hash, blob } = action.preimage;
|
|
11692
|
-
const codec = serialize.servicePreimages(serviceId, hash);
|
|
11748
|
+
const codec = serialize.servicePreimages(blake2b, serviceId, hash);
|
|
11693
11749
|
yield [StateEntryUpdateAction.Insert, codec.key, blob];
|
|
11694
11750
|
|
|
11695
11751
|
if (action.slot !== null) {
|
|
11696
|
-
const codec2 = serialize.serviceLookupHistory(serviceId, hash, tryAsU32(blob.length));
|
|
11752
|
+
const codec2 = serialize.serviceLookupHistory(blake2b, serviceId, hash, tryAsU32(blob.length));
|
|
11697
11753
|
yield [
|
|
11698
11754
|
StateEntryUpdateAction.Insert,
|
|
11699
11755
|
codec2.key,
|
|
@@ -11704,16 +11760,16 @@ declare function* serializePreimages(preimages: UpdatePreimage[] | undefined, en
|
|
|
11704
11760
|
}
|
|
11705
11761
|
case UpdatePreimageKind.UpdateOrAdd: {
|
|
11706
11762
|
const { hash, length, slots } = action.item;
|
|
11707
|
-
const codec = serialize.serviceLookupHistory(serviceId, hash, length);
|
|
11763
|
+
const codec = serialize.serviceLookupHistory(blake2b, serviceId, hash, length);
|
|
11708
11764
|
yield [StateEntryUpdateAction.Insert, codec.key, encode(codec.Codec, slots)];
|
|
11709
11765
|
break;
|
|
11710
11766
|
}
|
|
11711
11767
|
case UpdatePreimageKind.Remove: {
|
|
11712
11768
|
const { hash, length } = action;
|
|
11713
|
-
const codec = serialize.servicePreimages(serviceId, hash);
|
|
11769
|
+
const codec = serialize.servicePreimages(blake2b, serviceId, hash);
|
|
11714
11770
|
yield [StateEntryUpdateAction.Remove, codec.key, EMPTY_BLOB];
|
|
11715
11771
|
|
|
11716
|
-
const codec2 = serialize.serviceLookupHistory(serviceId, hash, length);
|
|
11772
|
+
const codec2 = serialize.serviceLookupHistory(blake2b, serviceId, hash, length);
|
|
11717
11773
|
yield [StateEntryUpdateAction.Remove, codec2.key, EMPTY_BLOB];
|
|
11718
11774
|
break;
|
|
11719
11775
|
}
|
|
@@ -11725,6 +11781,7 @@ declare function* serializePreimages(preimages: UpdatePreimage[] | undefined, en
|
|
|
11725
11781
|
declare function* serializeServiceUpdates(
|
|
11726
11782
|
servicesUpdates: UpdateService[] | undefined,
|
|
11727
11783
|
encode: EncodeFun,
|
|
11784
|
+
blake2b: Blake2b,
|
|
11728
11785
|
): Generator<StateEntryUpdate> {
|
|
11729
11786
|
for (const { action, serviceId } of servicesUpdates ?? []) {
|
|
11730
11787
|
// new service being created or updated
|
|
@@ -11734,7 +11791,7 @@ declare function* serializeServiceUpdates(
|
|
|
11734
11791
|
// additional lookup history update
|
|
11735
11792
|
if (action.kind === UpdateServiceKind.Create && action.lookupHistory !== null) {
|
|
11736
11793
|
const { lookupHistory } = action;
|
|
11737
|
-
const codec2 = serialize.serviceLookupHistory(serviceId, lookupHistory.hash, lookupHistory.length);
|
|
11794
|
+
const codec2 = serialize.serviceLookupHistory(blake2b, serviceId, lookupHistory.hash, lookupHistory.length);
|
|
11738
11795
|
yield [StateEntryUpdateAction.Insert, codec2.key, encode(codec2.Codec, lookupHistory.slots)];
|
|
11739
11796
|
}
|
|
11740
11797
|
}
|
|
@@ -11868,8 +11925,8 @@ declare class StateEntries {
|
|
|
11868
11925
|
);
|
|
11869
11926
|
|
|
11870
11927
|
/** Turn in-memory state into it's serialized form. */
|
|
11871
|
-
static serializeInMemory(spec: ChainSpec, state: InMemoryState) {
|
|
11872
|
-
return new StateEntries(convertInMemoryStateToDictionary(spec, state));
|
|
11928
|
+
static serializeInMemory(spec: ChainSpec, blake2b: Blake2b, state: InMemoryState) {
|
|
11929
|
+
return new StateEntries(convertInMemoryStateToDictionary(spec, blake2b, state));
|
|
11873
11930
|
}
|
|
11874
11931
|
|
|
11875
11932
|
/**
|
|
@@ -11924,7 +11981,8 @@ declare class StateEntries {
|
|
|
11924
11981
|
}
|
|
11925
11982
|
|
|
11926
11983
|
/** https://graypaper.fluffylabs.dev/#/68eaa1f/391600391600?v=0.6.4 */
|
|
11927
|
-
getRootHash(): StateRootHash {
|
|
11984
|
+
getRootHash(blake2b: Blake2b): StateRootHash {
|
|
11985
|
+
const blake2bTrieHasher = getBlake2bTrieHasher(blake2b);
|
|
11928
11986
|
const leaves: SortedSet<LeafNode> = SortedSet.fromArray(leafComparator);
|
|
11929
11987
|
for (const [key, value] of this) {
|
|
11930
11988
|
leaves.insert(InMemoryTrie.constructLeaf(blake2bTrieHasher, key.asOpaque(), value));
|
|
@@ -11937,6 +11995,7 @@ declare class StateEntries {
|
|
|
11937
11995
|
/** https://graypaper.fluffylabs.dev/#/68eaa1f/38a50038a500?v=0.6.4 */
|
|
11938
11996
|
declare function convertInMemoryStateToDictionary(
|
|
11939
11997
|
spec: ChainSpec,
|
|
11998
|
+
blake2b: Blake2b,
|
|
11940
11999
|
state: InMemoryState,
|
|
11941
12000
|
): TruncatedHashDictionary<StateKey, BytesBlob> {
|
|
11942
12001
|
const serialized = TruncatedHashDictionary.fromEntries<StateKey, BytesBlob>([]);
|
|
@@ -11969,20 +12028,25 @@ declare function convertInMemoryStateToDictionary(
|
|
|
11969
12028
|
|
|
11970
12029
|
// preimages
|
|
11971
12030
|
for (const preimage of service.data.preimages.values()) {
|
|
11972
|
-
const { key, Codec } = serialize.servicePreimages(serviceId, preimage.hash);
|
|
12031
|
+
const { key, Codec } = serialize.servicePreimages(blake2b, serviceId, preimage.hash);
|
|
11973
12032
|
serialized.set(key, Encoder.encodeObject(Codec, preimage.blob));
|
|
11974
12033
|
}
|
|
11975
12034
|
|
|
11976
12035
|
// storage
|
|
11977
12036
|
for (const storage of service.data.storage.values()) {
|
|
11978
|
-
const { key, Codec } = serialize.serviceStorage(serviceId, storage.key);
|
|
12037
|
+
const { key, Codec } = serialize.serviceStorage(blake2b, serviceId, storage.key);
|
|
11979
12038
|
serialized.set(key, Encoder.encodeObject(Codec, storage.value));
|
|
11980
12039
|
}
|
|
11981
12040
|
|
|
11982
12041
|
// lookup history
|
|
11983
12042
|
for (const lookupHistoryList of service.data.lookupHistory.values()) {
|
|
11984
12043
|
for (const lookupHistory of lookupHistoryList) {
|
|
11985
|
-
const { key, Codec } = serialize.serviceLookupHistory(
|
|
12044
|
+
const { key, Codec } = serialize.serviceLookupHistory(
|
|
12045
|
+
blake2b,
|
|
12046
|
+
serviceId,
|
|
12047
|
+
lookupHistory.hash,
|
|
12048
|
+
lookupHistory.length,
|
|
12049
|
+
);
|
|
11986
12050
|
serialized.set(key, Encoder.encodeObject(Codec, lookupHistory.slots.slice()));
|
|
11987
12051
|
}
|
|
11988
12052
|
}
|
|
@@ -12013,21 +12077,23 @@ declare class SerializedState<T extends SerializedStateBackend = SerializedState
|
|
|
12013
12077
|
implements State, EnumerableState
|
|
12014
12078
|
{
|
|
12015
12079
|
/** Create a state-like object from collection of serialized entries. */
|
|
12016
|
-
static fromStateEntries(spec: ChainSpec, state: StateEntries, recentServices: ServiceId[] = []) {
|
|
12017
|
-
return new SerializedState(spec, state, recentServices);
|
|
12080
|
+
static fromStateEntries(spec: ChainSpec, blake2b: Blake2b, state: StateEntries, recentServices: ServiceId[] = []) {
|
|
12081
|
+
return new SerializedState(spec, blake2b, state, recentServices);
|
|
12018
12082
|
}
|
|
12019
12083
|
|
|
12020
12084
|
/** Create a state-like object backed by some DB. */
|
|
12021
12085
|
static new<T extends SerializedStateBackend>(
|
|
12022
12086
|
spec: ChainSpec,
|
|
12087
|
+
blake2b: Blake2b,
|
|
12023
12088
|
db: T,
|
|
12024
12089
|
recentServices: ServiceId[] = [],
|
|
12025
12090
|
): SerializedState<T> {
|
|
12026
|
-
return new SerializedState(spec, db, recentServices);
|
|
12091
|
+
return new SerializedState(spec, blake2b, db, recentServices);
|
|
12027
12092
|
}
|
|
12028
12093
|
|
|
12029
12094
|
private constructor(
|
|
12030
12095
|
private readonly spec: ChainSpec,
|
|
12096
|
+
private readonly blake2b: Blake2b,
|
|
12031
12097
|
public backend: T,
|
|
12032
12098
|
/** Best-effort list of recently active services. */
|
|
12033
12099
|
private readonly _recentServiceIds: ServiceId[],
|
|
@@ -12058,7 +12124,7 @@ declare class SerializedState<T extends SerializedStateBackend = SerializedState
|
|
|
12058
12124
|
this._recentServiceIds.push(id);
|
|
12059
12125
|
}
|
|
12060
12126
|
|
|
12061
|
-
return new SerializedService(id, serviceData, (key) => this.retrieveOptional(key));
|
|
12127
|
+
return new SerializedService(this.blake2b, id, serviceData, (key) => this.retrieveOptional(key));
|
|
12062
12128
|
}
|
|
12063
12129
|
|
|
12064
12130
|
private retrieve<T>({ key, Codec }: KeyAndCodec<T>, description: string): T {
|
|
@@ -12157,6 +12223,7 @@ declare class SerializedState<T extends SerializedStateBackend = SerializedState
|
|
|
12157
12223
|
/** Service data representation on a serialized state. */
|
|
12158
12224
|
declare class SerializedService implements Service {
|
|
12159
12225
|
constructor(
|
|
12226
|
+
public readonly blake2b: Blake2b,
|
|
12160
12227
|
/** Service id */
|
|
12161
12228
|
public readonly serviceId: ServiceId,
|
|
12162
12229
|
private readonly accountInfo: ServiceAccountInfo,
|
|
@@ -12172,14 +12239,14 @@ declare class SerializedService implements Service {
|
|
|
12172
12239
|
getStorage(rawKey: StorageKey): BytesBlob | null {
|
|
12173
12240
|
if (Compatibility.isLessThan(GpVersion.V0_6_7)) {
|
|
12174
12241
|
const SERVICE_ID_BYTES = 4;
|
|
12175
|
-
const serviceIdAndKey =
|
|
12242
|
+
const serviceIdAndKey = safeAllocUint8Array(SERVICE_ID_BYTES + rawKey.length);
|
|
12176
12243
|
serviceIdAndKey.set(u32AsLeBytes(this.serviceId));
|
|
12177
12244
|
serviceIdAndKey.set(rawKey.raw, SERVICE_ID_BYTES);
|
|
12178
|
-
const key: StorageKey = asOpaqueType(BytesBlob.blobFrom(blake2b.hashBytes(serviceIdAndKey).raw));
|
|
12179
|
-
return this.retrieveOptional(serialize.serviceStorage(this.serviceId, key)) ?? null;
|
|
12245
|
+
const key: StorageKey = asOpaqueType(BytesBlob.blobFrom(this.blake2b.hashBytes(serviceIdAndKey).raw));
|
|
12246
|
+
return this.retrieveOptional(serialize.serviceStorage(this.blake2b, this.serviceId, key)) ?? null;
|
|
12180
12247
|
}
|
|
12181
12248
|
|
|
12182
|
-
return this.retrieveOptional(serialize.serviceStorage(this.serviceId, rawKey)) ?? null;
|
|
12249
|
+
return this.retrieveOptional(serialize.serviceStorage(this.blake2b, this.serviceId, rawKey)) ?? null;
|
|
12183
12250
|
}
|
|
12184
12251
|
|
|
12185
12252
|
/**
|
|
@@ -12189,17 +12256,17 @@ declare class SerializedService implements Service {
|
|
|
12189
12256
|
*/
|
|
12190
12257
|
hasPreimage(hash: PreimageHash): boolean {
|
|
12191
12258
|
// TODO [ToDr] consider optimizing to avoid fetching the whole data.
|
|
12192
|
-
return this.retrieveOptional(serialize.servicePreimages(this.serviceId, hash)) !== undefined;
|
|
12259
|
+
return this.retrieveOptional(serialize.servicePreimages(this.blake2b, this.serviceId, hash)) !== undefined;
|
|
12193
12260
|
}
|
|
12194
12261
|
|
|
12195
12262
|
/** Retrieve preimage from the DB. */
|
|
12196
12263
|
getPreimage(hash: PreimageHash): BytesBlob | null {
|
|
12197
|
-
return this.retrieveOptional(serialize.servicePreimages(this.serviceId, hash)) ?? null;
|
|
12264
|
+
return this.retrieveOptional(serialize.servicePreimages(this.blake2b, this.serviceId, hash)) ?? null;
|
|
12198
12265
|
}
|
|
12199
12266
|
|
|
12200
12267
|
/** Retrieve preimage lookup history. */
|
|
12201
12268
|
getLookupHistory(hash: PreimageHash, len: U32): LookupHistorySlots | null {
|
|
12202
|
-
const rawSlots = this.retrieveOptional(serialize.serviceLookupHistory(this.serviceId, hash, len));
|
|
12269
|
+
const rawSlots = this.retrieveOptional(serialize.serviceLookupHistory(this.blake2b, this.serviceId, hash, len));
|
|
12203
12270
|
if (rawSlots === undefined) {
|
|
12204
12271
|
return null;
|
|
12205
12272
|
}
|
|
@@ -12212,9 +12279,9 @@ type KeyAndCodec<T> = {
|
|
|
12212
12279
|
Codec: Decode<T>;
|
|
12213
12280
|
};
|
|
12214
12281
|
|
|
12215
|
-
declare function loadState(spec: ChainSpec, entries: Iterable<[StateKey | TruncatedHash, BytesBlob]>) {
|
|
12282
|
+
declare function loadState(spec: ChainSpec, blake2b: Blake2b, entries: Iterable<[StateKey | TruncatedHash, BytesBlob]>) {
|
|
12216
12283
|
const stateEntries = StateEntries.fromEntriesUnsafe(entries);
|
|
12217
|
-
return SerializedState.fromStateEntries(spec, stateEntries);
|
|
12284
|
+
return SerializedState.fromStateEntries(spec, blake2b, stateEntries);
|
|
12218
12285
|
}
|
|
12219
12286
|
|
|
12220
12287
|
/**
|
|
@@ -12370,7 +12437,8 @@ declare class LeafDb implements SerializedStateBackend {
|
|
|
12370
12437
|
assertNever(val);
|
|
12371
12438
|
}
|
|
12372
12439
|
|
|
12373
|
-
getStateRoot(): StateRootHash {
|
|
12440
|
+
getStateRoot(blake2b: Blake2b): StateRootHash {
|
|
12441
|
+
const blake2bTrieHasher = getBlake2bTrieHasher(blake2b);
|
|
12374
12442
|
return InMemoryTrie.computeStateRoot(blake2bTrieHasher, this.leaves).asOpaque();
|
|
12375
12443
|
}
|
|
12376
12444
|
|
|
@@ -12468,7 +12536,8 @@ declare class InMemoryStates implements StatesDb<InMemoryState> {
|
|
|
12468
12536
|
}
|
|
12469
12537
|
|
|
12470
12538
|
async getStateRoot(state: InMemoryState): Promise<StateRootHash> {
|
|
12471
|
-
|
|
12539
|
+
const blake2b = await Blake2b.createHasher();
|
|
12540
|
+
return StateEntries.serializeInMemory(this.spec, blake2b, state).getRootHash(blake2b);
|
|
12472
12541
|
}
|
|
12473
12542
|
|
|
12474
12543
|
/** Insert a full state into the database. */
|
|
@@ -12573,7 +12642,7 @@ declare function padAndEncodeData(input: BytesBlob) {
|
|
|
12573
12642
|
const paddedLength = Math.ceil(input.length / PIECE_SIZE) * PIECE_SIZE;
|
|
12574
12643
|
let padded = input;
|
|
12575
12644
|
if (input.length !== paddedLength) {
|
|
12576
|
-
padded = BytesBlob.blobFrom(
|
|
12645
|
+
padded = BytesBlob.blobFrom(safeAllocUint8Array(paddedLength));
|
|
12577
12646
|
padded.raw.set(input.raw, 0);
|
|
12578
12647
|
}
|
|
12579
12648
|
return chunkingFunction(padded);
|
|
@@ -12629,7 +12698,7 @@ declare function decodeData(input: FixedSizeArray<[number, BytesBlob], N_CHUNKS_
|
|
|
12629
12698
|
*/
|
|
12630
12699
|
declare function encodePoints(input: Bytes<PIECE_SIZE>): FixedSizeArray<Bytes<POINT_LENGTH>, N_CHUNKS_TOTAL> {
|
|
12631
12700
|
const result: Bytes<POINT_LENGTH>[] = [];
|
|
12632
|
-
const data =
|
|
12701
|
+
const data = safeAllocUint8Array(POINT_ALIGNMENT * N_CHUNKS_REQUIRED);
|
|
12633
12702
|
|
|
12634
12703
|
// add original shards to the result
|
|
12635
12704
|
for (let i = 0; i < N_CHUNKS_REQUIRED; i++) {
|
|
@@ -12649,7 +12718,7 @@ declare function encodePoints(input: Bytes<PIECE_SIZE>): FixedSizeArray<Bytes<PO
|
|
|
12649
12718
|
for (let i = 0; i < N_CHUNKS_REDUNDANCY; i++) {
|
|
12650
12719
|
const pointIndex = i * POINT_ALIGNMENT;
|
|
12651
12720
|
|
|
12652
|
-
const redundancyPoint =
|
|
12721
|
+
const redundancyPoint = safeAllocUint8Array(POINT_LENGTH);
|
|
12653
12722
|
for (let j = 0; j < POINT_LENGTH; j++) {
|
|
12654
12723
|
redundancyPoint[j] = encodedData[pointIndex + j * HALF_POINT_SIZE];
|
|
12655
12724
|
}
|
|
@@ -12669,7 +12738,7 @@ declare function decodePiece(
|
|
|
12669
12738
|
): Bytes<PIECE_SIZE> {
|
|
12670
12739
|
const result = Bytes.zero(PIECE_SIZE);
|
|
12671
12740
|
|
|
12672
|
-
const data =
|
|
12741
|
+
const data = safeAllocUint8Array(N_CHUNKS_REQUIRED * POINT_ALIGNMENT);
|
|
12673
12742
|
const indices = new Uint16Array(input.length);
|
|
12674
12743
|
|
|
12675
12744
|
for (let i = 0; i < N_CHUNKS_REQUIRED; i++) {
|
|
@@ -12796,7 +12865,7 @@ declare function lace<N extends number, K extends number>(input: FixedSizeArray<
|
|
|
12796
12865
|
return BytesBlob.empty();
|
|
12797
12866
|
}
|
|
12798
12867
|
const n = input[0].length;
|
|
12799
|
-
const result = BytesBlob.blobFrom(
|
|
12868
|
+
const result = BytesBlob.blobFrom(safeAllocUint8Array(k * n));
|
|
12800
12869
|
for (let i = 0; i < k; i++) {
|
|
12801
12870
|
const entry = input[i].raw;
|
|
12802
12871
|
for (let j = 0; j < n; j++) {
|
|
@@ -13639,6 +13708,8 @@ declare enum NewServiceError {
|
|
|
13639
13708
|
InsufficientFunds = 0,
|
|
13640
13709
|
/** Service is not privileged to set gratis storage. */
|
|
13641
13710
|
UnprivilegedService = 1,
|
|
13711
|
+
/** Registrar attempting to create a service with already existing id. */
|
|
13712
|
+
RegistrarServiceIdAlreadyTaken = 2,
|
|
13642
13713
|
}
|
|
13643
13714
|
|
|
13644
13715
|
declare enum UpdatePrivilegesError {
|
|
@@ -13704,14 +13775,18 @@ interface PartialState {
|
|
|
13704
13775
|
): Result$2<OK, TransferError>;
|
|
13705
13776
|
|
|
13706
13777
|
/**
|
|
13707
|
-
* Create a new service with given codeHash, length, gas, allowance and
|
|
13778
|
+
* Create a new service with given codeHash, length, gas, allowance, gratisStorage and wantedServiceId.
|
|
13708
13779
|
*
|
|
13709
|
-
* Returns a newly assigned id
|
|
13710
|
-
*
|
|
13780
|
+
* Returns a newly assigned id
|
|
13781
|
+
* or `wantedServiceId` if it's lower than `S`
|
|
13782
|
+
* and parent of that service is `Registrar`.
|
|
13783
|
+
*
|
|
13784
|
+
* https://graypaper.fluffylabs.dev/#/ab2cdbd/2fa9042fc304?v=0.7.2
|
|
13711
13785
|
*
|
|
13712
13786
|
* An error can be returned in case the account does not
|
|
13713
13787
|
* have the required balance
|
|
13714
|
-
* or tries to set gratis storage without being
|
|
13788
|
+
* or tries to set gratis storage without being `Manager`
|
|
13789
|
+
* or `Registrar` tries to set service id thats already taken.
|
|
13715
13790
|
*/
|
|
13716
13791
|
newService(
|
|
13717
13792
|
codeHash: CodeHash,
|
|
@@ -13719,6 +13794,7 @@ interface PartialState {
|
|
|
13719
13794
|
gas: ServiceGas,
|
|
13720
13795
|
allowance: ServiceGas,
|
|
13721
13796
|
gratisStorage: U64,
|
|
13797
|
+
wantedServiceId: U64,
|
|
13722
13798
|
): Result$2<ServiceId, NewServiceError>;
|
|
13723
13799
|
|
|
13724
13800
|
/** Upgrade code of currently running service. */
|
|
@@ -13740,7 +13816,7 @@ interface PartialState {
|
|
|
13740
13816
|
updateAuthorizationQueue(
|
|
13741
13817
|
coreIndex: CoreIndex,
|
|
13742
13818
|
authQueue: FixedSizeArray<Blake2bHash, AUTHORIZATION_QUEUE_SIZE>,
|
|
13743
|
-
|
|
13819
|
+
assigners: ServiceId | null,
|
|
13744
13820
|
): Result$2<OK, UpdatePrivilegesError>;
|
|
13745
13821
|
|
|
13746
13822
|
/**
|
|
@@ -13749,14 +13825,16 @@ interface PartialState {
|
|
|
13749
13825
|
* `m`: manager service (can change privileged services)
|
|
13750
13826
|
* `a`: manages authorization queue
|
|
13751
13827
|
* `v`: manages validator keys
|
|
13752
|
-
* `
|
|
13828
|
+
* `r`: manages create new services in protected id range.
|
|
13829
|
+
* `z`: collection of serviceId -> gas that auto-accumulate every block
|
|
13753
13830
|
*
|
|
13754
13831
|
*/
|
|
13755
13832
|
updatePrivilegedServices(
|
|
13756
13833
|
m: ServiceId | null,
|
|
13757
13834
|
a: PerCore<ServiceId>,
|
|
13758
13835
|
v: ServiceId | null,
|
|
13759
|
-
|
|
13836
|
+
r: ServiceId | null,
|
|
13837
|
+
z: [ServiceId, ServiceGas][],
|
|
13760
13838
|
): Result$2<OK, UpdatePrivilegesError>;
|
|
13761
13839
|
|
|
13762
13840
|
/** Yield accumulation trie result hash. */
|
|
@@ -13868,7 +13946,7 @@ declare class Mask {
|
|
|
13868
13946
|
}
|
|
13869
13947
|
|
|
13870
13948
|
private buildLookupTableForward(mask: BitVec) {
|
|
13871
|
-
const table =
|
|
13949
|
+
const table = safeAllocUint8Array(mask.bitLength);
|
|
13872
13950
|
let lastInstructionOffset = 0;
|
|
13873
13951
|
for (let i = mask.bitLength - 1; i >= 0; i--) {
|
|
13874
13952
|
if (mask.isSet(i)) {
|
|
@@ -14012,7 +14090,7 @@ declare class Registers {
|
|
|
14012
14090
|
private asSigned: BigInt64Array;
|
|
14013
14091
|
private asUnsigned: BigUint64Array;
|
|
14014
14092
|
|
|
14015
|
-
constructor(private readonly bytes =
|
|
14093
|
+
constructor(private readonly bytes = safeAllocUint8Array(NO_OF_REGISTERS << REGISTER_SIZE_SHIFT)) {
|
|
14016
14094
|
check`${bytes.length === NO_OF_REGISTERS << REGISTER_SIZE_SHIFT} Invalid size of registers array.`;
|
|
14017
14095
|
this.asSigned = new BigInt64Array(bytes.buffer, bytes.byteOffset);
|
|
14018
14096
|
this.asUnsigned = new BigUint64Array(bytes.buffer, bytes.byteOffset);
|
|
@@ -17674,7 +17752,7 @@ declare class AccumulationStateUpdate {
|
|
|
17674
17752
|
/** Services state updates. */
|
|
17675
17753
|
public readonly services: ServicesUpdate,
|
|
17676
17754
|
/** Pending transfers. */
|
|
17677
|
-
public
|
|
17755
|
+
public transfers: PendingTransfer[],
|
|
17678
17756
|
/** Yielded accumulation root. */
|
|
17679
17757
|
public readonly yieldedRoots: Map<ServiceId, OpaqueHash> = new Map(),
|
|
17680
17758
|
) {}
|
|
@@ -17725,11 +17803,18 @@ declare class AccumulationStateUpdate {
|
|
|
17725
17803
|
if (from.privilegedServices !== null) {
|
|
17726
17804
|
update.privilegedServices = PrivilegedServices.create({
|
|
17727
17805
|
...from.privilegedServices,
|
|
17728
|
-
|
|
17806
|
+
assigners: asKnownSize([...from.privilegedServices.assigners]),
|
|
17729
17807
|
});
|
|
17730
17808
|
}
|
|
17731
17809
|
return update;
|
|
17732
17810
|
}
|
|
17811
|
+
|
|
17812
|
+
/** Retrieve and clear pending transfers. */
|
|
17813
|
+
takeTransfers() {
|
|
17814
|
+
const transfers = this.transfers;
|
|
17815
|
+
this.transfers = [];
|
|
17816
|
+
return transfers;
|
|
17817
|
+
}
|
|
17733
17818
|
}
|
|
17734
17819
|
|
|
17735
17820
|
type StateSlice = Pick<State, "getService" | "privilegedServices">;
|
|
@@ -17996,7 +18081,7 @@ declare const HostCallResult = {
|
|
|
17996
18081
|
OOB: tryAsU64(0xffff_ffff_ffff_fffdn), // 2**64 - 3
|
|
17997
18082
|
/** Index unknown. */
|
|
17998
18083
|
WHO: tryAsU64(0xffff_ffff_ffff_fffcn), // 2**64 - 4
|
|
17999
|
-
/** Storage full. */
|
|
18084
|
+
/** Storage full or resource already allocated. */
|
|
18000
18085
|
FULL: tryAsU64(0xffff_ffff_ffff_fffbn), // 2**64 - 5
|
|
18001
18086
|
/** Core index unknown. */
|
|
18002
18087
|
CORE: tryAsU64(0xffff_ffff_ffff_fffan), // 2**64 - 6
|
|
@@ -18004,7 +18089,7 @@ declare const HostCallResult = {
|
|
|
18004
18089
|
CASH: tryAsU64(0xffff_ffff_ffff_fff9n), // 2**64 - 7
|
|
18005
18090
|
/** Gas limit too low. */
|
|
18006
18091
|
LOW: tryAsU64(0xffff_ffff_ffff_fff8n), // 2**64 - 8
|
|
18007
|
-
/** The item is already solicited
|
|
18092
|
+
/** The item is already solicited, cannot be forgotten or the operation is invalid due to privilege level. */
|
|
18008
18093
|
HUH: tryAsU64(0xffff_ffff_ffff_fff7n), // 2**64 - 9
|
|
18009
18094
|
/** The return value indicating general success. */
|
|
18010
18095
|
OK: tryAsU64(0n),
|
|
@@ -18239,7 +18324,7 @@ declare class HostCalls {
|
|
|
18239
18324
|
const maybeAddress = regs.getLowerU32(7);
|
|
18240
18325
|
const maybeLength = regs.getLowerU32(8);
|
|
18241
18326
|
|
|
18242
|
-
const result =
|
|
18327
|
+
const result = safeAllocUint8Array(maybeLength);
|
|
18243
18328
|
const startAddress = tryAsMemoryIndex(maybeAddress);
|
|
18244
18329
|
const loadResult = memory.loadInto(result, startAddress);
|
|
18245
18330
|
|
|
@@ -18678,7 +18763,7 @@ declare class DebuggerAdapter {
|
|
|
18678
18763
|
|
|
18679
18764
|
if (page === null) {
|
|
18680
18765
|
// page wasn't allocated so we return an empty page
|
|
18681
|
-
return
|
|
18766
|
+
return safeAllocUint8Array(PAGE_SIZE);
|
|
18682
18767
|
}
|
|
18683
18768
|
|
|
18684
18769
|
if (page.length === PAGE_SIZE) {
|
|
@@ -18687,7 +18772,7 @@ declare class DebuggerAdapter {
|
|
|
18687
18772
|
}
|
|
18688
18773
|
|
|
18689
18774
|
// page was allocated but it is shorter than PAGE_SIZE so we have to extend it
|
|
18690
|
-
const fullPage =
|
|
18775
|
+
const fullPage = safeAllocUint8Array(PAGE_SIZE);
|
|
18691
18776
|
fullPage.set(page);
|
|
18692
18777
|
return fullPage;
|
|
18693
18778
|
}
|
|
@@ -18880,10 +18965,10 @@ type ENTROPY_BYTES = typeof ENTROPY_BYTES;
|
|
|
18880
18965
|
*
|
|
18881
18966
|
* https://graypaper.fluffylabs.dev/#/579bd12/3b9a013b9a01
|
|
18882
18967
|
*/
|
|
18883
|
-
declare function fisherYatesShuffle<T>(arr: T[], entropy: Bytes<ENTROPY_BYTES>): T[] {
|
|
18968
|
+
declare function fisherYatesShuffle<T>(blake2b: Blake2b, arr: T[], entropy: Bytes<ENTROPY_BYTES>): T[] {
|
|
18884
18969
|
check`${entropy.length === ENTROPY_BYTES} Expected entropy of length ${ENTROPY_BYTES}, got ${entropy.length}`;
|
|
18885
18970
|
const n = arr.length;
|
|
18886
|
-
const randomNumbers = hashToNumberSequence(entropy, arr.length);
|
|
18971
|
+
const randomNumbers = hashToNumberSequence(blake2b, entropy, arr.length);
|
|
18887
18972
|
const result: T[] = new Array<T>(n);
|
|
18888
18973
|
|
|
18889
18974
|
let itemsLeft = n;
|
|
@@ -18909,6 +18994,7 @@ declare namespace index$2 {
|
|
|
18909
18994
|
declare class JsonServiceInfo {
|
|
18910
18995
|
static fromJson = json.object<JsonServiceInfo, ServiceAccountInfo>(
|
|
18911
18996
|
{
|
|
18997
|
+
...(Compatibility.isGreaterOrEqual(GpVersion.V0_7_1) ? { version: "number" } : {}),
|
|
18912
18998
|
code_hash: fromJson.bytes32(),
|
|
18913
18999
|
balance: json.fromNumber((x) => tryAsU64(x)),
|
|
18914
19000
|
min_item_gas: json.fromNumber((x) => tryAsServiceGas(x)),
|
|
@@ -18947,6 +19033,7 @@ declare class JsonServiceInfo {
|
|
|
18947
19033
|
},
|
|
18948
19034
|
);
|
|
18949
19035
|
|
|
19036
|
+
version?: number;
|
|
18950
19037
|
code_hash!: CodeHash;
|
|
18951
19038
|
balance!: U64;
|
|
18952
19039
|
min_item_gas!: ServiceGas;
|
|
@@ -18993,6 +19080,19 @@ declare const lookupMetaFromJson = json.object<JsonLookupMeta, LookupHistoryItem
|
|
|
18993
19080
|
({ key, value }) => new LookupHistoryItem(key.hash, key.length, value),
|
|
18994
19081
|
);
|
|
18995
19082
|
|
|
19083
|
+
declare const preimageStatusFromJson = json.object<JsonPreimageStatus, LookupHistoryItem>(
|
|
19084
|
+
{
|
|
19085
|
+
hash: fromJson.bytes32(),
|
|
19086
|
+
status: json.array("number"),
|
|
19087
|
+
},
|
|
19088
|
+
({ hash, status }) => new LookupHistoryItem(hash, tryAsU32(0), status),
|
|
19089
|
+
);
|
|
19090
|
+
|
|
19091
|
+
type JsonPreimageStatus = {
|
|
19092
|
+
hash: PreimageHash;
|
|
19093
|
+
status: LookupHistorySlots;
|
|
19094
|
+
};
|
|
19095
|
+
|
|
18996
19096
|
type JsonLookupMeta = {
|
|
18997
19097
|
key: {
|
|
18998
19098
|
hash: PreimageHash;
|
|
@@ -19005,21 +19105,34 @@ declare class JsonService {
|
|
|
19005
19105
|
static fromJson = json.object<JsonService, InMemoryService>(
|
|
19006
19106
|
{
|
|
19007
19107
|
id: "number",
|
|
19008
|
-
data:
|
|
19009
|
-
|
|
19010
|
-
|
|
19011
|
-
|
|
19012
|
-
|
|
19013
|
-
|
|
19108
|
+
data: Compatibility.isLessThan(GpVersion.V0_7_1)
|
|
19109
|
+
? {
|
|
19110
|
+
service: JsonServiceInfo.fromJson,
|
|
19111
|
+
preimages: json.optional(json.array(JsonPreimageItem.fromJson)),
|
|
19112
|
+
storage: json.optional(json.array(JsonStorageItem.fromJson)),
|
|
19113
|
+
lookup_meta: json.optional(json.array(lookupMetaFromJson)),
|
|
19114
|
+
}
|
|
19115
|
+
: {
|
|
19116
|
+
service: JsonServiceInfo.fromJson,
|
|
19117
|
+
storage: json.optional(json.array(JsonStorageItem.fromJson)),
|
|
19118
|
+
preimages_blob: json.optional(json.array(JsonPreimageItem.fromJson)),
|
|
19119
|
+
preimages_status: json.optional(json.array(preimageStatusFromJson)),
|
|
19120
|
+
},
|
|
19014
19121
|
},
|
|
19015
19122
|
({ id, data }) => {
|
|
19123
|
+
const preimages = HashDictionary.fromEntries(
|
|
19124
|
+
(data.preimages ?? data.preimages_blob ?? []).map((x) => [x.hash, x]),
|
|
19125
|
+
);
|
|
19126
|
+
|
|
19016
19127
|
const lookupHistory = HashDictionary.new<PreimageHash, LookupHistoryItem[]>();
|
|
19017
|
-
|
|
19128
|
+
|
|
19129
|
+
for (const item of data.lookup_meta ?? data.preimages_status ?? []) {
|
|
19018
19130
|
const data = lookupHistory.get(item.hash) ?? [];
|
|
19019
|
-
|
|
19131
|
+
const length = tryAsU32(preimages.get(item.hash)?.blob.length ?? item.length);
|
|
19132
|
+
data.push(new LookupHistoryItem(item.hash, length, item.slots));
|
|
19020
19133
|
lookupHistory.set(item.hash, data);
|
|
19021
19134
|
}
|
|
19022
|
-
|
|
19135
|
+
|
|
19023
19136
|
const storage = new Map<string, StorageItem>();
|
|
19024
19137
|
|
|
19025
19138
|
const entries = (data.storage ?? []).map(({ key, value }) => {
|
|
@@ -19046,6 +19159,8 @@ declare class JsonService {
|
|
|
19046
19159
|
preimages?: JsonPreimageItem[];
|
|
19047
19160
|
storage?: JsonStorageItem[];
|
|
19048
19161
|
lookup_meta?: LookupHistoryItem[];
|
|
19162
|
+
preimages_blob?: JsonPreimageItem[];
|
|
19163
|
+
preimages_status?: LookupHistoryItem[];
|
|
19049
19164
|
};
|
|
19050
19165
|
}
|
|
19051
19166
|
|
|
@@ -19055,8 +19170,7 @@ declare const availabilityAssignmentFromJson = json.object<JsonAvailabilityAssig
|
|
|
19055
19170
|
timeout: "number",
|
|
19056
19171
|
},
|
|
19057
19172
|
({ report, timeout }) => {
|
|
19058
|
-
|
|
19059
|
-
return AvailabilityAssignment.create({ workReport: new WithHash(workReportHash, report), timeout });
|
|
19173
|
+
return AvailabilityAssignment.create({ workReport: report, timeout });
|
|
19060
19174
|
},
|
|
19061
19175
|
);
|
|
19062
19176
|
|
|
@@ -19277,8 +19391,12 @@ declare class JsonServiceStatistics {
|
|
|
19277
19391
|
extrinsic_count: "number",
|
|
19278
19392
|
accumulate_count: "number",
|
|
19279
19393
|
accumulate_gas_used: json.fromNumber(tryAsServiceGas),
|
|
19280
|
-
|
|
19281
|
-
|
|
19394
|
+
...(Compatibility.isLessThan(GpVersion.V0_7_1)
|
|
19395
|
+
? {
|
|
19396
|
+
on_transfers_count: "number",
|
|
19397
|
+
on_transfers_gas_used: json.fromNumber(tryAsServiceGas),
|
|
19398
|
+
}
|
|
19399
|
+
: {}),
|
|
19282
19400
|
},
|
|
19283
19401
|
({
|
|
19284
19402
|
provided_count,
|
|
@@ -19305,8 +19423,8 @@ declare class JsonServiceStatistics {
|
|
|
19305
19423
|
extrinsicCount: extrinsic_count,
|
|
19306
19424
|
accumulateCount: accumulate_count,
|
|
19307
19425
|
accumulateGasUsed: accumulate_gas_used,
|
|
19308
|
-
onTransfersCount: on_transfers_count,
|
|
19309
|
-
onTransfersGasUsed: on_transfers_gas_used,
|
|
19426
|
+
onTransfersCount: on_transfers_count ?? tryAsU32(0),
|
|
19427
|
+
onTransfersGasUsed: on_transfers_gas_used ?? tryAsServiceGas(0),
|
|
19310
19428
|
});
|
|
19311
19429
|
},
|
|
19312
19430
|
);
|
|
@@ -19321,8 +19439,8 @@ declare class JsonServiceStatistics {
|
|
|
19321
19439
|
extrinsic_count!: U16;
|
|
19322
19440
|
accumulate_count!: U32;
|
|
19323
19441
|
accumulate_gas_used!: ServiceGas;
|
|
19324
|
-
on_transfers_count
|
|
19325
|
-
on_transfers_gas_used
|
|
19442
|
+
on_transfers_count?: U32;
|
|
19443
|
+
on_transfers_gas_used?: ServiceGas;
|
|
19326
19444
|
}
|
|
19327
19445
|
|
|
19328
19446
|
type ServiceStatisticsEntry = {
|
|
@@ -19394,8 +19512,9 @@ type JsonStateDump = {
|
|
|
19394
19512
|
tau: State["timeslot"];
|
|
19395
19513
|
chi: {
|
|
19396
19514
|
chi_m: PrivilegedServices["manager"];
|
|
19397
|
-
chi_a: PrivilegedServices["
|
|
19398
|
-
chi_v: PrivilegedServices["
|
|
19515
|
+
chi_a: PrivilegedServices["assigners"];
|
|
19516
|
+
chi_v: PrivilegedServices["delegator"];
|
|
19517
|
+
chi_r?: PrivilegedServices["registrar"];
|
|
19399
19518
|
chi_g: PrivilegedServices["autoAccumulateServices"] | null;
|
|
19400
19519
|
};
|
|
19401
19520
|
pi: JsonStatisticsData;
|
|
@@ -19428,6 +19547,7 @@ declare const fullStateDumpFromJson = (spec: ChainSpec) =>
|
|
|
19428
19547
|
chi_m: "number",
|
|
19429
19548
|
chi_a: json.array("number"),
|
|
19430
19549
|
chi_v: "number",
|
|
19550
|
+
chi_r: json.optional("number"),
|
|
19431
19551
|
chi_g: json.nullable(
|
|
19432
19552
|
json.array({
|
|
19433
19553
|
service: "number",
|
|
@@ -19460,6 +19580,9 @@ declare const fullStateDumpFromJson = (spec: ChainSpec) =>
|
|
|
19460
19580
|
theta,
|
|
19461
19581
|
accounts,
|
|
19462
19582
|
}): InMemoryState => {
|
|
19583
|
+
if (Compatibility.isGreaterOrEqual(GpVersion.V0_7_1) && chi.chi_r === undefined) {
|
|
19584
|
+
throw new Error("Registrar is required in Privileges GP ^0.7.1");
|
|
19585
|
+
}
|
|
19463
19586
|
return InMemoryState.create({
|
|
19464
19587
|
authPools: tryAsPerCore(
|
|
19465
19588
|
alpha.map((perCore) => {
|
|
@@ -19493,8 +19616,9 @@ declare const fullStateDumpFromJson = (spec: ChainSpec) =>
|
|
|
19493
19616
|
timeslot: tau,
|
|
19494
19617
|
privilegedServices: PrivilegedServices.create({
|
|
19495
19618
|
manager: chi.chi_m,
|
|
19496
|
-
|
|
19497
|
-
|
|
19619
|
+
assigners: chi.chi_a,
|
|
19620
|
+
delegator: chi.chi_v,
|
|
19621
|
+
registrar: chi.chi_r ?? tryAsServiceId(2 ** 32 - 1),
|
|
19498
19622
|
autoAccumulateServices: chi.chi_g ?? [],
|
|
19499
19623
|
}),
|
|
19500
19624
|
statistics: JsonStatisticsData.toStatisticsData(spec, pi),
|
|
@@ -19517,6 +19641,7 @@ declare const index$1_JsonDisputesRecords: typeof JsonDisputesRecords;
|
|
|
19517
19641
|
type index$1_JsonLookupMeta = JsonLookupMeta;
|
|
19518
19642
|
type index$1_JsonPreimageItem = JsonPreimageItem;
|
|
19519
19643
|
declare const index$1_JsonPreimageItem: typeof JsonPreimageItem;
|
|
19644
|
+
type index$1_JsonPreimageStatus = JsonPreimageStatus;
|
|
19520
19645
|
type index$1_JsonRecentBlockState = JsonRecentBlockState;
|
|
19521
19646
|
type index$1_JsonRecentBlocks = JsonRecentBlocks;
|
|
19522
19647
|
type index$1_JsonReportedWorkPackageInfo = JsonReportedWorkPackageInfo;
|
|
@@ -19541,6 +19666,7 @@ declare const index$1_disputesRecordsFromJson: typeof disputesRecordsFromJson;
|
|
|
19541
19666
|
declare const index$1_fullStateDumpFromJson: typeof fullStateDumpFromJson;
|
|
19542
19667
|
declare const index$1_lookupMetaFromJson: typeof lookupMetaFromJson;
|
|
19543
19668
|
declare const index$1_notYetAccumulatedFromJson: typeof notYetAccumulatedFromJson;
|
|
19669
|
+
declare const index$1_preimageStatusFromJson: typeof preimageStatusFromJson;
|
|
19544
19670
|
declare const index$1_recentBlockStateFromJson: typeof recentBlockStateFromJson;
|
|
19545
19671
|
declare const index$1_recentBlocksHistoryFromJson: typeof recentBlocksHistoryFromJson;
|
|
19546
19672
|
declare const index$1_reportedWorkPackageFromJson: typeof reportedWorkPackageFromJson;
|
|
@@ -19548,8 +19674,8 @@ declare const index$1_serviceStatisticsEntryFromJson: typeof serviceStatisticsEn
|
|
|
19548
19674
|
declare const index$1_ticketFromJson: typeof ticketFromJson;
|
|
19549
19675
|
declare const index$1_validatorDataFromJson: typeof validatorDataFromJson;
|
|
19550
19676
|
declare namespace index$1 {
|
|
19551
|
-
export { index$1_JsonCoreStatistics as JsonCoreStatistics, index$1_JsonDisputesRecords as JsonDisputesRecords, index$1_JsonPreimageItem as JsonPreimageItem, index$1_JsonService as JsonService, index$1_JsonServiceInfo as JsonServiceInfo, index$1_JsonServiceStatistics as JsonServiceStatistics, index$1_JsonStatisticsData as JsonStatisticsData, index$1_JsonStorageItem as JsonStorageItem, index$1_JsonValidatorStatistics as JsonValidatorStatistics, index$1_TicketsOrKeys as TicketsOrKeys, index$1_availabilityAssignmentFromJson as availabilityAssignmentFromJson, index$1_disputesRecordsFromJson as disputesRecordsFromJson, index$1_fullStateDumpFromJson as fullStateDumpFromJson, index$1_lookupMetaFromJson as lookupMetaFromJson, index$1_notYetAccumulatedFromJson as notYetAccumulatedFromJson, index$1_recentBlockStateFromJson as recentBlockStateFromJson, index$1_recentBlocksHistoryFromJson as recentBlocksHistoryFromJson, index$1_reportedWorkPackageFromJson as reportedWorkPackageFromJson, index$1_serviceStatisticsEntryFromJson as serviceStatisticsEntryFromJson, index$1_ticketFromJson as ticketFromJson, index$1_validatorDataFromJson as validatorDataFromJson };
|
|
19552
|
-
export type { index$1_JsonAvailabilityAssignment as JsonAvailabilityAssignment, index$1_JsonLookupMeta as JsonLookupMeta, index$1_JsonRecentBlockState as JsonRecentBlockState, index$1_JsonRecentBlocks as JsonRecentBlocks, index$1_JsonReportedWorkPackageInfo as JsonReportedWorkPackageInfo, index$1_JsonStateDump as JsonStateDump, index$1_ServiceStatisticsEntry as ServiceStatisticsEntry };
|
|
19677
|
+
export { index$1_JsonCoreStatistics as JsonCoreStatistics, index$1_JsonDisputesRecords as JsonDisputesRecords, index$1_JsonPreimageItem as JsonPreimageItem, index$1_JsonService as JsonService, index$1_JsonServiceInfo as JsonServiceInfo, index$1_JsonServiceStatistics as JsonServiceStatistics, index$1_JsonStatisticsData as JsonStatisticsData, index$1_JsonStorageItem as JsonStorageItem, index$1_JsonValidatorStatistics as JsonValidatorStatistics, index$1_TicketsOrKeys as TicketsOrKeys, index$1_availabilityAssignmentFromJson as availabilityAssignmentFromJson, index$1_disputesRecordsFromJson as disputesRecordsFromJson, index$1_fullStateDumpFromJson as fullStateDumpFromJson, index$1_lookupMetaFromJson as lookupMetaFromJson, index$1_notYetAccumulatedFromJson as notYetAccumulatedFromJson, index$1_preimageStatusFromJson as preimageStatusFromJson, index$1_recentBlockStateFromJson as recentBlockStateFromJson, index$1_recentBlocksHistoryFromJson as recentBlocksHistoryFromJson, index$1_reportedWorkPackageFromJson as reportedWorkPackageFromJson, index$1_serviceStatisticsEntryFromJson as serviceStatisticsEntryFromJson, index$1_ticketFromJson as ticketFromJson, index$1_validatorDataFromJson as validatorDataFromJson };
|
|
19678
|
+
export type { index$1_JsonAvailabilityAssignment as JsonAvailabilityAssignment, index$1_JsonLookupMeta as JsonLookupMeta, index$1_JsonPreimageStatus as JsonPreimageStatus, index$1_JsonRecentBlockState as JsonRecentBlockState, index$1_JsonRecentBlocks as JsonRecentBlocks, index$1_JsonReportedWorkPackageInfo as JsonReportedWorkPackageInfo, index$1_JsonStateDump as JsonStateDump, index$1_ServiceStatisticsEntry as ServiceStatisticsEntry };
|
|
19553
19679
|
}
|
|
19554
19680
|
|
|
19555
19681
|
/** Helper function to create most used hashes in the block */
|
|
@@ -19557,7 +19683,7 @@ declare class TransitionHasher implements MmrHasher<KeccakHash> {
|
|
|
19557
19683
|
constructor(
|
|
19558
19684
|
private readonly context: ChainSpec,
|
|
19559
19685
|
private readonly keccakHasher: KeccakHasher,
|
|
19560
|
-
|
|
19686
|
+
public readonly blake2b: Blake2b,
|
|
19561
19687
|
) {}
|
|
19562
19688
|
|
|
19563
19689
|
/** Concatenates two hashes and hash this concatenation */
|
|
@@ -19571,7 +19697,7 @@ declare class TransitionHasher implements MmrHasher<KeccakHash> {
|
|
|
19571
19697
|
|
|
19572
19698
|
/** Creates hash from the block header view */
|
|
19573
19699
|
header(header: HeaderView): WithHash<HeaderHash, HeaderView> {
|
|
19574
|
-
return new WithHash(blake2b.hashBytes(header.encoded()
|
|
19700
|
+
return new WithHash(this.blake2b.hashBytes(header.encoded()).asOpaque(), header);
|
|
19575
19701
|
}
|
|
19576
19702
|
|
|
19577
19703
|
/**
|
|
@@ -19585,7 +19711,7 @@ declare class TransitionHasher implements MmrHasher<KeccakHash> {
|
|
|
19585
19711
|
.view()
|
|
19586
19712
|
.map((g) => g.view())
|
|
19587
19713
|
.map((guarantee) => {
|
|
19588
|
-
const reportHash = blake2b.hashBytes(guarantee.report.encoded()
|
|
19714
|
+
const reportHash = this.blake2b.hashBytes(guarantee.report.encoded()).asOpaque<WorkReportHash>();
|
|
19589
19715
|
return BytesBlob.blobFromParts([
|
|
19590
19716
|
reportHash.raw,
|
|
19591
19717
|
guarantee.slot.encoded().raw,
|
|
@@ -19595,15 +19721,15 @@ declare class TransitionHasher implements MmrHasher<KeccakHash> {
|
|
|
19595
19721
|
|
|
19596
19722
|
const guaranteeBlob = Encoder.encodeObject(codec.sequenceVarLen(dumpCodec), guarantees, this.context);
|
|
19597
19723
|
|
|
19598
|
-
const et = blake2b.hashBytes(extrinsicView.tickets.encoded()
|
|
19599
|
-
const ep = blake2b.hashBytes(extrinsicView.preimages.encoded()
|
|
19600
|
-
const eg = blake2b.hashBytes(guaranteeBlob
|
|
19601
|
-
const ea = blake2b.hashBytes(extrinsicView.assurances.encoded()
|
|
19602
|
-
const ed = blake2b.hashBytes(extrinsicView.disputes.encoded()
|
|
19724
|
+
const et = this.blake2b.hashBytes(extrinsicView.tickets.encoded()).asOpaque<ExtrinsicHash>();
|
|
19725
|
+
const ep = this.blake2b.hashBytes(extrinsicView.preimages.encoded()).asOpaque<ExtrinsicHash>();
|
|
19726
|
+
const eg = this.blake2b.hashBytes(guaranteeBlob).asOpaque<ExtrinsicHash>();
|
|
19727
|
+
const ea = this.blake2b.hashBytes(extrinsicView.assurances.encoded()).asOpaque<ExtrinsicHash>();
|
|
19728
|
+
const ed = this.blake2b.hashBytes(extrinsicView.disputes.encoded()).asOpaque<ExtrinsicHash>();
|
|
19603
19729
|
|
|
19604
19730
|
const encoded = BytesBlob.blobFromParts([et.raw, ep.raw, eg.raw, ea.raw, ed.raw]);
|
|
19605
19731
|
|
|
19606
|
-
return new WithHashAndBytes(blake2b.hashBytes(encoded
|
|
19732
|
+
return new WithHashAndBytes(this.blake2b.hashBytes(encoded).asOpaque(), extrinsicView, encoded);
|
|
19607
19733
|
}
|
|
19608
19734
|
|
|
19609
19735
|
/** Creates hash for given WorkPackage */
|
|
@@ -19614,7 +19740,7 @@ declare class TransitionHasher implements MmrHasher<KeccakHash> {
|
|
|
19614
19740
|
private encode<T, THash extends OpaqueHash>(codec: Codec<T>, data: T): WithHashAndBytes<THash, T> {
|
|
19615
19741
|
// TODO [ToDr] Use already allocated encoding destination and hash bytes from some arena.
|
|
19616
19742
|
const encoded = Encoder.encodeObject(codec, data, this.context);
|
|
19617
|
-
return new WithHashAndBytes(blake2b.hashBytes(encoded
|
|
19743
|
+
return new WithHashAndBytes(this.blake2b.hashBytes(encoded).asOpaque(), data, encoded);
|
|
19618
19744
|
}
|
|
19619
19745
|
}
|
|
19620
19746
|
|
|
@@ -19635,7 +19761,10 @@ declare enum PreimagesErrorCode {
|
|
|
19635
19761
|
|
|
19636
19762
|
// TODO [SeKo] consider whether this module is the right place to remove expired preimages
|
|
19637
19763
|
declare class Preimages {
|
|
19638
|
-
constructor(
|
|
19764
|
+
constructor(
|
|
19765
|
+
public readonly state: PreimagesState,
|
|
19766
|
+
public readonly blake2b: Blake2b,
|
|
19767
|
+
) {}
|
|
19639
19768
|
|
|
19640
19769
|
integrate(input: PreimagesInput): Result$2<PreimagesStateUpdate, PreimagesErrorCode> {
|
|
19641
19770
|
// make sure lookup extrinsics are sorted and unique
|
|
@@ -19664,7 +19793,7 @@ declare class Preimages {
|
|
|
19664
19793
|
// select preimages for integration
|
|
19665
19794
|
for (const preimage of preimages) {
|
|
19666
19795
|
const { requester, blob } = preimage;
|
|
19667
|
-
const hash: PreimageHash = blake2b.hashBytes(blob).asOpaque();
|
|
19796
|
+
const hash: PreimageHash = this.blake2b.hashBytes(blob).asOpaque();
|
|
19668
19797
|
|
|
19669
19798
|
const service = this.state.getService(requester);
|
|
19670
19799
|
if (service === null) {
|
|
@@ -19695,156 +19824,6 @@ declare class Preimages {
|
|
|
19695
19824
|
}
|
|
19696
19825
|
}
|
|
19697
19826
|
|
|
19698
|
-
declare enum ServiceExecutorError {
|
|
19699
|
-
NoLookup = 0,
|
|
19700
|
-
NoState = 1,
|
|
19701
|
-
NoServiceCode = 2,
|
|
19702
|
-
ServiceCodeMismatch = 3,
|
|
19703
|
-
}
|
|
19704
|
-
|
|
19705
|
-
declare class WorkPackageExecutor {
|
|
19706
|
-
constructor(
|
|
19707
|
-
private readonly blocks: BlocksDb,
|
|
19708
|
-
private readonly state: StatesDb,
|
|
19709
|
-
private readonly hasher: TransitionHasher,
|
|
19710
|
-
) {}
|
|
19711
|
-
|
|
19712
|
-
// TODO [ToDr] this while thing should be triple-checked with the GP.
|
|
19713
|
-
// I'm currently implementing some dirty version for the demo.
|
|
19714
|
-
async executeWorkPackage(pack: WorkPackage): Promise<WorkReport> {
|
|
19715
|
-
const headerHash = pack.context.lookupAnchor;
|
|
19716
|
-
// execute authorisation first or is it already executed and we just need to check it?
|
|
19717
|
-
const authExec = this.getServiceExecutor(
|
|
19718
|
-
// TODO [ToDr] should this be anchor or lookupAnchor?
|
|
19719
|
-
headerHash,
|
|
19720
|
-
pack.authCodeHost,
|
|
19721
|
-
pack.authCodeHash,
|
|
19722
|
-
);
|
|
19723
|
-
|
|
19724
|
-
if (authExec.isError) {
|
|
19725
|
-
// TODO [ToDr] most likely shouldn't be throw.
|
|
19726
|
-
throw new Error(`Could not get authorization executor: ${authExec.error}`);
|
|
19727
|
-
}
|
|
19728
|
-
|
|
19729
|
-
const pvm = authExec.ok;
|
|
19730
|
-
const authGas = tryAsGas(15_000n);
|
|
19731
|
-
const result = await pvm.run(pack.parametrization, authGas);
|
|
19732
|
-
|
|
19733
|
-
if (!result.isEqualTo(pack.authorization)) {
|
|
19734
|
-
throw new Error("Authorization is invalid.");
|
|
19735
|
-
}
|
|
19736
|
-
|
|
19737
|
-
const results: WorkResult[] = [];
|
|
19738
|
-
for (const item of pack.items) {
|
|
19739
|
-
const exec = this.getServiceExecutor(headerHash, item.service, item.codeHash);
|
|
19740
|
-
if (exec.isError) {
|
|
19741
|
-
throw new Error(`Could not get item executor: ${exec.error}`);
|
|
19742
|
-
}
|
|
19743
|
-
const pvm = exec.ok;
|
|
19744
|
-
|
|
19745
|
-
const gasRatio = tryAsServiceGas(3_000n);
|
|
19746
|
-
const ret = await pvm.run(item.payload, tryAsGas(item.refineGasLimit)); // or accumulateGasLimit?
|
|
19747
|
-
results.push(
|
|
19748
|
-
WorkResult.create({
|
|
19749
|
-
serviceId: item.service,
|
|
19750
|
-
codeHash: item.codeHash,
|
|
19751
|
-
payloadHash: blake2b.hashBytes(item.payload),
|
|
19752
|
-
gas: gasRatio,
|
|
19753
|
-
result: new WorkExecResult(WorkExecResultKind.ok, ret),
|
|
19754
|
-
load: WorkRefineLoad.create({
|
|
19755
|
-
gasUsed: tryAsServiceGas(5),
|
|
19756
|
-
importedSegments: tryAsU32(0),
|
|
19757
|
-
exportedSegments: tryAsU32(0),
|
|
19758
|
-
extrinsicSize: tryAsU32(0),
|
|
19759
|
-
extrinsicCount: tryAsU32(0),
|
|
19760
|
-
}),
|
|
19761
|
-
}),
|
|
19762
|
-
);
|
|
19763
|
-
}
|
|
19764
|
-
|
|
19765
|
-
const workPackage = this.hasher.workPackage(pack);
|
|
19766
|
-
const workPackageSpec = WorkPackageSpec.create({
|
|
19767
|
-
hash: workPackage.hash,
|
|
19768
|
-
length: tryAsU32(workPackage.encoded.length),
|
|
19769
|
-
erasureRoot: Bytes.zero(HASH_SIZE),
|
|
19770
|
-
exportsRoot: Bytes.zero(HASH_SIZE).asOpaque(),
|
|
19771
|
-
exportsCount: tryAsU16(0),
|
|
19772
|
-
});
|
|
19773
|
-
const coreIndex = tryAsCoreIndex(0);
|
|
19774
|
-
const authorizerHash = Bytes.fill(HASH_SIZE, 5).asOpaque();
|
|
19775
|
-
|
|
19776
|
-
const workResults = FixedSizeArray.new(results, tryAsWorkItemsCount(results.length));
|
|
19777
|
-
|
|
19778
|
-
return Promise.resolve(
|
|
19779
|
-
WorkReport.create({
|
|
19780
|
-
workPackageSpec,
|
|
19781
|
-
context: pack.context,
|
|
19782
|
-
coreIndex,
|
|
19783
|
-
authorizerHash,
|
|
19784
|
-
authorizationOutput: pack.authorization,
|
|
19785
|
-
segmentRootLookup: [],
|
|
19786
|
-
results: workResults,
|
|
19787
|
-
authorizationGasUsed: tryAsServiceGas(0),
|
|
19788
|
-
}),
|
|
19789
|
-
);
|
|
19790
|
-
}
|
|
19791
|
-
|
|
19792
|
-
getServiceExecutor(
|
|
19793
|
-
lookupAnchor: HeaderHash,
|
|
19794
|
-
serviceId: ServiceId,
|
|
19795
|
-
expectedCodeHash: CodeHash,
|
|
19796
|
-
): Result$2<PvmExecutor, ServiceExecutorError> {
|
|
19797
|
-
const header = this.blocks.getHeader(lookupAnchor);
|
|
19798
|
-
if (header === null) {
|
|
19799
|
-
return Result.error(ServiceExecutorError.NoLookup);
|
|
19800
|
-
}
|
|
19801
|
-
|
|
19802
|
-
const state = this.state.getState(lookupAnchor);
|
|
19803
|
-
if (state === null) {
|
|
19804
|
-
return Result.error(ServiceExecutorError.NoState);
|
|
19805
|
-
}
|
|
19806
|
-
|
|
19807
|
-
const service = state.getService(serviceId);
|
|
19808
|
-
const serviceCodeHash = service?.getInfo().codeHash ?? null;
|
|
19809
|
-
if (serviceCodeHash === null) {
|
|
19810
|
-
return Result.error(ServiceExecutorError.NoServiceCode);
|
|
19811
|
-
}
|
|
19812
|
-
|
|
19813
|
-
if (!serviceCodeHash.isEqualTo(expectedCodeHash)) {
|
|
19814
|
-
return Result.error(ServiceExecutorError.ServiceCodeMismatch);
|
|
19815
|
-
}
|
|
19816
|
-
|
|
19817
|
-
const serviceCode = service?.getPreimage(serviceCodeHash.asOpaque()) ?? null;
|
|
19818
|
-
if (serviceCode === null) {
|
|
19819
|
-
return Result.error(ServiceExecutorError.NoServiceCode);
|
|
19820
|
-
}
|
|
19821
|
-
|
|
19822
|
-
return Result.ok(new PvmExecutor(serviceCode));
|
|
19823
|
-
}
|
|
19824
|
-
}
|
|
19825
|
-
|
|
19826
|
-
declare class PvmExecutor {
|
|
19827
|
-
private readonly pvm: HostCalls;
|
|
19828
|
-
private hostCalls = new HostCallsManager({ missing: new Missing() });
|
|
19829
|
-
private pvmInstanceManager = new PvmInstanceManager(4);
|
|
19830
|
-
|
|
19831
|
-
constructor(private serviceCode: BytesBlob) {
|
|
19832
|
-
this.pvm = new PvmHostCallExtension(this.pvmInstanceManager, this.hostCalls);
|
|
19833
|
-
}
|
|
19834
|
-
|
|
19835
|
-
async run(args: BytesBlob, gas: Gas): Promise<BytesBlob> {
|
|
19836
|
-
const program = Program.fromSpi(this.serviceCode.raw, args.raw, true);
|
|
19837
|
-
|
|
19838
|
-
const result = await this.pvm.runProgram(program.code, 5, gas, program.registers, program.memory);
|
|
19839
|
-
|
|
19840
|
-
if (result.hasMemorySlice()) {
|
|
19841
|
-
return BytesBlob.blobFrom(result.memorySlice);
|
|
19842
|
-
}
|
|
19843
|
-
|
|
19844
|
-
return BytesBlob.empty();
|
|
19845
|
-
}
|
|
19846
|
-
}
|
|
19847
|
-
|
|
19848
19827
|
type index_Preimages = Preimages;
|
|
19849
19828
|
declare const index_Preimages: typeof Preimages;
|
|
19850
19829
|
type index_PreimagesErrorCode = PreimagesErrorCode;
|
|
@@ -19854,10 +19833,8 @@ type index_PreimagesState = PreimagesState;
|
|
|
19854
19833
|
type index_PreimagesStateUpdate = PreimagesStateUpdate;
|
|
19855
19834
|
type index_TransitionHasher = TransitionHasher;
|
|
19856
19835
|
declare const index_TransitionHasher: typeof TransitionHasher;
|
|
19857
|
-
type index_WorkPackageExecutor = WorkPackageExecutor;
|
|
19858
|
-
declare const index_WorkPackageExecutor: typeof WorkPackageExecutor;
|
|
19859
19836
|
declare namespace index {
|
|
19860
|
-
export { index_Preimages as Preimages, index_PreimagesErrorCode as PreimagesErrorCode, index_TransitionHasher as TransitionHasher
|
|
19837
|
+
export { index_Preimages as Preimages, index_PreimagesErrorCode as PreimagesErrorCode, index_TransitionHasher as TransitionHasher };
|
|
19861
19838
|
export type { index_PreimagesInput as PreimagesInput, index_PreimagesState as PreimagesState, index_PreimagesStateUpdate as PreimagesStateUpdate };
|
|
19862
19839
|
}
|
|
19863
19840
|
|