@typeberry/lib 0.1.3 → 0.2.0-b6e3410
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.cjs +747 -1370
- package/index.d.ts +833 -798
- package/index.js +746 -1369
- package/package.json +1 -1
package/index.d.ts
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
declare enum GpVersion {
|
|
2
2
|
V0_6_7 = "0.6.7",
|
|
3
3
|
V0_7_0 = "0.7.0",
|
|
4
|
-
V0_7_1 = "0.7.1
|
|
4
|
+
V0_7_1 = "0.7.1",
|
|
5
5
|
V0_7_2 = "0.7.2-preview",
|
|
6
6
|
}
|
|
7
7
|
|
|
@@ -11,12 +11,12 @@ declare enum TestSuite {
|
|
|
11
11
|
}
|
|
12
12
|
|
|
13
13
|
declare const DEFAULT_SUITE = TestSuite.W3F_DAVXY;
|
|
14
|
-
|
|
15
|
-
declare const ALL_VERSIONS_IN_ORDER = [GpVersion.V0_6_7, GpVersion.V0_7_0, GpVersion.V0_7_1, GpVersion.V0_7_2];
|
|
16
|
-
declare const DEFAULT_VERSION = GpVersion.V0_7_0;
|
|
14
|
+
declare const DEFAULT_VERSION = GpVersion.V0_7_1;
|
|
17
15
|
declare let CURRENT_VERSION = parseCurrentVersion(env.GP_VERSION) ?? DEFAULT_VERSION;
|
|
18
16
|
declare let CURRENT_SUITE = parseCurrentSuite(env.TEST_SUITE) ?? DEFAULT_SUITE;
|
|
19
17
|
|
|
18
|
+
declare const ALL_VERSIONS_IN_ORDER = [GpVersion.V0_6_7, GpVersion.V0_7_0, GpVersion.V0_7_1, GpVersion.V0_7_2];
|
|
19
|
+
|
|
20
20
|
declare function parseCurrentVersion(env?: string): GpVersion | undefined {
|
|
21
21
|
if (env === undefined) {
|
|
22
22
|
return undefined;
|
|
@@ -35,7 +35,9 @@ declare function parseCurrentVersion(env?: string): GpVersion | undefined {
|
|
|
35
35
|
}
|
|
36
36
|
|
|
37
37
|
declare function parseCurrentSuite(env?: string): TestSuite | undefined {
|
|
38
|
-
if (env === undefined)
|
|
38
|
+
if (env === undefined) {
|
|
39
|
+
return undefined;
|
|
40
|
+
}
|
|
39
41
|
switch (env) {
|
|
40
42
|
case TestSuite.W3F_DAVXY:
|
|
41
43
|
case TestSuite.JAMDUNA:
|
|
@@ -420,6 +422,20 @@ declare const Result$2 = {
|
|
|
420
422
|
},
|
|
421
423
|
};
|
|
422
424
|
|
|
425
|
+
// about 2GB, the maximum ArrayBuffer length on Chrome confirmed by several sources:
|
|
426
|
+
// - https://issues.chromium.org/issues/40055619
|
|
427
|
+
// - https://stackoverflow.com/a/72124984
|
|
428
|
+
// - https://onnxruntime.ai/docs/tutorials/web/large-models.html#maximum-size-of-arraybuffer
|
|
429
|
+
declare const MAX_LENGTH$1 = 2145386496;
|
|
430
|
+
|
|
431
|
+
declare function safeAllocUint8Array(length: number) {
|
|
432
|
+
if (length > MAX_LENGTH) {
|
|
433
|
+
// biome-ignore lint/suspicious/noConsole: can't have a dependency on logger here
|
|
434
|
+
console.warn(`Trying to allocate ${length} bytes, which is greater than the maximum of ${MAX_LENGTH}.`);
|
|
435
|
+
}
|
|
436
|
+
return new Uint8Array(Math.min(MAX_LENGTH, length));
|
|
437
|
+
}
|
|
438
|
+
|
|
423
439
|
/**
|
|
424
440
|
* Utilities for tests.
|
|
425
441
|
*/
|
|
@@ -573,8 +589,12 @@ declare function deepEqual<T>(
|
|
|
573
589
|
const aKey = `${a.key}`;
|
|
574
590
|
const bKey = `${b.key}`;
|
|
575
591
|
|
|
576
|
-
if (aKey < bKey)
|
|
577
|
-
|
|
592
|
+
if (aKey < bKey) {
|
|
593
|
+
return -1;
|
|
594
|
+
}
|
|
595
|
+
if (bKey < aKey) {
|
|
596
|
+
return 1;
|
|
597
|
+
}
|
|
578
598
|
return 0;
|
|
579
599
|
});
|
|
580
600
|
};
|
|
@@ -755,11 +775,12 @@ declare const index$u_oomWarningPrinted: typeof oomWarningPrinted;
|
|
|
755
775
|
declare const index$u_parseCurrentSuite: typeof parseCurrentSuite;
|
|
756
776
|
declare const index$u_parseCurrentVersion: typeof parseCurrentVersion;
|
|
757
777
|
declare const index$u_resultToString: typeof resultToString;
|
|
778
|
+
declare const index$u_safeAllocUint8Array: typeof safeAllocUint8Array;
|
|
758
779
|
declare const index$u_seeThrough: typeof seeThrough;
|
|
759
780
|
declare const index$u_trimStack: typeof trimStack;
|
|
760
781
|
declare const index$u_workspacePathFix: typeof workspacePathFix;
|
|
761
782
|
declare namespace index$u {
|
|
762
|
-
export { index$u_ALL_VERSIONS_IN_ORDER as ALL_VERSIONS_IN_ORDER, index$u_CURRENT_SUITE as CURRENT_SUITE, index$u_CURRENT_VERSION as CURRENT_VERSION, index$u_Compatibility as Compatibility, index$u_DEFAULT_SUITE as DEFAULT_SUITE, index$u_DEFAULT_VERSION as DEFAULT_VERSION, index$u_ErrorsCollector as ErrorsCollector, index$u_GpVersion as GpVersion, Result$2 as Result, index$u_RichTaggedError as RichTaggedError, index$u_TEST_COMPARE_USING as TEST_COMPARE_USING, index$u_TestSuite as TestSuite, index$u_WithDebug as WithDebug, index$u___OPAQUE_TYPE__ as __OPAQUE_TYPE__, index$u_asOpaqueType as asOpaqueType, index$u_assertEmpty as assertEmpty, index$u_assertNever as assertNever, index$u_callCompareFunction as callCompareFunction, index$u_check as check, index$u_deepEqual as deepEqual, index$u_getAllKeysSorted as getAllKeysSorted, index$u_inspect as inspect, index$u_isBrowser as isBrowser, index$u_isResult as isResult, index$u_isTaggedError as isTaggedError, index$u_maybeTaggedErrorToString as maybeTaggedErrorToString, index$u_measure as measure, index$u_oomWarningPrinted as oomWarningPrinted, index$u_parseCurrentSuite as parseCurrentSuite, index$u_parseCurrentVersion as parseCurrentVersion, index$u_resultToString as resultToString, index$u_seeThrough as seeThrough, index$u_trimStack as trimStack, index$u_workspacePathFix as workspacePathFix };
|
|
783
|
+
export { index$u_ALL_VERSIONS_IN_ORDER as ALL_VERSIONS_IN_ORDER, index$u_CURRENT_SUITE as CURRENT_SUITE, index$u_CURRENT_VERSION as CURRENT_VERSION, index$u_Compatibility as Compatibility, index$u_DEFAULT_SUITE as DEFAULT_SUITE, index$u_DEFAULT_VERSION as DEFAULT_VERSION, index$u_ErrorsCollector as ErrorsCollector, index$u_GpVersion as GpVersion, MAX_LENGTH$1 as MAX_LENGTH, Result$2 as Result, index$u_RichTaggedError as RichTaggedError, index$u_TEST_COMPARE_USING as TEST_COMPARE_USING, index$u_TestSuite as TestSuite, index$u_WithDebug as WithDebug, index$u___OPAQUE_TYPE__ as __OPAQUE_TYPE__, index$u_asOpaqueType as asOpaqueType, index$u_assertEmpty as assertEmpty, index$u_assertNever as assertNever, index$u_callCompareFunction as callCompareFunction, index$u_check as check, index$u_deepEqual as deepEqual, index$u_getAllKeysSorted as getAllKeysSorted, index$u_inspect as inspect, index$u_isBrowser as isBrowser, index$u_isResult as isResult, index$u_isTaggedError as isTaggedError, index$u_maybeTaggedErrorToString as maybeTaggedErrorToString, index$u_measure as measure, index$u_oomWarningPrinted as oomWarningPrinted, index$u_parseCurrentSuite as parseCurrentSuite, index$u_parseCurrentVersion as parseCurrentVersion, index$u_resultToString as resultToString, index$u_safeAllocUint8Array as safeAllocUint8Array, index$u_seeThrough as seeThrough, index$u_trimStack as trimStack, index$u_workspacePathFix as workspacePathFix };
|
|
763
784
|
export type { index$u_DeepEqualOptions as DeepEqualOptions, index$u_EnumMapping as EnumMapping, index$u_ErrorResult as ErrorResult, index$u_OK as OK, index$u_OkResult as OkResult, index$u_Opaque as Opaque, index$u_StringLiteral as StringLiteral, index$u_TaggedError as TaggedError, index$u_TokenOf as TokenOf, index$u_Uninstantiable as Uninstantiable, index$u_WithOpaque as WithOpaque };
|
|
764
785
|
}
|
|
765
786
|
|
|
@@ -929,7 +950,7 @@ declare class BytesBlob {
|
|
|
929
950
|
static blobFromParts(v: Uint8Array | Uint8Array[], ...rest: Uint8Array[]) {
|
|
930
951
|
const vArr = v instanceof Uint8Array ? [v] : v;
|
|
931
952
|
const totalLength = vArr.reduce((a, v) => a + v.length, 0) + rest.reduce((a, v) => a + v.length, 0);
|
|
932
|
-
const buffer =
|
|
953
|
+
const buffer = safeAllocUint8Array(totalLength);
|
|
933
954
|
let offset = 0;
|
|
934
955
|
for (const r of vArr) {
|
|
935
956
|
buffer.set(r, offset);
|
|
@@ -1012,7 +1033,7 @@ declare class Bytes<T extends number> extends BytesBlob {
|
|
|
1012
1033
|
|
|
1013
1034
|
/** Create an empty [`Bytes<X>`] of given length. */
|
|
1014
1035
|
static zero<X extends number>(len: X): Bytes<X> {
|
|
1015
|
-
return new Bytes(
|
|
1036
|
+
return new Bytes(safeAllocUint8Array(len), len);
|
|
1016
1037
|
}
|
|
1017
1038
|
|
|
1018
1039
|
// TODO [ToDr] `fill` should have the argments swapped to align with the rest.
|
|
@@ -1133,7 +1154,7 @@ declare class BitVec {
|
|
|
1133
1154
|
* Create new [`BitVec`] with all values set to `false`.
|
|
1134
1155
|
*/
|
|
1135
1156
|
static empty(bitLength: number) {
|
|
1136
|
-
const data =
|
|
1157
|
+
const data = safeAllocUint8Array(Math.ceil(bitLength / 8));
|
|
1137
1158
|
return new BitVec(data, bitLength);
|
|
1138
1159
|
}
|
|
1139
1160
|
|
|
@@ -3461,6 +3482,99 @@ declare namespace index$q {
|
|
|
3461
3482
|
export type { index$q_ClassConstructor as ClassConstructor, index$q_Codec as Codec, index$q_CodecRecord as CodecRecord, index$q_Decode as Decode, index$q_DescribedBy as DescribedBy, index$q_DescriptorRecord as DescriptorRecord, index$q_Encode as Encode, index$q_LengthRange as LengthRange, index$q_OptionalRecord as OptionalRecord, Options$1 as Options, index$q_PropertyKeys as PropertyKeys, index$q_SimpleDescriptorRecord as SimpleDescriptorRecord, index$q_SizeHint as SizeHint, index$q_ViewOf as ViewOf };
|
|
3462
3483
|
}
|
|
3463
3484
|
|
|
3485
|
+
/**
|
|
3486
|
+
* A utility class providing a readonly view over a portion of an array without copying it.
|
|
3487
|
+
*/
|
|
3488
|
+
declare class ArrayView<T> implements Iterable<T> {
|
|
3489
|
+
private readonly source: T[];
|
|
3490
|
+
public readonly length: number;
|
|
3491
|
+
|
|
3492
|
+
private constructor(
|
|
3493
|
+
source: T[],
|
|
3494
|
+
private readonly start: number,
|
|
3495
|
+
private readonly end: number,
|
|
3496
|
+
) {
|
|
3497
|
+
this.source = source;
|
|
3498
|
+
this.length = end - start;
|
|
3499
|
+
}
|
|
3500
|
+
|
|
3501
|
+
static from<T>(source: T[], start = 0, end = source.length): ArrayView<T> {
|
|
3502
|
+
check`
|
|
3503
|
+
${start >= 0 && end <= source.length && start <= end}
|
|
3504
|
+
Invalid start (${start})/end (${end}) for ArrayView
|
|
3505
|
+
`;
|
|
3506
|
+
return new ArrayView(source, start, end);
|
|
3507
|
+
}
|
|
3508
|
+
|
|
3509
|
+
get(i: number): T {
|
|
3510
|
+
check`
|
|
3511
|
+
${i >= 0 && i < this.length}
|
|
3512
|
+
Index out of bounds: ${i} < ${this.length}
|
|
3513
|
+
`;
|
|
3514
|
+
return this.source[this.start + i];
|
|
3515
|
+
}
|
|
3516
|
+
|
|
3517
|
+
subview(from: number, to: number = this.length): ArrayView<T> {
|
|
3518
|
+
return ArrayView.from(this.source, this.start + from, this.start + to);
|
|
3519
|
+
}
|
|
3520
|
+
|
|
3521
|
+
toArray(): T[] {
|
|
3522
|
+
return this.source.slice(this.start, this.end);
|
|
3523
|
+
}
|
|
3524
|
+
|
|
3525
|
+
*[Symbol.iterator](): Iterator<T> {
|
|
3526
|
+
for (let i = this.start; i < this.end; i++) {
|
|
3527
|
+
yield this.source[i];
|
|
3528
|
+
}
|
|
3529
|
+
}
|
|
3530
|
+
}
|
|
3531
|
+
|
|
3532
|
+
type ITypedArray = Uint8Array | Uint16Array | Uint32Array;
|
|
3533
|
+
type IDataType = string | Buffer | ITypedArray;
|
|
3534
|
+
|
|
3535
|
+
type IHasher = {
|
|
3536
|
+
/**
|
|
3537
|
+
* Initializes hash state to default value
|
|
3538
|
+
*/
|
|
3539
|
+
init: () => IHasher;
|
|
3540
|
+
/**
|
|
3541
|
+
* Updates the hash content with the given data
|
|
3542
|
+
*/
|
|
3543
|
+
update: (data: IDataType) => IHasher;
|
|
3544
|
+
/**
|
|
3545
|
+
* Calculates the hash of all of the data passed to be hashed with hash.update().
|
|
3546
|
+
* Defaults to hexadecimal string
|
|
3547
|
+
* @param outputType If outputType is "binary", it returns Uint8Array. Otherwise it
|
|
3548
|
+
* returns hexadecimal string
|
|
3549
|
+
*/
|
|
3550
|
+
digest: {
|
|
3551
|
+
(outputType: "binary"): Uint8Array;
|
|
3552
|
+
(outputType?: "hex"): string;
|
|
3553
|
+
};
|
|
3554
|
+
/**
|
|
3555
|
+
* Save the current internal state of the hasher for later resumption with load().
|
|
3556
|
+
* Cannot be called before .init() or after .digest()
|
|
3557
|
+
*
|
|
3558
|
+
* Note that this state can include arbitrary information about the value being hashed (e.g.
|
|
3559
|
+
* could include N plaintext bytes from the value), so needs to be treated as being as
|
|
3560
|
+
* sensitive as the input value itself.
|
|
3561
|
+
*/
|
|
3562
|
+
save: () => Uint8Array;
|
|
3563
|
+
/**
|
|
3564
|
+
* Resume a state that was created by save(). If this state was not created by a
|
|
3565
|
+
* compatible build of hash-wasm, an exception will be thrown.
|
|
3566
|
+
*/
|
|
3567
|
+
load: (state: Uint8Array) => IHasher;
|
|
3568
|
+
/**
|
|
3569
|
+
* Block size in bytes
|
|
3570
|
+
*/
|
|
3571
|
+
blockSize: number;
|
|
3572
|
+
/**
|
|
3573
|
+
* Digest size in bytes
|
|
3574
|
+
*/
|
|
3575
|
+
digestSize: number;
|
|
3576
|
+
};
|
|
3577
|
+
|
|
3464
3578
|
/**
|
|
3465
3579
|
* Size of the output of the hash functions.
|
|
3466
3580
|
*
|
|
@@ -3516,144 +3630,46 @@ declare class WithHashAndBytes<THash extends OpaqueHash, TData> extends WithHash
|
|
|
3516
3630
|
}
|
|
3517
3631
|
}
|
|
3518
3632
|
|
|
3519
|
-
|
|
3520
|
-
interface HashAllocator {
|
|
3521
|
-
/** Return a new hash destination. */
|
|
3522
|
-
emptyHash(): OpaqueHash;
|
|
3523
|
-
}
|
|
3524
|
-
|
|
3525
|
-
/** The simplest allocator returning just a fresh copy of bytes each time. */
|
|
3526
|
-
declare class SimpleAllocator implements HashAllocator {
|
|
3527
|
-
emptyHash(): OpaqueHash {
|
|
3528
|
-
return Bytes.zero(HASH_SIZE);
|
|
3529
|
-
}
|
|
3530
|
-
}
|
|
3531
|
-
|
|
3532
|
-
/** An allocator that works by allocating larger (continuous) pages of memory. */
|
|
3533
|
-
declare class PageAllocator implements HashAllocator {
|
|
3534
|
-
private page: Uint8Array = new Uint8Array(0);
|
|
3535
|
-
private currentHash = 0;
|
|
3633
|
+
declare const zero$1 = Bytes.zero(HASH_SIZE);
|
|
3536
3634
|
|
|
3537
|
-
|
|
3538
|
-
|
|
3539
|
-
|
|
3540
|
-
this.resetPage();
|
|
3635
|
+
declare class Blake2b {
|
|
3636
|
+
static async createHasher() {
|
|
3637
|
+
return new Blake2b(await createBLAKE2b(HASH_SIZE * 8));
|
|
3541
3638
|
}
|
|
3542
3639
|
|
|
3543
|
-
private
|
|
3544
|
-
const pageSizeBytes = this.hashesPerPage * HASH_SIZE;
|
|
3545
|
-
this.currentHash = 0;
|
|
3546
|
-
this.page = new Uint8Array(pageSizeBytes);
|
|
3547
|
-
}
|
|
3548
|
-
|
|
3549
|
-
emptyHash(): OpaqueHash {
|
|
3550
|
-
const startIdx = this.currentHash * HASH_SIZE;
|
|
3551
|
-
const endIdx = startIdx + HASH_SIZE;
|
|
3640
|
+
private constructor(private readonly hasher: IHasher) {}
|
|
3552
3641
|
|
|
3553
|
-
|
|
3554
|
-
|
|
3555
|
-
|
|
3642
|
+
/**
|
|
3643
|
+
* Hash given collection of blobs.
|
|
3644
|
+
*
|
|
3645
|
+
* If empty array is given a zero-hash is returned.
|
|
3646
|
+
*/
|
|
3647
|
+
hashBlobs<H extends Blake2bHash>(r: (BytesBlob | Uint8Array)[]): H {
|
|
3648
|
+
if (r.length === 0) {
|
|
3649
|
+
return zero.asOpaque();
|
|
3556
3650
|
}
|
|
3557
3651
|
|
|
3558
|
-
|
|
3652
|
+
const hasher = this.hasher.init();
|
|
3653
|
+
for (const v of r) {
|
|
3654
|
+
hasher.update(v instanceof BytesBlob ? v.raw : v);
|
|
3655
|
+
}
|
|
3656
|
+
return Bytes.fromBlob(hasher.digest("binary"), HASH_SIZE).asOpaque();
|
|
3559
3657
|
}
|
|
3560
|
-
}
|
|
3561
|
-
|
|
3562
|
-
declare const defaultAllocator = new SimpleAllocator();
|
|
3563
3658
|
|
|
3564
|
-
/**
|
|
3565
|
-
|
|
3566
|
-
|
|
3567
|
-
|
|
3568
|
-
|
|
3569
|
-
|
|
3570
|
-
r: (BytesBlob | Uint8Array)[],
|
|
3571
|
-
allocator: HashAllocator = defaultAllocator,
|
|
3572
|
-
): H {
|
|
3573
|
-
const out = allocator.emptyHash();
|
|
3574
|
-
if (r.length === 0) {
|
|
3575
|
-
return out.asOpaque();
|
|
3659
|
+
/** Hash given blob of bytes. */
|
|
3660
|
+
hashBytes(blob: BytesBlob | Uint8Array): Blake2bHash {
|
|
3661
|
+
const hasher = this.hasher.init();
|
|
3662
|
+
const bytes = blob instanceof BytesBlob ? blob.raw : blob;
|
|
3663
|
+
hasher.update(bytes);
|
|
3664
|
+
return Bytes.fromBlob(hasher.digest("binary"), HASH_SIZE).asOpaque();
|
|
3576
3665
|
}
|
|
3577
3666
|
|
|
3578
|
-
|
|
3579
|
-
|
|
3580
|
-
|
|
3667
|
+
/** Convert given string into bytes and hash it. */
|
|
3668
|
+
hashString(str: string) {
|
|
3669
|
+
return this.hashBytes(BytesBlob.blobFromString(str));
|
|
3581
3670
|
}
|
|
3582
|
-
hasher?.digest(out.raw);
|
|
3583
|
-
return out.asOpaque();
|
|
3584
|
-
}
|
|
3585
|
-
|
|
3586
|
-
/** Hash given blob of bytes. */
|
|
3587
|
-
declare function hashBytes(blob: BytesBlob | Uint8Array, allocator: HashAllocator = defaultAllocator): Blake2bHash {
|
|
3588
|
-
const hasher = blake2b(HASH_SIZE);
|
|
3589
|
-
const bytes = blob instanceof BytesBlob ? blob.raw : blob;
|
|
3590
|
-
hasher?.update(bytes);
|
|
3591
|
-
const out = allocator.emptyHash();
|
|
3592
|
-
hasher?.digest(out.raw);
|
|
3593
|
-
return out;
|
|
3594
|
-
}
|
|
3595
|
-
|
|
3596
|
-
/** Convert given string into bytes and hash it. */
|
|
3597
|
-
declare function hashString(str: string, allocator: HashAllocator = defaultAllocator) {
|
|
3598
|
-
return hashBytes(BytesBlob.blobFromString(str), allocator);
|
|
3599
|
-
}
|
|
3600
|
-
|
|
3601
|
-
declare const blake2b_hashBytes: typeof hashBytes;
|
|
3602
|
-
declare const blake2b_hashString: typeof hashString;
|
|
3603
|
-
declare namespace blake2b {
|
|
3604
|
-
export {
|
|
3605
|
-
hashBlobs$1 as hashBlobs,
|
|
3606
|
-
blake2b_hashBytes as hashBytes,
|
|
3607
|
-
blake2b_hashString as hashString,
|
|
3608
|
-
};
|
|
3609
3671
|
}
|
|
3610
3672
|
|
|
3611
|
-
type ITypedArray = Uint8Array | Uint16Array | Uint32Array;
|
|
3612
|
-
type IDataType = string | Buffer | ITypedArray;
|
|
3613
|
-
|
|
3614
|
-
type IHasher = {
|
|
3615
|
-
/**
|
|
3616
|
-
* Initializes hash state to default value
|
|
3617
|
-
*/
|
|
3618
|
-
init: () => IHasher;
|
|
3619
|
-
/**
|
|
3620
|
-
* Updates the hash content with the given data
|
|
3621
|
-
*/
|
|
3622
|
-
update: (data: IDataType) => IHasher;
|
|
3623
|
-
/**
|
|
3624
|
-
* Calculates the hash of all of the data passed to be hashed with hash.update().
|
|
3625
|
-
* Defaults to hexadecimal string
|
|
3626
|
-
* @param outputType If outputType is "binary", it returns Uint8Array. Otherwise it
|
|
3627
|
-
* returns hexadecimal string
|
|
3628
|
-
*/
|
|
3629
|
-
digest: {
|
|
3630
|
-
(outputType: "binary"): Uint8Array;
|
|
3631
|
-
(outputType?: "hex"): string;
|
|
3632
|
-
};
|
|
3633
|
-
/**
|
|
3634
|
-
* Save the current internal state of the hasher for later resumption with load().
|
|
3635
|
-
* Cannot be called before .init() or after .digest()
|
|
3636
|
-
*
|
|
3637
|
-
* Note that this state can include arbitrary information about the value being hashed (e.g.
|
|
3638
|
-
* could include N plaintext bytes from the value), so needs to be treated as being as
|
|
3639
|
-
* sensitive as the input value itself.
|
|
3640
|
-
*/
|
|
3641
|
-
save: () => Uint8Array;
|
|
3642
|
-
/**
|
|
3643
|
-
* Resume a state that was created by save(). If this state was not created by a
|
|
3644
|
-
* compatible build of hash-wasm, an exception will be thrown.
|
|
3645
|
-
*/
|
|
3646
|
-
load: (state: Uint8Array) => IHasher;
|
|
3647
|
-
/**
|
|
3648
|
-
* Block size in bytes
|
|
3649
|
-
*/
|
|
3650
|
-
blockSize: number;
|
|
3651
|
-
/**
|
|
3652
|
-
* Digest size in bytes
|
|
3653
|
-
*/
|
|
3654
|
-
digestSize: number;
|
|
3655
|
-
};
|
|
3656
|
-
|
|
3657
3673
|
declare class KeccakHasher {
|
|
3658
3674
|
static async create(): Promise<KeccakHasher> {
|
|
3659
3675
|
return new KeccakHasher(await createKeccak(256));
|
|
@@ -3681,15 +3697,15 @@ declare namespace keccak {
|
|
|
3681
3697
|
};
|
|
3682
3698
|
}
|
|
3683
3699
|
|
|
3700
|
+
// TODO [ToDr] (#213) this should most likely be moved to a separate
|
|
3701
|
+
// package to avoid pulling in unnecessary deps.
|
|
3702
|
+
|
|
3703
|
+
type index$p_Blake2b = Blake2b;
|
|
3704
|
+
declare const index$p_Blake2b: typeof Blake2b;
|
|
3684
3705
|
type index$p_Blake2bHash = Blake2bHash;
|
|
3685
3706
|
type index$p_HASH_SIZE = HASH_SIZE;
|
|
3686
|
-
type index$p_HashAllocator = HashAllocator;
|
|
3687
3707
|
type index$p_KeccakHash = KeccakHash;
|
|
3688
3708
|
type index$p_OpaqueHash = OpaqueHash;
|
|
3689
|
-
type index$p_PageAllocator = PageAllocator;
|
|
3690
|
-
declare const index$p_PageAllocator: typeof PageAllocator;
|
|
3691
|
-
type index$p_SimpleAllocator = SimpleAllocator;
|
|
3692
|
-
declare const index$p_SimpleAllocator: typeof SimpleAllocator;
|
|
3693
3709
|
type index$p_TRUNCATED_HASH_SIZE = TRUNCATED_HASH_SIZE;
|
|
3694
3710
|
type index$p_TruncatedHash = TruncatedHash;
|
|
3695
3711
|
type index$p_WithHash<THash extends OpaqueHash, TData> = WithHash<THash, TData>;
|
|
@@ -3697,12 +3713,10 @@ declare const index$p_WithHash: typeof WithHash;
|
|
|
3697
3713
|
type index$p_WithHashAndBytes<THash extends OpaqueHash, TData> = WithHashAndBytes<THash, TData>;
|
|
3698
3714
|
declare const index$p_WithHashAndBytes: typeof WithHashAndBytes;
|
|
3699
3715
|
declare const index$p_ZERO_HASH: typeof ZERO_HASH;
|
|
3700
|
-
declare const index$p_blake2b: typeof blake2b;
|
|
3701
|
-
declare const index$p_defaultAllocator: typeof defaultAllocator;
|
|
3702
3716
|
declare const index$p_keccak: typeof keccak;
|
|
3703
3717
|
declare namespace index$p {
|
|
3704
|
-
export { index$
|
|
3705
|
-
export type { index$p_Blake2bHash as Blake2bHash, index$p_HASH_SIZE as HASH_SIZE, index$
|
|
3718
|
+
export { index$p_Blake2b as Blake2b, index$p_WithHash as WithHash, index$p_WithHashAndBytes as WithHashAndBytes, index$p_ZERO_HASH as ZERO_HASH, index$p_keccak as keccak, zero$1 as zero };
|
|
3719
|
+
export type { index$p_Blake2bHash as Blake2bHash, index$p_HASH_SIZE as HASH_SIZE, index$p_KeccakHash as KeccakHash, index$p_OpaqueHash as OpaqueHash, index$p_TRUNCATED_HASH_SIZE as TRUNCATED_HASH_SIZE, index$p_TruncatedHash as TruncatedHash };
|
|
3706
3720
|
}
|
|
3707
3721
|
|
|
3708
3722
|
/** Immutable view of the `HashDictionary`. */
|
|
@@ -4479,6 +4493,8 @@ declare class TruncatedHashDictionary<T extends OpaqueHash, V> {
|
|
|
4479
4493
|
}
|
|
4480
4494
|
}
|
|
4481
4495
|
|
|
4496
|
+
type index$o_ArrayView<T> = ArrayView<T>;
|
|
4497
|
+
declare const index$o_ArrayView: typeof ArrayView;
|
|
4482
4498
|
type index$o_FixedSizeArray<T, N extends number> = FixedSizeArray<T, N>;
|
|
4483
4499
|
declare const index$o_FixedSizeArray: typeof FixedSizeArray;
|
|
4484
4500
|
type index$o_HashDictionary<K extends OpaqueHash, V> = HashDictionary<K, V>;
|
|
@@ -4506,7 +4522,7 @@ type index$o_TruncatedHashDictionary<T extends OpaqueHash, V> = TruncatedHashDic
|
|
|
4506
4522
|
declare const index$o_TruncatedHashDictionary: typeof TruncatedHashDictionary;
|
|
4507
4523
|
declare const index$o_asKnownSize: typeof asKnownSize;
|
|
4508
4524
|
declare namespace index$o {
|
|
4509
|
-
export { index$o_FixedSizeArray as FixedSizeArray, index$o_HashDictionary as HashDictionary, index$o_HashSet as HashSet, index$o_MultiMap as MultiMap, index$o_SortedArray as SortedArray, index$o_SortedSet as SortedSet, index$o_TruncatedHashDictionary as TruncatedHashDictionary, index$o_asKnownSize as asKnownSize };
|
|
4525
|
+
export { index$o_ArrayView as ArrayView, index$o_FixedSizeArray as FixedSizeArray, index$o_HashDictionary as HashDictionary, index$o_HashSet as HashSet, index$o_MultiMap as MultiMap, index$o_SortedArray as SortedArray, index$o_SortedSet as SortedSet, index$o_TruncatedHashDictionary as TruncatedHashDictionary, index$o_asKnownSize as asKnownSize };
|
|
4510
4526
|
export type { index$o_HashWithZeroedBit as HashWithZeroedBit, index$o_ImmutableHashDictionary as ImmutableHashDictionary, index$o_ImmutableHashSet as ImmutableHashSet, index$o_ImmutableSortedArray as ImmutableSortedArray, index$o_ImmutableSortedSet as ImmutableSortedSet, index$o_KeyMapper as KeyMapper, index$o_KeyMappers as KeyMappers, index$o_KnownSize as KnownSize, index$o_KnownSizeArray as KnownSizeArray, index$o_KnownSizeId as KnownSizeId, index$o_NestedMaps as NestedMaps };
|
|
4511
4527
|
}
|
|
4512
4528
|
|
|
@@ -4735,7 +4751,7 @@ declare async function verify<T extends BytesBlob>(input: Input<T>[]): Promise<b
|
|
|
4735
4751
|
(acc, { message, key, signature }) => acc + key.length + signature.length + message.length + 1,
|
|
4736
4752
|
0,
|
|
4737
4753
|
);
|
|
4738
|
-
const data =
|
|
4754
|
+
const data = safeAllocUint8Array(dataLength);
|
|
4739
4755
|
|
|
4740
4756
|
let offset = 0;
|
|
4741
4757
|
|
|
@@ -4825,22 +4841,16 @@ declare function trivialSeed(s: U32): KeySeed {
|
|
|
4825
4841
|
* Derives a Ed25519 secret key from a seed.
|
|
4826
4842
|
* https://github.com/polkadot-fellows/JIPs/blob/7048f79edf4f4eb8bfe6fb42e6bbf61900f44c65/JIP-5.md#derivation-method
|
|
4827
4843
|
*/
|
|
4828
|
-
declare function deriveEd25519SecretKey(
|
|
4829
|
-
|
|
4830
|
-
allocator: SimpleAllocator = new SimpleAllocator(),
|
|
4831
|
-
): Ed25519SecretSeed {
|
|
4832
|
-
return blake2b.hashBytes(BytesBlob.blobFromParts([ED25519_SECRET_KEY.raw, seed.raw]), allocator).asOpaque();
|
|
4844
|
+
declare function deriveEd25519SecretKey(seed: KeySeed, blake2b: Blake2b): Ed25519SecretSeed {
|
|
4845
|
+
return blake2b.hashBytes(BytesBlob.blobFromParts([ED25519_SECRET_KEY.raw, seed.raw])).asOpaque();
|
|
4833
4846
|
}
|
|
4834
4847
|
|
|
4835
4848
|
/**
|
|
4836
4849
|
* Derives a Bandersnatch secret key from a seed.
|
|
4837
4850
|
* https://github.com/polkadot-fellows/JIPs/blob/7048f79edf4f4eb8bfe6fb42e6bbf61900f44c65/JIP-5.md#derivation-method
|
|
4838
4851
|
*/
|
|
4839
|
-
declare function deriveBandersnatchSecretKey(
|
|
4840
|
-
|
|
4841
|
-
allocator: SimpleAllocator = new SimpleAllocator(),
|
|
4842
|
-
): BandersnatchSecretSeed {
|
|
4843
|
-
return blake2b.hashBytes(BytesBlob.blobFromParts([BANDERSNATCH_SECRET_KEY.raw, seed.raw]), allocator).asOpaque();
|
|
4852
|
+
declare function deriveBandersnatchSecretKey(seed: KeySeed, blake2b: Blake2b): BandersnatchSecretSeed {
|
|
4853
|
+
return blake2b.hashBytes(BytesBlob.blobFromParts([BANDERSNATCH_SECRET_KEY.raw, seed.raw])).asOpaque();
|
|
4844
4854
|
}
|
|
4845
4855
|
|
|
4846
4856
|
/**
|
|
@@ -8373,7 +8383,7 @@ declare enum NodeType {
|
|
|
8373
8383
|
declare class TrieNode {
|
|
8374
8384
|
constructor(
|
|
8375
8385
|
/** Exactly 512 bits / 64 bytes */
|
|
8376
|
-
public readonly raw: Uint8Array =
|
|
8386
|
+
public readonly raw: Uint8Array = safeAllocUint8Array(TRIE_NODE_BYTES),
|
|
8377
8387
|
) {}
|
|
8378
8388
|
|
|
8379
8389
|
/** Returns the type of the node */
|
|
@@ -9111,21 +9121,6 @@ declare function accumulationOutputComparator(a: AccumulationOutput, b: Accumula
|
|
|
9111
9121
|
return Ordering.Equal;
|
|
9112
9122
|
}
|
|
9113
9123
|
|
|
9114
|
-
declare const codecWithHash = <T, V, H extends OpaqueHash>(val: Descriptor<T, V>): Descriptor<WithHash<H, T>, V> =>
|
|
9115
|
-
Descriptor.withView(
|
|
9116
|
-
val.name,
|
|
9117
|
-
val.sizeHint,
|
|
9118
|
-
(e, elem) => val.encode(e, elem.data),
|
|
9119
|
-
(d): WithHash<H, T> => {
|
|
9120
|
-
const decoder2 = d.clone();
|
|
9121
|
-
const encoded = val.skipEncoded(decoder2);
|
|
9122
|
-
const hash = blake2b.hashBytes(encoded);
|
|
9123
|
-
return new WithHash(hash.asOpaque(), val.decode(d));
|
|
9124
|
-
},
|
|
9125
|
-
val.skip,
|
|
9126
|
-
val.View,
|
|
9127
|
-
);
|
|
9128
|
-
|
|
9129
9124
|
/**
|
|
9130
9125
|
* Assignment of particular work report to a core.
|
|
9131
9126
|
*
|
|
@@ -9136,7 +9131,7 @@ declare const codecWithHash = <T, V, H extends OpaqueHash>(val: Descriptor<T, V>
|
|
|
9136
9131
|
*/
|
|
9137
9132
|
declare class AvailabilityAssignment extends WithDebug {
|
|
9138
9133
|
static Codec = codec.Class(AvailabilityAssignment, {
|
|
9139
|
-
workReport:
|
|
9134
|
+
workReport: WorkReport.Codec,
|
|
9140
9135
|
timeout: codec.u32.asOpaque<TimeSlot>(),
|
|
9141
9136
|
});
|
|
9142
9137
|
|
|
@@ -9146,7 +9141,7 @@ declare class AvailabilityAssignment extends WithDebug {
|
|
|
9146
9141
|
|
|
9147
9142
|
private constructor(
|
|
9148
9143
|
/** Work report assigned to a core. */
|
|
9149
|
-
public readonly workReport:
|
|
9144
|
+
public readonly workReport: WorkReport,
|
|
9150
9145
|
/** Time slot at which the report becomes obsolete. */
|
|
9151
9146
|
public readonly timeout: TimeSlot,
|
|
9152
9147
|
) {
|
|
@@ -9196,6 +9191,11 @@ declare class DisputesRecords {
|
|
|
9196
9191
|
return new DisputesRecords(goodSet, badSet, wonkySet, punishSet);
|
|
9197
9192
|
}
|
|
9198
9193
|
|
|
9194
|
+
private readonly goodSetDict: ImmutableHashSet<WorkReportHash>;
|
|
9195
|
+
private readonly badSetDict: ImmutableHashSet<WorkReportHash>;
|
|
9196
|
+
private readonly wonkySetDict: ImmutableHashSet<WorkReportHash>;
|
|
9197
|
+
private readonly punishSetDict: ImmutableHashSet<Ed25519Key>;
|
|
9198
|
+
|
|
9199
9199
|
private constructor(
|
|
9200
9200
|
/** `goodSet`: all work-reports hashes which were judged to be correct */
|
|
9201
9201
|
public readonly goodSet: ImmutableSortedSet<WorkReportHash>,
|
|
@@ -9205,7 +9205,21 @@ declare class DisputesRecords {
|
|
|
9205
9205
|
public readonly wonkySet: ImmutableSortedSet<WorkReportHash>,
|
|
9206
9206
|
/** `punishSet`: set of Ed25519 keys representing validators which were found to have misjudged a work-report */
|
|
9207
9207
|
public readonly punishSet: ImmutableSortedSet<Ed25519Key>,
|
|
9208
|
-
) {
|
|
9208
|
+
) {
|
|
9209
|
+
this.goodSetDict = HashSet.from(goodSet.array);
|
|
9210
|
+
this.badSetDict = HashSet.from(badSet.array);
|
|
9211
|
+
this.wonkySetDict = HashSet.from(wonkySet.array);
|
|
9212
|
+
this.punishSetDict = HashSet.from(punishSet.array);
|
|
9213
|
+
}
|
|
9214
|
+
|
|
9215
|
+
public asDictionaries() {
|
|
9216
|
+
return {
|
|
9217
|
+
goodSet: this.goodSetDict,
|
|
9218
|
+
badSet: this.badSetDict,
|
|
9219
|
+
wonkySet: this.wonkySetDict,
|
|
9220
|
+
punishSet: this.punishSetDict,
|
|
9221
|
+
};
|
|
9222
|
+
}
|
|
9209
9223
|
|
|
9210
9224
|
static fromSortedArrays({
|
|
9211
9225
|
goodSet,
|
|
@@ -9231,8 +9245,6 @@ declare function hashComparator<V extends OpaqueHash>(a: V, b: V) {
|
|
|
9231
9245
|
return a.compare(b);
|
|
9232
9246
|
}
|
|
9233
9247
|
|
|
9234
|
-
// TODO [ToDr] Not sure where these should live yet :(
|
|
9235
|
-
|
|
9236
9248
|
/**
|
|
9237
9249
|
* `J`: The maximum sum of dependency items in a work-report.
|
|
9238
9250
|
*
|
|
@@ -9286,57 +9298,275 @@ declare class NotYetAccumulatedReport extends WithDebug {
|
|
|
9286
9298
|
}
|
|
9287
9299
|
}
|
|
9288
9300
|
|
|
9289
|
-
/**
|
|
9290
|
-
|
|
9291
|
-
|
|
9292
|
-
|
|
9293
|
-
|
|
9294
|
-
|
|
9301
|
+
/**
|
|
9302
|
+
* `B_S`: The basic minimum balance which all services require.
|
|
9303
|
+
*
|
|
9304
|
+
* https://graypaper.fluffylabs.dev/#/7e6ff6a/445800445800?v=0.6.7
|
|
9305
|
+
*/
|
|
9306
|
+
declare const BASE_SERVICE_BALANCE = 100n;
|
|
9307
|
+
/**
|
|
9308
|
+
* `B_I`: The additional minimum balance required per item of elective service state.
|
|
9309
|
+
*
|
|
9310
|
+
* https://graypaper.fluffylabs.dev/#/7e6ff6a/445000445000?v=0.6.7
|
|
9311
|
+
*/
|
|
9312
|
+
declare const ELECTIVE_ITEM_BALANCE = 10n;
|
|
9313
|
+
/**
|
|
9314
|
+
* `B_L`: The additional minimum balance required per octet of elective service state.
|
|
9315
|
+
*
|
|
9316
|
+
* https://graypaper.fluffylabs.dev/#/7e6ff6a/445400445400?v=0.6.7
|
|
9317
|
+
*/
|
|
9318
|
+
declare const ELECTIVE_BYTE_BALANCE = 1n;
|
|
9295
9319
|
|
|
9296
|
-
|
|
9297
|
-
|
|
9298
|
-
|
|
9320
|
+
declare const zeroSizeHint: SizeHint = {
|
|
9321
|
+
bytes: 0,
|
|
9322
|
+
isExact: true,
|
|
9323
|
+
};
|
|
9299
9324
|
|
|
9300
|
-
|
|
9301
|
-
|
|
9302
|
-
|
|
9303
|
-
|
|
9304
|
-
|
|
9305
|
-
|
|
9306
|
-
|
|
9325
|
+
/** 0-byte read, return given default value */
|
|
9326
|
+
declare const ignoreValueWithDefault = <T>(defaultValue: T) =>
|
|
9327
|
+
Descriptor.new<T>(
|
|
9328
|
+
"ignoreValue",
|
|
9329
|
+
zeroSizeHint,
|
|
9330
|
+
(_e, _v) => {},
|
|
9331
|
+
(_d) => defaultValue,
|
|
9332
|
+
(_s) => {},
|
|
9333
|
+
);
|
|
9334
|
+
|
|
9335
|
+
/** Encode and decode object with leading version number. */
|
|
9336
|
+
declare const codecWithVersion = <T>(val: Descriptor<T>): Descriptor<T> =>
|
|
9337
|
+
Descriptor.new<T>(
|
|
9338
|
+
"withVersion",
|
|
9339
|
+
{
|
|
9340
|
+
bytes: val.sizeHint.bytes + 8,
|
|
9341
|
+
isExact: false,
|
|
9342
|
+
},
|
|
9343
|
+
(e, v) => {
|
|
9344
|
+
e.varU64(0n);
|
|
9345
|
+
val.encode(e, v);
|
|
9346
|
+
},
|
|
9347
|
+
(d) => {
|
|
9348
|
+
const version = d.varU64();
|
|
9349
|
+
if (version !== 0n) {
|
|
9350
|
+
throw new Error("Non-zero version is not supported!");
|
|
9351
|
+
}
|
|
9352
|
+
return val.decode(d);
|
|
9353
|
+
},
|
|
9354
|
+
(s) => {
|
|
9355
|
+
s.varU64();
|
|
9356
|
+
val.skip(s);
|
|
9357
|
+
},
|
|
9358
|
+
);
|
|
9307
9359
|
|
|
9308
9360
|
/**
|
|
9309
|
-
*
|
|
9361
|
+
* Service account details.
|
|
9362
|
+
*
|
|
9363
|
+
* https://graypaper.fluffylabs.dev/#/7e6ff6a/108301108301?v=0.6.7
|
|
9310
9364
|
*/
|
|
9311
|
-
declare class
|
|
9312
|
-
static Codec = codec.Class(
|
|
9313
|
-
|
|
9314
|
-
|
|
9315
|
-
|
|
9316
|
-
|
|
9365
|
+
declare class ServiceAccountInfo extends WithDebug {
|
|
9366
|
+
static Codec = codec.Class(ServiceAccountInfo, {
|
|
9367
|
+
codeHash: codec.bytes(HASH_SIZE).asOpaque<CodeHash>(),
|
|
9368
|
+
balance: codec.u64,
|
|
9369
|
+
accumulateMinGas: codec.u64.convert((x) => x, tryAsServiceGas),
|
|
9370
|
+
onTransferMinGas: codec.u64.convert((x) => x, tryAsServiceGas),
|
|
9371
|
+
storageUtilisationBytes: codec.u64,
|
|
9372
|
+
gratisStorage: codec.u64,
|
|
9373
|
+
storageUtilisationCount: codec.u32,
|
|
9374
|
+
created: codec.u32.convert((x) => x, tryAsTimeSlot),
|
|
9375
|
+
lastAccumulation: codec.u32.convert((x) => x, tryAsTimeSlot),
|
|
9376
|
+
parentService: codec.u32.convert((x) => x, tryAsServiceId),
|
|
9317
9377
|
});
|
|
9318
9378
|
|
|
9319
|
-
static create(
|
|
9320
|
-
return new
|
|
9321
|
-
|
|
9322
|
-
|
|
9323
|
-
|
|
9324
|
-
|
|
9325
|
-
|
|
9326
|
-
|
|
9327
|
-
|
|
9328
|
-
|
|
9329
|
-
|
|
9330
|
-
|
|
9331
|
-
|
|
9332
|
-
|
|
9333
|
-
|
|
9334
|
-
|
|
9335
|
-
|
|
9336
|
-
|
|
9337
|
-
|
|
9338
|
-
|
|
9339
|
-
|
|
9379
|
+
static create(a: CodecRecord<ServiceAccountInfo>) {
|
|
9380
|
+
return new ServiceAccountInfo(
|
|
9381
|
+
a.codeHash,
|
|
9382
|
+
a.balance,
|
|
9383
|
+
a.accumulateMinGas,
|
|
9384
|
+
a.onTransferMinGas,
|
|
9385
|
+
a.storageUtilisationBytes,
|
|
9386
|
+
a.gratisStorage,
|
|
9387
|
+
a.storageUtilisationCount,
|
|
9388
|
+
a.created,
|
|
9389
|
+
a.lastAccumulation,
|
|
9390
|
+
a.parentService,
|
|
9391
|
+
);
|
|
9392
|
+
}
|
|
9393
|
+
|
|
9394
|
+
/**
|
|
9395
|
+
* `a_t = max(0, BS + BI * a_i + BL * a_o - a_f)`
|
|
9396
|
+
* https://graypaper.fluffylabs.dev/#/7e6ff6a/119e01119e01?v=0.6.7
|
|
9397
|
+
*/
|
|
9398
|
+
static calculateThresholdBalance(items: U32, bytes: U64, gratisStorage: U64): U64 {
|
|
9399
|
+
const storageCost =
|
|
9400
|
+
BASE_SERVICE_BALANCE + ELECTIVE_ITEM_BALANCE * BigInt(items) + ELECTIVE_BYTE_BALANCE * bytes - gratisStorage;
|
|
9401
|
+
|
|
9402
|
+
if (storageCost < 0n) {
|
|
9403
|
+
return tryAsU64(0);
|
|
9404
|
+
}
|
|
9405
|
+
|
|
9406
|
+
if (storageCost >= 2n ** 64n) {
|
|
9407
|
+
return tryAsU64(2n ** 64n - 1n);
|
|
9408
|
+
}
|
|
9409
|
+
|
|
9410
|
+
return tryAsU64(storageCost);
|
|
9411
|
+
}
|
|
9412
|
+
|
|
9413
|
+
private constructor(
|
|
9414
|
+
/** `a_c`: Hash of the service code. */
|
|
9415
|
+
public readonly codeHash: CodeHash,
|
|
9416
|
+
/** `a_b`: Current account balance. */
|
|
9417
|
+
public readonly balance: U64,
|
|
9418
|
+
/** `a_g`: Minimal gas required to execute Accumulate entrypoint. */
|
|
9419
|
+
public readonly accumulateMinGas: ServiceGas,
|
|
9420
|
+
/** `a_m`: Minimal gas required to execute On Transfer entrypoint. */
|
|
9421
|
+
public readonly onTransferMinGas: ServiceGas,
|
|
9422
|
+
/** `a_o`: Total number of octets in storage. */
|
|
9423
|
+
public readonly storageUtilisationBytes: U64,
|
|
9424
|
+
/** `a_f`: Cost-free storage. Decreases both storage item count and total byte size. */
|
|
9425
|
+
public readonly gratisStorage: U64,
|
|
9426
|
+
/** `a_i`: Number of items in storage. */
|
|
9427
|
+
public readonly storageUtilisationCount: U32,
|
|
9428
|
+
/** `a_r`: Creation account time slot. */
|
|
9429
|
+
public readonly created: TimeSlot,
|
|
9430
|
+
/** `a_a`: Most recent accumulation time slot. */
|
|
9431
|
+
public readonly lastAccumulation: TimeSlot,
|
|
9432
|
+
/** `a_p`: Parent service ID. */
|
|
9433
|
+
public readonly parentService: ServiceId,
|
|
9434
|
+
) {
|
|
9435
|
+
super();
|
|
9436
|
+
}
|
|
9437
|
+
}
|
|
9438
|
+
|
|
9439
|
+
declare class PreimageItem extends WithDebug {
|
|
9440
|
+
static Codec = codec.Class(PreimageItem, {
|
|
9441
|
+
hash: codec.bytes(HASH_SIZE).asOpaque<PreimageHash>(),
|
|
9442
|
+
blob: codec.blob,
|
|
9443
|
+
});
|
|
9444
|
+
|
|
9445
|
+
static create({ hash, blob }: CodecRecord<PreimageItem>) {
|
|
9446
|
+
return new PreimageItem(hash, blob);
|
|
9447
|
+
}
|
|
9448
|
+
|
|
9449
|
+
private constructor(
|
|
9450
|
+
readonly hash: PreimageHash,
|
|
9451
|
+
readonly blob: BytesBlob,
|
|
9452
|
+
) {
|
|
9453
|
+
super();
|
|
9454
|
+
}
|
|
9455
|
+
}
|
|
9456
|
+
|
|
9457
|
+
type StorageKey = Opaque<BytesBlob, "storage key">;
|
|
9458
|
+
|
|
9459
|
+
declare class StorageItem extends WithDebug {
|
|
9460
|
+
static Codec = codec.Class(StorageItem, {
|
|
9461
|
+
key: codec.blob.convert(
|
|
9462
|
+
(i) => i,
|
|
9463
|
+
(o) => asOpaqueType(o),
|
|
9464
|
+
),
|
|
9465
|
+
value: codec.blob,
|
|
9466
|
+
});
|
|
9467
|
+
|
|
9468
|
+
static create({ key, value }: CodecRecord<StorageItem>) {
|
|
9469
|
+
return new StorageItem(key, value);
|
|
9470
|
+
}
|
|
9471
|
+
|
|
9472
|
+
private constructor(
|
|
9473
|
+
readonly key: StorageKey,
|
|
9474
|
+
readonly value: BytesBlob,
|
|
9475
|
+
) {
|
|
9476
|
+
super();
|
|
9477
|
+
}
|
|
9478
|
+
}
|
|
9479
|
+
|
|
9480
|
+
declare const MAX_LOOKUP_HISTORY_SLOTS = 3;
|
|
9481
|
+
type LookupHistorySlots = KnownSizeArray<TimeSlot, `0-${typeof MAX_LOOKUP_HISTORY_SLOTS} timeslots`>;
|
|
9482
|
+
declare function tryAsLookupHistorySlots(items: readonly TimeSlot[]): LookupHistorySlots {
|
|
9483
|
+
const knownSize = asKnownSize(items) as LookupHistorySlots;
|
|
9484
|
+
if (knownSize.length > MAX_LOOKUP_HISTORY_SLOTS) {
|
|
9485
|
+
throw new Error(`Lookup history items must contain 0-${MAX_LOOKUP_HISTORY_SLOTS} timeslots.`);
|
|
9486
|
+
}
|
|
9487
|
+
return knownSize;
|
|
9488
|
+
}
|
|
9489
|
+
|
|
9490
|
+
/** https://graypaper.fluffylabs.dev/#/5f542d7/115400115800 */
|
|
9491
|
+
declare class LookupHistoryItem {
|
|
9492
|
+
constructor(
|
|
9493
|
+
public readonly hash: PreimageHash,
|
|
9494
|
+
public readonly length: U32,
|
|
9495
|
+
/**
|
|
9496
|
+
* Preimage availability history as a sequence of time slots.
|
|
9497
|
+
* See PreimageStatus and the following GP fragment for more details.
|
|
9498
|
+
* https://graypaper.fluffylabs.dev/#/5f542d7/11780011a500 */
|
|
9499
|
+
public readonly slots: LookupHistorySlots,
|
|
9500
|
+
) {}
|
|
9501
|
+
|
|
9502
|
+
static isRequested(item: LookupHistoryItem | LookupHistorySlots): boolean {
|
|
9503
|
+
if ("slots" in item) {
|
|
9504
|
+
return item.slots.length === 0;
|
|
9505
|
+
}
|
|
9506
|
+
return item.length === 0;
|
|
9507
|
+
}
|
|
9508
|
+
}
|
|
9509
|
+
|
|
9510
|
+
/** Dictionary entry of services that auto-accumulate every block. */
|
|
9511
|
+
declare class AutoAccumulate {
|
|
9512
|
+
static Codec = codec.Class(AutoAccumulate, {
|
|
9513
|
+
service: codec.u32.asOpaque<ServiceId>(),
|
|
9514
|
+
gasLimit: codec.u64.asOpaque<ServiceGas>(),
|
|
9515
|
+
});
|
|
9516
|
+
|
|
9517
|
+
static create({ service, gasLimit }: CodecRecord<AutoAccumulate>) {
|
|
9518
|
+
return new AutoAccumulate(service, gasLimit);
|
|
9519
|
+
}
|
|
9520
|
+
|
|
9521
|
+
private constructor(
|
|
9522
|
+
/** Service id that auto-accumulates. */
|
|
9523
|
+
readonly service: ServiceId,
|
|
9524
|
+
/** Gas limit for auto-accumulation. */
|
|
9525
|
+
readonly gasLimit: ServiceGas,
|
|
9526
|
+
) {}
|
|
9527
|
+
}
|
|
9528
|
+
|
|
9529
|
+
/**
|
|
9530
|
+
* https://graypaper.fluffylabs.dev/#/ab2cdbd/114402114402?v=0.7.2
|
|
9531
|
+
*/
|
|
9532
|
+
declare class PrivilegedServices {
|
|
9533
|
+
/** https://graypaper.fluffylabs.dev/#/ab2cdbd/3bbd023bcb02?v=0.7.2 */
|
|
9534
|
+
static Codec = codec.Class(PrivilegedServices, {
|
|
9535
|
+
manager: codec.u32.asOpaque<ServiceId>(),
|
|
9536
|
+
assigners: codecPerCore(codec.u32.asOpaque<ServiceId>()),
|
|
9537
|
+
delegator: codec.u32.asOpaque<ServiceId>(),
|
|
9538
|
+
registrar: Compatibility.isGreaterOrEqual(GpVersion.V0_7_1)
|
|
9539
|
+
? codec.u32.asOpaque<ServiceId>()
|
|
9540
|
+
: ignoreValueWithDefault(tryAsServiceId(2 ** 32 - 1)),
|
|
9541
|
+
autoAccumulateServices: readonlyArray(codec.sequenceVarLen(AutoAccumulate.Codec)),
|
|
9542
|
+
});
|
|
9543
|
+
|
|
9544
|
+
static create(a: CodecRecord<PrivilegedServices>) {
|
|
9545
|
+
return new PrivilegedServices(a.manager, a.delegator, a.registrar, a.assigners, a.autoAccumulateServices);
|
|
9546
|
+
}
|
|
9547
|
+
|
|
9548
|
+
private constructor(
|
|
9549
|
+
/**
|
|
9550
|
+
* `χ_M`: Manages alteration of χ from block to block,
|
|
9551
|
+
* as well as bestow services with storage deposit credits.
|
|
9552
|
+
* https://graypaper.fluffylabs.dev/#/ab2cdbd/111502111902?v=0.7.2
|
|
9553
|
+
*/
|
|
9554
|
+
readonly manager: ServiceId,
|
|
9555
|
+
/** `χ_V`: Managers validator keys. */
|
|
9556
|
+
readonly delegator: ServiceId,
|
|
9557
|
+
/**
|
|
9558
|
+
* `χ_R`: Manages the creation of services in protected range.
|
|
9559
|
+
*
|
|
9560
|
+
* https://graypaper.fluffylabs.dev/#/ab2cdbd/111b02111d02?v=0.7.2
|
|
9561
|
+
*/
|
|
9562
|
+
readonly registrar: ServiceId,
|
|
9563
|
+
/** `χ_A`: Manages authorization queue one for each core. */
|
|
9564
|
+
readonly assigners: PerCore<ServiceId>,
|
|
9565
|
+
/** `χ_Z`: Dictionary of services that auto-accumulate every block with their gas limit. */
|
|
9566
|
+
readonly autoAccumulateServices: readonly AutoAccumulate[],
|
|
9567
|
+
) {}
|
|
9568
|
+
}
|
|
9569
|
+
|
|
9340
9570
|
declare const SUPER_PEAK_STRING = BytesBlob.blobFromString("peak");
|
|
9341
9571
|
|
|
9342
9572
|
/** Merkle Mountain Range peaks. */
|
|
@@ -9646,323 +9876,139 @@ declare class RecentBlocksHistory extends WithDebug {
|
|
|
9646
9876
|
declare const VALIDATOR_META_BYTES = 128;
|
|
9647
9877
|
type VALIDATOR_META_BYTES = typeof VALIDATOR_META_BYTES;
|
|
9648
9878
|
|
|
9649
|
-
/**
|
|
9650
|
-
* Details about validators' identity.
|
|
9651
|
-
*
|
|
9652
|
-
* https://graypaper.fluffylabs.dev/#/5f542d7/0d4b010d4c01
|
|
9653
|
-
*/
|
|
9654
|
-
declare class ValidatorData extends WithDebug {
|
|
9655
|
-
static Codec = codec.Class(ValidatorData, {
|
|
9656
|
-
bandersnatch: codec.bytes(BANDERSNATCH_KEY_BYTES).asOpaque<BandersnatchKey>(),
|
|
9657
|
-
ed25519: codec.bytes(ED25519_KEY_BYTES).asOpaque<Ed25519Key>(),
|
|
9658
|
-
bls: codec.bytes(BLS_KEY_BYTES).asOpaque<BlsKey>(),
|
|
9659
|
-
metadata: codec.bytes(VALIDATOR_META_BYTES),
|
|
9660
|
-
});
|
|
9661
|
-
|
|
9662
|
-
static create({ ed25519, bandersnatch, bls, metadata }: CodecRecord<ValidatorData>) {
|
|
9663
|
-
return new ValidatorData(bandersnatch, ed25519, bls, metadata);
|
|
9664
|
-
}
|
|
9665
|
-
|
|
9666
|
-
private constructor(
|
|
9667
|
-
/** Bandersnatch public key. */
|
|
9668
|
-
public readonly bandersnatch: BandersnatchKey,
|
|
9669
|
-
/** ED25519 key data. */
|
|
9670
|
-
public readonly ed25519: Ed25519Key,
|
|
9671
|
-
/** BLS public key. */
|
|
9672
|
-
public readonly bls: BlsKey,
|
|
9673
|
-
/** Validator-defined additional metdata. */
|
|
9674
|
-
public readonly metadata: Bytes<VALIDATOR_META_BYTES>,
|
|
9675
|
-
) {
|
|
9676
|
-
super();
|
|
9677
|
-
}
|
|
9678
|
-
}
|
|
9679
|
-
|
|
9680
|
-
declare enum SafroleSealingKeysKind {
|
|
9681
|
-
Tickets = 0,
|
|
9682
|
-
Keys = 1,
|
|
9683
|
-
}
|
|
9684
|
-
|
|
9685
|
-
type SafroleSealingKeys =
|
|
9686
|
-
| {
|
|
9687
|
-
kind: SafroleSealingKeysKind.Keys;
|
|
9688
|
-
keys: PerEpochBlock<BandersnatchKey>;
|
|
9689
|
-
}
|
|
9690
|
-
| {
|
|
9691
|
-
kind: SafroleSealingKeysKind.Tickets;
|
|
9692
|
-
tickets: PerEpochBlock<Ticket>;
|
|
9693
|
-
};
|
|
9694
|
-
|
|
9695
|
-
declare const codecBandersnatchKey = codec.bytes(BANDERSNATCH_KEY_BYTES).asOpaque<BandersnatchKey>();
|
|
9696
|
-
|
|
9697
|
-
declare class SafroleSealingKeysData extends WithDebug {
|
|
9698
|
-
static Codec = codecWithContext((context) => {
|
|
9699
|
-
return codec.custom<SafroleSealingKeys>(
|
|
9700
|
-
{
|
|
9701
|
-
name: "SafroleSealingKeys",
|
|
9702
|
-
sizeHint: { bytes: 1 + HASH_SIZE * context.epochLength, isExact: false },
|
|
9703
|
-
},
|
|
9704
|
-
(e, x) => {
|
|
9705
|
-
e.varU32(tryAsU32(x.kind));
|
|
9706
|
-
if (x.kind === SafroleSealingKeysKind.Keys) {
|
|
9707
|
-
e.sequenceFixLen(codecBandersnatchKey, x.keys);
|
|
9708
|
-
} else {
|
|
9709
|
-
e.sequenceFixLen(Ticket.Codec, x.tickets);
|
|
9710
|
-
}
|
|
9711
|
-
},
|
|
9712
|
-
(d) => {
|
|
9713
|
-
const epochLength = context.epochLength;
|
|
9714
|
-
const kind = d.varU32();
|
|
9715
|
-
if (kind === SafroleSealingKeysKind.Keys) {
|
|
9716
|
-
const keys = d.sequenceFixLen<BandersnatchKey>(codecBandersnatchKey, epochLength);
|
|
9717
|
-
return SafroleSealingKeysData.keys(tryAsPerEpochBlock(keys, context));
|
|
9718
|
-
}
|
|
9719
|
-
|
|
9720
|
-
if (kind === SafroleSealingKeysKind.Tickets) {
|
|
9721
|
-
const tickets = d.sequenceFixLen(Ticket.Codec, epochLength);
|
|
9722
|
-
return SafroleSealingKeysData.tickets(tryAsPerEpochBlock(tickets, context));
|
|
9723
|
-
}
|
|
9724
|
-
|
|
9725
|
-
throw new Error(`Unexpected safrole sealing keys kind: ${kind}`);
|
|
9726
|
-
},
|
|
9727
|
-
(s) => {
|
|
9728
|
-
const kind = s.decoder.varU32();
|
|
9729
|
-
if (kind === SafroleSealingKeysKind.Keys) {
|
|
9730
|
-
s.sequenceFixLen(codecBandersnatchKey, context.epochLength);
|
|
9731
|
-
return;
|
|
9732
|
-
}
|
|
9733
|
-
if (kind === SafroleSealingKeysKind.Tickets) {
|
|
9734
|
-
s.sequenceFixLen(Ticket.Codec, context.epochLength);
|
|
9735
|
-
return;
|
|
9736
|
-
}
|
|
9737
|
-
|
|
9738
|
-
throw new Error(`Unexpected safrole sealing keys kind: ${kind}`);
|
|
9739
|
-
},
|
|
9740
|
-
);
|
|
9741
|
-
});
|
|
9742
|
-
|
|
9743
|
-
static keys(keys: PerEpochBlock<BandersnatchKey>): SafroleSealingKeys {
|
|
9744
|
-
return new SafroleSealingKeysData(SafroleSealingKeysKind.Keys, keys, undefined) as SafroleSealingKeys;
|
|
9745
|
-
}
|
|
9746
|
-
|
|
9747
|
-
static tickets(tickets: PerEpochBlock<Ticket>): SafroleSealingKeys {
|
|
9748
|
-
return new SafroleSealingKeysData(SafroleSealingKeysKind.Tickets, undefined, tickets) as SafroleSealingKeys;
|
|
9749
|
-
}
|
|
9750
|
-
|
|
9751
|
-
private constructor(
|
|
9752
|
-
readonly kind: SafroleSealingKeysKind,
|
|
9753
|
-
readonly keys?: PerEpochBlock<BandersnatchKey>,
|
|
9754
|
-
readonly tickets?: PerEpochBlock<Ticket>,
|
|
9755
|
-
) {
|
|
9756
|
-
super();
|
|
9757
|
-
}
|
|
9758
|
-
}
|
|
9759
|
-
|
|
9760
|
-
declare class SafroleData {
|
|
9761
|
-
static Codec = codec.Class(SafroleData, {
|
|
9762
|
-
nextValidatorData: codecPerValidator(ValidatorData.Codec),
|
|
9763
|
-
epochRoot: codec.bytes(BANDERSNATCH_RING_ROOT_BYTES).asOpaque<BandersnatchRingRoot>(),
|
|
9764
|
-
sealingKeySeries: SafroleSealingKeysData.Codec,
|
|
9765
|
-
ticketsAccumulator: readonlyArray(codec.sequenceVarLen(Ticket.Codec)).convert(seeThrough, asKnownSize),
|
|
9766
|
-
});
|
|
9767
|
-
|
|
9768
|
-
static create({ nextValidatorData, epochRoot, sealingKeySeries, ticketsAccumulator }: CodecRecord<SafroleData>) {
|
|
9769
|
-
return new SafroleData(nextValidatorData, epochRoot, sealingKeySeries, ticketsAccumulator);
|
|
9770
|
-
}
|
|
9771
|
-
|
|
9772
|
-
private constructor(
|
|
9773
|
-
/** gamma_k */
|
|
9774
|
-
public readonly nextValidatorData: PerValidator<ValidatorData>,
|
|
9775
|
-
/** gamma_z */
|
|
9776
|
-
public readonly epochRoot: BandersnatchRingRoot,
|
|
9777
|
-
/** gamma_s */
|
|
9778
|
-
public readonly sealingKeySeries: SafroleSealingKeys,
|
|
9779
|
-
/** gamma_a */
|
|
9780
|
-
public readonly ticketsAccumulator: KnownSizeArray<Ticket, "0...EpochLength">,
|
|
9781
|
-
) {}
|
|
9782
|
-
}
|
|
9783
|
-
|
|
9784
|
-
/**
|
|
9785
|
-
* `B_S`: The basic minimum balance which all services require.
|
|
9786
|
-
*
|
|
9787
|
-
* https://graypaper.fluffylabs.dev/#/7e6ff6a/445800445800?v=0.6.7
|
|
9788
|
-
*/
|
|
9789
|
-
declare const BASE_SERVICE_BALANCE = 100n;
|
|
9790
|
-
/**
|
|
9791
|
-
* `B_I`: The additional minimum balance required per item of elective service state.
|
|
9792
|
-
*
|
|
9793
|
-
* https://graypaper.fluffylabs.dev/#/7e6ff6a/445000445000?v=0.6.7
|
|
9794
|
-
*/
|
|
9795
|
-
declare const ELECTIVE_ITEM_BALANCE = 10n;
|
|
9796
|
-
/**
|
|
9797
|
-
* `B_L`: The additional minimum balance required per octet of elective service state.
|
|
9798
|
-
*
|
|
9799
|
-
* https://graypaper.fluffylabs.dev/#/7e6ff6a/445400445400?v=0.6.7
|
|
9800
|
-
*/
|
|
9801
|
-
declare const ELECTIVE_BYTE_BALANCE = 1n;
|
|
9802
|
-
|
|
9803
|
-
declare const zeroSizeHint: SizeHint = {
|
|
9804
|
-
bytes: 0,
|
|
9805
|
-
isExact: true,
|
|
9806
|
-
};
|
|
9807
|
-
|
|
9808
|
-
/** 0-byte read, return given default value */
|
|
9809
|
-
declare const ignoreValueWithDefault = <T>(defaultValue: T) =>
|
|
9810
|
-
Descriptor.new<T>(
|
|
9811
|
-
"ignoreValue",
|
|
9812
|
-
zeroSizeHint,
|
|
9813
|
-
(_e, _v) => {},
|
|
9814
|
-
(_d) => defaultValue,
|
|
9815
|
-
(_s) => {},
|
|
9816
|
-
);
|
|
9817
|
-
|
|
9818
|
-
/**
|
|
9819
|
-
* Service account details.
|
|
9820
|
-
*
|
|
9821
|
-
* https://graypaper.fluffylabs.dev/#/7e6ff6a/108301108301?v=0.6.7
|
|
9822
|
-
*/
|
|
9823
|
-
declare class ServiceAccountInfo extends WithDebug {
|
|
9824
|
-
static Codec = codec.Class(ServiceAccountInfo, {
|
|
9825
|
-
codeHash: codec.bytes(HASH_SIZE).asOpaque<CodeHash>(),
|
|
9826
|
-
balance: codec.u64,
|
|
9827
|
-
accumulateMinGas: codec.u64.convert((x) => x, tryAsServiceGas),
|
|
9828
|
-
onTransferMinGas: codec.u64.convert((x) => x, tryAsServiceGas),
|
|
9829
|
-
storageUtilisationBytes: codec.u64,
|
|
9830
|
-
gratisStorage: codec.u64,
|
|
9831
|
-
storageUtilisationCount: codec.u32,
|
|
9832
|
-
created: codec.u32.convert((x) => x, tryAsTimeSlot),
|
|
9833
|
-
lastAccumulation: codec.u32.convert((x) => x, tryAsTimeSlot),
|
|
9834
|
-
parentService: codec.u32.convert((x) => x, tryAsServiceId),
|
|
9835
|
-
});
|
|
9836
|
-
|
|
9837
|
-
static create(a: CodecRecord<ServiceAccountInfo>) {
|
|
9838
|
-
return new ServiceAccountInfo(
|
|
9839
|
-
a.codeHash,
|
|
9840
|
-
a.balance,
|
|
9841
|
-
a.accumulateMinGas,
|
|
9842
|
-
a.onTransferMinGas,
|
|
9843
|
-
a.storageUtilisationBytes,
|
|
9844
|
-
a.gratisStorage,
|
|
9845
|
-
a.storageUtilisationCount,
|
|
9846
|
-
a.created,
|
|
9847
|
-
a.lastAccumulation,
|
|
9848
|
-
a.parentService,
|
|
9849
|
-
);
|
|
9850
|
-
}
|
|
9851
|
-
|
|
9852
|
-
/**
|
|
9853
|
-
* `a_t = max(0, BS + BI * a_i + BL * a_o - a_f)`
|
|
9854
|
-
* https://graypaper.fluffylabs.dev/#/7e6ff6a/119e01119e01?v=0.6.7
|
|
9855
|
-
*/
|
|
9856
|
-
static calculateThresholdBalance(items: U32, bytes: U64, gratisStorage: U64): U64 {
|
|
9857
|
-
const storageCost =
|
|
9858
|
-
BASE_SERVICE_BALANCE + ELECTIVE_ITEM_BALANCE * BigInt(items) + ELECTIVE_BYTE_BALANCE * bytes - gratisStorage;
|
|
9859
|
-
|
|
9860
|
-
if (storageCost < 0n) {
|
|
9861
|
-
return tryAsU64(0);
|
|
9862
|
-
}
|
|
9863
|
-
|
|
9864
|
-
if (storageCost >= 2n ** 64n) {
|
|
9865
|
-
return tryAsU64(2n ** 64n - 1n);
|
|
9866
|
-
}
|
|
9867
|
-
|
|
9868
|
-
return tryAsU64(storageCost);
|
|
9869
|
-
}
|
|
9870
|
-
|
|
9871
|
-
private constructor(
|
|
9872
|
-
/** `a_c`: Hash of the service code. */
|
|
9873
|
-
public readonly codeHash: CodeHash,
|
|
9874
|
-
/** `a_b`: Current account balance. */
|
|
9875
|
-
public readonly balance: U64,
|
|
9876
|
-
/** `a_g`: Minimal gas required to execute Accumulate entrypoint. */
|
|
9877
|
-
public readonly accumulateMinGas: ServiceGas,
|
|
9878
|
-
/** `a_m`: Minimal gas required to execute On Transfer entrypoint. */
|
|
9879
|
-
public readonly onTransferMinGas: ServiceGas,
|
|
9880
|
-
/** `a_o`: Total number of octets in storage. */
|
|
9881
|
-
public readonly storageUtilisationBytes: U64,
|
|
9882
|
-
/** `a_f`: Cost-free storage. Decreases both storage item count and total byte size. */
|
|
9883
|
-
public readonly gratisStorage: U64,
|
|
9884
|
-
/** `a_i`: Number of items in storage. */
|
|
9885
|
-
public readonly storageUtilisationCount: U32,
|
|
9886
|
-
/** `a_r`: Creation account time slot. */
|
|
9887
|
-
public readonly created: TimeSlot,
|
|
9888
|
-
/** `a_a`: Most recent accumulation time slot. */
|
|
9889
|
-
public readonly lastAccumulation: TimeSlot,
|
|
9890
|
-
/** `a_p`: Parent service ID. */
|
|
9891
|
-
public readonly parentService: ServiceId,
|
|
9892
|
-
) {
|
|
9893
|
-
super();
|
|
9894
|
-
}
|
|
9895
|
-
}
|
|
9896
|
-
|
|
9897
|
-
declare class PreimageItem extends WithDebug {
|
|
9898
|
-
static Codec = codec.Class(PreimageItem, {
|
|
9899
|
-
hash: codec.bytes(HASH_SIZE).asOpaque<PreimageHash>(),
|
|
9900
|
-
blob: codec.blob,
|
|
9879
|
+
/**
|
|
9880
|
+
* Details about validators' identity.
|
|
9881
|
+
*
|
|
9882
|
+
* https://graypaper.fluffylabs.dev/#/5f542d7/0d4b010d4c01
|
|
9883
|
+
*/
|
|
9884
|
+
declare class ValidatorData extends WithDebug {
|
|
9885
|
+
static Codec = codec.Class(ValidatorData, {
|
|
9886
|
+
bandersnatch: codec.bytes(BANDERSNATCH_KEY_BYTES).asOpaque<BandersnatchKey>(),
|
|
9887
|
+
ed25519: codec.bytes(ED25519_KEY_BYTES).asOpaque<Ed25519Key>(),
|
|
9888
|
+
bls: codec.bytes(BLS_KEY_BYTES).asOpaque<BlsKey>(),
|
|
9889
|
+
metadata: codec.bytes(VALIDATOR_META_BYTES),
|
|
9901
9890
|
});
|
|
9902
9891
|
|
|
9903
|
-
static create({
|
|
9904
|
-
return new
|
|
9892
|
+
static create({ ed25519, bandersnatch, bls, metadata }: CodecRecord<ValidatorData>) {
|
|
9893
|
+
return new ValidatorData(bandersnatch, ed25519, bls, metadata);
|
|
9905
9894
|
}
|
|
9906
9895
|
|
|
9907
9896
|
private constructor(
|
|
9908
|
-
|
|
9909
|
-
readonly
|
|
9897
|
+
/** Bandersnatch public key. */
|
|
9898
|
+
public readonly bandersnatch: BandersnatchKey,
|
|
9899
|
+
/** ED25519 key data. */
|
|
9900
|
+
public readonly ed25519: Ed25519Key,
|
|
9901
|
+
/** BLS public key. */
|
|
9902
|
+
public readonly bls: BlsKey,
|
|
9903
|
+
/** Validator-defined additional metdata. */
|
|
9904
|
+
public readonly metadata: Bytes<VALIDATOR_META_BYTES>,
|
|
9910
9905
|
) {
|
|
9911
9906
|
super();
|
|
9912
9907
|
}
|
|
9913
9908
|
}
|
|
9914
9909
|
|
|
9915
|
-
|
|
9910
|
+
declare enum SafroleSealingKeysKind {
|
|
9911
|
+
Tickets = 0,
|
|
9912
|
+
Keys = 1,
|
|
9913
|
+
}
|
|
9916
9914
|
|
|
9917
|
-
|
|
9918
|
-
|
|
9919
|
-
|
|
9920
|
-
|
|
9921
|
-
|
|
9922
|
-
|
|
9923
|
-
|
|
9915
|
+
type SafroleSealingKeys =
|
|
9916
|
+
| {
|
|
9917
|
+
kind: SafroleSealingKeysKind.Keys;
|
|
9918
|
+
keys: PerEpochBlock<BandersnatchKey>;
|
|
9919
|
+
}
|
|
9920
|
+
| {
|
|
9921
|
+
kind: SafroleSealingKeysKind.Tickets;
|
|
9922
|
+
tickets: PerEpochBlock<Ticket>;
|
|
9923
|
+
};
|
|
9924
|
+
|
|
9925
|
+
declare const codecBandersnatchKey = codec.bytes(BANDERSNATCH_KEY_BYTES).asOpaque<BandersnatchKey>();
|
|
9926
|
+
|
|
9927
|
+
declare class SafroleSealingKeysData extends WithDebug {
|
|
9928
|
+
static Codec = codecWithContext((context) => {
|
|
9929
|
+
return codec.custom<SafroleSealingKeys>(
|
|
9930
|
+
{
|
|
9931
|
+
name: "SafroleSealingKeys",
|
|
9932
|
+
sizeHint: { bytes: 1 + HASH_SIZE * context.epochLength, isExact: false },
|
|
9933
|
+
},
|
|
9934
|
+
(e, x) => {
|
|
9935
|
+
e.varU32(tryAsU32(x.kind));
|
|
9936
|
+
if (x.kind === SafroleSealingKeysKind.Keys) {
|
|
9937
|
+
e.sequenceFixLen(codecBandersnatchKey, x.keys);
|
|
9938
|
+
} else {
|
|
9939
|
+
e.sequenceFixLen(Ticket.Codec, x.tickets);
|
|
9940
|
+
}
|
|
9941
|
+
},
|
|
9942
|
+
(d) => {
|
|
9943
|
+
const epochLength = context.epochLength;
|
|
9944
|
+
const kind = d.varU32();
|
|
9945
|
+
if (kind === SafroleSealingKeysKind.Keys) {
|
|
9946
|
+
const keys = d.sequenceFixLen<BandersnatchKey>(codecBandersnatchKey, epochLength);
|
|
9947
|
+
return SafroleSealingKeysData.keys(tryAsPerEpochBlock(keys, context));
|
|
9948
|
+
}
|
|
9949
|
+
|
|
9950
|
+
if (kind === SafroleSealingKeysKind.Tickets) {
|
|
9951
|
+
const tickets = d.sequenceFixLen(Ticket.Codec, epochLength);
|
|
9952
|
+
return SafroleSealingKeysData.tickets(tryAsPerEpochBlock(tickets, context));
|
|
9953
|
+
}
|
|
9954
|
+
|
|
9955
|
+
throw new Error(`Unexpected safrole sealing keys kind: ${kind}`);
|
|
9956
|
+
},
|
|
9957
|
+
(s) => {
|
|
9958
|
+
const kind = s.decoder.varU32();
|
|
9959
|
+
if (kind === SafroleSealingKeysKind.Keys) {
|
|
9960
|
+
s.sequenceFixLen(codecBandersnatchKey, context.epochLength);
|
|
9961
|
+
return;
|
|
9962
|
+
}
|
|
9963
|
+
if (kind === SafroleSealingKeysKind.Tickets) {
|
|
9964
|
+
s.sequenceFixLen(Ticket.Codec, context.epochLength);
|
|
9965
|
+
return;
|
|
9966
|
+
}
|
|
9967
|
+
|
|
9968
|
+
throw new Error(`Unexpected safrole sealing keys kind: ${kind}`);
|
|
9969
|
+
},
|
|
9970
|
+
);
|
|
9924
9971
|
});
|
|
9925
9972
|
|
|
9926
|
-
static
|
|
9927
|
-
return new
|
|
9973
|
+
static keys(keys: PerEpochBlock<BandersnatchKey>): SafroleSealingKeys {
|
|
9974
|
+
return new SafroleSealingKeysData(SafroleSealingKeysKind.Keys, keys, undefined) as SafroleSealingKeys;
|
|
9975
|
+
}
|
|
9976
|
+
|
|
9977
|
+
static tickets(tickets: PerEpochBlock<Ticket>): SafroleSealingKeys {
|
|
9978
|
+
return new SafroleSealingKeysData(SafroleSealingKeysKind.Tickets, undefined, tickets) as SafroleSealingKeys;
|
|
9928
9979
|
}
|
|
9929
9980
|
|
|
9930
9981
|
private constructor(
|
|
9931
|
-
readonly
|
|
9932
|
-
readonly
|
|
9982
|
+
readonly kind: SafroleSealingKeysKind,
|
|
9983
|
+
readonly keys?: PerEpochBlock<BandersnatchKey>,
|
|
9984
|
+
readonly tickets?: PerEpochBlock<Ticket>,
|
|
9933
9985
|
) {
|
|
9934
9986
|
super();
|
|
9935
9987
|
}
|
|
9936
9988
|
}
|
|
9937
9989
|
|
|
9938
|
-
declare
|
|
9939
|
-
|
|
9940
|
-
|
|
9941
|
-
|
|
9942
|
-
|
|
9943
|
-
|
|
9990
|
+
declare class SafroleData {
|
|
9991
|
+
static Codec = codec.Class(SafroleData, {
|
|
9992
|
+
nextValidatorData: codecPerValidator(ValidatorData.Codec),
|
|
9993
|
+
epochRoot: codec.bytes(BANDERSNATCH_RING_ROOT_BYTES).asOpaque<BandersnatchRingRoot>(),
|
|
9994
|
+
sealingKeySeries: SafroleSealingKeysData.Codec,
|
|
9995
|
+
ticketsAccumulator: readonlyArray(codec.sequenceVarLen(Ticket.Codec)).convert(seeThrough, asKnownSize),
|
|
9996
|
+
});
|
|
9997
|
+
|
|
9998
|
+
static create({ nextValidatorData, epochRoot, sealingKeySeries, ticketsAccumulator }: CodecRecord<SafroleData>) {
|
|
9999
|
+
return new SafroleData(nextValidatorData, epochRoot, sealingKeySeries, ticketsAccumulator);
|
|
9944
10000
|
}
|
|
9945
|
-
return knownSize;
|
|
9946
|
-
}
|
|
9947
10001
|
|
|
9948
|
-
|
|
9949
|
-
|
|
9950
|
-
|
|
9951
|
-
|
|
9952
|
-
public readonly
|
|
9953
|
-
/**
|
|
9954
|
-
|
|
9955
|
-
|
|
9956
|
-
|
|
9957
|
-
public readonly slots: LookupHistorySlots,
|
|
10002
|
+
private constructor(
|
|
10003
|
+
/** gamma_k */
|
|
10004
|
+
public readonly nextValidatorData: PerValidator<ValidatorData>,
|
|
10005
|
+
/** gamma_z */
|
|
10006
|
+
public readonly epochRoot: BandersnatchRingRoot,
|
|
10007
|
+
/** gamma_s */
|
|
10008
|
+
public readonly sealingKeySeries: SafroleSealingKeys,
|
|
10009
|
+
/** gamma_a */
|
|
10010
|
+
public readonly ticketsAccumulator: KnownSizeArray<Ticket, "0...EpochLength">,
|
|
9958
10011
|
) {}
|
|
9959
|
-
|
|
9960
|
-
static isRequested(item: LookupHistoryItem | LookupHistorySlots): boolean {
|
|
9961
|
-
if ("slots" in item) {
|
|
9962
|
-
return item.slots.length === 0;
|
|
9963
|
-
}
|
|
9964
|
-
return item.length === 0;
|
|
9965
|
-
}
|
|
9966
10012
|
}
|
|
9967
10013
|
|
|
9968
10014
|
declare const codecServiceId: Descriptor<ServiceId> =
|
|
@@ -10105,12 +10151,26 @@ declare class CoreStatistics {
|
|
|
10105
10151
|
* Service statistics.
|
|
10106
10152
|
* Updated per block, based on available work reports (`W`).
|
|
10107
10153
|
*
|
|
10108
|
-
* https://graypaper.fluffylabs.dev/#/
|
|
10109
|
-
* https://github.com/gavofyork/graypaper/blob/9bffb08f3ea7b67832019176754df4fb36b9557d/text/statistics.tex#L77
|
|
10154
|
+
* https://graypaper.fluffylabs.dev/#/1c979cb/199802199802?v=0.7.1
|
|
10110
10155
|
*/
|
|
10111
10156
|
declare class ServiceStatistics {
|
|
10112
|
-
static Codec = Compatibility.
|
|
10113
|
-
|
|
10157
|
+
static Codec = Compatibility.selectIfGreaterOrEqual({
|
|
10158
|
+
fallback: codec.Class(ServiceStatistics, {
|
|
10159
|
+
providedCount: codecVarU16,
|
|
10160
|
+
providedSize: codec.varU32,
|
|
10161
|
+
refinementCount: codec.varU32,
|
|
10162
|
+
refinementGasUsed: codecVarGas,
|
|
10163
|
+
imports: codecVarU16,
|
|
10164
|
+
exports: codecVarU16,
|
|
10165
|
+
extrinsicSize: codec.varU32,
|
|
10166
|
+
extrinsicCount: codecVarU16,
|
|
10167
|
+
accumulateCount: codec.varU32,
|
|
10168
|
+
accumulateGasUsed: codecVarGas,
|
|
10169
|
+
onTransfersCount: codec.varU32,
|
|
10170
|
+
onTransfersGasUsed: codecVarGas,
|
|
10171
|
+
}),
|
|
10172
|
+
versions: {
|
|
10173
|
+
[GpVersion.V0_7_0]: codec.Class(ServiceStatistics, {
|
|
10114
10174
|
providedCount: codecVarU16,
|
|
10115
10175
|
providedSize: codec.varU32,
|
|
10116
10176
|
refinementCount: codec.varU32,
|
|
@@ -10123,21 +10183,23 @@ declare class ServiceStatistics {
|
|
|
10123
10183
|
accumulateGasUsed: codecVarGas,
|
|
10124
10184
|
onTransfersCount: codec.varU32,
|
|
10125
10185
|
onTransfersGasUsed: codecVarGas,
|
|
10126
|
-
})
|
|
10127
|
-
|
|
10186
|
+
}),
|
|
10187
|
+
[GpVersion.V0_7_1]: codec.Class(ServiceStatistics, {
|
|
10128
10188
|
providedCount: codecVarU16,
|
|
10129
10189
|
providedSize: codec.varU32,
|
|
10130
10190
|
refinementCount: codec.varU32,
|
|
10131
10191
|
refinementGasUsed: codecVarGas,
|
|
10132
10192
|
imports: codecVarU16,
|
|
10133
|
-
exports: codecVarU16,
|
|
10134
|
-
extrinsicSize: codec.varU32,
|
|
10135
10193
|
extrinsicCount: codecVarU16,
|
|
10194
|
+
extrinsicSize: codec.varU32,
|
|
10195
|
+
exports: codecVarU16,
|
|
10136
10196
|
accumulateCount: codec.varU32,
|
|
10137
10197
|
accumulateGasUsed: codecVarGas,
|
|
10138
|
-
onTransfersCount:
|
|
10139
|
-
onTransfersGasUsed:
|
|
10140
|
-
})
|
|
10198
|
+
onTransfersCount: ignoreValueWithDefault(tryAsU32(0)),
|
|
10199
|
+
onTransfersGasUsed: ignoreValueWithDefault(tryAsServiceGas(0)),
|
|
10200
|
+
}),
|
|
10201
|
+
},
|
|
10202
|
+
});
|
|
10141
10203
|
|
|
10142
10204
|
static create(v: CodecRecord<ServiceStatistics>) {
|
|
10143
10205
|
return new ServiceStatistics(
|
|
@@ -10177,9 +10239,9 @@ declare class ServiceStatistics {
|
|
|
10177
10239
|
public accumulateCount: U32,
|
|
10178
10240
|
/** `a.1` */
|
|
10179
10241
|
public accumulateGasUsed: ServiceGas,
|
|
10180
|
-
/** `t.0` */
|
|
10242
|
+
/** `t.0` @deprecated since 0.7.1 */
|
|
10181
10243
|
public onTransfersCount: U32,
|
|
10182
|
-
/** `t.1` */
|
|
10244
|
+
/** `t.1` @deprecated since 0.7.1 */
|
|
10183
10245
|
public onTransfersGasUsed: ServiceGas,
|
|
10184
10246
|
) {}
|
|
10185
10247
|
|
|
@@ -11139,8 +11201,9 @@ declare class InMemoryState extends WithDebug implements State, EnumerableState
|
|
|
11139
11201
|
epochRoot: Bytes.zero(BANDERSNATCH_RING_ROOT_BYTES).asOpaque(),
|
|
11140
11202
|
privilegedServices: PrivilegedServices.create({
|
|
11141
11203
|
manager: tryAsServiceId(0),
|
|
11142
|
-
|
|
11143
|
-
|
|
11204
|
+
assigners: tryAsPerCore(new Array(spec.coresCount).fill(tryAsServiceId(0)), spec),
|
|
11205
|
+
delegator: tryAsServiceId(0),
|
|
11206
|
+
registrar: tryAsServiceId(MAX_VALUE),
|
|
11144
11207
|
autoAccumulateServices: [],
|
|
11145
11208
|
}),
|
|
11146
11209
|
accumulationOutputLog: SortedArray.fromArray(accumulationOutputComparator, []),
|
|
@@ -11274,7 +11337,7 @@ declare const index$e_codecPerCore: typeof codecPerCore;
|
|
|
11274
11337
|
declare const index$e_codecServiceId: typeof codecServiceId;
|
|
11275
11338
|
declare const index$e_codecVarGas: typeof codecVarGas;
|
|
11276
11339
|
declare const index$e_codecVarU16: typeof codecVarU16;
|
|
11277
|
-
declare const index$
|
|
11340
|
+
declare const index$e_codecWithVersion: typeof codecWithVersion;
|
|
11278
11341
|
declare const index$e_hashComparator: typeof hashComparator;
|
|
11279
11342
|
declare const index$e_ignoreValueWithDefault: typeof ignoreValueWithDefault;
|
|
11280
11343
|
declare const index$e_serviceDataCodec: typeof serviceDataCodec;
|
|
@@ -11285,7 +11348,7 @@ declare const index$e_tryAsPerCore: typeof tryAsPerCore;
|
|
|
11285
11348
|
declare const index$e_workReportsSortedSetCodec: typeof workReportsSortedSetCodec;
|
|
11286
11349
|
declare const index$e_zeroSizeHint: typeof zeroSizeHint;
|
|
11287
11350
|
declare namespace index$e {
|
|
11288
|
-
export { index$e_AccumulationOutput as AccumulationOutput, index$e_AutoAccumulate as AutoAccumulate, index$e_AvailabilityAssignment as AvailabilityAssignment, index$e_BASE_SERVICE_BALANCE as BASE_SERVICE_BALANCE, index$e_BlockState as BlockState, index$e_CoreStatistics as CoreStatistics, index$e_DisputesRecords as DisputesRecords, index$e_ELECTIVE_BYTE_BALANCE as ELECTIVE_BYTE_BALANCE, index$e_ELECTIVE_ITEM_BALANCE as ELECTIVE_ITEM_BALANCE, index$e_InMemoryService as InMemoryService, index$e_InMemoryState as InMemoryState, index$e_LookupHistoryItem as LookupHistoryItem, index$e_MAX_LOOKUP_HISTORY_SLOTS as MAX_LOOKUP_HISTORY_SLOTS, index$e_PreimageItem as PreimageItem, index$e_PrivilegedServices as PrivilegedServices, index$e_RecentBlocks as RecentBlocks, index$e_RecentBlocksHistory as RecentBlocksHistory, index$e_SafroleData as SafroleData, index$e_SafroleSealingKeysData as SafroleSealingKeysData, index$e_SafroleSealingKeysKind as SafroleSealingKeysKind, index$e_ServiceAccountInfo as ServiceAccountInfo, index$e_ServiceStatistics as ServiceStatistics, index$e_StatisticsData as StatisticsData, index$e_StorageItem as StorageItem, index$e_UpdateError as UpdateError, index$e_UpdatePreimage as UpdatePreimage, index$e_UpdatePreimageKind as UpdatePreimageKind, index$e_UpdateService as UpdateService, index$e_UpdateServiceKind as UpdateServiceKind, index$e_UpdateStorage as UpdateStorage, index$e_UpdateStorageKind as UpdateStorageKind, index$e_ValidatorData as ValidatorData, index$e_ValidatorStatistics as ValidatorStatistics, index$e_accumulationOutputComparator as accumulationOutputComparator, index$e_codecBandersnatchKey as codecBandersnatchKey, index$e_codecPerCore as codecPerCore, index$e_codecServiceId as codecServiceId, index$e_codecVarGas as codecVarGas, index$e_codecVarU16 as codecVarU16, index$
|
|
11351
|
+
export { index$e_AccumulationOutput as AccumulationOutput, index$e_AutoAccumulate as AutoAccumulate, index$e_AvailabilityAssignment as AvailabilityAssignment, index$e_BASE_SERVICE_BALANCE as BASE_SERVICE_BALANCE, index$e_BlockState as BlockState, index$e_CoreStatistics as CoreStatistics, index$e_DisputesRecords as DisputesRecords, index$e_ELECTIVE_BYTE_BALANCE as ELECTIVE_BYTE_BALANCE, index$e_ELECTIVE_ITEM_BALANCE as ELECTIVE_ITEM_BALANCE, index$e_InMemoryService as InMemoryService, index$e_InMemoryState as InMemoryState, index$e_LookupHistoryItem as LookupHistoryItem, index$e_MAX_LOOKUP_HISTORY_SLOTS as MAX_LOOKUP_HISTORY_SLOTS, index$e_PreimageItem as PreimageItem, index$e_PrivilegedServices as PrivilegedServices, index$e_RecentBlocks as RecentBlocks, index$e_RecentBlocksHistory as RecentBlocksHistory, index$e_SafroleData as SafroleData, index$e_SafroleSealingKeysData as SafroleSealingKeysData, index$e_SafroleSealingKeysKind as SafroleSealingKeysKind, index$e_ServiceAccountInfo as ServiceAccountInfo, index$e_ServiceStatistics as ServiceStatistics, index$e_StatisticsData as StatisticsData, index$e_StorageItem as StorageItem, index$e_UpdateError as UpdateError, index$e_UpdatePreimage as UpdatePreimage, index$e_UpdatePreimageKind as UpdatePreimageKind, index$e_UpdateService as UpdateService, index$e_UpdateServiceKind as UpdateServiceKind, index$e_UpdateStorage as UpdateStorage, index$e_UpdateStorageKind as UpdateStorageKind, index$e_ValidatorData as ValidatorData, index$e_ValidatorStatistics as ValidatorStatistics, index$e_accumulationOutputComparator as accumulationOutputComparator, index$e_codecBandersnatchKey as codecBandersnatchKey, index$e_codecPerCore as codecPerCore, index$e_codecServiceId as codecServiceId, index$e_codecVarGas as codecVarGas, index$e_codecVarU16 as codecVarU16, index$e_codecWithVersion as codecWithVersion, index$e_hashComparator as hashComparator, index$e_ignoreValueWithDefault as ignoreValueWithDefault, index$e_serviceDataCodec as serviceDataCodec, index$e_serviceEntriesCodec as serviceEntriesCodec, index$e_sortedSetCodec as sortedSetCodec, index$e_tryAsLookupHistorySlots as tryAsLookupHistorySlots, index$e_tryAsPerCore as tryAsPerCore, index$e_workReportsSortedSetCodec as workReportsSortedSetCodec, index$e_zeroSizeHint as zeroSizeHint };
|
|
11289
11352
|
export type { index$e_BlocksState as BlocksState, index$e_ENTROPY_ENTRIES as ENTROPY_ENTRIES, index$e_EnumerableState as EnumerableState, index$e_FieldNames as FieldNames, index$e_InMemoryStateFields as InMemoryStateFields, index$e_LookupHistorySlots as LookupHistorySlots, index$e_MAX_RECENT_HISTORY as MAX_RECENT_HISTORY, index$e_PerCore as PerCore, index$e_SafroleSealingKeys as SafroleSealingKeys, index$e_Service as Service, index$e_ServiceData as ServiceData, index$e_ServiceEntries as ServiceEntries, index$e_ServicesUpdate as ServicesUpdate, index$e_State as State, index$e_StorageKey as StorageKey, index$e_VALIDATOR_META_BYTES as VALIDATOR_META_BYTES };
|
|
11290
11353
|
}
|
|
11291
11354
|
|
|
@@ -11353,7 +11416,7 @@ declare namespace stateKeys {
|
|
|
11353
11416
|
}
|
|
11354
11417
|
|
|
11355
11418
|
/** https://graypaper.fluffylabs.dev/#/1c979cb/3bba033bba03?v=0.7.1 */
|
|
11356
|
-
export function serviceStorage(serviceId: ServiceId, key: StorageKey): StateKey {
|
|
11419
|
+
export function serviceStorage(blake2b: Blake2b, serviceId: ServiceId, key: StorageKey): StateKey {
|
|
11357
11420
|
if (Compatibility.isLessThan(GpVersion.V0_6_7)) {
|
|
11358
11421
|
const out = Bytes.zero(HASH_SIZE);
|
|
11359
11422
|
out.raw.set(u32AsLeBytes(tryAsU32(2 ** 32 - 1)), 0);
|
|
@@ -11361,11 +11424,11 @@ declare namespace stateKeys {
|
|
|
11361
11424
|
return legacyServiceNested(serviceId, out);
|
|
11362
11425
|
}
|
|
11363
11426
|
|
|
11364
|
-
return serviceNested(serviceId, tryAsU32(2 ** 32 - 1), key);
|
|
11427
|
+
return serviceNested(blake2b, serviceId, tryAsU32(2 ** 32 - 1), key);
|
|
11365
11428
|
}
|
|
11366
11429
|
|
|
11367
11430
|
/** https://graypaper.fluffylabs.dev/#/1c979cb/3bd7033bd703?v=0.7.1 */
|
|
11368
|
-
export function servicePreimage(serviceId: ServiceId, hash: PreimageHash): StateKey {
|
|
11431
|
+
export function servicePreimage(blake2b: Blake2b, serviceId: ServiceId, hash: PreimageHash): StateKey {
|
|
11369
11432
|
if (Compatibility.isLessThan(GpVersion.V0_6_7)) {
|
|
11370
11433
|
const out = Bytes.zero(HASH_SIZE);
|
|
11371
11434
|
out.raw.set(u32AsLeBytes(tryAsU32(2 ** 32 - 2)), 0);
|
|
@@ -11373,11 +11436,16 @@ declare namespace stateKeys {
|
|
|
11373
11436
|
return legacyServiceNested(serviceId, out);
|
|
11374
11437
|
}
|
|
11375
11438
|
|
|
11376
|
-
return serviceNested(serviceId, tryAsU32(2 ** 32 - 2), hash);
|
|
11439
|
+
return serviceNested(blake2b, serviceId, tryAsU32(2 ** 32 - 2), hash);
|
|
11377
11440
|
}
|
|
11378
11441
|
|
|
11379
11442
|
/** https://graypaper.fluffylabs.dev/#/1c979cb/3b0a043b0a04?v=0.7.1 */
|
|
11380
|
-
export function serviceLookupHistory(
|
|
11443
|
+
export function serviceLookupHistory(
|
|
11444
|
+
blake2b: Blake2b,
|
|
11445
|
+
serviceId: ServiceId,
|
|
11446
|
+
hash: PreimageHash,
|
|
11447
|
+
preimageLength: U32,
|
|
11448
|
+
): StateKey {
|
|
11381
11449
|
if (Compatibility.isLessThan(GpVersion.V0_6_7)) {
|
|
11382
11450
|
const doubleHash = blake2b.hashBytes(hash);
|
|
11383
11451
|
const out = Bytes.zero(HASH_SIZE);
|
|
@@ -11386,11 +11454,11 @@ declare namespace stateKeys {
|
|
|
11386
11454
|
return legacyServiceNested(serviceId, out);
|
|
11387
11455
|
}
|
|
11388
11456
|
|
|
11389
|
-
return serviceNested(serviceId, preimageLength, hash);
|
|
11457
|
+
return serviceNested(blake2b, serviceId, preimageLength, hash);
|
|
11390
11458
|
}
|
|
11391
11459
|
|
|
11392
11460
|
/** https://graypaper.fluffylabs.dev/#/1c979cb/3b88003b8800?v=0.7.1 */
|
|
11393
|
-
export function serviceNested(serviceId: ServiceId, numberPrefix: U32, hash: BytesBlob): StateKey {
|
|
11461
|
+
export function serviceNested(blake2b: Blake2b, serviceId: ServiceId, numberPrefix: U32, hash: BytesBlob): StateKey {
|
|
11394
11462
|
const inputToHash = BytesBlob.blobFromParts(u32AsLeBytes(numberPrefix), hash.raw);
|
|
11395
11463
|
const newHash = blake2b.hashBytes(inputToHash).raw.subarray(0, 28);
|
|
11396
11464
|
const key = Bytes.zero(HASH_SIZE);
|
|
@@ -11570,24 +11638,26 @@ declare namespace serialize {
|
|
|
11570
11638
|
/** C(255, s): https://graypaper.fluffylabs.dev/#/85129da/383103383103?v=0.6.3 */
|
|
11571
11639
|
export const serviceData = (serviceId: ServiceId) => ({
|
|
11572
11640
|
key: stateKeys.serviceInfo(serviceId),
|
|
11573
|
-
Codec:
|
|
11641
|
+
Codec: Compatibility.isGreaterOrEqual(GpVersion.V0_7_1)
|
|
11642
|
+
? codecWithVersion(ServiceAccountInfo.Codec)
|
|
11643
|
+
: ServiceAccountInfo.Codec,
|
|
11574
11644
|
});
|
|
11575
11645
|
|
|
11576
11646
|
/** https://graypaper.fluffylabs.dev/#/85129da/384803384803?v=0.6.3 */
|
|
11577
|
-
export const serviceStorage = (serviceId: ServiceId, key: StorageKey) => ({
|
|
11578
|
-
key: stateKeys.serviceStorage(serviceId, key),
|
|
11647
|
+
export const serviceStorage = (blake2b: Blake2b, serviceId: ServiceId, key: StorageKey) => ({
|
|
11648
|
+
key: stateKeys.serviceStorage(blake2b, serviceId, key),
|
|
11579
11649
|
Codec: dumpCodec,
|
|
11580
11650
|
});
|
|
11581
11651
|
|
|
11582
11652
|
/** https://graypaper.fluffylabs.dev/#/85129da/385b03385b03?v=0.6.3 */
|
|
11583
|
-
export const servicePreimages = (serviceId: ServiceId, hash: PreimageHash) => ({
|
|
11584
|
-
key: stateKeys.servicePreimage(serviceId, hash),
|
|
11653
|
+
export const servicePreimages = (blake2b: Blake2b, serviceId: ServiceId, hash: PreimageHash) => ({
|
|
11654
|
+
key: stateKeys.servicePreimage(blake2b, serviceId, hash),
|
|
11585
11655
|
Codec: dumpCodec,
|
|
11586
11656
|
});
|
|
11587
11657
|
|
|
11588
11658
|
/** https://graypaper.fluffylabs.dev/#/85129da/387603387603?v=0.6.3 */
|
|
11589
|
-
export const serviceLookupHistory = (serviceId: ServiceId, hash: PreimageHash, len: U32) => ({
|
|
11590
|
-
key: stateKeys.serviceLookupHistory(serviceId, hash, len),
|
|
11659
|
+
export const serviceLookupHistory = (blake2b: Blake2b, serviceId: ServiceId, hash: PreimageHash, len: U32) => ({
|
|
11660
|
+
key: stateKeys.serviceLookupHistory(blake2b, serviceId, hash, len),
|
|
11591
11661
|
Codec: readonlyArray(codec.sequenceVarLen(codec.u32)),
|
|
11592
11662
|
});
|
|
11593
11663
|
}
|
|
@@ -11622,6 +11692,7 @@ declare const EMPTY_BLOB = BytesBlob.empty();
|
|
|
11622
11692
|
/** Serialize given state update into a series of key-value pairs. */
|
|
11623
11693
|
declare function* serializeStateUpdate(
|
|
11624
11694
|
spec: ChainSpec,
|
|
11695
|
+
blake2b: Blake2b,
|
|
11625
11696
|
update: Partial<State & ServicesUpdate>,
|
|
11626
11697
|
): Generator<StateEntryUpdate> {
|
|
11627
11698
|
// first let's serialize all of the simple entries (if present!)
|
|
@@ -11630,9 +11701,9 @@ declare function* serializeStateUpdate(
|
|
|
11630
11701
|
const encode = <T>(codec: Encode<T>, val: T) => Encoder.encodeObject(codec, val, spec);
|
|
11631
11702
|
|
|
11632
11703
|
// then let's proceed with service updates
|
|
11633
|
-
yield* serializeServiceUpdates(update.servicesUpdates, encode);
|
|
11634
|
-
yield* serializePreimages(update.preimages, encode);
|
|
11635
|
-
yield* serializeStorage(update.storage);
|
|
11704
|
+
yield* serializeServiceUpdates(update.servicesUpdates, encode, blake2b);
|
|
11705
|
+
yield* serializePreimages(update.preimages, encode, blake2b);
|
|
11706
|
+
yield* serializeStorage(update.storage, blake2b);
|
|
11636
11707
|
yield* serializeRemovedServices(update.servicesRemoved);
|
|
11637
11708
|
}
|
|
11638
11709
|
|
|
@@ -11644,18 +11715,18 @@ declare function* serializeRemovedServices(servicesRemoved: ServiceId[] | undefi
|
|
|
11644
11715
|
}
|
|
11645
11716
|
}
|
|
11646
11717
|
|
|
11647
|
-
declare function* serializeStorage(storage: UpdateStorage[] | undefined): Generator<StateEntryUpdate> {
|
|
11718
|
+
declare function* serializeStorage(storage: UpdateStorage[] | undefined, blake2b: Blake2b): Generator<StateEntryUpdate> {
|
|
11648
11719
|
for (const { action, serviceId } of storage ?? []) {
|
|
11649
11720
|
switch (action.kind) {
|
|
11650
11721
|
case UpdateStorageKind.Set: {
|
|
11651
11722
|
const key = action.storage.key;
|
|
11652
|
-
const codec = serialize.serviceStorage(serviceId, key);
|
|
11723
|
+
const codec = serialize.serviceStorage(blake2b, serviceId, key);
|
|
11653
11724
|
yield [StateEntryUpdateAction.Insert, codec.key, action.storage.value];
|
|
11654
11725
|
break;
|
|
11655
11726
|
}
|
|
11656
11727
|
case UpdateStorageKind.Remove: {
|
|
11657
11728
|
const key = action.key;
|
|
11658
|
-
const codec = serialize.serviceStorage(serviceId, key);
|
|
11729
|
+
const codec = serialize.serviceStorage(blake2b, serviceId, key);
|
|
11659
11730
|
yield [StateEntryUpdateAction.Remove, codec.key, EMPTY_BLOB];
|
|
11660
11731
|
break;
|
|
11661
11732
|
}
|
|
@@ -11665,16 +11736,20 @@ declare function* serializeStorage(storage: UpdateStorage[] | undefined): Genera
|
|
|
11665
11736
|
}
|
|
11666
11737
|
}
|
|
11667
11738
|
|
|
11668
|
-
declare function* serializePreimages(
|
|
11739
|
+
declare function* serializePreimages(
|
|
11740
|
+
preimages: UpdatePreimage[] | undefined,
|
|
11741
|
+
encode: EncodeFun,
|
|
11742
|
+
blake2b: Blake2b,
|
|
11743
|
+
): Generator<StateEntryUpdate> {
|
|
11669
11744
|
for (const { action, serviceId } of preimages ?? []) {
|
|
11670
11745
|
switch (action.kind) {
|
|
11671
11746
|
case UpdatePreimageKind.Provide: {
|
|
11672
11747
|
const { hash, blob } = action.preimage;
|
|
11673
|
-
const codec = serialize.servicePreimages(serviceId, hash);
|
|
11748
|
+
const codec = serialize.servicePreimages(blake2b, serviceId, hash);
|
|
11674
11749
|
yield [StateEntryUpdateAction.Insert, codec.key, blob];
|
|
11675
11750
|
|
|
11676
11751
|
if (action.slot !== null) {
|
|
11677
|
-
const codec2 = serialize.serviceLookupHistory(serviceId, hash, tryAsU32(blob.length));
|
|
11752
|
+
const codec2 = serialize.serviceLookupHistory(blake2b, serviceId, hash, tryAsU32(blob.length));
|
|
11678
11753
|
yield [
|
|
11679
11754
|
StateEntryUpdateAction.Insert,
|
|
11680
11755
|
codec2.key,
|
|
@@ -11685,16 +11760,16 @@ declare function* serializePreimages(preimages: UpdatePreimage[] | undefined, en
|
|
|
11685
11760
|
}
|
|
11686
11761
|
case UpdatePreimageKind.UpdateOrAdd: {
|
|
11687
11762
|
const { hash, length, slots } = action.item;
|
|
11688
|
-
const codec = serialize.serviceLookupHistory(serviceId, hash, length);
|
|
11763
|
+
const codec = serialize.serviceLookupHistory(blake2b, serviceId, hash, length);
|
|
11689
11764
|
yield [StateEntryUpdateAction.Insert, codec.key, encode(codec.Codec, slots)];
|
|
11690
11765
|
break;
|
|
11691
11766
|
}
|
|
11692
11767
|
case UpdatePreimageKind.Remove: {
|
|
11693
11768
|
const { hash, length } = action;
|
|
11694
|
-
const codec = serialize.servicePreimages(serviceId, hash);
|
|
11769
|
+
const codec = serialize.servicePreimages(blake2b, serviceId, hash);
|
|
11695
11770
|
yield [StateEntryUpdateAction.Remove, codec.key, EMPTY_BLOB];
|
|
11696
11771
|
|
|
11697
|
-
const codec2 = serialize.serviceLookupHistory(serviceId, hash, length);
|
|
11772
|
+
const codec2 = serialize.serviceLookupHistory(blake2b, serviceId, hash, length);
|
|
11698
11773
|
yield [StateEntryUpdateAction.Remove, codec2.key, EMPTY_BLOB];
|
|
11699
11774
|
break;
|
|
11700
11775
|
}
|
|
@@ -11706,6 +11781,7 @@ declare function* serializePreimages(preimages: UpdatePreimage[] | undefined, en
|
|
|
11706
11781
|
declare function* serializeServiceUpdates(
|
|
11707
11782
|
servicesUpdates: UpdateService[] | undefined,
|
|
11708
11783
|
encode: EncodeFun,
|
|
11784
|
+
blake2b: Blake2b,
|
|
11709
11785
|
): Generator<StateEntryUpdate> {
|
|
11710
11786
|
for (const { action, serviceId } of servicesUpdates ?? []) {
|
|
11711
11787
|
// new service being created or updated
|
|
@@ -11715,7 +11791,7 @@ declare function* serializeServiceUpdates(
|
|
|
11715
11791
|
// additional lookup history update
|
|
11716
11792
|
if (action.kind === UpdateServiceKind.Create && action.lookupHistory !== null) {
|
|
11717
11793
|
const { lookupHistory } = action;
|
|
11718
|
-
const codec2 = serialize.serviceLookupHistory(serviceId, lookupHistory.hash, lookupHistory.length);
|
|
11794
|
+
const codec2 = serialize.serviceLookupHistory(blake2b, serviceId, lookupHistory.hash, lookupHistory.length);
|
|
11719
11795
|
yield [StateEntryUpdateAction.Insert, codec2.key, encode(codec2.Codec, lookupHistory.slots)];
|
|
11720
11796
|
}
|
|
11721
11797
|
}
|
|
@@ -11849,8 +11925,8 @@ declare class StateEntries {
|
|
|
11849
11925
|
);
|
|
11850
11926
|
|
|
11851
11927
|
/** Turn in-memory state into it's serialized form. */
|
|
11852
|
-
static serializeInMemory(spec: ChainSpec, state: InMemoryState) {
|
|
11853
|
-
return new StateEntries(convertInMemoryStateToDictionary(spec, state));
|
|
11928
|
+
static serializeInMemory(spec: ChainSpec, blake2b: Blake2b, state: InMemoryState) {
|
|
11929
|
+
return new StateEntries(convertInMemoryStateToDictionary(spec, blake2b, state));
|
|
11854
11930
|
}
|
|
11855
11931
|
|
|
11856
11932
|
/**
|
|
@@ -11905,7 +11981,8 @@ declare class StateEntries {
|
|
|
11905
11981
|
}
|
|
11906
11982
|
|
|
11907
11983
|
/** https://graypaper.fluffylabs.dev/#/68eaa1f/391600391600?v=0.6.4 */
|
|
11908
|
-
getRootHash(): StateRootHash {
|
|
11984
|
+
getRootHash(blake2b: Blake2b): StateRootHash {
|
|
11985
|
+
const blake2bTrieHasher = getBlake2bTrieHasher(blake2b);
|
|
11909
11986
|
const leaves: SortedSet<LeafNode> = SortedSet.fromArray(leafComparator);
|
|
11910
11987
|
for (const [key, value] of this) {
|
|
11911
11988
|
leaves.insert(InMemoryTrie.constructLeaf(blake2bTrieHasher, key.asOpaque(), value));
|
|
@@ -11918,6 +11995,7 @@ declare class StateEntries {
|
|
|
11918
11995
|
/** https://graypaper.fluffylabs.dev/#/68eaa1f/38a50038a500?v=0.6.4 */
|
|
11919
11996
|
declare function convertInMemoryStateToDictionary(
|
|
11920
11997
|
spec: ChainSpec,
|
|
11998
|
+
blake2b: Blake2b,
|
|
11921
11999
|
state: InMemoryState,
|
|
11922
12000
|
): TruncatedHashDictionary<StateKey, BytesBlob> {
|
|
11923
12001
|
const serialized = TruncatedHashDictionary.fromEntries<StateKey, BytesBlob>([]);
|
|
@@ -11950,20 +12028,25 @@ declare function convertInMemoryStateToDictionary(
|
|
|
11950
12028
|
|
|
11951
12029
|
// preimages
|
|
11952
12030
|
for (const preimage of service.data.preimages.values()) {
|
|
11953
|
-
const { key, Codec } = serialize.servicePreimages(serviceId, preimage.hash);
|
|
12031
|
+
const { key, Codec } = serialize.servicePreimages(blake2b, serviceId, preimage.hash);
|
|
11954
12032
|
serialized.set(key, Encoder.encodeObject(Codec, preimage.blob));
|
|
11955
12033
|
}
|
|
11956
12034
|
|
|
11957
12035
|
// storage
|
|
11958
12036
|
for (const storage of service.data.storage.values()) {
|
|
11959
|
-
const { key, Codec } = serialize.serviceStorage(serviceId, storage.key);
|
|
12037
|
+
const { key, Codec } = serialize.serviceStorage(blake2b, serviceId, storage.key);
|
|
11960
12038
|
serialized.set(key, Encoder.encodeObject(Codec, storage.value));
|
|
11961
12039
|
}
|
|
11962
12040
|
|
|
11963
12041
|
// lookup history
|
|
11964
12042
|
for (const lookupHistoryList of service.data.lookupHistory.values()) {
|
|
11965
12043
|
for (const lookupHistory of lookupHistoryList) {
|
|
11966
|
-
const { key, Codec } = serialize.serviceLookupHistory(
|
|
12044
|
+
const { key, Codec } = serialize.serviceLookupHistory(
|
|
12045
|
+
blake2b,
|
|
12046
|
+
serviceId,
|
|
12047
|
+
lookupHistory.hash,
|
|
12048
|
+
lookupHistory.length,
|
|
12049
|
+
);
|
|
11967
12050
|
serialized.set(key, Encoder.encodeObject(Codec, lookupHistory.slots.slice()));
|
|
11968
12051
|
}
|
|
11969
12052
|
}
|
|
@@ -11994,21 +12077,23 @@ declare class SerializedState<T extends SerializedStateBackend = SerializedState
|
|
|
11994
12077
|
implements State, EnumerableState
|
|
11995
12078
|
{
|
|
11996
12079
|
/** Create a state-like object from collection of serialized entries. */
|
|
11997
|
-
static fromStateEntries(spec: ChainSpec, state: StateEntries, recentServices: ServiceId[] = []) {
|
|
11998
|
-
return new SerializedState(spec, state, recentServices);
|
|
12080
|
+
static fromStateEntries(spec: ChainSpec, blake2b: Blake2b, state: StateEntries, recentServices: ServiceId[] = []) {
|
|
12081
|
+
return new SerializedState(spec, blake2b, state, recentServices);
|
|
11999
12082
|
}
|
|
12000
12083
|
|
|
12001
12084
|
/** Create a state-like object backed by some DB. */
|
|
12002
12085
|
static new<T extends SerializedStateBackend>(
|
|
12003
12086
|
spec: ChainSpec,
|
|
12087
|
+
blake2b: Blake2b,
|
|
12004
12088
|
db: T,
|
|
12005
12089
|
recentServices: ServiceId[] = [],
|
|
12006
12090
|
): SerializedState<T> {
|
|
12007
|
-
return new SerializedState(spec, db, recentServices);
|
|
12091
|
+
return new SerializedState(spec, blake2b, db, recentServices);
|
|
12008
12092
|
}
|
|
12009
12093
|
|
|
12010
12094
|
private constructor(
|
|
12011
12095
|
private readonly spec: ChainSpec,
|
|
12096
|
+
private readonly blake2b: Blake2b,
|
|
12012
12097
|
public backend: T,
|
|
12013
12098
|
/** Best-effort list of recently active services. */
|
|
12014
12099
|
private readonly _recentServiceIds: ServiceId[],
|
|
@@ -12039,7 +12124,7 @@ declare class SerializedState<T extends SerializedStateBackend = SerializedState
|
|
|
12039
12124
|
this._recentServiceIds.push(id);
|
|
12040
12125
|
}
|
|
12041
12126
|
|
|
12042
|
-
return new SerializedService(id, serviceData, (key) => this.retrieveOptional(key));
|
|
12127
|
+
return new SerializedService(this.blake2b, id, serviceData, (key) => this.retrieveOptional(key));
|
|
12043
12128
|
}
|
|
12044
12129
|
|
|
12045
12130
|
private retrieve<T>({ key, Codec }: KeyAndCodec<T>, description: string): T {
|
|
@@ -12138,6 +12223,7 @@ declare class SerializedState<T extends SerializedStateBackend = SerializedState
|
|
|
12138
12223
|
/** Service data representation on a serialized state. */
|
|
12139
12224
|
declare class SerializedService implements Service {
|
|
12140
12225
|
constructor(
|
|
12226
|
+
public readonly blake2b: Blake2b,
|
|
12141
12227
|
/** Service id */
|
|
12142
12228
|
public readonly serviceId: ServiceId,
|
|
12143
12229
|
private readonly accountInfo: ServiceAccountInfo,
|
|
@@ -12153,14 +12239,14 @@ declare class SerializedService implements Service {
|
|
|
12153
12239
|
getStorage(rawKey: StorageKey): BytesBlob | null {
|
|
12154
12240
|
if (Compatibility.isLessThan(GpVersion.V0_6_7)) {
|
|
12155
12241
|
const SERVICE_ID_BYTES = 4;
|
|
12156
|
-
const serviceIdAndKey =
|
|
12242
|
+
const serviceIdAndKey = safeAllocUint8Array(SERVICE_ID_BYTES + rawKey.length);
|
|
12157
12243
|
serviceIdAndKey.set(u32AsLeBytes(this.serviceId));
|
|
12158
12244
|
serviceIdAndKey.set(rawKey.raw, SERVICE_ID_BYTES);
|
|
12159
|
-
const key: StorageKey = asOpaqueType(BytesBlob.blobFrom(blake2b.hashBytes(serviceIdAndKey).raw));
|
|
12160
|
-
return this.retrieveOptional(serialize.serviceStorage(this.serviceId, key)) ?? null;
|
|
12245
|
+
const key: StorageKey = asOpaqueType(BytesBlob.blobFrom(this.blake2b.hashBytes(serviceIdAndKey).raw));
|
|
12246
|
+
return this.retrieveOptional(serialize.serviceStorage(this.blake2b, this.serviceId, key)) ?? null;
|
|
12161
12247
|
}
|
|
12162
12248
|
|
|
12163
|
-
return this.retrieveOptional(serialize.serviceStorage(this.serviceId, rawKey)) ?? null;
|
|
12249
|
+
return this.retrieveOptional(serialize.serviceStorage(this.blake2b, this.serviceId, rawKey)) ?? null;
|
|
12164
12250
|
}
|
|
12165
12251
|
|
|
12166
12252
|
/**
|
|
@@ -12170,17 +12256,17 @@ declare class SerializedService implements Service {
|
|
|
12170
12256
|
*/
|
|
12171
12257
|
hasPreimage(hash: PreimageHash): boolean {
|
|
12172
12258
|
// TODO [ToDr] consider optimizing to avoid fetching the whole data.
|
|
12173
|
-
return this.retrieveOptional(serialize.servicePreimages(this.serviceId, hash)) !== undefined;
|
|
12259
|
+
return this.retrieveOptional(serialize.servicePreimages(this.blake2b, this.serviceId, hash)) !== undefined;
|
|
12174
12260
|
}
|
|
12175
12261
|
|
|
12176
12262
|
/** Retrieve preimage from the DB. */
|
|
12177
12263
|
getPreimage(hash: PreimageHash): BytesBlob | null {
|
|
12178
|
-
return this.retrieveOptional(serialize.servicePreimages(this.serviceId, hash)) ?? null;
|
|
12264
|
+
return this.retrieveOptional(serialize.servicePreimages(this.blake2b, this.serviceId, hash)) ?? null;
|
|
12179
12265
|
}
|
|
12180
12266
|
|
|
12181
12267
|
/** Retrieve preimage lookup history. */
|
|
12182
12268
|
getLookupHistory(hash: PreimageHash, len: U32): LookupHistorySlots | null {
|
|
12183
|
-
const rawSlots = this.retrieveOptional(serialize.serviceLookupHistory(this.serviceId, hash, len));
|
|
12269
|
+
const rawSlots = this.retrieveOptional(serialize.serviceLookupHistory(this.blake2b, this.serviceId, hash, len));
|
|
12184
12270
|
if (rawSlots === undefined) {
|
|
12185
12271
|
return null;
|
|
12186
12272
|
}
|
|
@@ -12193,9 +12279,9 @@ type KeyAndCodec<T> = {
|
|
|
12193
12279
|
Codec: Decode<T>;
|
|
12194
12280
|
};
|
|
12195
12281
|
|
|
12196
|
-
declare function loadState(spec: ChainSpec, entries: Iterable<[StateKey | TruncatedHash, BytesBlob]>) {
|
|
12282
|
+
declare function loadState(spec: ChainSpec, blake2b: Blake2b, entries: Iterable<[StateKey | TruncatedHash, BytesBlob]>) {
|
|
12197
12283
|
const stateEntries = StateEntries.fromEntriesUnsafe(entries);
|
|
12198
|
-
return SerializedState.fromStateEntries(spec, stateEntries);
|
|
12284
|
+
return SerializedState.fromStateEntries(spec, blake2b, stateEntries);
|
|
12199
12285
|
}
|
|
12200
12286
|
|
|
12201
12287
|
/**
|
|
@@ -12351,7 +12437,8 @@ declare class LeafDb implements SerializedStateBackend {
|
|
|
12351
12437
|
assertNever(val);
|
|
12352
12438
|
}
|
|
12353
12439
|
|
|
12354
|
-
getStateRoot(): StateRootHash {
|
|
12440
|
+
getStateRoot(blake2b: Blake2b): StateRootHash {
|
|
12441
|
+
const blake2bTrieHasher = getBlake2bTrieHasher(blake2b);
|
|
12355
12442
|
return InMemoryTrie.computeStateRoot(blake2bTrieHasher, this.leaves).asOpaque();
|
|
12356
12443
|
}
|
|
12357
12444
|
|
|
@@ -12449,7 +12536,8 @@ declare class InMemoryStates implements StatesDb<InMemoryState> {
|
|
|
12449
12536
|
}
|
|
12450
12537
|
|
|
12451
12538
|
async getStateRoot(state: InMemoryState): Promise<StateRootHash> {
|
|
12452
|
-
|
|
12539
|
+
const blake2b = await Blake2b.createHasher();
|
|
12540
|
+
return StateEntries.serializeInMemory(this.spec, blake2b, state).getRootHash(blake2b);
|
|
12453
12541
|
}
|
|
12454
12542
|
|
|
12455
12543
|
/** Insert a full state into the database. */
|
|
@@ -12554,7 +12642,7 @@ declare function padAndEncodeData(input: BytesBlob) {
|
|
|
12554
12642
|
const paddedLength = Math.ceil(input.length / PIECE_SIZE) * PIECE_SIZE;
|
|
12555
12643
|
let padded = input;
|
|
12556
12644
|
if (input.length !== paddedLength) {
|
|
12557
|
-
padded = BytesBlob.blobFrom(
|
|
12645
|
+
padded = BytesBlob.blobFrom(safeAllocUint8Array(paddedLength));
|
|
12558
12646
|
padded.raw.set(input.raw, 0);
|
|
12559
12647
|
}
|
|
12560
12648
|
return chunkingFunction(padded);
|
|
@@ -12610,7 +12698,7 @@ declare function decodeData(input: FixedSizeArray<[number, BytesBlob], N_CHUNKS_
|
|
|
12610
12698
|
*/
|
|
12611
12699
|
declare function encodePoints(input: Bytes<PIECE_SIZE>): FixedSizeArray<Bytes<POINT_LENGTH>, N_CHUNKS_TOTAL> {
|
|
12612
12700
|
const result: Bytes<POINT_LENGTH>[] = [];
|
|
12613
|
-
const data =
|
|
12701
|
+
const data = safeAllocUint8Array(POINT_ALIGNMENT * N_CHUNKS_REQUIRED);
|
|
12614
12702
|
|
|
12615
12703
|
// add original shards to the result
|
|
12616
12704
|
for (let i = 0; i < N_CHUNKS_REQUIRED; i++) {
|
|
@@ -12630,7 +12718,7 @@ declare function encodePoints(input: Bytes<PIECE_SIZE>): FixedSizeArray<Bytes<PO
|
|
|
12630
12718
|
for (let i = 0; i < N_CHUNKS_REDUNDANCY; i++) {
|
|
12631
12719
|
const pointIndex = i * POINT_ALIGNMENT;
|
|
12632
12720
|
|
|
12633
|
-
const redundancyPoint =
|
|
12721
|
+
const redundancyPoint = safeAllocUint8Array(POINT_LENGTH);
|
|
12634
12722
|
for (let j = 0; j < POINT_LENGTH; j++) {
|
|
12635
12723
|
redundancyPoint[j] = encodedData[pointIndex + j * HALF_POINT_SIZE];
|
|
12636
12724
|
}
|
|
@@ -12650,7 +12738,7 @@ declare function decodePiece(
|
|
|
12650
12738
|
): Bytes<PIECE_SIZE> {
|
|
12651
12739
|
const result = Bytes.zero(PIECE_SIZE);
|
|
12652
12740
|
|
|
12653
|
-
const data =
|
|
12741
|
+
const data = safeAllocUint8Array(N_CHUNKS_REQUIRED * POINT_ALIGNMENT);
|
|
12654
12742
|
const indices = new Uint16Array(input.length);
|
|
12655
12743
|
|
|
12656
12744
|
for (let i = 0; i < N_CHUNKS_REQUIRED; i++) {
|
|
@@ -12777,7 +12865,7 @@ declare function lace<N extends number, K extends number>(input: FixedSizeArray<
|
|
|
12777
12865
|
return BytesBlob.empty();
|
|
12778
12866
|
}
|
|
12779
12867
|
const n = input[0].length;
|
|
12780
|
-
const result = BytesBlob.blobFrom(
|
|
12868
|
+
const result = BytesBlob.blobFrom(safeAllocUint8Array(k * n));
|
|
12781
12869
|
for (let i = 0; i < k; i++) {
|
|
12782
12870
|
const entry = input[i].raw;
|
|
12783
12871
|
for (let j = 0; j < n; j++) {
|
|
@@ -13620,6 +13708,8 @@ declare enum NewServiceError {
|
|
|
13620
13708
|
InsufficientFunds = 0,
|
|
13621
13709
|
/** Service is not privileged to set gratis storage. */
|
|
13622
13710
|
UnprivilegedService = 1,
|
|
13711
|
+
/** Registrar attempting to create a service with already existing id. */
|
|
13712
|
+
RegistrarServiceIdAlreadyTaken = 2,
|
|
13623
13713
|
}
|
|
13624
13714
|
|
|
13625
13715
|
declare enum UpdatePrivilegesError {
|
|
@@ -13675,25 +13765,28 @@ interface PartialState {
|
|
|
13675
13765
|
|
|
13676
13766
|
/**
|
|
13677
13767
|
* Transfer given `amount` of funds to the `destination`,
|
|
13678
|
-
* passing `
|
|
13679
|
-
* and given `memo`.
|
|
13768
|
+
* passing `gas` fee for transfer and given `memo`.
|
|
13680
13769
|
*/
|
|
13681
13770
|
transfer(
|
|
13682
13771
|
destination: ServiceId | null,
|
|
13683
13772
|
amount: U64,
|
|
13684
|
-
|
|
13773
|
+
gas: ServiceGas,
|
|
13685
13774
|
memo: Bytes<TRANSFER_MEMO_BYTES>,
|
|
13686
13775
|
): Result$2<OK, TransferError>;
|
|
13687
13776
|
|
|
13688
13777
|
/**
|
|
13689
|
-
* Create a new service with given codeHash, length, gas, allowance and
|
|
13778
|
+
* Create a new service with given codeHash, length, gas, allowance, gratisStorage and wantedServiceId.
|
|
13779
|
+
*
|
|
13780
|
+
* Returns a newly assigned id
|
|
13781
|
+
* or `wantedServiceId` if it's lower than `S`
|
|
13782
|
+
* and parent of that service is `Registrar`.
|
|
13690
13783
|
*
|
|
13691
|
-
*
|
|
13692
|
-
* https://graypaper.fluffylabs.dev/#/7e6ff6a/2f4c022f4c02?v=0.6.7
|
|
13784
|
+
* https://graypaper.fluffylabs.dev/#/ab2cdbd/2fa9042fc304?v=0.7.2
|
|
13693
13785
|
*
|
|
13694
13786
|
* An error can be returned in case the account does not
|
|
13695
13787
|
* have the required balance
|
|
13696
|
-
* or tries to set gratis storage without being
|
|
13788
|
+
* or tries to set gratis storage without being `Manager`
|
|
13789
|
+
* or `Registrar` tries to set service id thats already taken.
|
|
13697
13790
|
*/
|
|
13698
13791
|
newService(
|
|
13699
13792
|
codeHash: CodeHash,
|
|
@@ -13701,6 +13794,7 @@ interface PartialState {
|
|
|
13701
13794
|
gas: ServiceGas,
|
|
13702
13795
|
allowance: ServiceGas,
|
|
13703
13796
|
gratisStorage: U64,
|
|
13797
|
+
wantedServiceId: U64,
|
|
13704
13798
|
): Result$2<ServiceId, NewServiceError>;
|
|
13705
13799
|
|
|
13706
13800
|
/** Upgrade code of currently running service. */
|
|
@@ -13722,7 +13816,7 @@ interface PartialState {
|
|
|
13722
13816
|
updateAuthorizationQueue(
|
|
13723
13817
|
coreIndex: CoreIndex,
|
|
13724
13818
|
authQueue: FixedSizeArray<Blake2bHash, AUTHORIZATION_QUEUE_SIZE>,
|
|
13725
|
-
|
|
13819
|
+
assigners: ServiceId | null,
|
|
13726
13820
|
): Result$2<OK, UpdatePrivilegesError>;
|
|
13727
13821
|
|
|
13728
13822
|
/**
|
|
@@ -13731,14 +13825,16 @@ interface PartialState {
|
|
|
13731
13825
|
* `m`: manager service (can change privileged services)
|
|
13732
13826
|
* `a`: manages authorization queue
|
|
13733
13827
|
* `v`: manages validator keys
|
|
13734
|
-
* `
|
|
13828
|
+
* `r`: manages create new services in protected id range.
|
|
13829
|
+
* `z`: collection of serviceId -> gas that auto-accumulate every block
|
|
13735
13830
|
*
|
|
13736
13831
|
*/
|
|
13737
13832
|
updatePrivilegedServices(
|
|
13738
13833
|
m: ServiceId | null,
|
|
13739
13834
|
a: PerCore<ServiceId>,
|
|
13740
13835
|
v: ServiceId | null,
|
|
13741
|
-
|
|
13836
|
+
r: ServiceId | null,
|
|
13837
|
+
z: [ServiceId, ServiceGas][],
|
|
13742
13838
|
): Result$2<OK, UpdatePrivilegesError>;
|
|
13743
13839
|
|
|
13744
13840
|
/** Yield accumulation trie result hash. */
|
|
@@ -13850,7 +13946,7 @@ declare class Mask {
|
|
|
13850
13946
|
}
|
|
13851
13947
|
|
|
13852
13948
|
private buildLookupTableForward(mask: BitVec) {
|
|
13853
|
-
const table =
|
|
13949
|
+
const table = safeAllocUint8Array(mask.bitLength);
|
|
13854
13950
|
let lastInstructionOffset = 0;
|
|
13855
13951
|
for (let i = mask.bitLength - 1; i >= 0; i--) {
|
|
13856
13952
|
if (mask.isSet(i)) {
|
|
@@ -13994,7 +14090,7 @@ declare class Registers {
|
|
|
13994
14090
|
private asSigned: BigInt64Array;
|
|
13995
14091
|
private asUnsigned: BigUint64Array;
|
|
13996
14092
|
|
|
13997
|
-
constructor(private readonly bytes =
|
|
14093
|
+
constructor(private readonly bytes = safeAllocUint8Array(NO_OF_REGISTERS << REGISTER_SIZE_SHIFT)) {
|
|
13998
14094
|
check`${bytes.length === NO_OF_REGISTERS << REGISTER_SIZE_SHIFT} Invalid size of registers array.`;
|
|
13999
14095
|
this.asSigned = new BigInt64Array(bytes.buffer, bytes.byteOffset);
|
|
14000
14096
|
this.asUnsigned = new BigUint64Array(bytes.buffer, bytes.byteOffset);
|
|
@@ -17446,6 +17542,29 @@ declare class Interpreter {
|
|
|
17446
17542
|
getMemoryPage(pageNumber: number): null | Uint8Array {
|
|
17447
17543
|
return this.memory.getPageDump(tryAsPageNumber(pageNumber));
|
|
17448
17544
|
}
|
|
17545
|
+
|
|
17546
|
+
calculateBlockGasCost(): Map<string, number> {
|
|
17547
|
+
const codeLength = this.code.length;
|
|
17548
|
+
const blocks: Map<string, number> = new Map();
|
|
17549
|
+
let currentBlock = "0";
|
|
17550
|
+
let gasCost = 0;
|
|
17551
|
+
const getNextIstructionIndex = (index: number) => index + 1 + this.mask.getNoOfBytesToNextInstruction(index + 1);
|
|
17552
|
+
|
|
17553
|
+
for (let index = 0; index < codeLength; index = getNextIstructionIndex(index)) {
|
|
17554
|
+
const instruction = this.code[index];
|
|
17555
|
+
if (this.basicBlocks.isBeginningOfBasicBlock(index)) {
|
|
17556
|
+
blocks.set(currentBlock, gasCost);
|
|
17557
|
+
currentBlock = index.toString();
|
|
17558
|
+
gasCost = 0;
|
|
17559
|
+
}
|
|
17560
|
+
|
|
17561
|
+
gasCost += instructionGasMap[instruction];
|
|
17562
|
+
}
|
|
17563
|
+
|
|
17564
|
+
blocks.set(currentBlock, gasCost);
|
|
17565
|
+
|
|
17566
|
+
return blocks;
|
|
17567
|
+
}
|
|
17449
17568
|
}
|
|
17450
17569
|
|
|
17451
17570
|
type index$8_BigGas = BigGas;
|
|
@@ -17656,7 +17775,7 @@ declare class AccumulationStateUpdate {
|
|
|
17656
17775
|
/** Services state updates. */
|
|
17657
17776
|
public readonly services: ServicesUpdate,
|
|
17658
17777
|
/** Pending transfers. */
|
|
17659
|
-
public
|
|
17778
|
+
public transfers: PendingTransfer[],
|
|
17660
17779
|
/** Yielded accumulation root. */
|
|
17661
17780
|
public readonly yieldedRoots: Map<ServiceId, OpaqueHash> = new Map(),
|
|
17662
17781
|
) {}
|
|
@@ -17707,11 +17826,18 @@ declare class AccumulationStateUpdate {
|
|
|
17707
17826
|
if (from.privilegedServices !== null) {
|
|
17708
17827
|
update.privilegedServices = PrivilegedServices.create({
|
|
17709
17828
|
...from.privilegedServices,
|
|
17710
|
-
|
|
17829
|
+
assigners: asKnownSize([...from.privilegedServices.assigners]),
|
|
17711
17830
|
});
|
|
17712
17831
|
}
|
|
17713
17832
|
return update;
|
|
17714
17833
|
}
|
|
17834
|
+
|
|
17835
|
+
/** Retrieve and clear pending transfers. */
|
|
17836
|
+
takeTransfers() {
|
|
17837
|
+
const transfers = this.transfers;
|
|
17838
|
+
this.transfers = [];
|
|
17839
|
+
return transfers;
|
|
17840
|
+
}
|
|
17715
17841
|
}
|
|
17716
17842
|
|
|
17717
17843
|
type StateSlice = Pick<State, "getService" | "privilegedServices">;
|
|
@@ -17978,7 +18104,7 @@ declare const HostCallResult = {
|
|
|
17978
18104
|
OOB: tryAsU64(0xffff_ffff_ffff_fffdn), // 2**64 - 3
|
|
17979
18105
|
/** Index unknown. */
|
|
17980
18106
|
WHO: tryAsU64(0xffff_ffff_ffff_fffcn), // 2**64 - 4
|
|
17981
|
-
/** Storage full. */
|
|
18107
|
+
/** Storage full or resource already allocated. */
|
|
17982
18108
|
FULL: tryAsU64(0xffff_ffff_ffff_fffbn), // 2**64 - 5
|
|
17983
18109
|
/** Core index unknown. */
|
|
17984
18110
|
CORE: tryAsU64(0xffff_ffff_ffff_fffan), // 2**64 - 6
|
|
@@ -17986,7 +18112,7 @@ declare const HostCallResult = {
|
|
|
17986
18112
|
CASH: tryAsU64(0xffff_ffff_ffff_fff9n), // 2**64 - 7
|
|
17987
18113
|
/** Gas limit too low. */
|
|
17988
18114
|
LOW: tryAsU64(0xffff_ffff_ffff_fff8n), // 2**64 - 8
|
|
17989
|
-
/** The item is already solicited
|
|
18115
|
+
/** The item is already solicited, cannot be forgotten or the operation is invalid due to privilege level. */
|
|
17990
18116
|
HUH: tryAsU64(0xffff_ffff_ffff_fff7n), // 2**64 - 9
|
|
17991
18117
|
/** The return value indicating general success. */
|
|
17992
18118
|
OK: tryAsU64(0n),
|
|
@@ -18052,9 +18178,15 @@ type HostCallIndex = Opaque<U32, "HostCallIndex[U32]">;
|
|
|
18052
18178
|
/** Attempt to convert a number into `HostCallIndex`. */
|
|
18053
18179
|
declare const tryAsHostCallIndex = (v: number): HostCallIndex => asOpaqueType(tryAsU32(v));
|
|
18054
18180
|
|
|
18181
|
+
/**
|
|
18182
|
+
* Host-call exit reason.
|
|
18183
|
+
*
|
|
18184
|
+
* https://graypaper.fluffylabs.dev/#/ab2cdbd/24a30124a501?v=0.7.2
|
|
18185
|
+
*/
|
|
18055
18186
|
declare enum PvmExecution {
|
|
18056
18187
|
Halt = 0,
|
|
18057
18188
|
Panic = 1,
|
|
18189
|
+
OOG = 2, // out-of-gas
|
|
18058
18190
|
}
|
|
18059
18191
|
|
|
18060
18192
|
/** A utility function to easily trace a bunch of registers. */
|
|
@@ -18067,8 +18199,12 @@ interface HostCallHandler {
|
|
|
18067
18199
|
/** Index of that host call (i.e. what PVM invokes via `ecalli`) */
|
|
18068
18200
|
readonly index: HostCallIndex;
|
|
18069
18201
|
|
|
18070
|
-
/**
|
|
18071
|
-
|
|
18202
|
+
/**
|
|
18203
|
+
* The gas cost of invocation of that host call.
|
|
18204
|
+
*
|
|
18205
|
+
* NOTE: `((reg: IHostCallRegisters) => Gas)` function is for compatibility reasons: pre GP 0.7.2
|
|
18206
|
+
*/
|
|
18207
|
+
readonly basicGasCost: SmallGas | ((reg: IHostCallRegisters) => Gas);
|
|
18072
18208
|
|
|
18073
18209
|
/** Currently executing service id. */
|
|
18074
18210
|
readonly currentServiceId: U32;
|
|
@@ -18211,7 +18347,7 @@ declare class HostCalls {
|
|
|
18211
18347
|
const maybeAddress = regs.getLowerU32(7);
|
|
18212
18348
|
const maybeLength = regs.getLowerU32(8);
|
|
18213
18349
|
|
|
18214
|
-
const result =
|
|
18350
|
+
const result = safeAllocUint8Array(maybeLength);
|
|
18215
18351
|
const startAddress = tryAsMemoryIndex(maybeAddress);
|
|
18216
18352
|
const loadResult = memory.loadInto(result, startAddress);
|
|
18217
18353
|
|
|
@@ -18244,8 +18380,10 @@ declare class HostCalls {
|
|
|
18244
18380
|
|
|
18245
18381
|
const hostCall = this.hostCalls.get(index);
|
|
18246
18382
|
const gasBefore = gas.get();
|
|
18247
|
-
|
|
18248
|
-
const
|
|
18383
|
+
// NOTE: `basicGasCost(regs)` function is for compatibility reasons: pre GP 0.7.2
|
|
18384
|
+
const basicGasCost =
|
|
18385
|
+
typeof hostCall.basicGasCost === "number" ? hostCall.basicGasCost : hostCall.basicGasCost(regs);
|
|
18386
|
+
const underflow = gas.sub(basicGasCost);
|
|
18249
18387
|
|
|
18250
18388
|
const pcLog = `[PC: ${pvmInstance.getPC()}]`;
|
|
18251
18389
|
if (underflow) {
|
|
@@ -18272,6 +18410,11 @@ declare class HostCalls {
|
|
|
18272
18410
|
return this.getReturnValue(status, pvmInstance);
|
|
18273
18411
|
}
|
|
18274
18412
|
|
|
18413
|
+
if (result === PvmExecution.OOG) {
|
|
18414
|
+
status = Status.OOG;
|
|
18415
|
+
return this.getReturnValue(status, pvmInstance);
|
|
18416
|
+
}
|
|
18417
|
+
|
|
18275
18418
|
if (result === undefined) {
|
|
18276
18419
|
pvmInstance.runProgram();
|
|
18277
18420
|
status = pvmInstance.getStatus();
|
|
@@ -18643,7 +18786,7 @@ declare class DebuggerAdapter {
|
|
|
18643
18786
|
|
|
18644
18787
|
if (page === null) {
|
|
18645
18788
|
// page wasn't allocated so we return an empty page
|
|
18646
|
-
return
|
|
18789
|
+
return safeAllocUint8Array(PAGE_SIZE);
|
|
18647
18790
|
}
|
|
18648
18791
|
|
|
18649
18792
|
if (page.length === PAGE_SIZE) {
|
|
@@ -18652,7 +18795,7 @@ declare class DebuggerAdapter {
|
|
|
18652
18795
|
}
|
|
18653
18796
|
|
|
18654
18797
|
// page was allocated but it is shorter than PAGE_SIZE so we have to extend it
|
|
18655
|
-
const fullPage =
|
|
18798
|
+
const fullPage = safeAllocUint8Array(PAGE_SIZE);
|
|
18656
18799
|
fullPage.set(page);
|
|
18657
18800
|
return fullPage;
|
|
18658
18801
|
}
|
|
@@ -18845,10 +18988,10 @@ type ENTROPY_BYTES = typeof ENTROPY_BYTES;
|
|
|
18845
18988
|
*
|
|
18846
18989
|
* https://graypaper.fluffylabs.dev/#/579bd12/3b9a013b9a01
|
|
18847
18990
|
*/
|
|
18848
|
-
declare function fisherYatesShuffle<T>(arr: T[], entropy: Bytes<ENTROPY_BYTES>): T[] {
|
|
18991
|
+
declare function fisherYatesShuffle<T>(blake2b: Blake2b, arr: T[], entropy: Bytes<ENTROPY_BYTES>): T[] {
|
|
18849
18992
|
check`${entropy.length === ENTROPY_BYTES} Expected entropy of length ${ENTROPY_BYTES}, got ${entropy.length}`;
|
|
18850
18993
|
const n = arr.length;
|
|
18851
|
-
const randomNumbers = hashToNumberSequence(entropy, arr.length);
|
|
18994
|
+
const randomNumbers = hashToNumberSequence(blake2b, entropy, arr.length);
|
|
18852
18995
|
const result: T[] = new Array<T>(n);
|
|
18853
18996
|
|
|
18854
18997
|
let itemsLeft = n;
|
|
@@ -18874,6 +19017,7 @@ declare namespace index$2 {
|
|
|
18874
19017
|
declare class JsonServiceInfo {
|
|
18875
19018
|
static fromJson = json.object<JsonServiceInfo, ServiceAccountInfo>(
|
|
18876
19019
|
{
|
|
19020
|
+
...(Compatibility.isGreaterOrEqual(GpVersion.V0_7_1) ? { version: "number" } : {}),
|
|
18877
19021
|
code_hash: fromJson.bytes32(),
|
|
18878
19022
|
balance: json.fromNumber((x) => tryAsU64(x)),
|
|
18879
19023
|
min_item_gas: json.fromNumber((x) => tryAsServiceGas(x)),
|
|
@@ -18912,6 +19056,7 @@ declare class JsonServiceInfo {
|
|
|
18912
19056
|
},
|
|
18913
19057
|
);
|
|
18914
19058
|
|
|
19059
|
+
version?: number;
|
|
18915
19060
|
code_hash!: CodeHash;
|
|
18916
19061
|
balance!: U64;
|
|
18917
19062
|
min_item_gas!: ServiceGas;
|
|
@@ -18958,6 +19103,19 @@ declare const lookupMetaFromJson = json.object<JsonLookupMeta, LookupHistoryItem
|
|
|
18958
19103
|
({ key, value }) => new LookupHistoryItem(key.hash, key.length, value),
|
|
18959
19104
|
);
|
|
18960
19105
|
|
|
19106
|
+
declare const preimageStatusFromJson = json.object<JsonPreimageStatus, LookupHistoryItem>(
|
|
19107
|
+
{
|
|
19108
|
+
hash: fromJson.bytes32(),
|
|
19109
|
+
status: json.array("number"),
|
|
19110
|
+
},
|
|
19111
|
+
({ hash, status }) => new LookupHistoryItem(hash, tryAsU32(0), status),
|
|
19112
|
+
);
|
|
19113
|
+
|
|
19114
|
+
type JsonPreimageStatus = {
|
|
19115
|
+
hash: PreimageHash;
|
|
19116
|
+
status: LookupHistorySlots;
|
|
19117
|
+
};
|
|
19118
|
+
|
|
18961
19119
|
type JsonLookupMeta = {
|
|
18962
19120
|
key: {
|
|
18963
19121
|
hash: PreimageHash;
|
|
@@ -18970,21 +19128,34 @@ declare class JsonService {
|
|
|
18970
19128
|
static fromJson = json.object<JsonService, InMemoryService>(
|
|
18971
19129
|
{
|
|
18972
19130
|
id: "number",
|
|
18973
|
-
data:
|
|
18974
|
-
|
|
18975
|
-
|
|
18976
|
-
|
|
18977
|
-
|
|
18978
|
-
|
|
19131
|
+
data: Compatibility.isLessThan(GpVersion.V0_7_1)
|
|
19132
|
+
? {
|
|
19133
|
+
service: JsonServiceInfo.fromJson,
|
|
19134
|
+
preimages: json.optional(json.array(JsonPreimageItem.fromJson)),
|
|
19135
|
+
storage: json.optional(json.array(JsonStorageItem.fromJson)),
|
|
19136
|
+
lookup_meta: json.optional(json.array(lookupMetaFromJson)),
|
|
19137
|
+
}
|
|
19138
|
+
: {
|
|
19139
|
+
service: JsonServiceInfo.fromJson,
|
|
19140
|
+
storage: json.optional(json.array(JsonStorageItem.fromJson)),
|
|
19141
|
+
preimages_blob: json.optional(json.array(JsonPreimageItem.fromJson)),
|
|
19142
|
+
preimages_status: json.optional(json.array(preimageStatusFromJson)),
|
|
19143
|
+
},
|
|
18979
19144
|
},
|
|
18980
19145
|
({ id, data }) => {
|
|
19146
|
+
const preimages = HashDictionary.fromEntries(
|
|
19147
|
+
(data.preimages ?? data.preimages_blob ?? []).map((x) => [x.hash, x]),
|
|
19148
|
+
);
|
|
19149
|
+
|
|
18981
19150
|
const lookupHistory = HashDictionary.new<PreimageHash, LookupHistoryItem[]>();
|
|
18982
|
-
|
|
19151
|
+
|
|
19152
|
+
for (const item of data.lookup_meta ?? data.preimages_status ?? []) {
|
|
18983
19153
|
const data = lookupHistory.get(item.hash) ?? [];
|
|
18984
|
-
|
|
19154
|
+
const length = tryAsU32(preimages.get(item.hash)?.blob.length ?? item.length);
|
|
19155
|
+
data.push(new LookupHistoryItem(item.hash, length, item.slots));
|
|
18985
19156
|
lookupHistory.set(item.hash, data);
|
|
18986
19157
|
}
|
|
18987
|
-
|
|
19158
|
+
|
|
18988
19159
|
const storage = new Map<string, StorageItem>();
|
|
18989
19160
|
|
|
18990
19161
|
const entries = (data.storage ?? []).map(({ key, value }) => {
|
|
@@ -19011,6 +19182,8 @@ declare class JsonService {
|
|
|
19011
19182
|
preimages?: JsonPreimageItem[];
|
|
19012
19183
|
storage?: JsonStorageItem[];
|
|
19013
19184
|
lookup_meta?: LookupHistoryItem[];
|
|
19185
|
+
preimages_blob?: JsonPreimageItem[];
|
|
19186
|
+
preimages_status?: LookupHistoryItem[];
|
|
19014
19187
|
};
|
|
19015
19188
|
}
|
|
19016
19189
|
|
|
@@ -19020,8 +19193,7 @@ declare const availabilityAssignmentFromJson = json.object<JsonAvailabilityAssig
|
|
|
19020
19193
|
timeout: "number",
|
|
19021
19194
|
},
|
|
19022
19195
|
({ report, timeout }) => {
|
|
19023
|
-
|
|
19024
|
-
return AvailabilityAssignment.create({ workReport: new WithHash(workReportHash, report), timeout });
|
|
19196
|
+
return AvailabilityAssignment.create({ workReport: report, timeout });
|
|
19025
19197
|
},
|
|
19026
19198
|
);
|
|
19027
19199
|
|
|
@@ -19242,8 +19414,12 @@ declare class JsonServiceStatistics {
|
|
|
19242
19414
|
extrinsic_count: "number",
|
|
19243
19415
|
accumulate_count: "number",
|
|
19244
19416
|
accumulate_gas_used: json.fromNumber(tryAsServiceGas),
|
|
19245
|
-
|
|
19246
|
-
|
|
19417
|
+
...(Compatibility.isLessThan(GpVersion.V0_7_1)
|
|
19418
|
+
? {
|
|
19419
|
+
on_transfers_count: "number",
|
|
19420
|
+
on_transfers_gas_used: json.fromNumber(tryAsServiceGas),
|
|
19421
|
+
}
|
|
19422
|
+
: {}),
|
|
19247
19423
|
},
|
|
19248
19424
|
({
|
|
19249
19425
|
provided_count,
|
|
@@ -19270,8 +19446,8 @@ declare class JsonServiceStatistics {
|
|
|
19270
19446
|
extrinsicCount: extrinsic_count,
|
|
19271
19447
|
accumulateCount: accumulate_count,
|
|
19272
19448
|
accumulateGasUsed: accumulate_gas_used,
|
|
19273
|
-
onTransfersCount: on_transfers_count,
|
|
19274
|
-
onTransfersGasUsed: on_transfers_gas_used,
|
|
19449
|
+
onTransfersCount: on_transfers_count ?? tryAsU32(0),
|
|
19450
|
+
onTransfersGasUsed: on_transfers_gas_used ?? tryAsServiceGas(0),
|
|
19275
19451
|
});
|
|
19276
19452
|
},
|
|
19277
19453
|
);
|
|
@@ -19286,8 +19462,8 @@ declare class JsonServiceStatistics {
|
|
|
19286
19462
|
extrinsic_count!: U16;
|
|
19287
19463
|
accumulate_count!: U32;
|
|
19288
19464
|
accumulate_gas_used!: ServiceGas;
|
|
19289
|
-
on_transfers_count
|
|
19290
|
-
on_transfers_gas_used
|
|
19465
|
+
on_transfers_count?: U32;
|
|
19466
|
+
on_transfers_gas_used?: ServiceGas;
|
|
19291
19467
|
}
|
|
19292
19468
|
|
|
19293
19469
|
type ServiceStatisticsEntry = {
|
|
@@ -19359,8 +19535,9 @@ type JsonStateDump = {
|
|
|
19359
19535
|
tau: State["timeslot"];
|
|
19360
19536
|
chi: {
|
|
19361
19537
|
chi_m: PrivilegedServices["manager"];
|
|
19362
|
-
chi_a: PrivilegedServices["
|
|
19363
|
-
chi_v: PrivilegedServices["
|
|
19538
|
+
chi_a: PrivilegedServices["assigners"];
|
|
19539
|
+
chi_v: PrivilegedServices["delegator"];
|
|
19540
|
+
chi_r?: PrivilegedServices["registrar"];
|
|
19364
19541
|
chi_g: PrivilegedServices["autoAccumulateServices"] | null;
|
|
19365
19542
|
};
|
|
19366
19543
|
pi: JsonStatisticsData;
|
|
@@ -19393,6 +19570,7 @@ declare const fullStateDumpFromJson = (spec: ChainSpec) =>
|
|
|
19393
19570
|
chi_m: "number",
|
|
19394
19571
|
chi_a: json.array("number"),
|
|
19395
19572
|
chi_v: "number",
|
|
19573
|
+
chi_r: json.optional("number"),
|
|
19396
19574
|
chi_g: json.nullable(
|
|
19397
19575
|
json.array({
|
|
19398
19576
|
service: "number",
|
|
@@ -19425,6 +19603,9 @@ declare const fullStateDumpFromJson = (spec: ChainSpec) =>
|
|
|
19425
19603
|
theta,
|
|
19426
19604
|
accounts,
|
|
19427
19605
|
}): InMemoryState => {
|
|
19606
|
+
if (Compatibility.isGreaterOrEqual(GpVersion.V0_7_1) && chi.chi_r === undefined) {
|
|
19607
|
+
throw new Error("Registrar is required in Privileges GP ^0.7.1");
|
|
19608
|
+
}
|
|
19428
19609
|
return InMemoryState.create({
|
|
19429
19610
|
authPools: tryAsPerCore(
|
|
19430
19611
|
alpha.map((perCore) => {
|
|
@@ -19458,8 +19639,9 @@ declare const fullStateDumpFromJson = (spec: ChainSpec) =>
|
|
|
19458
19639
|
timeslot: tau,
|
|
19459
19640
|
privilegedServices: PrivilegedServices.create({
|
|
19460
19641
|
manager: chi.chi_m,
|
|
19461
|
-
|
|
19462
|
-
|
|
19642
|
+
assigners: chi.chi_a,
|
|
19643
|
+
delegator: chi.chi_v,
|
|
19644
|
+
registrar: chi.chi_r ?? tryAsServiceId(2 ** 32 - 1),
|
|
19463
19645
|
autoAccumulateServices: chi.chi_g ?? [],
|
|
19464
19646
|
}),
|
|
19465
19647
|
statistics: JsonStatisticsData.toStatisticsData(spec, pi),
|
|
@@ -19482,6 +19664,7 @@ declare const index$1_JsonDisputesRecords: typeof JsonDisputesRecords;
|
|
|
19482
19664
|
type index$1_JsonLookupMeta = JsonLookupMeta;
|
|
19483
19665
|
type index$1_JsonPreimageItem = JsonPreimageItem;
|
|
19484
19666
|
declare const index$1_JsonPreimageItem: typeof JsonPreimageItem;
|
|
19667
|
+
type index$1_JsonPreimageStatus = JsonPreimageStatus;
|
|
19485
19668
|
type index$1_JsonRecentBlockState = JsonRecentBlockState;
|
|
19486
19669
|
type index$1_JsonRecentBlocks = JsonRecentBlocks;
|
|
19487
19670
|
type index$1_JsonReportedWorkPackageInfo = JsonReportedWorkPackageInfo;
|
|
@@ -19506,6 +19689,7 @@ declare const index$1_disputesRecordsFromJson: typeof disputesRecordsFromJson;
|
|
|
19506
19689
|
declare const index$1_fullStateDumpFromJson: typeof fullStateDumpFromJson;
|
|
19507
19690
|
declare const index$1_lookupMetaFromJson: typeof lookupMetaFromJson;
|
|
19508
19691
|
declare const index$1_notYetAccumulatedFromJson: typeof notYetAccumulatedFromJson;
|
|
19692
|
+
declare const index$1_preimageStatusFromJson: typeof preimageStatusFromJson;
|
|
19509
19693
|
declare const index$1_recentBlockStateFromJson: typeof recentBlockStateFromJson;
|
|
19510
19694
|
declare const index$1_recentBlocksHistoryFromJson: typeof recentBlocksHistoryFromJson;
|
|
19511
19695
|
declare const index$1_reportedWorkPackageFromJson: typeof reportedWorkPackageFromJson;
|
|
@@ -19513,8 +19697,8 @@ declare const index$1_serviceStatisticsEntryFromJson: typeof serviceStatisticsEn
|
|
|
19513
19697
|
declare const index$1_ticketFromJson: typeof ticketFromJson;
|
|
19514
19698
|
declare const index$1_validatorDataFromJson: typeof validatorDataFromJson;
|
|
19515
19699
|
declare namespace index$1 {
|
|
19516
|
-
export { index$1_JsonCoreStatistics as JsonCoreStatistics, index$1_JsonDisputesRecords as JsonDisputesRecords, index$1_JsonPreimageItem as JsonPreimageItem, index$1_JsonService as JsonService, index$1_JsonServiceInfo as JsonServiceInfo, index$1_JsonServiceStatistics as JsonServiceStatistics, index$1_JsonStatisticsData as JsonStatisticsData, index$1_JsonStorageItem as JsonStorageItem, index$1_JsonValidatorStatistics as JsonValidatorStatistics, index$1_TicketsOrKeys as TicketsOrKeys, index$1_availabilityAssignmentFromJson as availabilityAssignmentFromJson, index$1_disputesRecordsFromJson as disputesRecordsFromJson, index$1_fullStateDumpFromJson as fullStateDumpFromJson, index$1_lookupMetaFromJson as lookupMetaFromJson, index$1_notYetAccumulatedFromJson as notYetAccumulatedFromJson, index$1_recentBlockStateFromJson as recentBlockStateFromJson, index$1_recentBlocksHistoryFromJson as recentBlocksHistoryFromJson, index$1_reportedWorkPackageFromJson as reportedWorkPackageFromJson, index$1_serviceStatisticsEntryFromJson as serviceStatisticsEntryFromJson, index$1_ticketFromJson as ticketFromJson, index$1_validatorDataFromJson as validatorDataFromJson };
|
|
19517
|
-
export type { index$1_JsonAvailabilityAssignment as JsonAvailabilityAssignment, index$1_JsonLookupMeta as JsonLookupMeta, index$1_JsonRecentBlockState as JsonRecentBlockState, index$1_JsonRecentBlocks as JsonRecentBlocks, index$1_JsonReportedWorkPackageInfo as JsonReportedWorkPackageInfo, index$1_JsonStateDump as JsonStateDump, index$1_ServiceStatisticsEntry as ServiceStatisticsEntry };
|
|
19700
|
+
export { index$1_JsonCoreStatistics as JsonCoreStatistics, index$1_JsonDisputesRecords as JsonDisputesRecords, index$1_JsonPreimageItem as JsonPreimageItem, index$1_JsonService as JsonService, index$1_JsonServiceInfo as JsonServiceInfo, index$1_JsonServiceStatistics as JsonServiceStatistics, index$1_JsonStatisticsData as JsonStatisticsData, index$1_JsonStorageItem as JsonStorageItem, index$1_JsonValidatorStatistics as JsonValidatorStatistics, index$1_TicketsOrKeys as TicketsOrKeys, index$1_availabilityAssignmentFromJson as availabilityAssignmentFromJson, index$1_disputesRecordsFromJson as disputesRecordsFromJson, index$1_fullStateDumpFromJson as fullStateDumpFromJson, index$1_lookupMetaFromJson as lookupMetaFromJson, index$1_notYetAccumulatedFromJson as notYetAccumulatedFromJson, index$1_preimageStatusFromJson as preimageStatusFromJson, index$1_recentBlockStateFromJson as recentBlockStateFromJson, index$1_recentBlocksHistoryFromJson as recentBlocksHistoryFromJson, index$1_reportedWorkPackageFromJson as reportedWorkPackageFromJson, index$1_serviceStatisticsEntryFromJson as serviceStatisticsEntryFromJson, index$1_ticketFromJson as ticketFromJson, index$1_validatorDataFromJson as validatorDataFromJson };
|
|
19701
|
+
export type { index$1_JsonAvailabilityAssignment as JsonAvailabilityAssignment, index$1_JsonLookupMeta as JsonLookupMeta, index$1_JsonPreimageStatus as JsonPreimageStatus, index$1_JsonRecentBlockState as JsonRecentBlockState, index$1_JsonRecentBlocks as JsonRecentBlocks, index$1_JsonReportedWorkPackageInfo as JsonReportedWorkPackageInfo, index$1_JsonStateDump as JsonStateDump, index$1_ServiceStatisticsEntry as ServiceStatisticsEntry };
|
|
19518
19702
|
}
|
|
19519
19703
|
|
|
19520
19704
|
/** Helper function to create most used hashes in the block */
|
|
@@ -19522,7 +19706,7 @@ declare class TransitionHasher implements MmrHasher<KeccakHash> {
|
|
|
19522
19706
|
constructor(
|
|
19523
19707
|
private readonly context: ChainSpec,
|
|
19524
19708
|
private readonly keccakHasher: KeccakHasher,
|
|
19525
|
-
|
|
19709
|
+
public readonly blake2b: Blake2b,
|
|
19526
19710
|
) {}
|
|
19527
19711
|
|
|
19528
19712
|
/** Concatenates two hashes and hash this concatenation */
|
|
@@ -19536,7 +19720,7 @@ declare class TransitionHasher implements MmrHasher<KeccakHash> {
|
|
|
19536
19720
|
|
|
19537
19721
|
/** Creates hash from the block header view */
|
|
19538
19722
|
header(header: HeaderView): WithHash<HeaderHash, HeaderView> {
|
|
19539
|
-
return new WithHash(blake2b.hashBytes(header.encoded()
|
|
19723
|
+
return new WithHash(this.blake2b.hashBytes(header.encoded()).asOpaque(), header);
|
|
19540
19724
|
}
|
|
19541
19725
|
|
|
19542
19726
|
/**
|
|
@@ -19550,7 +19734,7 @@ declare class TransitionHasher implements MmrHasher<KeccakHash> {
|
|
|
19550
19734
|
.view()
|
|
19551
19735
|
.map((g) => g.view())
|
|
19552
19736
|
.map((guarantee) => {
|
|
19553
|
-
const reportHash = blake2b.hashBytes(guarantee.report.encoded()
|
|
19737
|
+
const reportHash = this.blake2b.hashBytes(guarantee.report.encoded()).asOpaque<WorkReportHash>();
|
|
19554
19738
|
return BytesBlob.blobFromParts([
|
|
19555
19739
|
reportHash.raw,
|
|
19556
19740
|
guarantee.slot.encoded().raw,
|
|
@@ -19560,15 +19744,15 @@ declare class TransitionHasher implements MmrHasher<KeccakHash> {
|
|
|
19560
19744
|
|
|
19561
19745
|
const guaranteeBlob = Encoder.encodeObject(codec.sequenceVarLen(dumpCodec), guarantees, this.context);
|
|
19562
19746
|
|
|
19563
|
-
const et = blake2b.hashBytes(extrinsicView.tickets.encoded()
|
|
19564
|
-
const ep = blake2b.hashBytes(extrinsicView.preimages.encoded()
|
|
19565
|
-
const eg = blake2b.hashBytes(guaranteeBlob
|
|
19566
|
-
const ea = blake2b.hashBytes(extrinsicView.assurances.encoded()
|
|
19567
|
-
const ed = blake2b.hashBytes(extrinsicView.disputes.encoded()
|
|
19747
|
+
const et = this.blake2b.hashBytes(extrinsicView.tickets.encoded()).asOpaque<ExtrinsicHash>();
|
|
19748
|
+
const ep = this.blake2b.hashBytes(extrinsicView.preimages.encoded()).asOpaque<ExtrinsicHash>();
|
|
19749
|
+
const eg = this.blake2b.hashBytes(guaranteeBlob).asOpaque<ExtrinsicHash>();
|
|
19750
|
+
const ea = this.blake2b.hashBytes(extrinsicView.assurances.encoded()).asOpaque<ExtrinsicHash>();
|
|
19751
|
+
const ed = this.blake2b.hashBytes(extrinsicView.disputes.encoded()).asOpaque<ExtrinsicHash>();
|
|
19568
19752
|
|
|
19569
19753
|
const encoded = BytesBlob.blobFromParts([et.raw, ep.raw, eg.raw, ea.raw, ed.raw]);
|
|
19570
19754
|
|
|
19571
|
-
return new WithHashAndBytes(blake2b.hashBytes(encoded
|
|
19755
|
+
return new WithHashAndBytes(this.blake2b.hashBytes(encoded).asOpaque(), extrinsicView, encoded);
|
|
19572
19756
|
}
|
|
19573
19757
|
|
|
19574
19758
|
/** Creates hash for given WorkPackage */
|
|
@@ -19579,7 +19763,7 @@ declare class TransitionHasher implements MmrHasher<KeccakHash> {
|
|
|
19579
19763
|
private encode<T, THash extends OpaqueHash>(codec: Codec<T>, data: T): WithHashAndBytes<THash, T> {
|
|
19580
19764
|
// TODO [ToDr] Use already allocated encoding destination and hash bytes from some arena.
|
|
19581
19765
|
const encoded = Encoder.encodeObject(codec, data, this.context);
|
|
19582
|
-
return new WithHashAndBytes(blake2b.hashBytes(encoded
|
|
19766
|
+
return new WithHashAndBytes(this.blake2b.hashBytes(encoded).asOpaque(), data, encoded);
|
|
19583
19767
|
}
|
|
19584
19768
|
}
|
|
19585
19769
|
|
|
@@ -19600,7 +19784,10 @@ declare enum PreimagesErrorCode {
|
|
|
19600
19784
|
|
|
19601
19785
|
// TODO [SeKo] consider whether this module is the right place to remove expired preimages
|
|
19602
19786
|
declare class Preimages {
|
|
19603
|
-
constructor(
|
|
19787
|
+
constructor(
|
|
19788
|
+
public readonly state: PreimagesState,
|
|
19789
|
+
public readonly blake2b: Blake2b,
|
|
19790
|
+
) {}
|
|
19604
19791
|
|
|
19605
19792
|
integrate(input: PreimagesInput): Result$2<PreimagesStateUpdate, PreimagesErrorCode> {
|
|
19606
19793
|
// make sure lookup extrinsics are sorted and unique
|
|
@@ -19629,7 +19816,7 @@ declare class Preimages {
|
|
|
19629
19816
|
// select preimages for integration
|
|
19630
19817
|
for (const preimage of preimages) {
|
|
19631
19818
|
const { requester, blob } = preimage;
|
|
19632
|
-
const hash: PreimageHash = blake2b.hashBytes(blob).asOpaque();
|
|
19819
|
+
const hash: PreimageHash = this.blake2b.hashBytes(blob).asOpaque();
|
|
19633
19820
|
|
|
19634
19821
|
const service = this.state.getService(requester);
|
|
19635
19822
|
if (service === null) {
|
|
@@ -19660,156 +19847,6 @@ declare class Preimages {
|
|
|
19660
19847
|
}
|
|
19661
19848
|
}
|
|
19662
19849
|
|
|
19663
|
-
declare enum ServiceExecutorError {
|
|
19664
|
-
NoLookup = 0,
|
|
19665
|
-
NoState = 1,
|
|
19666
|
-
NoServiceCode = 2,
|
|
19667
|
-
ServiceCodeMismatch = 3,
|
|
19668
|
-
}
|
|
19669
|
-
|
|
19670
|
-
declare class WorkPackageExecutor {
|
|
19671
|
-
constructor(
|
|
19672
|
-
private readonly blocks: BlocksDb,
|
|
19673
|
-
private readonly state: StatesDb,
|
|
19674
|
-
private readonly hasher: TransitionHasher,
|
|
19675
|
-
) {}
|
|
19676
|
-
|
|
19677
|
-
// TODO [ToDr] this while thing should be triple-checked with the GP.
|
|
19678
|
-
// I'm currently implementing some dirty version for the demo.
|
|
19679
|
-
async executeWorkPackage(pack: WorkPackage): Promise<WorkReport> {
|
|
19680
|
-
const headerHash = pack.context.lookupAnchor;
|
|
19681
|
-
// execute authorisation first or is it already executed and we just need to check it?
|
|
19682
|
-
const authExec = this.getServiceExecutor(
|
|
19683
|
-
// TODO [ToDr] should this be anchor or lookupAnchor?
|
|
19684
|
-
headerHash,
|
|
19685
|
-
pack.authCodeHost,
|
|
19686
|
-
pack.authCodeHash,
|
|
19687
|
-
);
|
|
19688
|
-
|
|
19689
|
-
if (authExec.isError) {
|
|
19690
|
-
// TODO [ToDr] most likely shouldn't be throw.
|
|
19691
|
-
throw new Error(`Could not get authorization executor: ${authExec.error}`);
|
|
19692
|
-
}
|
|
19693
|
-
|
|
19694
|
-
const pvm = authExec.ok;
|
|
19695
|
-
const authGas = tryAsGas(15_000n);
|
|
19696
|
-
const result = await pvm.run(pack.parametrization, authGas);
|
|
19697
|
-
|
|
19698
|
-
if (!result.isEqualTo(pack.authorization)) {
|
|
19699
|
-
throw new Error("Authorization is invalid.");
|
|
19700
|
-
}
|
|
19701
|
-
|
|
19702
|
-
const results: WorkResult[] = [];
|
|
19703
|
-
for (const item of pack.items) {
|
|
19704
|
-
const exec = this.getServiceExecutor(headerHash, item.service, item.codeHash);
|
|
19705
|
-
if (exec.isError) {
|
|
19706
|
-
throw new Error(`Could not get item executor: ${exec.error}`);
|
|
19707
|
-
}
|
|
19708
|
-
const pvm = exec.ok;
|
|
19709
|
-
|
|
19710
|
-
const gasRatio = tryAsServiceGas(3_000n);
|
|
19711
|
-
const ret = await pvm.run(item.payload, tryAsGas(item.refineGasLimit)); // or accumulateGasLimit?
|
|
19712
|
-
results.push(
|
|
19713
|
-
WorkResult.create({
|
|
19714
|
-
serviceId: item.service,
|
|
19715
|
-
codeHash: item.codeHash,
|
|
19716
|
-
payloadHash: blake2b.hashBytes(item.payload),
|
|
19717
|
-
gas: gasRatio,
|
|
19718
|
-
result: new WorkExecResult(WorkExecResultKind.ok, ret),
|
|
19719
|
-
load: WorkRefineLoad.create({
|
|
19720
|
-
gasUsed: tryAsServiceGas(5),
|
|
19721
|
-
importedSegments: tryAsU32(0),
|
|
19722
|
-
exportedSegments: tryAsU32(0),
|
|
19723
|
-
extrinsicSize: tryAsU32(0),
|
|
19724
|
-
extrinsicCount: tryAsU32(0),
|
|
19725
|
-
}),
|
|
19726
|
-
}),
|
|
19727
|
-
);
|
|
19728
|
-
}
|
|
19729
|
-
|
|
19730
|
-
const workPackage = this.hasher.workPackage(pack);
|
|
19731
|
-
const workPackageSpec = WorkPackageSpec.create({
|
|
19732
|
-
hash: workPackage.hash,
|
|
19733
|
-
length: tryAsU32(workPackage.encoded.length),
|
|
19734
|
-
erasureRoot: Bytes.zero(HASH_SIZE),
|
|
19735
|
-
exportsRoot: Bytes.zero(HASH_SIZE).asOpaque(),
|
|
19736
|
-
exportsCount: tryAsU16(0),
|
|
19737
|
-
});
|
|
19738
|
-
const coreIndex = tryAsCoreIndex(0);
|
|
19739
|
-
const authorizerHash = Bytes.fill(HASH_SIZE, 5).asOpaque();
|
|
19740
|
-
|
|
19741
|
-
const workResults = FixedSizeArray.new(results, tryAsWorkItemsCount(results.length));
|
|
19742
|
-
|
|
19743
|
-
return Promise.resolve(
|
|
19744
|
-
WorkReport.create({
|
|
19745
|
-
workPackageSpec,
|
|
19746
|
-
context: pack.context,
|
|
19747
|
-
coreIndex,
|
|
19748
|
-
authorizerHash,
|
|
19749
|
-
authorizationOutput: pack.authorization,
|
|
19750
|
-
segmentRootLookup: [],
|
|
19751
|
-
results: workResults,
|
|
19752
|
-
authorizationGasUsed: tryAsServiceGas(0),
|
|
19753
|
-
}),
|
|
19754
|
-
);
|
|
19755
|
-
}
|
|
19756
|
-
|
|
19757
|
-
getServiceExecutor(
|
|
19758
|
-
lookupAnchor: HeaderHash,
|
|
19759
|
-
serviceId: ServiceId,
|
|
19760
|
-
expectedCodeHash: CodeHash,
|
|
19761
|
-
): Result$2<PvmExecutor, ServiceExecutorError> {
|
|
19762
|
-
const header = this.blocks.getHeader(lookupAnchor);
|
|
19763
|
-
if (header === null) {
|
|
19764
|
-
return Result.error(ServiceExecutorError.NoLookup);
|
|
19765
|
-
}
|
|
19766
|
-
|
|
19767
|
-
const state = this.state.getState(lookupAnchor);
|
|
19768
|
-
if (state === null) {
|
|
19769
|
-
return Result.error(ServiceExecutorError.NoState);
|
|
19770
|
-
}
|
|
19771
|
-
|
|
19772
|
-
const service = state.getService(serviceId);
|
|
19773
|
-
const serviceCodeHash = service?.getInfo().codeHash ?? null;
|
|
19774
|
-
if (serviceCodeHash === null) {
|
|
19775
|
-
return Result.error(ServiceExecutorError.NoServiceCode);
|
|
19776
|
-
}
|
|
19777
|
-
|
|
19778
|
-
if (!serviceCodeHash.isEqualTo(expectedCodeHash)) {
|
|
19779
|
-
return Result.error(ServiceExecutorError.ServiceCodeMismatch);
|
|
19780
|
-
}
|
|
19781
|
-
|
|
19782
|
-
const serviceCode = service?.getPreimage(serviceCodeHash.asOpaque()) ?? null;
|
|
19783
|
-
if (serviceCode === null) {
|
|
19784
|
-
return Result.error(ServiceExecutorError.NoServiceCode);
|
|
19785
|
-
}
|
|
19786
|
-
|
|
19787
|
-
return Result.ok(new PvmExecutor(serviceCode));
|
|
19788
|
-
}
|
|
19789
|
-
}
|
|
19790
|
-
|
|
19791
|
-
declare class PvmExecutor {
|
|
19792
|
-
private readonly pvm: HostCalls;
|
|
19793
|
-
private hostCalls = new HostCallsManager({ missing: new Missing() });
|
|
19794
|
-
private pvmInstanceManager = new PvmInstanceManager(4);
|
|
19795
|
-
|
|
19796
|
-
constructor(private serviceCode: BytesBlob) {
|
|
19797
|
-
this.pvm = new PvmHostCallExtension(this.pvmInstanceManager, this.hostCalls);
|
|
19798
|
-
}
|
|
19799
|
-
|
|
19800
|
-
async run(args: BytesBlob, gas: Gas): Promise<BytesBlob> {
|
|
19801
|
-
const program = Program.fromSpi(this.serviceCode.raw, args.raw, true);
|
|
19802
|
-
|
|
19803
|
-
const result = await this.pvm.runProgram(program.code, 5, gas, program.registers, program.memory);
|
|
19804
|
-
|
|
19805
|
-
if (result.hasMemorySlice()) {
|
|
19806
|
-
return BytesBlob.blobFrom(result.memorySlice);
|
|
19807
|
-
}
|
|
19808
|
-
|
|
19809
|
-
return BytesBlob.empty();
|
|
19810
|
-
}
|
|
19811
|
-
}
|
|
19812
|
-
|
|
19813
19850
|
type index_Preimages = Preimages;
|
|
19814
19851
|
declare const index_Preimages: typeof Preimages;
|
|
19815
19852
|
type index_PreimagesErrorCode = PreimagesErrorCode;
|
|
@@ -19819,10 +19856,8 @@ type index_PreimagesState = PreimagesState;
|
|
|
19819
19856
|
type index_PreimagesStateUpdate = PreimagesStateUpdate;
|
|
19820
19857
|
type index_TransitionHasher = TransitionHasher;
|
|
19821
19858
|
declare const index_TransitionHasher: typeof TransitionHasher;
|
|
19822
|
-
type index_WorkPackageExecutor = WorkPackageExecutor;
|
|
19823
|
-
declare const index_WorkPackageExecutor: typeof WorkPackageExecutor;
|
|
19824
19859
|
declare namespace index {
|
|
19825
|
-
export { index_Preimages as Preimages, index_PreimagesErrorCode as PreimagesErrorCode, index_TransitionHasher as TransitionHasher
|
|
19860
|
+
export { index_Preimages as Preimages, index_PreimagesErrorCode as PreimagesErrorCode, index_TransitionHasher as TransitionHasher };
|
|
19826
19861
|
export type { index_PreimagesInput as PreimagesInput, index_PreimagesState as PreimagesState, index_PreimagesStateUpdate as PreimagesStateUpdate };
|
|
19827
19862
|
}
|
|
19828
19863
|
|