@typeberry/lib 0.1.3-ea24911 → 0.2.0-74f246e
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.cjs +1185 -1682
- package/index.d.ts +1034 -751
- package/index.js +1184 -1681
- package/package.json +1 -1
package/index.d.ts
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
declare enum GpVersion {
|
|
2
2
|
V0_6_7 = "0.6.7",
|
|
3
3
|
V0_7_0 = "0.7.0",
|
|
4
|
-
V0_7_1 = "0.7.1
|
|
4
|
+
V0_7_1 = "0.7.1",
|
|
5
5
|
V0_7_2 = "0.7.2-preview",
|
|
6
6
|
}
|
|
7
7
|
|
|
@@ -11,12 +11,12 @@ declare enum TestSuite {
|
|
|
11
11
|
}
|
|
12
12
|
|
|
13
13
|
declare const DEFAULT_SUITE = TestSuite.W3F_DAVXY;
|
|
14
|
-
|
|
15
|
-
declare const ALL_VERSIONS_IN_ORDER = [GpVersion.V0_6_7, GpVersion.V0_7_0, GpVersion.V0_7_1, GpVersion.V0_7_2];
|
|
16
|
-
declare const DEFAULT_VERSION = GpVersion.V0_7_0;
|
|
14
|
+
declare const DEFAULT_VERSION = GpVersion.V0_7_1;
|
|
17
15
|
declare let CURRENT_VERSION = parseCurrentVersion(env.GP_VERSION) ?? DEFAULT_VERSION;
|
|
18
16
|
declare let CURRENT_SUITE = parseCurrentSuite(env.TEST_SUITE) ?? DEFAULT_SUITE;
|
|
19
17
|
|
|
18
|
+
declare const ALL_VERSIONS_IN_ORDER = [GpVersion.V0_6_7, GpVersion.V0_7_0, GpVersion.V0_7_1, GpVersion.V0_7_2];
|
|
19
|
+
|
|
20
20
|
declare function parseCurrentVersion(env?: string): GpVersion | undefined {
|
|
21
21
|
if (env === undefined) {
|
|
22
22
|
return undefined;
|
|
@@ -35,7 +35,9 @@ declare function parseCurrentVersion(env?: string): GpVersion | undefined {
|
|
|
35
35
|
}
|
|
36
36
|
|
|
37
37
|
declare function parseCurrentSuite(env?: string): TestSuite | undefined {
|
|
38
|
-
if (env === undefined)
|
|
38
|
+
if (env === undefined) {
|
|
39
|
+
return undefined;
|
|
40
|
+
}
|
|
39
41
|
switch (env) {
|
|
40
42
|
case TestSuite.W3F_DAVXY:
|
|
41
43
|
case TestSuite.JAMDUNA:
|
|
@@ -420,6 +422,20 @@ declare const Result$2 = {
|
|
|
420
422
|
},
|
|
421
423
|
};
|
|
422
424
|
|
|
425
|
+
// about 2GB, the maximum ArrayBuffer length on Chrome confirmed by several sources:
|
|
426
|
+
// - https://issues.chromium.org/issues/40055619
|
|
427
|
+
// - https://stackoverflow.com/a/72124984
|
|
428
|
+
// - https://onnxruntime.ai/docs/tutorials/web/large-models.html#maximum-size-of-arraybuffer
|
|
429
|
+
declare const MAX_LENGTH$1 = 2145386496;
|
|
430
|
+
|
|
431
|
+
declare function safeAllocUint8Array(length: number) {
|
|
432
|
+
if (length > MAX_LENGTH) {
|
|
433
|
+
// biome-ignore lint/suspicious/noConsole: can't have a dependency on logger here
|
|
434
|
+
console.warn(`Trying to allocate ${length} bytes, which is greater than the maximum of ${MAX_LENGTH}.`);
|
|
435
|
+
}
|
|
436
|
+
return new Uint8Array(Math.min(MAX_LENGTH, length));
|
|
437
|
+
}
|
|
438
|
+
|
|
423
439
|
/**
|
|
424
440
|
* Utilities for tests.
|
|
425
441
|
*/
|
|
@@ -573,8 +589,12 @@ declare function deepEqual<T>(
|
|
|
573
589
|
const aKey = `${a.key}`;
|
|
574
590
|
const bKey = `${b.key}`;
|
|
575
591
|
|
|
576
|
-
if (aKey < bKey)
|
|
577
|
-
|
|
592
|
+
if (aKey < bKey) {
|
|
593
|
+
return -1;
|
|
594
|
+
}
|
|
595
|
+
if (bKey < aKey) {
|
|
596
|
+
return 1;
|
|
597
|
+
}
|
|
578
598
|
return 0;
|
|
579
599
|
});
|
|
580
600
|
};
|
|
@@ -755,11 +775,12 @@ declare const index$u_oomWarningPrinted: typeof oomWarningPrinted;
|
|
|
755
775
|
declare const index$u_parseCurrentSuite: typeof parseCurrentSuite;
|
|
756
776
|
declare const index$u_parseCurrentVersion: typeof parseCurrentVersion;
|
|
757
777
|
declare const index$u_resultToString: typeof resultToString;
|
|
778
|
+
declare const index$u_safeAllocUint8Array: typeof safeAllocUint8Array;
|
|
758
779
|
declare const index$u_seeThrough: typeof seeThrough;
|
|
759
780
|
declare const index$u_trimStack: typeof trimStack;
|
|
760
781
|
declare const index$u_workspacePathFix: typeof workspacePathFix;
|
|
761
782
|
declare namespace index$u {
|
|
762
|
-
export { index$u_ALL_VERSIONS_IN_ORDER as ALL_VERSIONS_IN_ORDER, index$u_CURRENT_SUITE as CURRENT_SUITE, index$u_CURRENT_VERSION as CURRENT_VERSION, index$u_Compatibility as Compatibility, index$u_DEFAULT_SUITE as DEFAULT_SUITE, index$u_DEFAULT_VERSION as DEFAULT_VERSION, index$u_ErrorsCollector as ErrorsCollector, index$u_GpVersion as GpVersion, Result$2 as Result, index$u_RichTaggedError as RichTaggedError, index$u_TEST_COMPARE_USING as TEST_COMPARE_USING, index$u_TestSuite as TestSuite, index$u_WithDebug as WithDebug, index$u___OPAQUE_TYPE__ as __OPAQUE_TYPE__, index$u_asOpaqueType as asOpaqueType, index$u_assertEmpty as assertEmpty, index$u_assertNever as assertNever, index$u_callCompareFunction as callCompareFunction, index$u_check as check, index$u_deepEqual as deepEqual, index$u_getAllKeysSorted as getAllKeysSorted, index$u_inspect as inspect, index$u_isBrowser as isBrowser, index$u_isResult as isResult, index$u_isTaggedError as isTaggedError, index$u_maybeTaggedErrorToString as maybeTaggedErrorToString, index$u_measure as measure, index$u_oomWarningPrinted as oomWarningPrinted, index$u_parseCurrentSuite as parseCurrentSuite, index$u_parseCurrentVersion as parseCurrentVersion, index$u_resultToString as resultToString, index$u_seeThrough as seeThrough, index$u_trimStack as trimStack, index$u_workspacePathFix as workspacePathFix };
|
|
783
|
+
export { index$u_ALL_VERSIONS_IN_ORDER as ALL_VERSIONS_IN_ORDER, index$u_CURRENT_SUITE as CURRENT_SUITE, index$u_CURRENT_VERSION as CURRENT_VERSION, index$u_Compatibility as Compatibility, index$u_DEFAULT_SUITE as DEFAULT_SUITE, index$u_DEFAULT_VERSION as DEFAULT_VERSION, index$u_ErrorsCollector as ErrorsCollector, index$u_GpVersion as GpVersion, MAX_LENGTH$1 as MAX_LENGTH, Result$2 as Result, index$u_RichTaggedError as RichTaggedError, index$u_TEST_COMPARE_USING as TEST_COMPARE_USING, index$u_TestSuite as TestSuite, index$u_WithDebug as WithDebug, index$u___OPAQUE_TYPE__ as __OPAQUE_TYPE__, index$u_asOpaqueType as asOpaqueType, index$u_assertEmpty as assertEmpty, index$u_assertNever as assertNever, index$u_callCompareFunction as callCompareFunction, index$u_check as check, index$u_deepEqual as deepEqual, index$u_getAllKeysSorted as getAllKeysSorted, index$u_inspect as inspect, index$u_isBrowser as isBrowser, index$u_isResult as isResult, index$u_isTaggedError as isTaggedError, index$u_maybeTaggedErrorToString as maybeTaggedErrorToString, index$u_measure as measure, index$u_oomWarningPrinted as oomWarningPrinted, index$u_parseCurrentSuite as parseCurrentSuite, index$u_parseCurrentVersion as parseCurrentVersion, index$u_resultToString as resultToString, index$u_safeAllocUint8Array as safeAllocUint8Array, index$u_seeThrough as seeThrough, index$u_trimStack as trimStack, index$u_workspacePathFix as workspacePathFix };
|
|
763
784
|
export type { index$u_DeepEqualOptions as DeepEqualOptions, index$u_EnumMapping as EnumMapping, index$u_ErrorResult as ErrorResult, index$u_OK as OK, index$u_OkResult as OkResult, index$u_Opaque as Opaque, index$u_StringLiteral as StringLiteral, index$u_TaggedError as TaggedError, index$u_TokenOf as TokenOf, index$u_Uninstantiable as Uninstantiable, index$u_WithOpaque as WithOpaque };
|
|
764
785
|
}
|
|
765
786
|
|
|
@@ -929,7 +950,7 @@ declare class BytesBlob {
|
|
|
929
950
|
static blobFromParts(v: Uint8Array | Uint8Array[], ...rest: Uint8Array[]) {
|
|
930
951
|
const vArr = v instanceof Uint8Array ? [v] : v;
|
|
931
952
|
const totalLength = vArr.reduce((a, v) => a + v.length, 0) + rest.reduce((a, v) => a + v.length, 0);
|
|
932
|
-
const buffer =
|
|
953
|
+
const buffer = safeAllocUint8Array(totalLength);
|
|
933
954
|
let offset = 0;
|
|
934
955
|
for (const r of vArr) {
|
|
935
956
|
buffer.set(r, offset);
|
|
@@ -1012,7 +1033,7 @@ declare class Bytes<T extends number> extends BytesBlob {
|
|
|
1012
1033
|
|
|
1013
1034
|
/** Create an empty [`Bytes<X>`] of given length. */
|
|
1014
1035
|
static zero<X extends number>(len: X): Bytes<X> {
|
|
1015
|
-
return new Bytes(
|
|
1036
|
+
return new Bytes(safeAllocUint8Array(len), len);
|
|
1016
1037
|
}
|
|
1017
1038
|
|
|
1018
1039
|
// TODO [ToDr] `fill` should have the argments swapped to align with the rest.
|
|
@@ -1133,7 +1154,7 @@ declare class BitVec {
|
|
|
1133
1154
|
* Create new [`BitVec`] with all values set to `false`.
|
|
1134
1155
|
*/
|
|
1135
1156
|
static empty(bitLength: number) {
|
|
1136
|
-
const data =
|
|
1157
|
+
const data = safeAllocUint8Array(Math.ceil(bitLength / 8));
|
|
1137
1158
|
return new BitVec(data, bitLength);
|
|
1138
1159
|
}
|
|
1139
1160
|
|
|
@@ -2406,11 +2427,15 @@ type ClassConstructor<T> = {
|
|
|
2406
2427
|
create: (o: CodecRecord<T>) => T;
|
|
2407
2428
|
};
|
|
2408
2429
|
|
|
2409
|
-
/**
|
|
2410
|
-
* A full codec type, i.e. the `Encode` and `Decode`.
|
|
2411
|
-
*/
|
|
2430
|
+
/** A full codec type, i.e. the `Encode` and `Decode`. */
|
|
2412
2431
|
type Codec<T> = Encode<T> & Decode<T>;
|
|
2413
2432
|
|
|
2433
|
+
/** A codec descriptor with extra view. */
|
|
2434
|
+
type CodecWithView<T, V> = Codec<T> & {
|
|
2435
|
+
/** encoded data view codec. */
|
|
2436
|
+
View: Codec<V>;
|
|
2437
|
+
};
|
|
2438
|
+
|
|
2414
2439
|
/**
|
|
2415
2440
|
* Type descriptor definition.
|
|
2416
2441
|
*
|
|
@@ -2419,7 +2444,7 @@ type Codec<T> = Encode<T> & Decode<T>;
|
|
|
2419
2444
|
*
|
|
2420
2445
|
* Descriptors can be composed to form more complex typings.
|
|
2421
2446
|
*/
|
|
2422
|
-
declare class Descriptor<T, V = T> implements Codec<T>, Skip {
|
|
2447
|
+
declare class Descriptor<T, V = T> implements Codec<T>, Skip, CodecWithView<T, V> {
|
|
2423
2448
|
/** A "lightweight" version of the object. */
|
|
2424
2449
|
public readonly View: Descriptor<V>;
|
|
2425
2450
|
|
|
@@ -2665,6 +2690,10 @@ declare abstract class ObjectView<T> {
|
|
|
2665
2690
|
toString() {
|
|
2666
2691
|
return `View<${this.materializedConstructor.name}>(cache: ${this.cache.size})`;
|
|
2667
2692
|
}
|
|
2693
|
+
|
|
2694
|
+
[TEST_COMPARE_USING]() {
|
|
2695
|
+
return this.materialize();
|
|
2696
|
+
}
|
|
2668
2697
|
}
|
|
2669
2698
|
|
|
2670
2699
|
/**
|
|
@@ -3216,15 +3245,25 @@ declare namespace codec$1 {
|
|
|
3216
3245
|
sizeHint: SizeHint;
|
|
3217
3246
|
},
|
|
3218
3247
|
chooser: (ctx: unknown | null) => Descriptor<T, V>,
|
|
3219
|
-
): Descriptor<T, V> =>
|
|
3220
|
-
|
|
3248
|
+
): Descriptor<T, V> => {
|
|
3249
|
+
const Self = chooser(null);
|
|
3250
|
+
return Descriptor.withView(
|
|
3221
3251
|
name,
|
|
3222
3252
|
sizeHint,
|
|
3223
3253
|
(e, x) => chooser(e.getContext()).encode(e, x),
|
|
3224
3254
|
(d) => chooser(d.getContext()).decode(d),
|
|
3225
3255
|
(s) => chooser(s.decoder.getContext()).skip(s),
|
|
3226
|
-
|
|
3256
|
+
hasUniqueView(Self)
|
|
3257
|
+
? select(
|
|
3258
|
+
{
|
|
3259
|
+
name: Self.View.name,
|
|
3260
|
+
sizeHint: Self.View.sizeHint,
|
|
3261
|
+
},
|
|
3262
|
+
(ctx) => chooser(ctx).View,
|
|
3263
|
+
)
|
|
3264
|
+
: Self.View,
|
|
3227
3265
|
);
|
|
3266
|
+
};
|
|
3228
3267
|
|
|
3229
3268
|
/**
|
|
3230
3269
|
* A descriptor for a more complex POJO.
|
|
@@ -3418,6 +3457,7 @@ declare function sequenceViewFixLen<T, V>(
|
|
|
3418
3457
|
type index$q_ClassConstructor<T> = ClassConstructor<T>;
|
|
3419
3458
|
type index$q_Codec<T> = Codec<T>;
|
|
3420
3459
|
type index$q_CodecRecord<T> = CodecRecord<T>;
|
|
3460
|
+
type index$q_CodecWithView<T, V> = CodecWithView<T, V>;
|
|
3421
3461
|
declare const index$q_DEFAULT_START_LENGTH: typeof DEFAULT_START_LENGTH;
|
|
3422
3462
|
type index$q_Decode<T> = Decode<T>;
|
|
3423
3463
|
type index$q_Decoder = Decoder;
|
|
@@ -3458,9 +3498,102 @@ declare const index$q_tryAsExactBytes: typeof tryAsExactBytes;
|
|
|
3458
3498
|
declare const index$q_validateLength: typeof validateLength;
|
|
3459
3499
|
declare namespace index$q {
|
|
3460
3500
|
export { index$q_DEFAULT_START_LENGTH as DEFAULT_START_LENGTH, index$q_Decoder as Decoder, index$q_Descriptor as Descriptor, index$q_Encoder as Encoder, index$q_MASKS as MASKS, index$q_MAX_LENGTH as MAX_LENGTH, index$q_ObjectView as ObjectView, index$q_SequenceView as SequenceView, index$q_TYPICAL_DICTIONARY_LENGTH as TYPICAL_DICTIONARY_LENGTH, index$q_TYPICAL_SEQUENCE_LENGTH as TYPICAL_SEQUENCE_LENGTH, index$q_ViewField as ViewField, index$q_addSizeHints as addSizeHints, codec$1 as codec, index$q_decodeVariableLengthExtraBytes as decodeVariableLengthExtraBytes, index$q_exactHint as exactHint, index$q_forEachDescriptor as forEachDescriptor, index$q_hasUniqueView as hasUniqueView, index$q_objectView as objectView, index$q_readonlyArray as readonlyArray, index$q_sequenceViewFixLen as sequenceViewFixLen, index$q_sequenceViewVarLen as sequenceViewVarLen, index$q_tryAsExactBytes as tryAsExactBytes, index$q_validateLength as validateLength };
|
|
3461
|
-
export type { index$q_ClassConstructor as ClassConstructor, index$q_Codec as Codec, index$q_CodecRecord as CodecRecord, index$q_Decode as Decode, index$q_DescribedBy as DescribedBy, index$q_DescriptorRecord as DescriptorRecord, index$q_Encode as Encode, index$q_LengthRange as LengthRange, index$q_OptionalRecord as OptionalRecord, Options$1 as Options, index$q_PropertyKeys as PropertyKeys, index$q_SimpleDescriptorRecord as SimpleDescriptorRecord, index$q_SizeHint as SizeHint, index$q_ViewOf as ViewOf };
|
|
3501
|
+
export type { index$q_ClassConstructor as ClassConstructor, index$q_Codec as Codec, index$q_CodecRecord as CodecRecord, index$q_CodecWithView as CodecWithView, index$q_Decode as Decode, index$q_DescribedBy as DescribedBy, index$q_DescriptorRecord as DescriptorRecord, index$q_Encode as Encode, index$q_LengthRange as LengthRange, index$q_OptionalRecord as OptionalRecord, Options$1 as Options, index$q_PropertyKeys as PropertyKeys, index$q_SimpleDescriptorRecord as SimpleDescriptorRecord, index$q_SizeHint as SizeHint, index$q_ViewOf as ViewOf };
|
|
3462
3502
|
}
|
|
3463
3503
|
|
|
3504
|
+
/**
|
|
3505
|
+
* A utility class providing a readonly view over a portion of an array without copying it.
|
|
3506
|
+
*/
|
|
3507
|
+
declare class ArrayView<T> implements Iterable<T> {
|
|
3508
|
+
private readonly source: T[];
|
|
3509
|
+
public readonly length: number;
|
|
3510
|
+
|
|
3511
|
+
private constructor(
|
|
3512
|
+
source: T[],
|
|
3513
|
+
private readonly start: number,
|
|
3514
|
+
private readonly end: number,
|
|
3515
|
+
) {
|
|
3516
|
+
this.source = source;
|
|
3517
|
+
this.length = end - start;
|
|
3518
|
+
}
|
|
3519
|
+
|
|
3520
|
+
static from<T>(source: T[], start = 0, end = source.length): ArrayView<T> {
|
|
3521
|
+
check`
|
|
3522
|
+
${start >= 0 && end <= source.length && start <= end}
|
|
3523
|
+
Invalid start (${start})/end (${end}) for ArrayView
|
|
3524
|
+
`;
|
|
3525
|
+
return new ArrayView(source, start, end);
|
|
3526
|
+
}
|
|
3527
|
+
|
|
3528
|
+
get(i: number): T {
|
|
3529
|
+
check`
|
|
3530
|
+
${i >= 0 && i < this.length}
|
|
3531
|
+
Index out of bounds: ${i} < ${this.length}
|
|
3532
|
+
`;
|
|
3533
|
+
return this.source[this.start + i];
|
|
3534
|
+
}
|
|
3535
|
+
|
|
3536
|
+
subview(from: number, to: number = this.length): ArrayView<T> {
|
|
3537
|
+
return ArrayView.from(this.source, this.start + from, this.start + to);
|
|
3538
|
+
}
|
|
3539
|
+
|
|
3540
|
+
toArray(): T[] {
|
|
3541
|
+
return this.source.slice(this.start, this.end);
|
|
3542
|
+
}
|
|
3543
|
+
|
|
3544
|
+
*[Symbol.iterator](): Iterator<T> {
|
|
3545
|
+
for (let i = this.start; i < this.end; i++) {
|
|
3546
|
+
yield this.source[i];
|
|
3547
|
+
}
|
|
3548
|
+
}
|
|
3549
|
+
}
|
|
3550
|
+
|
|
3551
|
+
type ITypedArray = Uint8Array | Uint16Array | Uint32Array;
|
|
3552
|
+
type IDataType = string | Buffer | ITypedArray;
|
|
3553
|
+
|
|
3554
|
+
type IHasher = {
|
|
3555
|
+
/**
|
|
3556
|
+
* Initializes hash state to default value
|
|
3557
|
+
*/
|
|
3558
|
+
init: () => IHasher;
|
|
3559
|
+
/**
|
|
3560
|
+
* Updates the hash content with the given data
|
|
3561
|
+
*/
|
|
3562
|
+
update: (data: IDataType) => IHasher;
|
|
3563
|
+
/**
|
|
3564
|
+
* Calculates the hash of all of the data passed to be hashed with hash.update().
|
|
3565
|
+
* Defaults to hexadecimal string
|
|
3566
|
+
* @param outputType If outputType is "binary", it returns Uint8Array. Otherwise it
|
|
3567
|
+
* returns hexadecimal string
|
|
3568
|
+
*/
|
|
3569
|
+
digest: {
|
|
3570
|
+
(outputType: "binary"): Uint8Array;
|
|
3571
|
+
(outputType?: "hex"): string;
|
|
3572
|
+
};
|
|
3573
|
+
/**
|
|
3574
|
+
* Save the current internal state of the hasher for later resumption with load().
|
|
3575
|
+
* Cannot be called before .init() or after .digest()
|
|
3576
|
+
*
|
|
3577
|
+
* Note that this state can include arbitrary information about the value being hashed (e.g.
|
|
3578
|
+
* could include N plaintext bytes from the value), so needs to be treated as being as
|
|
3579
|
+
* sensitive as the input value itself.
|
|
3580
|
+
*/
|
|
3581
|
+
save: () => Uint8Array;
|
|
3582
|
+
/**
|
|
3583
|
+
* Resume a state that was created by save(). If this state was not created by a
|
|
3584
|
+
* compatible build of hash-wasm, an exception will be thrown.
|
|
3585
|
+
*/
|
|
3586
|
+
load: (state: Uint8Array) => IHasher;
|
|
3587
|
+
/**
|
|
3588
|
+
* Block size in bytes
|
|
3589
|
+
*/
|
|
3590
|
+
blockSize: number;
|
|
3591
|
+
/**
|
|
3592
|
+
* Digest size in bytes
|
|
3593
|
+
*/
|
|
3594
|
+
digestSize: number;
|
|
3595
|
+
};
|
|
3596
|
+
|
|
3464
3597
|
/**
|
|
3465
3598
|
* Size of the output of the hash functions.
|
|
3466
3599
|
*
|
|
@@ -3516,144 +3649,46 @@ declare class WithHashAndBytes<THash extends OpaqueHash, TData> extends WithHash
|
|
|
3516
3649
|
}
|
|
3517
3650
|
}
|
|
3518
3651
|
|
|
3519
|
-
|
|
3520
|
-
interface HashAllocator {
|
|
3521
|
-
/** Return a new hash destination. */
|
|
3522
|
-
emptyHash(): OpaqueHash;
|
|
3523
|
-
}
|
|
3524
|
-
|
|
3525
|
-
/** The simplest allocator returning just a fresh copy of bytes each time. */
|
|
3526
|
-
declare class SimpleAllocator implements HashAllocator {
|
|
3527
|
-
emptyHash(): OpaqueHash {
|
|
3528
|
-
return Bytes.zero(HASH_SIZE);
|
|
3529
|
-
}
|
|
3530
|
-
}
|
|
3531
|
-
|
|
3532
|
-
/** An allocator that works by allocating larger (continuous) pages of memory. */
|
|
3533
|
-
declare class PageAllocator implements HashAllocator {
|
|
3534
|
-
private page: Uint8Array = new Uint8Array(0);
|
|
3535
|
-
private currentHash = 0;
|
|
3652
|
+
declare const zero$1 = Bytes.zero(HASH_SIZE);
|
|
3536
3653
|
|
|
3537
|
-
|
|
3538
|
-
|
|
3539
|
-
|
|
3540
|
-
this.resetPage();
|
|
3654
|
+
declare class Blake2b {
|
|
3655
|
+
static async createHasher() {
|
|
3656
|
+
return new Blake2b(await createBLAKE2b(HASH_SIZE * 8));
|
|
3541
3657
|
}
|
|
3542
3658
|
|
|
3543
|
-
private
|
|
3544
|
-
const pageSizeBytes = this.hashesPerPage * HASH_SIZE;
|
|
3545
|
-
this.currentHash = 0;
|
|
3546
|
-
this.page = new Uint8Array(pageSizeBytes);
|
|
3547
|
-
}
|
|
3659
|
+
private constructor(private readonly hasher: IHasher) {}
|
|
3548
3660
|
|
|
3549
|
-
|
|
3550
|
-
|
|
3551
|
-
|
|
3552
|
-
|
|
3553
|
-
|
|
3554
|
-
|
|
3555
|
-
|
|
3661
|
+
/**
|
|
3662
|
+
* Hash given collection of blobs.
|
|
3663
|
+
*
|
|
3664
|
+
* If empty array is given a zero-hash is returned.
|
|
3665
|
+
*/
|
|
3666
|
+
hashBlobs<H extends Blake2bHash>(r: (BytesBlob | Uint8Array)[]): H {
|
|
3667
|
+
if (r.length === 0) {
|
|
3668
|
+
return zero.asOpaque();
|
|
3556
3669
|
}
|
|
3557
3670
|
|
|
3558
|
-
|
|
3671
|
+
const hasher = this.hasher.init();
|
|
3672
|
+
for (const v of r) {
|
|
3673
|
+
hasher.update(v instanceof BytesBlob ? v.raw : v);
|
|
3674
|
+
}
|
|
3675
|
+
return Bytes.fromBlob(hasher.digest("binary"), HASH_SIZE).asOpaque();
|
|
3559
3676
|
}
|
|
3560
|
-
}
|
|
3561
|
-
|
|
3562
|
-
declare const defaultAllocator = new SimpleAllocator();
|
|
3563
3677
|
|
|
3564
|
-
/**
|
|
3565
|
-
|
|
3566
|
-
|
|
3567
|
-
|
|
3568
|
-
|
|
3569
|
-
|
|
3570
|
-
r: (BytesBlob | Uint8Array)[],
|
|
3571
|
-
allocator: HashAllocator = defaultAllocator,
|
|
3572
|
-
): H {
|
|
3573
|
-
const out = allocator.emptyHash();
|
|
3574
|
-
if (r.length === 0) {
|
|
3575
|
-
return out.asOpaque();
|
|
3678
|
+
/** Hash given blob of bytes. */
|
|
3679
|
+
hashBytes(blob: BytesBlob | Uint8Array): Blake2bHash {
|
|
3680
|
+
const hasher = this.hasher.init();
|
|
3681
|
+
const bytes = blob instanceof BytesBlob ? blob.raw : blob;
|
|
3682
|
+
hasher.update(bytes);
|
|
3683
|
+
return Bytes.fromBlob(hasher.digest("binary"), HASH_SIZE).asOpaque();
|
|
3576
3684
|
}
|
|
3577
3685
|
|
|
3578
|
-
|
|
3579
|
-
|
|
3580
|
-
|
|
3686
|
+
/** Convert given string into bytes and hash it. */
|
|
3687
|
+
hashString(str: string) {
|
|
3688
|
+
return this.hashBytes(BytesBlob.blobFromString(str));
|
|
3581
3689
|
}
|
|
3582
|
-
hasher?.digest(out.raw);
|
|
3583
|
-
return out.asOpaque();
|
|
3584
|
-
}
|
|
3585
|
-
|
|
3586
|
-
/** Hash given blob of bytes. */
|
|
3587
|
-
declare function hashBytes(blob: BytesBlob | Uint8Array, allocator: HashAllocator = defaultAllocator): Blake2bHash {
|
|
3588
|
-
const hasher = blake2b(HASH_SIZE);
|
|
3589
|
-
const bytes = blob instanceof BytesBlob ? blob.raw : blob;
|
|
3590
|
-
hasher?.update(bytes);
|
|
3591
|
-
const out = allocator.emptyHash();
|
|
3592
|
-
hasher?.digest(out.raw);
|
|
3593
|
-
return out;
|
|
3594
|
-
}
|
|
3595
|
-
|
|
3596
|
-
/** Convert given string into bytes and hash it. */
|
|
3597
|
-
declare function hashString(str: string, allocator: HashAllocator = defaultAllocator) {
|
|
3598
|
-
return hashBytes(BytesBlob.blobFromString(str), allocator);
|
|
3599
3690
|
}
|
|
3600
3691
|
|
|
3601
|
-
declare const blake2b_hashBytes: typeof hashBytes;
|
|
3602
|
-
declare const blake2b_hashString: typeof hashString;
|
|
3603
|
-
declare namespace blake2b {
|
|
3604
|
-
export {
|
|
3605
|
-
hashBlobs$1 as hashBlobs,
|
|
3606
|
-
blake2b_hashBytes as hashBytes,
|
|
3607
|
-
blake2b_hashString as hashString,
|
|
3608
|
-
};
|
|
3609
|
-
}
|
|
3610
|
-
|
|
3611
|
-
type ITypedArray = Uint8Array | Uint16Array | Uint32Array;
|
|
3612
|
-
type IDataType = string | Buffer | ITypedArray;
|
|
3613
|
-
|
|
3614
|
-
type IHasher = {
|
|
3615
|
-
/**
|
|
3616
|
-
* Initializes hash state to default value
|
|
3617
|
-
*/
|
|
3618
|
-
init: () => IHasher;
|
|
3619
|
-
/**
|
|
3620
|
-
* Updates the hash content with the given data
|
|
3621
|
-
*/
|
|
3622
|
-
update: (data: IDataType) => IHasher;
|
|
3623
|
-
/**
|
|
3624
|
-
* Calculates the hash of all of the data passed to be hashed with hash.update().
|
|
3625
|
-
* Defaults to hexadecimal string
|
|
3626
|
-
* @param outputType If outputType is "binary", it returns Uint8Array. Otherwise it
|
|
3627
|
-
* returns hexadecimal string
|
|
3628
|
-
*/
|
|
3629
|
-
digest: {
|
|
3630
|
-
(outputType: "binary"): Uint8Array;
|
|
3631
|
-
(outputType?: "hex"): string;
|
|
3632
|
-
};
|
|
3633
|
-
/**
|
|
3634
|
-
* Save the current internal state of the hasher for later resumption with load().
|
|
3635
|
-
* Cannot be called before .init() or after .digest()
|
|
3636
|
-
*
|
|
3637
|
-
* Note that this state can include arbitrary information about the value being hashed (e.g.
|
|
3638
|
-
* could include N plaintext bytes from the value), so needs to be treated as being as
|
|
3639
|
-
* sensitive as the input value itself.
|
|
3640
|
-
*/
|
|
3641
|
-
save: () => Uint8Array;
|
|
3642
|
-
/**
|
|
3643
|
-
* Resume a state that was created by save(). If this state was not created by a
|
|
3644
|
-
* compatible build of hash-wasm, an exception will be thrown.
|
|
3645
|
-
*/
|
|
3646
|
-
load: (state: Uint8Array) => IHasher;
|
|
3647
|
-
/**
|
|
3648
|
-
* Block size in bytes
|
|
3649
|
-
*/
|
|
3650
|
-
blockSize: number;
|
|
3651
|
-
/**
|
|
3652
|
-
* Digest size in bytes
|
|
3653
|
-
*/
|
|
3654
|
-
digestSize: number;
|
|
3655
|
-
};
|
|
3656
|
-
|
|
3657
3692
|
declare class KeccakHasher {
|
|
3658
3693
|
static async create(): Promise<KeccakHasher> {
|
|
3659
3694
|
return new KeccakHasher(await createKeccak(256));
|
|
@@ -3681,15 +3716,15 @@ declare namespace keccak {
|
|
|
3681
3716
|
};
|
|
3682
3717
|
}
|
|
3683
3718
|
|
|
3719
|
+
// TODO [ToDr] (#213) this should most likely be moved to a separate
|
|
3720
|
+
// package to avoid pulling in unnecessary deps.
|
|
3721
|
+
|
|
3722
|
+
type index$p_Blake2b = Blake2b;
|
|
3723
|
+
declare const index$p_Blake2b: typeof Blake2b;
|
|
3684
3724
|
type index$p_Blake2bHash = Blake2bHash;
|
|
3685
3725
|
type index$p_HASH_SIZE = HASH_SIZE;
|
|
3686
|
-
type index$p_HashAllocator = HashAllocator;
|
|
3687
3726
|
type index$p_KeccakHash = KeccakHash;
|
|
3688
3727
|
type index$p_OpaqueHash = OpaqueHash;
|
|
3689
|
-
type index$p_PageAllocator = PageAllocator;
|
|
3690
|
-
declare const index$p_PageAllocator: typeof PageAllocator;
|
|
3691
|
-
type index$p_SimpleAllocator = SimpleAllocator;
|
|
3692
|
-
declare const index$p_SimpleAllocator: typeof SimpleAllocator;
|
|
3693
3728
|
type index$p_TRUNCATED_HASH_SIZE = TRUNCATED_HASH_SIZE;
|
|
3694
3729
|
type index$p_TruncatedHash = TruncatedHash;
|
|
3695
3730
|
type index$p_WithHash<THash extends OpaqueHash, TData> = WithHash<THash, TData>;
|
|
@@ -3697,12 +3732,10 @@ declare const index$p_WithHash: typeof WithHash;
|
|
|
3697
3732
|
type index$p_WithHashAndBytes<THash extends OpaqueHash, TData> = WithHashAndBytes<THash, TData>;
|
|
3698
3733
|
declare const index$p_WithHashAndBytes: typeof WithHashAndBytes;
|
|
3699
3734
|
declare const index$p_ZERO_HASH: typeof ZERO_HASH;
|
|
3700
|
-
declare const index$p_blake2b: typeof blake2b;
|
|
3701
|
-
declare const index$p_defaultAllocator: typeof defaultAllocator;
|
|
3702
3735
|
declare const index$p_keccak: typeof keccak;
|
|
3703
3736
|
declare namespace index$p {
|
|
3704
|
-
export { index$
|
|
3705
|
-
export type { index$p_Blake2bHash as Blake2bHash, index$p_HASH_SIZE as HASH_SIZE, index$
|
|
3737
|
+
export { index$p_Blake2b as Blake2b, index$p_WithHash as WithHash, index$p_WithHashAndBytes as WithHashAndBytes, index$p_ZERO_HASH as ZERO_HASH, index$p_keccak as keccak, zero$1 as zero };
|
|
3738
|
+
export type { index$p_Blake2bHash as Blake2bHash, index$p_HASH_SIZE as HASH_SIZE, index$p_KeccakHash as KeccakHash, index$p_OpaqueHash as OpaqueHash, index$p_TRUNCATED_HASH_SIZE as TRUNCATED_HASH_SIZE, index$p_TruncatedHash as TruncatedHash };
|
|
3706
3739
|
}
|
|
3707
3740
|
|
|
3708
3741
|
/** Immutable view of the `HashDictionary`. */
|
|
@@ -4479,6 +4512,8 @@ declare class TruncatedHashDictionary<T extends OpaqueHash, V> {
|
|
|
4479
4512
|
}
|
|
4480
4513
|
}
|
|
4481
4514
|
|
|
4515
|
+
type index$o_ArrayView<T> = ArrayView<T>;
|
|
4516
|
+
declare const index$o_ArrayView: typeof ArrayView;
|
|
4482
4517
|
type index$o_FixedSizeArray<T, N extends number> = FixedSizeArray<T, N>;
|
|
4483
4518
|
declare const index$o_FixedSizeArray: typeof FixedSizeArray;
|
|
4484
4519
|
type index$o_HashDictionary<K extends OpaqueHash, V> = HashDictionary<K, V>;
|
|
@@ -4506,7 +4541,7 @@ type index$o_TruncatedHashDictionary<T extends OpaqueHash, V> = TruncatedHashDic
|
|
|
4506
4541
|
declare const index$o_TruncatedHashDictionary: typeof TruncatedHashDictionary;
|
|
4507
4542
|
declare const index$o_asKnownSize: typeof asKnownSize;
|
|
4508
4543
|
declare namespace index$o {
|
|
4509
|
-
export { index$o_FixedSizeArray as FixedSizeArray, index$o_HashDictionary as HashDictionary, index$o_HashSet as HashSet, index$o_MultiMap as MultiMap, index$o_SortedArray as SortedArray, index$o_SortedSet as SortedSet, index$o_TruncatedHashDictionary as TruncatedHashDictionary, index$o_asKnownSize as asKnownSize };
|
|
4544
|
+
export { index$o_ArrayView as ArrayView, index$o_FixedSizeArray as FixedSizeArray, index$o_HashDictionary as HashDictionary, index$o_HashSet as HashSet, index$o_MultiMap as MultiMap, index$o_SortedArray as SortedArray, index$o_SortedSet as SortedSet, index$o_TruncatedHashDictionary as TruncatedHashDictionary, index$o_asKnownSize as asKnownSize };
|
|
4510
4545
|
export type { index$o_HashWithZeroedBit as HashWithZeroedBit, index$o_ImmutableHashDictionary as ImmutableHashDictionary, index$o_ImmutableHashSet as ImmutableHashSet, index$o_ImmutableSortedArray as ImmutableSortedArray, index$o_ImmutableSortedSet as ImmutableSortedSet, index$o_KeyMapper as KeyMapper, index$o_KeyMappers as KeyMappers, index$o_KnownSize as KnownSize, index$o_KnownSizeArray as KnownSizeArray, index$o_KnownSizeId as KnownSizeId, index$o_NestedMaps as NestedMaps };
|
|
4511
4546
|
}
|
|
4512
4547
|
|
|
@@ -4735,7 +4770,7 @@ declare async function verify<T extends BytesBlob>(input: Input<T>[]): Promise<b
|
|
|
4735
4770
|
(acc, { message, key, signature }) => acc + key.length + signature.length + message.length + 1,
|
|
4736
4771
|
0,
|
|
4737
4772
|
);
|
|
4738
|
-
const data =
|
|
4773
|
+
const data = safeAllocUint8Array(dataLength);
|
|
4739
4774
|
|
|
4740
4775
|
let offset = 0;
|
|
4741
4776
|
|
|
@@ -4825,22 +4860,16 @@ declare function trivialSeed(s: U32): KeySeed {
|
|
|
4825
4860
|
* Derives a Ed25519 secret key from a seed.
|
|
4826
4861
|
* https://github.com/polkadot-fellows/JIPs/blob/7048f79edf4f4eb8bfe6fb42e6bbf61900f44c65/JIP-5.md#derivation-method
|
|
4827
4862
|
*/
|
|
4828
|
-
declare function deriveEd25519SecretKey(
|
|
4829
|
-
|
|
4830
|
-
allocator: SimpleAllocator = new SimpleAllocator(),
|
|
4831
|
-
): Ed25519SecretSeed {
|
|
4832
|
-
return blake2b.hashBytes(BytesBlob.blobFromParts([ED25519_SECRET_KEY.raw, seed.raw]), allocator).asOpaque();
|
|
4863
|
+
declare function deriveEd25519SecretKey(seed: KeySeed, blake2b: Blake2b): Ed25519SecretSeed {
|
|
4864
|
+
return blake2b.hashBytes(BytesBlob.blobFromParts([ED25519_SECRET_KEY.raw, seed.raw])).asOpaque();
|
|
4833
4865
|
}
|
|
4834
4866
|
|
|
4835
4867
|
/**
|
|
4836
4868
|
* Derives a Bandersnatch secret key from a seed.
|
|
4837
4869
|
* https://github.com/polkadot-fellows/JIPs/blob/7048f79edf4f4eb8bfe6fb42e6bbf61900f44c65/JIP-5.md#derivation-method
|
|
4838
4870
|
*/
|
|
4839
|
-
declare function deriveBandersnatchSecretKey(
|
|
4840
|
-
|
|
4841
|
-
allocator: SimpleAllocator = new SimpleAllocator(),
|
|
4842
|
-
): BandersnatchSecretSeed {
|
|
4843
|
-
return blake2b.hashBytes(BytesBlob.blobFromParts([BANDERSNATCH_SECRET_KEY.raw, seed.raw]), allocator).asOpaque();
|
|
4871
|
+
declare function deriveBandersnatchSecretKey(seed: KeySeed, blake2b: Blake2b): BandersnatchSecretSeed {
|
|
4872
|
+
return blake2b.hashBytes(BytesBlob.blobFromParts([BANDERSNATCH_SECRET_KEY.raw, seed.raw])).asOpaque();
|
|
4844
4873
|
}
|
|
4845
4874
|
|
|
4846
4875
|
/**
|
|
@@ -6907,6 +6936,17 @@ declare function emptyBlock(slot: TimeSlot = tryAsTimeSlot(0)) {
|
|
|
6907
6936
|
});
|
|
6908
6937
|
}
|
|
6909
6938
|
|
|
6939
|
+
/**
|
|
6940
|
+
* Take an input data and re-encode that data as view.
|
|
6941
|
+
*
|
|
6942
|
+
* NOTE: this function should NEVER be used in any production code,
|
|
6943
|
+
* it's only a test helper.
|
|
6944
|
+
*/
|
|
6945
|
+
declare function reencodeAsView<T, V>(codec: Descriptor<T, V>, object: T, chainSpec?: ChainSpec): V {
|
|
6946
|
+
const encoded = Encoder.encodeObject(codec, object, chainSpec);
|
|
6947
|
+
return Decoder.decodeObject(codec.View, encoded, chainSpec);
|
|
6948
|
+
}
|
|
6949
|
+
|
|
6910
6950
|
type index$l_Block = Block;
|
|
6911
6951
|
declare const index$l_Block: typeof Block;
|
|
6912
6952
|
type index$l_BlockView = BlockView;
|
|
@@ -6956,6 +6996,7 @@ declare const index$l_guarantees: typeof guarantees;
|
|
|
6956
6996
|
declare const index$l_headerViewWithHashCodec: typeof headerViewWithHashCodec;
|
|
6957
6997
|
declare const index$l_legacyDescriptor: typeof legacyDescriptor;
|
|
6958
6998
|
declare const index$l_preimage: typeof preimage;
|
|
6999
|
+
declare const index$l_reencodeAsView: typeof reencodeAsView;
|
|
6959
7000
|
declare const index$l_refineContext: typeof refineContext;
|
|
6960
7001
|
declare const index$l_tickets: typeof tickets;
|
|
6961
7002
|
declare const index$l_tryAsCoreIndex: typeof tryAsCoreIndex;
|
|
@@ -6972,7 +7013,7 @@ declare const index$l_workPackage: typeof workPackage;
|
|
|
6972
7013
|
declare const index$l_workReport: typeof workReport;
|
|
6973
7014
|
declare const index$l_workResult: typeof workResult;
|
|
6974
7015
|
declare namespace index$l {
|
|
6975
|
-
export { index$l_Block as Block, index$l_EpochMarker as EpochMarker, index$l_Extrinsic as Extrinsic, index$l_Header as Header, index$l_HeaderViewWithHash as HeaderViewWithHash, index$l_MAX_NUMBER_OF_SEGMENTS as MAX_NUMBER_OF_SEGMENTS, index$l_TicketsMarker as TicketsMarker, index$l_ValidatorKeys as ValidatorKeys, index$l_W_E as W_E, index$l_W_S as W_S, index$l_assurances as assurances, index$l_codecPerEpochBlock as codecPerEpochBlock, index$l_codecPerValidator as codecPerValidator, codec as codecUtils, index$l_disputes as disputes, index$l_emptyBlock as emptyBlock, index$l_encodeUnsealedHeader as encodeUnsealedHeader, index$l_guarantees as guarantees, index$l_headerViewWithHashCodec as headerViewWithHashCodec, index$l_legacyDescriptor as legacyDescriptor, index$l_preimage as preimage, index$l_refineContext as refineContext, index$l_tickets as tickets, index$l_tryAsCoreIndex as tryAsCoreIndex, index$l_tryAsEpoch as tryAsEpoch, index$l_tryAsPerEpochBlock as tryAsPerEpochBlock, index$l_tryAsPerValidator as tryAsPerValidator, index$l_tryAsSegmentIndex as tryAsSegmentIndex, index$l_tryAsServiceGas as tryAsServiceGas, index$l_tryAsServiceId as tryAsServiceId, index$l_tryAsTimeSlot as tryAsTimeSlot, index$l_tryAsValidatorIndex as tryAsValidatorIndex, index$l_workItem as workItem, index$l_workPackage as workPackage, index$l_workReport as workReport, index$l_workResult as workResult };
|
|
7016
|
+
export { index$l_Block as Block, index$l_EpochMarker as EpochMarker, index$l_Extrinsic as Extrinsic, index$l_Header as Header, index$l_HeaderViewWithHash as HeaderViewWithHash, index$l_MAX_NUMBER_OF_SEGMENTS as MAX_NUMBER_OF_SEGMENTS, index$l_TicketsMarker as TicketsMarker, index$l_ValidatorKeys as ValidatorKeys, index$l_W_E as W_E, index$l_W_S as W_S, index$l_assurances as assurances, index$l_codecPerEpochBlock as codecPerEpochBlock, index$l_codecPerValidator as codecPerValidator, codec as codecUtils, index$l_disputes as disputes, index$l_emptyBlock as emptyBlock, index$l_encodeUnsealedHeader as encodeUnsealedHeader, index$l_guarantees as guarantees, index$l_headerViewWithHashCodec as headerViewWithHashCodec, index$l_legacyDescriptor as legacyDescriptor, index$l_preimage as preimage, index$l_reencodeAsView as reencodeAsView, index$l_refineContext as refineContext, index$l_tickets as tickets, index$l_tryAsCoreIndex as tryAsCoreIndex, index$l_tryAsEpoch as tryAsEpoch, index$l_tryAsPerEpochBlock as tryAsPerEpochBlock, index$l_tryAsPerValidator as tryAsPerValidator, index$l_tryAsSegmentIndex as tryAsSegmentIndex, index$l_tryAsServiceGas as tryAsServiceGas, index$l_tryAsServiceId as tryAsServiceId, index$l_tryAsTimeSlot as tryAsTimeSlot, index$l_tryAsValidatorIndex as tryAsValidatorIndex, index$l_workItem as workItem, index$l_workPackage as workPackage, index$l_workReport as workReport, index$l_workResult as workResult };
|
|
6976
7017
|
export type { index$l_BlockView as BlockView, index$l_CodeHash as CodeHash, index$l_CoreIndex as CoreIndex, index$l_EntropyHash as EntropyHash, index$l_Epoch as Epoch, index$l_EpochMarkerView as EpochMarkerView, index$l_ExtrinsicHash as ExtrinsicHash, index$l_ExtrinsicView as ExtrinsicView, index$l_HeaderHash as HeaderHash, index$l_HeaderView as HeaderView, index$l_PerEpochBlock as PerEpochBlock, index$l_PerValidator as PerValidator, index$l_SEGMENT_BYTES as SEGMENT_BYTES, index$l_Segment as Segment, index$l_SegmentIndex as SegmentIndex, index$l_ServiceGas as ServiceGas, index$l_ServiceId as ServiceId, index$l_StateRootHash as StateRootHash, index$l_TicketsMarkerView as TicketsMarkerView, index$l_TimeSlot as TimeSlot, index$l_ValidatorIndex as ValidatorIndex, index$l_WorkReportHash as WorkReportHash };
|
|
6977
7018
|
}
|
|
6978
7019
|
|
|
@@ -8373,7 +8414,7 @@ declare enum NodeType {
|
|
|
8373
8414
|
declare class TrieNode {
|
|
8374
8415
|
constructor(
|
|
8375
8416
|
/** Exactly 512 bits / 64 bytes */
|
|
8376
|
-
public readonly raw: Uint8Array =
|
|
8417
|
+
public readonly raw: Uint8Array = safeAllocUint8Array(TRIE_NODE_BYTES),
|
|
8377
8418
|
) {}
|
|
8378
8419
|
|
|
8379
8420
|
/** Returns the type of the node */
|
|
@@ -9111,49 +9152,62 @@ declare function accumulationOutputComparator(a: AccumulationOutput, b: Accumula
|
|
|
9111
9152
|
return Ordering.Equal;
|
|
9112
9153
|
}
|
|
9113
9154
|
|
|
9114
|
-
declare const codecWithHash = <T, V, H extends OpaqueHash>(val: Descriptor<T, V>): Descriptor<WithHash<H, T>, V> =>
|
|
9115
|
-
Descriptor.withView(
|
|
9116
|
-
val.name,
|
|
9117
|
-
val.sizeHint,
|
|
9118
|
-
(e, elem) => val.encode(e, elem.data),
|
|
9119
|
-
(d): WithHash<H, T> => {
|
|
9120
|
-
const decoder2 = d.clone();
|
|
9121
|
-
const encoded = val.skipEncoded(decoder2);
|
|
9122
|
-
const hash = blake2b.hashBytes(encoded);
|
|
9123
|
-
return new WithHash(hash.asOpaque(), val.decode(d));
|
|
9124
|
-
},
|
|
9125
|
-
val.skip,
|
|
9126
|
-
val.View,
|
|
9127
|
-
);
|
|
9128
|
-
|
|
9129
9155
|
/**
|
|
9130
|
-
*
|
|
9156
|
+
* `J`: The maximum sum of dependency items in a work-report.
|
|
9131
9157
|
*
|
|
9132
|
-
*
|
|
9133
|
-
|
|
9158
|
+
* https://graypaper.fluffylabs.dev/#/5f542d7/416a00416a00?v=0.6.2
|
|
9159
|
+
*/
|
|
9160
|
+
declare const MAX_REPORT_DEPENDENCIES = 8;
|
|
9161
|
+
type MAX_REPORT_DEPENDENCIES = typeof MAX_REPORT_DEPENDENCIES;
|
|
9162
|
+
|
|
9163
|
+
/**
|
|
9164
|
+
* Ready (i.e. available and/or audited) but not-yet-accumulated work-reports.
|
|
9134
9165
|
*
|
|
9135
|
-
* https://graypaper.fluffylabs.dev/#/
|
|
9166
|
+
* https://graypaper.fluffylabs.dev/#/5f542d7/165300165400
|
|
9136
9167
|
*/
|
|
9137
|
-
declare class
|
|
9138
|
-
static Codec = codec.Class(
|
|
9139
|
-
|
|
9140
|
-
|
|
9168
|
+
declare class NotYetAccumulatedReport extends WithDebug {
|
|
9169
|
+
static Codec = codec.Class(NotYetAccumulatedReport, {
|
|
9170
|
+
report: WorkReport.Codec,
|
|
9171
|
+
dependencies: codecKnownSizeArray(codec.bytes(HASH_SIZE).asOpaque<WorkPackageHash>(), {
|
|
9172
|
+
typicalLength: MAX_REPORT_DEPENDENCIES / 2,
|
|
9173
|
+
maxLength: MAX_REPORT_DEPENDENCIES,
|
|
9174
|
+
minLength: 0,
|
|
9175
|
+
}),
|
|
9141
9176
|
});
|
|
9142
9177
|
|
|
9143
|
-
static create({
|
|
9144
|
-
return new
|
|
9178
|
+
static create({ report, dependencies }: CodecRecord<NotYetAccumulatedReport>) {
|
|
9179
|
+
return new NotYetAccumulatedReport(report, dependencies);
|
|
9145
9180
|
}
|
|
9146
9181
|
|
|
9147
9182
|
private constructor(
|
|
9148
|
-
/**
|
|
9149
|
-
|
|
9150
|
-
|
|
9151
|
-
|
|
9183
|
+
/**
|
|
9184
|
+
* Each of these were made available at most one epoch ago
|
|
9185
|
+
* but have or had unfulfilled dependencies.
|
|
9186
|
+
*/
|
|
9187
|
+
readonly report: WorkReport,
|
|
9188
|
+
/**
|
|
9189
|
+
* Alongside the work-report itself, we retain its un-accumulated
|
|
9190
|
+
* dependencies, a set of work-package hashes.
|
|
9191
|
+
*
|
|
9192
|
+
* https://graypaper.fluffylabs.dev/#/5f542d7/165800165800
|
|
9193
|
+
*/
|
|
9194
|
+
readonly dependencies: KnownSizeArray<WorkPackageHash, `[0..${MAX_REPORT_DEPENDENCIES})`>,
|
|
9152
9195
|
) {
|
|
9153
9196
|
super();
|
|
9154
9197
|
}
|
|
9155
9198
|
}
|
|
9156
9199
|
|
|
9200
|
+
/**
|
|
9201
|
+
* Accumulation queue state entry.
|
|
9202
|
+
*/
|
|
9203
|
+
type AccumulationQueue = PerEpochBlock<readonly NotYetAccumulatedReport[]>;
|
|
9204
|
+
|
|
9205
|
+
declare const accumulationQueueCodec = codecPerEpochBlock(
|
|
9206
|
+
readonlyArray(codec.sequenceVarLen(NotYetAccumulatedReport.Codec)),
|
|
9207
|
+
);
|
|
9208
|
+
|
|
9209
|
+
type AccumulationQueueView = DescribedBy<typeof accumulationQueueCodec.View>;
|
|
9210
|
+
|
|
9157
9211
|
/** One entry of kind `T` for each core. */
|
|
9158
9212
|
type PerCore<T> = KnownSizeArray<T, "number of cores">;
|
|
9159
9213
|
/** Check if given array has correct length before casting to the opaque type. */
|
|
@@ -9169,6 +9223,68 @@ declare const codecPerCore = <T, V>(val: Descriptor<T, V>): Descriptor<PerCore<T
|
|
|
9169
9223
|
return codecKnownSizeArray(val, { fixedLength: context.coresCount });
|
|
9170
9224
|
});
|
|
9171
9225
|
|
|
9226
|
+
/**
|
|
9227
|
+
* Assignment of particular work report to a core.
|
|
9228
|
+
*
|
|
9229
|
+
* Used by "Assurances" and "Disputes" subsystem, denoted by `rho`
|
|
9230
|
+
* in state.
|
|
9231
|
+
*
|
|
9232
|
+
* https://graypaper.fluffylabs.dev/#/579bd12/135800135800
|
|
9233
|
+
*/
|
|
9234
|
+
declare class AvailabilityAssignment extends WithDebug {
|
|
9235
|
+
static Codec = codec.Class(AvailabilityAssignment, {
|
|
9236
|
+
workReport: WorkReport.Codec,
|
|
9237
|
+
timeout: codec.u32.asOpaque<TimeSlot>(),
|
|
9238
|
+
});
|
|
9239
|
+
|
|
9240
|
+
static create({ workReport, timeout }: CodecRecord<AvailabilityAssignment>) {
|
|
9241
|
+
return new AvailabilityAssignment(workReport, timeout);
|
|
9242
|
+
}
|
|
9243
|
+
|
|
9244
|
+
private constructor(
|
|
9245
|
+
/** Work report assigned to a core. */
|
|
9246
|
+
public readonly workReport: WorkReport,
|
|
9247
|
+
/** Time slot at which the report becomes obsolete. */
|
|
9248
|
+
public readonly timeout: TimeSlot,
|
|
9249
|
+
) {
|
|
9250
|
+
super();
|
|
9251
|
+
}
|
|
9252
|
+
}
|
|
9253
|
+
|
|
9254
|
+
declare const availabilityAssignmentsCodec = codecPerCore(codec.optional(AvailabilityAssignment.Codec));
|
|
9255
|
+
|
|
9256
|
+
type AvailabilityAssignmentsView = DescribedBy<typeof availabilityAssignmentsCodec.View>;
|
|
9257
|
+
|
|
9258
|
+
/** `O`: Maximal authorization pool size. */
|
|
9259
|
+
declare const MAX_AUTH_POOL_SIZE = O;
|
|
9260
|
+
type MAX_AUTH_POOL_SIZE = typeof MAX_AUTH_POOL_SIZE;
|
|
9261
|
+
|
|
9262
|
+
/** `Q`: Size of the authorization queue. */
|
|
9263
|
+
declare const AUTHORIZATION_QUEUE_SIZE = Q;
|
|
9264
|
+
type AUTHORIZATION_QUEUE_SIZE = typeof AUTHORIZATION_QUEUE_SIZE;
|
|
9265
|
+
|
|
9266
|
+
/** A pool of authorization hashes that is filled from the queue. */
|
|
9267
|
+
type AuthorizationPool = KnownSizeArray<AuthorizerHash, `At most ${typeof MAX_AUTH_POOL_SIZE}`>;
|
|
9268
|
+
|
|
9269
|
+
/**
|
|
9270
|
+
* A fixed-size queue of authorization hashes used to fill up the pool.
|
|
9271
|
+
*
|
|
9272
|
+
* Can be set using `ASSIGN` host call in batches of `AUTHORIZATION_QUEUE_SIZE`.
|
|
9273
|
+
*/
|
|
9274
|
+
type AuthorizationQueue = FixedSizeArray<AuthorizerHash, AUTHORIZATION_QUEUE_SIZE>;
|
|
9275
|
+
|
|
9276
|
+
declare const authPoolsCodec = codecPerCore<AuthorizationPool, SequenceView<AuthorizerHash>>(
|
|
9277
|
+
codecKnownSizeArray(codec.bytes(HASH_SIZE).asOpaque<AuthorizerHash>(), {
|
|
9278
|
+
minLength: 0,
|
|
9279
|
+
maxLength: MAX_AUTH_POOL_SIZE,
|
|
9280
|
+
typicalLength: MAX_AUTH_POOL_SIZE,
|
|
9281
|
+
}),
|
|
9282
|
+
);
|
|
9283
|
+
|
|
9284
|
+
declare const authQueuesCodec = codecPerCore<AuthorizationQueue, SequenceView<AuthorizerHash>>(
|
|
9285
|
+
codecFixedSizeArray(codec.bytes(HASH_SIZE).asOpaque<AuthorizerHash>(), AUTHORIZATION_QUEUE_SIZE),
|
|
9286
|
+
);
|
|
9287
|
+
|
|
9172
9288
|
declare const sortedSetCodec = <T extends OpaqueHash>() =>
|
|
9173
9289
|
readonlyArray(codec.sequenceVarLen(codec.bytes(HASH_SIZE))).convert<ImmutableSortedSet<T>>(
|
|
9174
9290
|
(input) => input.array,
|
|
@@ -9250,112 +9366,6 @@ declare function hashComparator<V extends OpaqueHash>(a: V, b: V) {
|
|
|
9250
9366
|
return a.compare(b);
|
|
9251
9367
|
}
|
|
9252
9368
|
|
|
9253
|
-
// TODO [ToDr] Not sure where these should live yet :(
|
|
9254
|
-
|
|
9255
|
-
/**
|
|
9256
|
-
* `J`: The maximum sum of dependency items in a work-report.
|
|
9257
|
-
*
|
|
9258
|
-
* https://graypaper.fluffylabs.dev/#/5f542d7/416a00416a00?v=0.6.2
|
|
9259
|
-
*/
|
|
9260
|
-
declare const MAX_REPORT_DEPENDENCIES = 8;
|
|
9261
|
-
type MAX_REPORT_DEPENDENCIES = typeof MAX_REPORT_DEPENDENCIES;
|
|
9262
|
-
|
|
9263
|
-
/** `Q`: Size of the authorization queue. */
|
|
9264
|
-
declare const AUTHORIZATION_QUEUE_SIZE = Q;
|
|
9265
|
-
type AUTHORIZATION_QUEUE_SIZE = typeof AUTHORIZATION_QUEUE_SIZE;
|
|
9266
|
-
|
|
9267
|
-
/** `O`: Maximal authorization pool size. */
|
|
9268
|
-
declare const MAX_AUTH_POOL_SIZE = O;
|
|
9269
|
-
type MAX_AUTH_POOL_SIZE = typeof MAX_AUTH_POOL_SIZE;
|
|
9270
|
-
|
|
9271
|
-
/**
|
|
9272
|
-
* Ready (i.e. available and/or audited) but not-yet-accumulated work-reports.
|
|
9273
|
-
*
|
|
9274
|
-
* https://graypaper.fluffylabs.dev/#/5f542d7/165300165400
|
|
9275
|
-
*/
|
|
9276
|
-
declare class NotYetAccumulatedReport extends WithDebug {
|
|
9277
|
-
static Codec = codec.Class(NotYetAccumulatedReport, {
|
|
9278
|
-
report: WorkReport.Codec,
|
|
9279
|
-
dependencies: codecKnownSizeArray(codec.bytes(HASH_SIZE).asOpaque<WorkPackageHash>(), {
|
|
9280
|
-
typicalLength: MAX_REPORT_DEPENDENCIES / 2,
|
|
9281
|
-
maxLength: MAX_REPORT_DEPENDENCIES,
|
|
9282
|
-
minLength: 0,
|
|
9283
|
-
}),
|
|
9284
|
-
});
|
|
9285
|
-
|
|
9286
|
-
static create({ report, dependencies }: CodecRecord<NotYetAccumulatedReport>) {
|
|
9287
|
-
return new NotYetAccumulatedReport(report, dependencies);
|
|
9288
|
-
}
|
|
9289
|
-
|
|
9290
|
-
private constructor(
|
|
9291
|
-
/**
|
|
9292
|
-
* Each of these were made available at most one epoch ago
|
|
9293
|
-
* but have or had unfulfilled dependencies.
|
|
9294
|
-
*/
|
|
9295
|
-
readonly report: WorkReport,
|
|
9296
|
-
/**
|
|
9297
|
-
* Alongside the work-report itself, we retain its un-accumulated
|
|
9298
|
-
* dependencies, a set of work-package hashes.
|
|
9299
|
-
*
|
|
9300
|
-
* https://graypaper.fluffylabs.dev/#/5f542d7/165800165800
|
|
9301
|
-
*/
|
|
9302
|
-
readonly dependencies: KnownSizeArray<WorkPackageHash, `[0..${MAX_REPORT_DEPENDENCIES})`>,
|
|
9303
|
-
) {
|
|
9304
|
-
super();
|
|
9305
|
-
}
|
|
9306
|
-
}
|
|
9307
|
-
|
|
9308
|
-
/** Dictionary entry of services that auto-accumulate every block. */
|
|
9309
|
-
declare class AutoAccumulate {
|
|
9310
|
-
static Codec = codec.Class(AutoAccumulate, {
|
|
9311
|
-
service: codec.u32.asOpaque<ServiceId>(),
|
|
9312
|
-
gasLimit: codec.u64.asOpaque<ServiceGas>(),
|
|
9313
|
-
});
|
|
9314
|
-
|
|
9315
|
-
static create({ service, gasLimit }: CodecRecord<AutoAccumulate>) {
|
|
9316
|
-
return new AutoAccumulate(service, gasLimit);
|
|
9317
|
-
}
|
|
9318
|
-
|
|
9319
|
-
private constructor(
|
|
9320
|
-
/** Service id that auto-accumulates. */
|
|
9321
|
-
readonly service: ServiceId,
|
|
9322
|
-
/** Gas limit for auto-accumulation. */
|
|
9323
|
-
readonly gasLimit: ServiceGas,
|
|
9324
|
-
) {}
|
|
9325
|
-
}
|
|
9326
|
-
|
|
9327
|
-
/**
|
|
9328
|
-
* https://graypaper.fluffylabs.dev/#/7e6ff6a/11da0111da01?v=0.6.7
|
|
9329
|
-
*/
|
|
9330
|
-
declare class PrivilegedServices {
|
|
9331
|
-
static Codec = codec.Class(PrivilegedServices, {
|
|
9332
|
-
manager: codec.u32.asOpaque<ServiceId>(),
|
|
9333
|
-
authManager: codecPerCore(codec.u32.asOpaque<ServiceId>()),
|
|
9334
|
-
validatorsManager: codec.u32.asOpaque<ServiceId>(),
|
|
9335
|
-
autoAccumulateServices: readonlyArray(codec.sequenceVarLen(AutoAccumulate.Codec)),
|
|
9336
|
-
});
|
|
9337
|
-
|
|
9338
|
-
static create({ manager, authManager, validatorsManager, autoAccumulateServices }: CodecRecord<PrivilegedServices>) {
|
|
9339
|
-
return new PrivilegedServices(manager, authManager, validatorsManager, autoAccumulateServices);
|
|
9340
|
-
}
|
|
9341
|
-
|
|
9342
|
-
private constructor(
|
|
9343
|
-
/**
|
|
9344
|
-
* `chi_m`: The first, χm, is the index of the manager service which is
|
|
9345
|
-
* the service able to effect an alteration of χ from block to block,
|
|
9346
|
-
* as well as bestow services with storage deposit credits.
|
|
9347
|
-
* https://graypaper.fluffylabs.dev/#/7e6ff6a/11a40111a801?v=0.6.7
|
|
9348
|
-
*/
|
|
9349
|
-
readonly manager: ServiceId,
|
|
9350
|
-
/** `chi_a`: Manages authorization queue one for each core. */
|
|
9351
|
-
readonly authManager: PerCore<ServiceId>,
|
|
9352
|
-
/** `chi_v`: Managers validator keys. */
|
|
9353
|
-
readonly validatorsManager: ServiceId,
|
|
9354
|
-
/** `chi_g`: Dictionary of services that auto-accumulate every block with their gas limit. */
|
|
9355
|
-
readonly autoAccumulateServices: readonly AutoAccumulate[],
|
|
9356
|
-
) {}
|
|
9357
|
-
}
|
|
9358
|
-
|
|
9359
9369
|
declare const SUPER_PEAK_STRING = BytesBlob.blobFromString("peak");
|
|
9360
9370
|
|
|
9361
9371
|
/** Merkle Mountain Range peaks. */
|
|
@@ -9552,6 +9562,11 @@ declare class BlockState extends WithDebug {
|
|
|
9552
9562
|
}
|
|
9553
9563
|
}
|
|
9554
9564
|
|
|
9565
|
+
/**
|
|
9566
|
+
* Recent history of blocks.
|
|
9567
|
+
*
|
|
9568
|
+
* https://graypaper.fluffylabs.dev/#/7e6ff6a/0fc9010fc901?v=0.6.7
|
|
9569
|
+
*/
|
|
9555
9570
|
declare class RecentBlocks extends WithDebug {
|
|
9556
9571
|
static Codec = codec.Class(RecentBlocks, {
|
|
9557
9572
|
blocks: codecKnownSizeArray(BlockState.Codec, {
|
|
@@ -9564,6 +9579,12 @@ declare class RecentBlocks extends WithDebug {
|
|
|
9564
9579
|
}),
|
|
9565
9580
|
});
|
|
9566
9581
|
|
|
9582
|
+
static empty() {
|
|
9583
|
+
return new RecentBlocks(asKnownSize([]), {
|
|
9584
|
+
peaks: [],
|
|
9585
|
+
});
|
|
9586
|
+
}
|
|
9587
|
+
|
|
9567
9588
|
static create(a: CodecRecord<RecentBlocks>) {
|
|
9568
9589
|
return new RecentBlocks(a.blocks, a.accumulationLog);
|
|
9569
9590
|
}
|
|
@@ -9584,78 +9605,21 @@ declare class RecentBlocks extends WithDebug {
|
|
|
9584
9605
|
}
|
|
9585
9606
|
}
|
|
9586
9607
|
|
|
9587
|
-
|
|
9588
|
-
* Recent history of blocks.
|
|
9589
|
-
*
|
|
9590
|
-
* https://graypaper.fluffylabs.dev/#/7e6ff6a/0fc9010fc901?v=0.6.7
|
|
9591
|
-
*/
|
|
9592
|
-
declare class RecentBlocksHistory extends WithDebug {
|
|
9593
|
-
static Codec = Descriptor.new<RecentBlocksHistory>(
|
|
9594
|
-
"RecentBlocksHistory",
|
|
9595
|
-
RecentBlocks.Codec.sizeHint,
|
|
9596
|
-
(encoder, value) => RecentBlocks.Codec.encode(encoder, value.asCurrent()),
|
|
9597
|
-
(decoder) => {
|
|
9598
|
-
const recentBlocks = RecentBlocks.Codec.decode(decoder);
|
|
9599
|
-
return RecentBlocksHistory.create(recentBlocks);
|
|
9600
|
-
},
|
|
9601
|
-
(skip) => {
|
|
9602
|
-
return RecentBlocks.Codec.skip(skip);
|
|
9603
|
-
},
|
|
9604
|
-
);
|
|
9605
|
-
|
|
9606
|
-
static create(recentBlocks: RecentBlocks) {
|
|
9607
|
-
return new RecentBlocksHistory(recentBlocks);
|
|
9608
|
-
}
|
|
9608
|
+
type RecentBlocksView = DescribedBy<typeof RecentBlocks.Codec.View>;
|
|
9609
9609
|
|
|
9610
|
-
|
|
9611
|
-
return RecentBlocksHistory.create(
|
|
9612
|
-
RecentBlocks.create({
|
|
9613
|
-
blocks: asKnownSize([]),
|
|
9614
|
-
accumulationLog: { peaks: [] },
|
|
9615
|
-
}),
|
|
9616
|
-
);
|
|
9617
|
-
}
|
|
9610
|
+
type RecentlyAccumulated = PerEpochBlock<ImmutableHashSet<WorkPackageHash>>;
|
|
9618
9611
|
|
|
9619
|
-
|
|
9620
|
-
|
|
9621
|
-
|
|
9622
|
-
|
|
9623
|
-
|
|
9624
|
-
|
|
9625
|
-
|
|
9626
|
-
|
|
9627
|
-
|
|
9628
|
-
}
|
|
9629
|
-
|
|
9630
|
-
/** History of recent blocks with maximum size of `MAX_RECENT_HISTORY` */
|
|
9631
|
-
get blocks(): readonly BlockState[] {
|
|
9632
|
-
if (this.current !== null) {
|
|
9633
|
-
return this.current.blocks;
|
|
9634
|
-
}
|
|
9635
|
-
|
|
9636
|
-
throw new Error("RecentBlocksHistory is in invalid state");
|
|
9637
|
-
}
|
|
9638
|
-
|
|
9639
|
-
asCurrent() {
|
|
9640
|
-
if (this.current === null) {
|
|
9641
|
-
throw new Error("Cannot access current RecentBlocks format");
|
|
9642
|
-
}
|
|
9643
|
-
return this.current;
|
|
9644
|
-
}
|
|
9645
|
-
|
|
9646
|
-
updateBlocks(blocks: BlockState[]): RecentBlocksHistory {
|
|
9647
|
-
if (this.current !== null) {
|
|
9648
|
-
return RecentBlocksHistory.create(
|
|
9649
|
-
RecentBlocks.create({
|
|
9650
|
-
...this.current,
|
|
9651
|
-
blocks: asOpaqueType(blocks as BlockState[]),
|
|
9652
|
-
}),
|
|
9653
|
-
);
|
|
9654
|
-
}
|
|
9612
|
+
declare const recentlyAccumulatedCodec = codecPerEpochBlock<
|
|
9613
|
+
ImmutableHashSet<WorkPackageHash>,
|
|
9614
|
+
SequenceView<WorkPackageHash>
|
|
9615
|
+
>(
|
|
9616
|
+
codec.sequenceVarLen(codec.bytes(HASH_SIZE).asOpaque<WorkPackageHash>()).convert(
|
|
9617
|
+
(x) => Array.from(x),
|
|
9618
|
+
(x) => HashSet.from(x),
|
|
9619
|
+
),
|
|
9620
|
+
);
|
|
9655
9621
|
|
|
9656
|
-
|
|
9657
|
-
}
|
|
9658
|
-
}
|
|
9622
|
+
type RecentlyAccumulatedView = DescribedBy<typeof recentlyAccumulatedCodec.View>;
|
|
9659
9623
|
|
|
9660
9624
|
/**
|
|
9661
9625
|
* Fixed size of validator metadata.
|
|
@@ -9696,6 +9660,10 @@ declare class ValidatorData extends WithDebug {
|
|
|
9696
9660
|
}
|
|
9697
9661
|
}
|
|
9698
9662
|
|
|
9663
|
+
type ValidatorDataView = DescribedBy<typeof ValidatorData.Codec.View>;
|
|
9664
|
+
|
|
9665
|
+
declare const validatorsDataCodec = codecPerValidator(ValidatorData.Codec);
|
|
9666
|
+
|
|
9699
9667
|
declare enum SafroleSealingKeysKind {
|
|
9700
9668
|
Tickets = 0,
|
|
9701
9669
|
Keys = 1,
|
|
@@ -9800,6 +9768,8 @@ declare class SafroleData {
|
|
|
9800
9768
|
) {}
|
|
9801
9769
|
}
|
|
9802
9770
|
|
|
9771
|
+
type SafroleDataView = DescribedBy<typeof SafroleData.Codec.View>;
|
|
9772
|
+
|
|
9803
9773
|
/**
|
|
9804
9774
|
* `B_S`: The basic minimum balance which all services require.
|
|
9805
9775
|
*
|
|
@@ -9834,6 +9804,31 @@ declare const ignoreValueWithDefault = <T>(defaultValue: T) =>
|
|
|
9834
9804
|
(_s) => {},
|
|
9835
9805
|
);
|
|
9836
9806
|
|
|
9807
|
+
/** Encode and decode object with leading version number. */
|
|
9808
|
+
declare const codecWithVersion = <T>(val: Descriptor<T>): Descriptor<T> =>
|
|
9809
|
+
Descriptor.new<T>(
|
|
9810
|
+
"withVersion",
|
|
9811
|
+
{
|
|
9812
|
+
bytes: val.sizeHint.bytes + 8,
|
|
9813
|
+
isExact: false,
|
|
9814
|
+
},
|
|
9815
|
+
(e, v) => {
|
|
9816
|
+
e.varU64(0n);
|
|
9817
|
+
val.encode(e, v);
|
|
9818
|
+
},
|
|
9819
|
+
(d) => {
|
|
9820
|
+
const version = d.varU64();
|
|
9821
|
+
if (version !== 0n) {
|
|
9822
|
+
throw new Error("Non-zero version is not supported!");
|
|
9823
|
+
}
|
|
9824
|
+
return val.decode(d);
|
|
9825
|
+
},
|
|
9826
|
+
(s) => {
|
|
9827
|
+
s.varU64();
|
|
9828
|
+
val.skip(s);
|
|
9829
|
+
},
|
|
9830
|
+
);
|
|
9831
|
+
|
|
9837
9832
|
/**
|
|
9838
9833
|
* Service account details.
|
|
9839
9834
|
*
|
|
@@ -9913,6 +9908,8 @@ declare class ServiceAccountInfo extends WithDebug {
|
|
|
9913
9908
|
}
|
|
9914
9909
|
}
|
|
9915
9910
|
|
|
9911
|
+
type ServiceAccountInfoView = DescribedBy<typeof ServiceAccountInfo.Codec.View>;
|
|
9912
|
+
|
|
9916
9913
|
declare class PreimageItem extends WithDebug {
|
|
9917
9914
|
static Codec = codec.Class(PreimageItem, {
|
|
9918
9915
|
hash: codec.bytes(HASH_SIZE).asOpaque<PreimageHash>(),
|
|
@@ -9984,6 +9981,66 @@ declare class LookupHistoryItem {
|
|
|
9984
9981
|
}
|
|
9985
9982
|
}
|
|
9986
9983
|
|
|
9984
|
+
/** Dictionary entry of services that auto-accumulate every block. */
|
|
9985
|
+
declare class AutoAccumulate {
|
|
9986
|
+
static Codec = codec.Class(AutoAccumulate, {
|
|
9987
|
+
service: codec.u32.asOpaque<ServiceId>(),
|
|
9988
|
+
gasLimit: codec.u64.asOpaque<ServiceGas>(),
|
|
9989
|
+
});
|
|
9990
|
+
|
|
9991
|
+
static create({ service, gasLimit }: CodecRecord<AutoAccumulate>) {
|
|
9992
|
+
return new AutoAccumulate(service, gasLimit);
|
|
9993
|
+
}
|
|
9994
|
+
|
|
9995
|
+
private constructor(
|
|
9996
|
+
/** Service id that auto-accumulates. */
|
|
9997
|
+
readonly service: ServiceId,
|
|
9998
|
+
/** Gas limit for auto-accumulation. */
|
|
9999
|
+
readonly gasLimit: ServiceGas,
|
|
10000
|
+
) {}
|
|
10001
|
+
}
|
|
10002
|
+
|
|
10003
|
+
/**
|
|
10004
|
+
* https://graypaper.fluffylabs.dev/#/ab2cdbd/114402114402?v=0.7.2
|
|
10005
|
+
*/
|
|
10006
|
+
declare class PrivilegedServices {
|
|
10007
|
+
/** https://graypaper.fluffylabs.dev/#/ab2cdbd/3bbd023bcb02?v=0.7.2 */
|
|
10008
|
+
static Codec = codec.Class(PrivilegedServices, {
|
|
10009
|
+
manager: codec.u32.asOpaque<ServiceId>(),
|
|
10010
|
+
assigners: codecPerCore(codec.u32.asOpaque<ServiceId>()),
|
|
10011
|
+
delegator: codec.u32.asOpaque<ServiceId>(),
|
|
10012
|
+
registrar: Compatibility.isGreaterOrEqual(GpVersion.V0_7_1)
|
|
10013
|
+
? codec.u32.asOpaque<ServiceId>()
|
|
10014
|
+
: ignoreValueWithDefault(tryAsServiceId(2 ** 32 - 1)),
|
|
10015
|
+
autoAccumulateServices: readonlyArray(codec.sequenceVarLen(AutoAccumulate.Codec)),
|
|
10016
|
+
});
|
|
10017
|
+
|
|
10018
|
+
static create(a: CodecRecord<PrivilegedServices>) {
|
|
10019
|
+
return new PrivilegedServices(a.manager, a.delegator, a.registrar, a.assigners, a.autoAccumulateServices);
|
|
10020
|
+
}
|
|
10021
|
+
|
|
10022
|
+
private constructor(
|
|
10023
|
+
/**
|
|
10024
|
+
* `χ_M`: Manages alteration of χ from block to block,
|
|
10025
|
+
* as well as bestow services with storage deposit credits.
|
|
10026
|
+
* https://graypaper.fluffylabs.dev/#/ab2cdbd/111502111902?v=0.7.2
|
|
10027
|
+
*/
|
|
10028
|
+
readonly manager: ServiceId,
|
|
10029
|
+
/** `χ_V`: Managers validator keys. */
|
|
10030
|
+
readonly delegator: ServiceId,
|
|
10031
|
+
/**
|
|
10032
|
+
* `χ_R`: Manages the creation of services in protected range.
|
|
10033
|
+
*
|
|
10034
|
+
* https://graypaper.fluffylabs.dev/#/ab2cdbd/111b02111d02?v=0.7.2
|
|
10035
|
+
*/
|
|
10036
|
+
readonly registrar: ServiceId,
|
|
10037
|
+
/** `χ_A`: Manages authorization queue one for each core. */
|
|
10038
|
+
readonly assigners: PerCore<ServiceId>,
|
|
10039
|
+
/** `χ_Z`: Dictionary of services that auto-accumulate every block with their gas limit. */
|
|
10040
|
+
readonly autoAccumulateServices: readonly AutoAccumulate[],
|
|
10041
|
+
) {}
|
|
10042
|
+
}
|
|
10043
|
+
|
|
9987
10044
|
declare const codecServiceId: Descriptor<ServiceId> =
|
|
9988
10045
|
Compatibility.isSuite(TestSuite.W3F_DAVXY) || Compatibility.isSuite(TestSuite.JAMDUNA, GpVersion.V0_6_7)
|
|
9989
10046
|
? codec.u32.asOpaque<ServiceId>()
|
|
@@ -10124,12 +10181,26 @@ declare class CoreStatistics {
|
|
|
10124
10181
|
* Service statistics.
|
|
10125
10182
|
* Updated per block, based on available work reports (`W`).
|
|
10126
10183
|
*
|
|
10127
|
-
* https://graypaper.fluffylabs.dev/#/
|
|
10128
|
-
* https://github.com/gavofyork/graypaper/blob/9bffb08f3ea7b67832019176754df4fb36b9557d/text/statistics.tex#L77
|
|
10184
|
+
* https://graypaper.fluffylabs.dev/#/1c979cb/199802199802?v=0.7.1
|
|
10129
10185
|
*/
|
|
10130
10186
|
declare class ServiceStatistics {
|
|
10131
|
-
static Codec = Compatibility.
|
|
10132
|
-
|
|
10187
|
+
static Codec = Compatibility.selectIfGreaterOrEqual({
|
|
10188
|
+
fallback: codec.Class(ServiceStatistics, {
|
|
10189
|
+
providedCount: codecVarU16,
|
|
10190
|
+
providedSize: codec.varU32,
|
|
10191
|
+
refinementCount: codec.varU32,
|
|
10192
|
+
refinementGasUsed: codecVarGas,
|
|
10193
|
+
imports: codecVarU16,
|
|
10194
|
+
exports: codecVarU16,
|
|
10195
|
+
extrinsicSize: codec.varU32,
|
|
10196
|
+
extrinsicCount: codecVarU16,
|
|
10197
|
+
accumulateCount: codec.varU32,
|
|
10198
|
+
accumulateGasUsed: codecVarGas,
|
|
10199
|
+
onTransfersCount: codec.varU32,
|
|
10200
|
+
onTransfersGasUsed: codecVarGas,
|
|
10201
|
+
}),
|
|
10202
|
+
versions: {
|
|
10203
|
+
[GpVersion.V0_7_0]: codec.Class(ServiceStatistics, {
|
|
10133
10204
|
providedCount: codecVarU16,
|
|
10134
10205
|
providedSize: codec.varU32,
|
|
10135
10206
|
refinementCount: codec.varU32,
|
|
@@ -10142,21 +10213,23 @@ declare class ServiceStatistics {
|
|
|
10142
10213
|
accumulateGasUsed: codecVarGas,
|
|
10143
10214
|
onTransfersCount: codec.varU32,
|
|
10144
10215
|
onTransfersGasUsed: codecVarGas,
|
|
10145
|
-
})
|
|
10146
|
-
|
|
10216
|
+
}),
|
|
10217
|
+
[GpVersion.V0_7_1]: codec.Class(ServiceStatistics, {
|
|
10147
10218
|
providedCount: codecVarU16,
|
|
10148
10219
|
providedSize: codec.varU32,
|
|
10149
10220
|
refinementCount: codec.varU32,
|
|
10150
10221
|
refinementGasUsed: codecVarGas,
|
|
10151
10222
|
imports: codecVarU16,
|
|
10152
|
-
exports: codecVarU16,
|
|
10153
|
-
extrinsicSize: codec.varU32,
|
|
10154
10223
|
extrinsicCount: codecVarU16,
|
|
10224
|
+
extrinsicSize: codec.varU32,
|
|
10225
|
+
exports: codecVarU16,
|
|
10155
10226
|
accumulateCount: codec.varU32,
|
|
10156
10227
|
accumulateGasUsed: codecVarGas,
|
|
10157
|
-
onTransfersCount:
|
|
10158
|
-
onTransfersGasUsed:
|
|
10159
|
-
})
|
|
10228
|
+
onTransfersCount: ignoreValueWithDefault(tryAsU32(0)),
|
|
10229
|
+
onTransfersGasUsed: ignoreValueWithDefault(tryAsServiceGas(0)),
|
|
10230
|
+
}),
|
|
10231
|
+
},
|
|
10232
|
+
});
|
|
10160
10233
|
|
|
10161
10234
|
static create(v: CodecRecord<ServiceStatistics>) {
|
|
10162
10235
|
return new ServiceStatistics(
|
|
@@ -10196,9 +10269,9 @@ declare class ServiceStatistics {
|
|
|
10196
10269
|
public accumulateCount: U32,
|
|
10197
10270
|
/** `a.1` */
|
|
10198
10271
|
public accumulateGasUsed: ServiceGas,
|
|
10199
|
-
/** `t.0` */
|
|
10272
|
+
/** `t.0` @deprecated since 0.7.1 */
|
|
10200
10273
|
public onTransfersCount: U32,
|
|
10201
|
-
/** `t.1` */
|
|
10274
|
+
/** `t.1` @deprecated since 0.7.1 */
|
|
10202
10275
|
public onTransfersGasUsed: ServiceGas,
|
|
10203
10276
|
) {}
|
|
10204
10277
|
|
|
@@ -10246,6 +10319,8 @@ declare class StatisticsData {
|
|
|
10246
10319
|
) {}
|
|
10247
10320
|
}
|
|
10248
10321
|
|
|
10322
|
+
type StatisticsDataView = DescribedBy<typeof StatisticsData.Codec.View>;
|
|
10323
|
+
|
|
10249
10324
|
/**
|
|
10250
10325
|
* In addition to the entropy accumulator η_0, we retain
|
|
10251
10326
|
* three additional historical values of the accumulator at
|
|
@@ -10297,7 +10372,7 @@ type State = {
|
|
|
10297
10372
|
/**
|
|
10298
10373
|
* `γₖ gamma_k`: The keys for the validators of the next epoch, equivalent to those keys which constitute γ_z .
|
|
10299
10374
|
*/
|
|
10300
|
-
readonly nextValidatorData:
|
|
10375
|
+
readonly nextValidatorData: PerValidator<ValidatorData>;
|
|
10301
10376
|
|
|
10302
10377
|
/**
|
|
10303
10378
|
* `κ kappa`: Validators, who are the set of economic actors uniquely
|
|
@@ -10343,7 +10418,7 @@ type State = {
|
|
|
10343
10418
|
*
|
|
10344
10419
|
* https://graypaper-reader.netlify.app/#/6e1c0cd/102400102400
|
|
10345
10420
|
*/
|
|
10346
|
-
readonly authPools: PerCore<
|
|
10421
|
+
readonly authPools: PerCore<AuthorizationPool>;
|
|
10347
10422
|
|
|
10348
10423
|
/**
|
|
10349
10424
|
* `φ phi`: A queue of authorizers for each core used to fill up the pool.
|
|
@@ -10352,14 +10427,14 @@ type State = {
|
|
|
10352
10427
|
*
|
|
10353
10428
|
* https://graypaper-reader.netlify.app/#/6e1c0cd/102400102400
|
|
10354
10429
|
*/
|
|
10355
|
-
readonly authQueues: PerCore<
|
|
10430
|
+
readonly authQueues: PerCore<AuthorizationQueue>;
|
|
10356
10431
|
|
|
10357
10432
|
/**
|
|
10358
10433
|
* `β beta`: State of the blocks from recent history.
|
|
10359
10434
|
*
|
|
10360
10435
|
* https://graypaper.fluffylabs.dev/#/579bd12/0fb7010fb701
|
|
10361
10436
|
*/
|
|
10362
|
-
readonly recentBlocks:
|
|
10437
|
+
readonly recentBlocks: RecentBlocks;
|
|
10363
10438
|
|
|
10364
10439
|
/**
|
|
10365
10440
|
* `π pi`: Previous and current statistics of each validator,
|
|
@@ -10376,7 +10451,7 @@ type State = {
|
|
|
10376
10451
|
*
|
|
10377
10452
|
* https://graypaper.fluffylabs.dev/#/5f542d7/165300165500
|
|
10378
10453
|
*/
|
|
10379
|
-
readonly accumulationQueue:
|
|
10454
|
+
readonly accumulationQueue: AccumulationQueue;
|
|
10380
10455
|
|
|
10381
10456
|
/**
|
|
10382
10457
|
* `ξ xi`: In order to know which work-packages have been
|
|
@@ -10386,7 +10461,7 @@ type State = {
|
|
|
10386
10461
|
*
|
|
10387
10462
|
* https://graypaper.fluffylabs.dev/#/5f542d7/161a00161d00
|
|
10388
10463
|
*/
|
|
10389
|
-
readonly recentlyAccumulated:
|
|
10464
|
+
readonly recentlyAccumulated: RecentlyAccumulated;
|
|
10390
10465
|
|
|
10391
10466
|
/*
|
|
10392
10467
|
* `γₐ gamma_a`: The ticket accumulator - a series of highest-scoring ticket identifiers to be
|
|
@@ -10459,6 +10534,113 @@ interface Service {
|
|
|
10459
10534
|
getLookupHistory(hash: PreimageHash, len: U32): LookupHistorySlots | null;
|
|
10460
10535
|
}
|
|
10461
10536
|
|
|
10537
|
+
/** Additional marker interface, when state view is supported/required. */
|
|
10538
|
+
type WithStateView<V = StateView> = {
|
|
10539
|
+
/** Get view of the state. */
|
|
10540
|
+
view(): V;
|
|
10541
|
+
};
|
|
10542
|
+
|
|
10543
|
+
/**
|
|
10544
|
+
* A non-decoding version of the `State`.
|
|
10545
|
+
*
|
|
10546
|
+
* Note we don't require all fields to have view accessors, since
|
|
10547
|
+
* it's only beneficial for large collections to be read via views.
|
|
10548
|
+
*
|
|
10549
|
+
* https://graypaper.fluffylabs.dev/#/579bd12/08f10008f100
|
|
10550
|
+
*/
|
|
10551
|
+
type StateView = {
|
|
10552
|
+
/**
|
|
10553
|
+
|
|
10554
|
+
* `ρ rho`: work-reports which have been reported but are not yet known to be
|
|
10555
|
+
* available to a super-majority of validators, together with the time
|
|
10556
|
+
* at which each was reported.
|
|
10557
|
+
*
|
|
10558
|
+
* https://graypaper.fluffylabs.dev/#/579bd12/135800135800
|
|
10559
|
+
*/
|
|
10560
|
+
availabilityAssignmentView(): AvailabilityAssignmentsView;
|
|
10561
|
+
|
|
10562
|
+
/**
|
|
10563
|
+
* `ι iota`: The validator keys and metadata to be drawn from next.
|
|
10564
|
+
*/
|
|
10565
|
+
designatedValidatorDataView(): SequenceView<ValidatorData, ValidatorDataView>;
|
|
10566
|
+
|
|
10567
|
+
/**
|
|
10568
|
+
* `κ kappa`: Validators, who are the set of economic actors uniquely
|
|
10569
|
+
* privileged to help build and maintain the Jam chain, are
|
|
10570
|
+
* identified within κ, archived in λ and enqueued from ι.
|
|
10571
|
+
*
|
|
10572
|
+
* https://graypaper.fluffylabs.dev/#/579bd12/080201080601
|
|
10573
|
+
*/
|
|
10574
|
+
currentValidatorDataView(): SequenceView<ValidatorData, ValidatorDataView>;
|
|
10575
|
+
|
|
10576
|
+
/**
|
|
10577
|
+
* `λ lambda`: Validators, who are the set of economic actors uniquely
|
|
10578
|
+
* privileged to help build and maintain the Jam chain, are
|
|
10579
|
+
* identified within κ, archived in λ and enqueued from ι.
|
|
10580
|
+
*
|
|
10581
|
+
* https://graypaper.fluffylabs.dev/#/579bd12/080201080601
|
|
10582
|
+
*/
|
|
10583
|
+
previousValidatorDataView(): SequenceView<ValidatorData, ValidatorDataView>;
|
|
10584
|
+
|
|
10585
|
+
/**
|
|
10586
|
+
* `α alpha`: Authorizers available for each core (authorizer pool).
|
|
10587
|
+
*
|
|
10588
|
+
* https://graypaper-reader.netlify.app/#/6e1c0cd/102400102400
|
|
10589
|
+
*/
|
|
10590
|
+
authPoolsView(): SequenceView<AuthorizationPool, SequenceView<AuthorizerHash>>;
|
|
10591
|
+
|
|
10592
|
+
/**
|
|
10593
|
+
* `φ phi`: A queue of authorizers for each core used to fill up the pool.
|
|
10594
|
+
*
|
|
10595
|
+
* Only updated by `accumulate` calls using `assign` host call.
|
|
10596
|
+
*
|
|
10597
|
+
* https://graypaper-reader.netlify.app/#/6e1c0cd/102400102400
|
|
10598
|
+
*/
|
|
10599
|
+
authQueuesView(): SequenceView<AuthorizationQueue, SequenceView<AuthorizerHash>>;
|
|
10600
|
+
|
|
10601
|
+
/**
|
|
10602
|
+
* `β beta`: State of the blocks from recent history.
|
|
10603
|
+
*
|
|
10604
|
+
* https://graypaper.fluffylabs.dev/#/579bd12/0fb7010fb701
|
|
10605
|
+
*/
|
|
10606
|
+
recentBlocksView(): RecentBlocksView;
|
|
10607
|
+
|
|
10608
|
+
/**
|
|
10609
|
+
* `π pi`: Previous and current statistics of each validator,
|
|
10610
|
+
* cores statistics and services statistics.
|
|
10611
|
+
*
|
|
10612
|
+
* https://graypaper.fluffylabs.dev/#/68eaa1f/18f60118f601?v=0.6.4
|
|
10613
|
+
*/
|
|
10614
|
+
statisticsView(): StatisticsDataView;
|
|
10615
|
+
|
|
10616
|
+
/**
|
|
10617
|
+
* `ϑ theta`: We also maintain knowledge of ready (i.e. available
|
|
10618
|
+
* and/or audited) but not-yet-accumulated work-reports in
|
|
10619
|
+
* the state item ϑ.
|
|
10620
|
+
*
|
|
10621
|
+
* https://graypaper.fluffylabs.dev/#/5f542d7/165300165500
|
|
10622
|
+
*/
|
|
10623
|
+
accumulationQueueView(): AccumulationQueueView;
|
|
10624
|
+
|
|
10625
|
+
/**
|
|
10626
|
+
* `ξ xi`: In order to know which work-packages have been
|
|
10627
|
+
* accumulated already, we maintain a history of what has
|
|
10628
|
+
* been accumulated. This history, ξ, is sufficiently large
|
|
10629
|
+
* for an epoch worth of work-reports.
|
|
10630
|
+
*
|
|
10631
|
+
* https://graypaper.fluffylabs.dev/#/5f542d7/161a00161d00
|
|
10632
|
+
*/
|
|
10633
|
+
recentlyAccumulatedView(): RecentlyAccumulatedView;
|
|
10634
|
+
|
|
10635
|
+
/*
|
|
10636
|
+
* `γ gamma`: Safrole data.
|
|
10637
|
+
*/
|
|
10638
|
+
safroleDataView(): SafroleDataView;
|
|
10639
|
+
|
|
10640
|
+
/** Retrieve details about single service. */
|
|
10641
|
+
getServiceInfoView(id: ServiceId): ServiceAccountInfoView | null;
|
|
10642
|
+
};
|
|
10643
|
+
|
|
10462
10644
|
declare enum UpdatePreimageKind {
|
|
10463
10645
|
/** Insert new preimage and optionally update it's lookup history. */
|
|
10464
10646
|
Provide = 0,
|
|
@@ -10781,10 +10963,10 @@ declare class InMemoryService extends WithDebug implements Service {
|
|
|
10781
10963
|
/**
|
|
10782
10964
|
* A special version of state, stored fully in-memory.
|
|
10783
10965
|
*/
|
|
10784
|
-
declare class InMemoryState extends WithDebug implements State, EnumerableState {
|
|
10966
|
+
declare class InMemoryState extends WithDebug implements State, WithStateView, EnumerableState {
|
|
10785
10967
|
/** Create a new `InMemoryState` by providing all required fields. */
|
|
10786
|
-
static
|
|
10787
|
-
return new InMemoryState(state);
|
|
10968
|
+
static new(chainSpec: ChainSpec, state: InMemoryStateFields) {
|
|
10969
|
+
return new InMemoryState(chainSpec, state);
|
|
10788
10970
|
}
|
|
10789
10971
|
|
|
10790
10972
|
/**
|
|
@@ -10802,7 +10984,7 @@ declare class InMemoryState extends WithDebug implements State, EnumerableState
|
|
|
10802
10984
|
/**
|
|
10803
10985
|
* Create a new `InMemoryState` from some other state object.
|
|
10804
10986
|
*/
|
|
10805
|
-
static copyFrom(other: State, servicesData: Map<ServiceId, ServiceEntries>) {
|
|
10987
|
+
static copyFrom(chainSpec: ChainSpec, other: State, servicesData: Map<ServiceId, ServiceEntries>) {
|
|
10806
10988
|
const services = new Map<ServiceId, InMemoryService>();
|
|
10807
10989
|
for (const [id, entries] of servicesData.entries()) {
|
|
10808
10990
|
const service = other.getService(id);
|
|
@@ -10813,7 +10995,7 @@ declare class InMemoryState extends WithDebug implements State, EnumerableState
|
|
|
10813
10995
|
services.set(id, inMemService);
|
|
10814
10996
|
}
|
|
10815
10997
|
|
|
10816
|
-
return InMemoryState.
|
|
10998
|
+
return InMemoryState.new(chainSpec, {
|
|
10817
10999
|
availabilityAssignment: other.availabilityAssignment,
|
|
10818
11000
|
accumulationQueue: other.accumulationQueue,
|
|
10819
11001
|
designatedValidatorData: other.designatedValidatorData,
|
|
@@ -11008,12 +11190,12 @@ declare class InMemoryState extends WithDebug implements State, EnumerableState
|
|
|
11008
11190
|
disputesRecords: DisputesRecords;
|
|
11009
11191
|
timeslot: TimeSlot;
|
|
11010
11192
|
entropy: FixedSizeArray<EntropyHash, ENTROPY_ENTRIES>;
|
|
11011
|
-
authPools: PerCore<
|
|
11012
|
-
authQueues: PerCore<
|
|
11013
|
-
recentBlocks:
|
|
11193
|
+
authPools: PerCore<AuthorizationPool>;
|
|
11194
|
+
authQueues: PerCore<AuthorizationQueue>;
|
|
11195
|
+
recentBlocks: RecentBlocks;
|
|
11014
11196
|
statistics: StatisticsData;
|
|
11015
|
-
accumulationQueue:
|
|
11016
|
-
recentlyAccumulated:
|
|
11197
|
+
accumulationQueue: AccumulationQueue;
|
|
11198
|
+
recentlyAccumulated: RecentlyAccumulated;
|
|
11017
11199
|
ticketsAccumulator: KnownSizeArray<Ticket, "0...EpochLength">;
|
|
11018
11200
|
sealingKeySeries: SafroleSealingKeys;
|
|
11019
11201
|
epochRoot: BandersnatchRingRoot;
|
|
@@ -11029,7 +11211,10 @@ declare class InMemoryState extends WithDebug implements State, EnumerableState
|
|
|
11029
11211
|
return this.services.get(id) ?? null;
|
|
11030
11212
|
}
|
|
11031
11213
|
|
|
11032
|
-
|
|
11214
|
+
protected constructor(
|
|
11215
|
+
private readonly chainSpec: ChainSpec,
|
|
11216
|
+
s: InMemoryStateFields,
|
|
11217
|
+
) {
|
|
11033
11218
|
super();
|
|
11034
11219
|
this.availabilityAssignment = s.availabilityAssignment;
|
|
11035
11220
|
this.designatedValidatorData = s.designatedValidatorData;
|
|
@@ -11053,11 +11238,15 @@ declare class InMemoryState extends WithDebug implements State, EnumerableState
|
|
|
11053
11238
|
this.services = s.services;
|
|
11054
11239
|
}
|
|
11055
11240
|
|
|
11241
|
+
view(): StateView {
|
|
11242
|
+
return new InMemoryStateView(this.chainSpec, this);
|
|
11243
|
+
}
|
|
11244
|
+
|
|
11056
11245
|
/**
|
|
11057
11246
|
* Create an empty and possibly incoherent `InMemoryState`.
|
|
11058
11247
|
*/
|
|
11059
11248
|
static empty(spec: ChainSpec) {
|
|
11060
|
-
return new InMemoryState({
|
|
11249
|
+
return new InMemoryState(spec, {
|
|
11061
11250
|
availabilityAssignment: tryAsPerCore(
|
|
11062
11251
|
Array.from({ length: spec.coresCount }, () => null),
|
|
11063
11252
|
spec,
|
|
@@ -11124,7 +11313,7 @@ declare class InMemoryState extends WithDebug implements State, EnumerableState
|
|
|
11124
11313
|
),
|
|
11125
11314
|
spec,
|
|
11126
11315
|
),
|
|
11127
|
-
recentBlocks:
|
|
11316
|
+
recentBlocks: RecentBlocks.empty(),
|
|
11128
11317
|
statistics: StatisticsData.create({
|
|
11129
11318
|
current: tryAsPerValidator(
|
|
11130
11319
|
Array.from({ length: spec.validatorsCount }, () => ValidatorStatistics.empty()),
|
|
@@ -11158,8 +11347,9 @@ declare class InMemoryState extends WithDebug implements State, EnumerableState
|
|
|
11158
11347
|
epochRoot: Bytes.zero(BANDERSNATCH_RING_ROOT_BYTES).asOpaque(),
|
|
11159
11348
|
privilegedServices: PrivilegedServices.create({
|
|
11160
11349
|
manager: tryAsServiceId(0),
|
|
11161
|
-
|
|
11162
|
-
|
|
11350
|
+
assigners: tryAsPerCore(new Array(spec.coresCount).fill(tryAsServiceId(0)), spec),
|
|
11351
|
+
delegator: tryAsServiceId(0),
|
|
11352
|
+
registrar: tryAsServiceId(MAX_VALUE),
|
|
11163
11353
|
autoAccumulateServices: [],
|
|
11164
11354
|
}),
|
|
11165
11355
|
accumulationOutputLog: SortedArray.fromArray(accumulationOutputComparator, []),
|
|
@@ -11209,12 +11399,18 @@ type FieldNames<T> = {
|
|
|
11209
11399
|
[K in keyof T]: T[K] extends Function ? never : K;
|
|
11210
11400
|
}[keyof T];
|
|
11211
11401
|
|
|
11402
|
+
type index$e_AUTHORIZATION_QUEUE_SIZE = AUTHORIZATION_QUEUE_SIZE;
|
|
11212
11403
|
type index$e_AccumulationOutput = AccumulationOutput;
|
|
11213
11404
|
declare const index$e_AccumulationOutput: typeof AccumulationOutput;
|
|
11405
|
+
type index$e_AccumulationQueue = AccumulationQueue;
|
|
11406
|
+
type index$e_AccumulationQueueView = AccumulationQueueView;
|
|
11407
|
+
type index$e_AuthorizationPool = AuthorizationPool;
|
|
11408
|
+
type index$e_AuthorizationQueue = AuthorizationQueue;
|
|
11214
11409
|
type index$e_AutoAccumulate = AutoAccumulate;
|
|
11215
11410
|
declare const index$e_AutoAccumulate: typeof AutoAccumulate;
|
|
11216
11411
|
type index$e_AvailabilityAssignment = AvailabilityAssignment;
|
|
11217
11412
|
declare const index$e_AvailabilityAssignment: typeof AvailabilityAssignment;
|
|
11413
|
+
type index$e_AvailabilityAssignmentsView = AvailabilityAssignmentsView;
|
|
11218
11414
|
declare const index$e_BASE_SERVICE_BALANCE: typeof BASE_SERVICE_BALANCE;
|
|
11219
11415
|
type index$e_BlockState = BlockState;
|
|
11220
11416
|
declare const index$e_BlockState: typeof BlockState;
|
|
@@ -11236,8 +11432,11 @@ type index$e_InMemoryStateFields = InMemoryStateFields;
|
|
|
11236
11432
|
type index$e_LookupHistoryItem = LookupHistoryItem;
|
|
11237
11433
|
declare const index$e_LookupHistoryItem: typeof LookupHistoryItem;
|
|
11238
11434
|
type index$e_LookupHistorySlots = LookupHistorySlots;
|
|
11435
|
+
type index$e_MAX_AUTH_POOL_SIZE = MAX_AUTH_POOL_SIZE;
|
|
11239
11436
|
declare const index$e_MAX_LOOKUP_HISTORY_SLOTS: typeof MAX_LOOKUP_HISTORY_SLOTS;
|
|
11240
11437
|
type index$e_MAX_RECENT_HISTORY = MAX_RECENT_HISTORY;
|
|
11438
|
+
type index$e_NotYetAccumulatedReport = NotYetAccumulatedReport;
|
|
11439
|
+
declare const index$e_NotYetAccumulatedReport: typeof NotYetAccumulatedReport;
|
|
11241
11440
|
type index$e_PerCore<T> = PerCore<T>;
|
|
11242
11441
|
type index$e_PreimageItem = PreimageItem;
|
|
11243
11442
|
declare const index$e_PreimageItem: typeof PreimageItem;
|
|
@@ -11245,10 +11444,12 @@ type index$e_PrivilegedServices = PrivilegedServices;
|
|
|
11245
11444
|
declare const index$e_PrivilegedServices: typeof PrivilegedServices;
|
|
11246
11445
|
type index$e_RecentBlocks = RecentBlocks;
|
|
11247
11446
|
declare const index$e_RecentBlocks: typeof RecentBlocks;
|
|
11248
|
-
type index$
|
|
11249
|
-
|
|
11447
|
+
type index$e_RecentBlocksView = RecentBlocksView;
|
|
11448
|
+
type index$e_RecentlyAccumulated = RecentlyAccumulated;
|
|
11449
|
+
type index$e_RecentlyAccumulatedView = RecentlyAccumulatedView;
|
|
11250
11450
|
type index$e_SafroleData = SafroleData;
|
|
11251
11451
|
declare const index$e_SafroleData: typeof SafroleData;
|
|
11452
|
+
type index$e_SafroleDataView = SafroleDataView;
|
|
11252
11453
|
type index$e_SafroleSealingKeys = SafroleSealingKeys;
|
|
11253
11454
|
type index$e_SafroleSealingKeysData = SafroleSealingKeysData;
|
|
11254
11455
|
declare const index$e_SafroleSealingKeysData: typeof SafroleSealingKeysData;
|
|
@@ -11257,14 +11458,17 @@ declare const index$e_SafroleSealingKeysKind: typeof SafroleSealingKeysKind;
|
|
|
11257
11458
|
type index$e_Service = Service;
|
|
11258
11459
|
type index$e_ServiceAccountInfo = ServiceAccountInfo;
|
|
11259
11460
|
declare const index$e_ServiceAccountInfo: typeof ServiceAccountInfo;
|
|
11461
|
+
type index$e_ServiceAccountInfoView = ServiceAccountInfoView;
|
|
11260
11462
|
type index$e_ServiceData = ServiceData;
|
|
11261
11463
|
type index$e_ServiceEntries = ServiceEntries;
|
|
11262
11464
|
type index$e_ServiceStatistics = ServiceStatistics;
|
|
11263
11465
|
declare const index$e_ServiceStatistics: typeof ServiceStatistics;
|
|
11264
11466
|
type index$e_ServicesUpdate = ServicesUpdate;
|
|
11265
11467
|
type index$e_State = State;
|
|
11468
|
+
type index$e_StateView = StateView;
|
|
11266
11469
|
type index$e_StatisticsData = StatisticsData;
|
|
11267
11470
|
declare const index$e_StatisticsData: typeof StatisticsData;
|
|
11471
|
+
type index$e_StatisticsDataView = StatisticsDataView;
|
|
11268
11472
|
type index$e_StorageItem = StorageItem;
|
|
11269
11473
|
declare const index$e_StorageItem: typeof StorageItem;
|
|
11270
11474
|
type index$e_StorageKey = StorageKey;
|
|
@@ -11285,27 +11489,35 @@ declare const index$e_UpdateStorageKind: typeof UpdateStorageKind;
|
|
|
11285
11489
|
type index$e_VALIDATOR_META_BYTES = VALIDATOR_META_BYTES;
|
|
11286
11490
|
type index$e_ValidatorData = ValidatorData;
|
|
11287
11491
|
declare const index$e_ValidatorData: typeof ValidatorData;
|
|
11492
|
+
type index$e_ValidatorDataView = ValidatorDataView;
|
|
11288
11493
|
type index$e_ValidatorStatistics = ValidatorStatistics;
|
|
11289
11494
|
declare const index$e_ValidatorStatistics: typeof ValidatorStatistics;
|
|
11495
|
+
type index$e_WithStateView<V = StateView> = WithStateView<V>;
|
|
11290
11496
|
declare const index$e_accumulationOutputComparator: typeof accumulationOutputComparator;
|
|
11497
|
+
declare const index$e_accumulationQueueCodec: typeof accumulationQueueCodec;
|
|
11498
|
+
declare const index$e_authPoolsCodec: typeof authPoolsCodec;
|
|
11499
|
+
declare const index$e_authQueuesCodec: typeof authQueuesCodec;
|
|
11500
|
+
declare const index$e_availabilityAssignmentsCodec: typeof availabilityAssignmentsCodec;
|
|
11291
11501
|
declare const index$e_codecBandersnatchKey: typeof codecBandersnatchKey;
|
|
11292
11502
|
declare const index$e_codecPerCore: typeof codecPerCore;
|
|
11293
11503
|
declare const index$e_codecServiceId: typeof codecServiceId;
|
|
11294
11504
|
declare const index$e_codecVarGas: typeof codecVarGas;
|
|
11295
11505
|
declare const index$e_codecVarU16: typeof codecVarU16;
|
|
11296
|
-
declare const index$
|
|
11506
|
+
declare const index$e_codecWithVersion: typeof codecWithVersion;
|
|
11297
11507
|
declare const index$e_hashComparator: typeof hashComparator;
|
|
11298
11508
|
declare const index$e_ignoreValueWithDefault: typeof ignoreValueWithDefault;
|
|
11509
|
+
declare const index$e_recentlyAccumulatedCodec: typeof recentlyAccumulatedCodec;
|
|
11299
11510
|
declare const index$e_serviceDataCodec: typeof serviceDataCodec;
|
|
11300
11511
|
declare const index$e_serviceEntriesCodec: typeof serviceEntriesCodec;
|
|
11301
11512
|
declare const index$e_sortedSetCodec: typeof sortedSetCodec;
|
|
11302
11513
|
declare const index$e_tryAsLookupHistorySlots: typeof tryAsLookupHistorySlots;
|
|
11303
11514
|
declare const index$e_tryAsPerCore: typeof tryAsPerCore;
|
|
11515
|
+
declare const index$e_validatorsDataCodec: typeof validatorsDataCodec;
|
|
11304
11516
|
declare const index$e_workReportsSortedSetCodec: typeof workReportsSortedSetCodec;
|
|
11305
11517
|
declare const index$e_zeroSizeHint: typeof zeroSizeHint;
|
|
11306
11518
|
declare namespace index$e {
|
|
11307
|
-
export { index$e_AccumulationOutput as AccumulationOutput, index$e_AutoAccumulate as AutoAccumulate, index$e_AvailabilityAssignment as AvailabilityAssignment, index$e_BASE_SERVICE_BALANCE as BASE_SERVICE_BALANCE, index$e_BlockState as BlockState, index$e_CoreStatistics as CoreStatistics, index$e_DisputesRecords as DisputesRecords, index$e_ELECTIVE_BYTE_BALANCE as ELECTIVE_BYTE_BALANCE, index$e_ELECTIVE_ITEM_BALANCE as ELECTIVE_ITEM_BALANCE, index$e_InMemoryService as InMemoryService, index$e_InMemoryState as InMemoryState, index$e_LookupHistoryItem as LookupHistoryItem, index$e_MAX_LOOKUP_HISTORY_SLOTS as MAX_LOOKUP_HISTORY_SLOTS, index$
|
|
11308
|
-
export type { index$e_BlocksState as BlocksState, index$e_ENTROPY_ENTRIES as ENTROPY_ENTRIES, index$e_EnumerableState as EnumerableState, index$e_FieldNames as FieldNames, index$e_InMemoryStateFields as InMemoryStateFields, index$e_LookupHistorySlots as LookupHistorySlots, index$e_MAX_RECENT_HISTORY as MAX_RECENT_HISTORY, index$e_PerCore as PerCore, index$e_SafroleSealingKeys as SafroleSealingKeys, index$e_Service as Service, index$e_ServiceData as ServiceData, index$e_ServiceEntries as ServiceEntries, index$e_ServicesUpdate as ServicesUpdate, index$e_State as State, index$e_StorageKey as StorageKey, index$e_VALIDATOR_META_BYTES as VALIDATOR_META_BYTES };
|
|
11519
|
+
export { index$e_AccumulationOutput as AccumulationOutput, index$e_AutoAccumulate as AutoAccumulate, index$e_AvailabilityAssignment as AvailabilityAssignment, index$e_BASE_SERVICE_BALANCE as BASE_SERVICE_BALANCE, index$e_BlockState as BlockState, index$e_CoreStatistics as CoreStatistics, index$e_DisputesRecords as DisputesRecords, index$e_ELECTIVE_BYTE_BALANCE as ELECTIVE_BYTE_BALANCE, index$e_ELECTIVE_ITEM_BALANCE as ELECTIVE_ITEM_BALANCE, index$e_InMemoryService as InMemoryService, index$e_InMemoryState as InMemoryState, index$e_LookupHistoryItem as LookupHistoryItem, index$e_MAX_LOOKUP_HISTORY_SLOTS as MAX_LOOKUP_HISTORY_SLOTS, index$e_NotYetAccumulatedReport as NotYetAccumulatedReport, index$e_PreimageItem as PreimageItem, index$e_PrivilegedServices as PrivilegedServices, index$e_RecentBlocks as RecentBlocks, index$e_SafroleData as SafroleData, index$e_SafroleSealingKeysData as SafroleSealingKeysData, index$e_SafroleSealingKeysKind as SafroleSealingKeysKind, index$e_ServiceAccountInfo as ServiceAccountInfo, index$e_ServiceStatistics as ServiceStatistics, index$e_StatisticsData as StatisticsData, index$e_StorageItem as StorageItem, index$e_UpdateError as UpdateError, index$e_UpdatePreimage as UpdatePreimage, index$e_UpdatePreimageKind as UpdatePreimageKind, index$e_UpdateService as UpdateService, index$e_UpdateServiceKind as UpdateServiceKind, index$e_UpdateStorage as UpdateStorage, index$e_UpdateStorageKind as UpdateStorageKind, index$e_ValidatorData as ValidatorData, index$e_ValidatorStatistics as ValidatorStatistics, index$e_accumulationOutputComparator as accumulationOutputComparator, index$e_accumulationQueueCodec as accumulationQueueCodec, index$e_authPoolsCodec as authPoolsCodec, index$e_authQueuesCodec as authQueuesCodec, index$e_availabilityAssignmentsCodec as availabilityAssignmentsCodec, index$e_codecBandersnatchKey as codecBandersnatchKey, index$e_codecPerCore as codecPerCore, index$e_codecServiceId as codecServiceId, index$e_codecVarGas as codecVarGas, index$e_codecVarU16 as codecVarU16, index$e_codecWithVersion as codecWithVersion, index$e_hashComparator as hashComparator, index$e_ignoreValueWithDefault as ignoreValueWithDefault, index$e_recentlyAccumulatedCodec as recentlyAccumulatedCodec, index$e_serviceDataCodec as serviceDataCodec, index$e_serviceEntriesCodec as serviceEntriesCodec, index$e_sortedSetCodec as sortedSetCodec, index$e_tryAsLookupHistorySlots as tryAsLookupHistorySlots, index$e_tryAsPerCore as tryAsPerCore, index$e_validatorsDataCodec as validatorsDataCodec, index$e_workReportsSortedSetCodec as workReportsSortedSetCodec, index$e_zeroSizeHint as zeroSizeHint };
|
|
11520
|
+
export type { index$e_AUTHORIZATION_QUEUE_SIZE as AUTHORIZATION_QUEUE_SIZE, index$e_AccumulationQueue as AccumulationQueue, index$e_AccumulationQueueView as AccumulationQueueView, index$e_AuthorizationPool as AuthorizationPool, index$e_AuthorizationQueue as AuthorizationQueue, index$e_AvailabilityAssignmentsView as AvailabilityAssignmentsView, index$e_BlocksState as BlocksState, index$e_ENTROPY_ENTRIES as ENTROPY_ENTRIES, index$e_EnumerableState as EnumerableState, index$e_FieldNames as FieldNames, index$e_InMemoryStateFields as InMemoryStateFields, index$e_LookupHistorySlots as LookupHistorySlots, index$e_MAX_AUTH_POOL_SIZE as MAX_AUTH_POOL_SIZE, index$e_MAX_RECENT_HISTORY as MAX_RECENT_HISTORY, index$e_PerCore as PerCore, index$e_RecentBlocksView as RecentBlocksView, index$e_RecentlyAccumulated as RecentlyAccumulated, index$e_RecentlyAccumulatedView as RecentlyAccumulatedView, index$e_SafroleDataView as SafroleDataView, index$e_SafroleSealingKeys as SafroleSealingKeys, index$e_Service as Service, index$e_ServiceAccountInfoView as ServiceAccountInfoView, index$e_ServiceData as ServiceData, index$e_ServiceEntries as ServiceEntries, index$e_ServicesUpdate as ServicesUpdate, index$e_State as State, index$e_StateView as StateView, index$e_StatisticsDataView as StatisticsDataView, index$e_StorageKey as StorageKey, index$e_VALIDATOR_META_BYTES as VALIDATOR_META_BYTES, index$e_ValidatorDataView as ValidatorDataView, index$e_WithStateView as WithStateView };
|
|
11309
11521
|
}
|
|
11310
11522
|
|
|
11311
11523
|
type StateKey = Opaque<OpaqueHash, "stateKey">;
|
|
@@ -11372,7 +11584,7 @@ declare namespace stateKeys {
|
|
|
11372
11584
|
}
|
|
11373
11585
|
|
|
11374
11586
|
/** https://graypaper.fluffylabs.dev/#/1c979cb/3bba033bba03?v=0.7.1 */
|
|
11375
|
-
export function serviceStorage(serviceId: ServiceId, key: StorageKey): StateKey {
|
|
11587
|
+
export function serviceStorage(blake2b: Blake2b, serviceId: ServiceId, key: StorageKey): StateKey {
|
|
11376
11588
|
if (Compatibility.isLessThan(GpVersion.V0_6_7)) {
|
|
11377
11589
|
const out = Bytes.zero(HASH_SIZE);
|
|
11378
11590
|
out.raw.set(u32AsLeBytes(tryAsU32(2 ** 32 - 1)), 0);
|
|
@@ -11380,11 +11592,11 @@ declare namespace stateKeys {
|
|
|
11380
11592
|
return legacyServiceNested(serviceId, out);
|
|
11381
11593
|
}
|
|
11382
11594
|
|
|
11383
|
-
return serviceNested(serviceId, tryAsU32(2 ** 32 - 1), key);
|
|
11595
|
+
return serviceNested(blake2b, serviceId, tryAsU32(2 ** 32 - 1), key);
|
|
11384
11596
|
}
|
|
11385
11597
|
|
|
11386
11598
|
/** https://graypaper.fluffylabs.dev/#/1c979cb/3bd7033bd703?v=0.7.1 */
|
|
11387
|
-
export function servicePreimage(serviceId: ServiceId, hash: PreimageHash): StateKey {
|
|
11599
|
+
export function servicePreimage(blake2b: Blake2b, serviceId: ServiceId, hash: PreimageHash): StateKey {
|
|
11388
11600
|
if (Compatibility.isLessThan(GpVersion.V0_6_7)) {
|
|
11389
11601
|
const out = Bytes.zero(HASH_SIZE);
|
|
11390
11602
|
out.raw.set(u32AsLeBytes(tryAsU32(2 ** 32 - 2)), 0);
|
|
@@ -11392,11 +11604,16 @@ declare namespace stateKeys {
|
|
|
11392
11604
|
return legacyServiceNested(serviceId, out);
|
|
11393
11605
|
}
|
|
11394
11606
|
|
|
11395
|
-
return serviceNested(serviceId, tryAsU32(2 ** 32 - 2), hash);
|
|
11607
|
+
return serviceNested(blake2b, serviceId, tryAsU32(2 ** 32 - 2), hash);
|
|
11396
11608
|
}
|
|
11397
11609
|
|
|
11398
11610
|
/** https://graypaper.fluffylabs.dev/#/1c979cb/3b0a043b0a04?v=0.7.1 */
|
|
11399
|
-
export function serviceLookupHistory(
|
|
11611
|
+
export function serviceLookupHistory(
|
|
11612
|
+
blake2b: Blake2b,
|
|
11613
|
+
serviceId: ServiceId,
|
|
11614
|
+
hash: PreimageHash,
|
|
11615
|
+
preimageLength: U32,
|
|
11616
|
+
): StateKey {
|
|
11400
11617
|
if (Compatibility.isLessThan(GpVersion.V0_6_7)) {
|
|
11401
11618
|
const doubleHash = blake2b.hashBytes(hash);
|
|
11402
11619
|
const out = Bytes.zero(HASH_SIZE);
|
|
@@ -11405,11 +11622,11 @@ declare namespace stateKeys {
|
|
|
11405
11622
|
return legacyServiceNested(serviceId, out);
|
|
11406
11623
|
}
|
|
11407
11624
|
|
|
11408
|
-
return serviceNested(serviceId, preimageLength, hash);
|
|
11625
|
+
return serviceNested(blake2b, serviceId, preimageLength, hash);
|
|
11409
11626
|
}
|
|
11410
11627
|
|
|
11411
11628
|
/** https://graypaper.fluffylabs.dev/#/1c979cb/3b88003b8800?v=0.7.1 */
|
|
11412
|
-
export function serviceNested(serviceId: ServiceId, numberPrefix: U32, hash: BytesBlob): StateKey {
|
|
11629
|
+
export function serviceNested(blake2b: Blake2b, serviceId: ServiceId, numberPrefix: U32, hash: BytesBlob): StateKey {
|
|
11413
11630
|
const inputToHash = BytesBlob.blobFromParts(u32AsLeBytes(numberPrefix), hash.raw);
|
|
11414
11631
|
const newHash = blake2b.hashBytes(inputToHash).raw.subarray(0, 28);
|
|
11415
11632
|
const key = Bytes.zero(HASH_SIZE);
|
|
@@ -11441,33 +11658,25 @@ declare function legacyServiceNested(serviceId: ServiceId, hash: OpaqueHash): St
|
|
|
11441
11658
|
return key.asOpaque();
|
|
11442
11659
|
}
|
|
11443
11660
|
|
|
11444
|
-
type StateCodec<T> = {
|
|
11661
|
+
type StateCodec<T, V = T> = {
|
|
11445
11662
|
key: StateKey;
|
|
11446
|
-
Codec: Descriptor<T>;
|
|
11663
|
+
Codec: Descriptor<T, V>;
|
|
11447
11664
|
extract: (s: State) => T;
|
|
11448
11665
|
};
|
|
11449
11666
|
|
|
11450
11667
|
/** Serialization for particular state entries. */
|
|
11451
11668
|
declare namespace serialize {
|
|
11452
11669
|
/** C(1): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b15013b1501?v=0.6.7 */
|
|
11453
|
-
export const authPools: StateCodec<State["authPools"]
|
|
11670
|
+
export const authPools: StateCodec<State["authPools"], ReturnType<StateView["authPoolsView"]>> = {
|
|
11454
11671
|
key: stateKeys.index(StateKeyIdx.Alpha),
|
|
11455
|
-
Codec:
|
|
11456
|
-
codecKnownSizeArray(codec.bytes(HASH_SIZE).asOpaque<AuthorizerHash>(), {
|
|
11457
|
-
minLength: 0,
|
|
11458
|
-
maxLength: MAX_AUTH_POOL_SIZE,
|
|
11459
|
-
typicalLength: MAX_AUTH_POOL_SIZE,
|
|
11460
|
-
}),
|
|
11461
|
-
),
|
|
11672
|
+
Codec: authPoolsCodec,
|
|
11462
11673
|
extract: (s) => s.authPools,
|
|
11463
11674
|
};
|
|
11464
11675
|
|
|
11465
11676
|
/** C(2): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b31013b3101?v=0.6.7 */
|
|
11466
|
-
export const authQueues: StateCodec<State["authQueues"]
|
|
11677
|
+
export const authQueues: StateCodec<State["authQueues"], ReturnType<StateView["authQueuesView"]>> = {
|
|
11467
11678
|
key: stateKeys.index(StateKeyIdx.Phi),
|
|
11468
|
-
Codec:
|
|
11469
|
-
codecFixedSizeArray(codec.bytes(HASH_SIZE).asOpaque<AuthorizerHash>(), AUTHORIZATION_QUEUE_SIZE),
|
|
11470
|
-
),
|
|
11679
|
+
Codec: authQueuesCodec,
|
|
11471
11680
|
extract: (s) => s.authQueues,
|
|
11472
11681
|
};
|
|
11473
11682
|
|
|
@@ -11475,14 +11684,14 @@ declare namespace serialize {
|
|
|
11475
11684
|
* C(3): Recent blocks with compatibility
|
|
11476
11685
|
* https://graypaper.fluffylabs.dev/#/7e6ff6a/3b3e013b3e01?v=0.6.7
|
|
11477
11686
|
*/
|
|
11478
|
-
export const recentBlocks: StateCodec<
|
|
11687
|
+
export const recentBlocks: StateCodec<RecentBlocks, RecentBlocksView> = {
|
|
11479
11688
|
key: stateKeys.index(StateKeyIdx.Beta),
|
|
11480
|
-
Codec:
|
|
11689
|
+
Codec: RecentBlocks.Codec,
|
|
11481
11690
|
extract: (s) => s.recentBlocks,
|
|
11482
11691
|
};
|
|
11483
11692
|
|
|
11484
11693
|
/** C(4): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b63013b6301?v=0.6.7 */
|
|
11485
|
-
export const safrole: StateCodec<SafroleData> = {
|
|
11694
|
+
export const safrole: StateCodec<SafroleData, SafroleDataView> = {
|
|
11486
11695
|
key: stateKeys.index(StateKeyIdx.Gamma),
|
|
11487
11696
|
Codec: SafroleData.Codec,
|
|
11488
11697
|
extract: (s) =>
|
|
@@ -11495,7 +11704,7 @@ declare namespace serialize {
|
|
|
11495
11704
|
};
|
|
11496
11705
|
|
|
11497
11706
|
/** C(5): https://graypaper.fluffylabs.dev/#/7e6ff6a/3bba013bba01?v=0.6.7 */
|
|
11498
|
-
export const disputesRecords: StateCodec<
|
|
11707
|
+
export const disputesRecords: StateCodec<DisputesRecords> = {
|
|
11499
11708
|
key: stateKeys.index(StateKeyIdx.Psi),
|
|
11500
11709
|
Codec: DisputesRecords.Codec,
|
|
11501
11710
|
extract: (s) => s.disputesRecords,
|
|
@@ -11509,30 +11718,42 @@ declare namespace serialize {
|
|
|
11509
11718
|
};
|
|
11510
11719
|
|
|
11511
11720
|
/** C(7): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b00023b0002?v=0.6.7 */
|
|
11512
|
-
export const designatedValidators: StateCodec<
|
|
11721
|
+
export const designatedValidators: StateCodec<
|
|
11722
|
+
State["designatedValidatorData"],
|
|
11723
|
+
ReturnType<StateView["designatedValidatorDataView"]>
|
|
11724
|
+
> = {
|
|
11513
11725
|
key: stateKeys.index(StateKeyIdx.Iota),
|
|
11514
|
-
Codec:
|
|
11726
|
+
Codec: validatorsDataCodec,
|
|
11515
11727
|
extract: (s) => s.designatedValidatorData,
|
|
11516
11728
|
};
|
|
11517
11729
|
|
|
11518
11730
|
/** C(8): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b0d023b0d02?v=0.6.7 */
|
|
11519
|
-
export const currentValidators: StateCodec<
|
|
11731
|
+
export const currentValidators: StateCodec<
|
|
11732
|
+
State["currentValidatorData"],
|
|
11733
|
+
ReturnType<StateView["currentValidatorDataView"]>
|
|
11734
|
+
> = {
|
|
11520
11735
|
key: stateKeys.index(StateKeyIdx.Kappa),
|
|
11521
|
-
Codec:
|
|
11736
|
+
Codec: validatorsDataCodec,
|
|
11522
11737
|
extract: (s) => s.currentValidatorData,
|
|
11523
11738
|
};
|
|
11524
11739
|
|
|
11525
11740
|
/** C(9): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b1a023b1a02?v=0.6.7 */
|
|
11526
|
-
export const previousValidators: StateCodec<
|
|
11741
|
+
export const previousValidators: StateCodec<
|
|
11742
|
+
State["previousValidatorData"],
|
|
11743
|
+
ReturnType<StateView["previousValidatorDataView"]>
|
|
11744
|
+
> = {
|
|
11527
11745
|
key: stateKeys.index(StateKeyIdx.Lambda),
|
|
11528
|
-
Codec:
|
|
11746
|
+
Codec: validatorsDataCodec,
|
|
11529
11747
|
extract: (s) => s.previousValidatorData,
|
|
11530
11748
|
};
|
|
11531
11749
|
|
|
11532
11750
|
/** C(10): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b27023b2702?v=0.6.7 */
|
|
11533
|
-
export const availabilityAssignment: StateCodec<
|
|
11751
|
+
export const availabilityAssignment: StateCodec<
|
|
11752
|
+
State["availabilityAssignment"],
|
|
11753
|
+
ReturnType<StateView["availabilityAssignmentView"]>
|
|
11754
|
+
> = {
|
|
11534
11755
|
key: stateKeys.index(StateKeyIdx.Rho),
|
|
11535
|
-
Codec:
|
|
11756
|
+
Codec: availabilityAssignmentsCodec,
|
|
11536
11757
|
extract: (s) => s.availabilityAssignment,
|
|
11537
11758
|
};
|
|
11538
11759
|
|
|
@@ -11551,28 +11772,29 @@ declare namespace serialize {
|
|
|
11551
11772
|
};
|
|
11552
11773
|
|
|
11553
11774
|
/** C(13): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b5e023b5e02?v=0.6.7 */
|
|
11554
|
-
export const statistics: StateCodec<
|
|
11775
|
+
export const statistics: StateCodec<StatisticsData, StatisticsDataView> = {
|
|
11555
11776
|
key: stateKeys.index(StateKeyIdx.Pi),
|
|
11556
11777
|
Codec: StatisticsData.Codec,
|
|
11557
11778
|
extract: (s) => s.statistics,
|
|
11558
11779
|
};
|
|
11559
11780
|
|
|
11560
11781
|
/** C(14): https://graypaper.fluffylabs.dev/#/1c979cb/3bf0023bf002?v=0.7.1 */
|
|
11561
|
-
export const accumulationQueue: StateCodec<
|
|
11782
|
+
export const accumulationQueue: StateCodec<
|
|
11783
|
+
State["accumulationQueue"],
|
|
11784
|
+
ReturnType<StateView["accumulationQueueView"]>
|
|
11785
|
+
> = {
|
|
11562
11786
|
key: stateKeys.index(StateKeyIdx.Omega),
|
|
11563
|
-
Codec:
|
|
11787
|
+
Codec: accumulationQueueCodec,
|
|
11564
11788
|
extract: (s) => s.accumulationQueue,
|
|
11565
11789
|
};
|
|
11566
11790
|
|
|
11567
11791
|
/** C(15): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b96023b9602?v=0.6.7 */
|
|
11568
|
-
export const recentlyAccumulated: StateCodec<
|
|
11792
|
+
export const recentlyAccumulated: StateCodec<
|
|
11793
|
+
State["recentlyAccumulated"],
|
|
11794
|
+
ReturnType<StateView["recentlyAccumulatedView"]>
|
|
11795
|
+
> = {
|
|
11569
11796
|
key: stateKeys.index(StateKeyIdx.Xi),
|
|
11570
|
-
Codec:
|
|
11571
|
-
codec.sequenceVarLen(codec.bytes(HASH_SIZE).asOpaque<WorkPackageHash>()).convert(
|
|
11572
|
-
(x) => Array.from(x),
|
|
11573
|
-
(x) => HashSet.from(x),
|
|
11574
|
-
),
|
|
11575
|
-
),
|
|
11797
|
+
Codec: recentlyAccumulatedCodec,
|
|
11576
11798
|
extract: (s) => s.recentlyAccumulated,
|
|
11577
11799
|
};
|
|
11578
11800
|
|
|
@@ -11589,24 +11811,26 @@ declare namespace serialize {
|
|
|
11589
11811
|
/** C(255, s): https://graypaper.fluffylabs.dev/#/85129da/383103383103?v=0.6.3 */
|
|
11590
11812
|
export const serviceData = (serviceId: ServiceId) => ({
|
|
11591
11813
|
key: stateKeys.serviceInfo(serviceId),
|
|
11592
|
-
Codec:
|
|
11814
|
+
Codec: Compatibility.isGreaterOrEqual(GpVersion.V0_7_1)
|
|
11815
|
+
? codecWithVersion(ServiceAccountInfo.Codec)
|
|
11816
|
+
: ServiceAccountInfo.Codec,
|
|
11593
11817
|
});
|
|
11594
11818
|
|
|
11595
11819
|
/** https://graypaper.fluffylabs.dev/#/85129da/384803384803?v=0.6.3 */
|
|
11596
|
-
export const serviceStorage = (serviceId: ServiceId, key: StorageKey) => ({
|
|
11597
|
-
key: stateKeys.serviceStorage(serviceId, key),
|
|
11820
|
+
export const serviceStorage = (blake2b: Blake2b, serviceId: ServiceId, key: StorageKey) => ({
|
|
11821
|
+
key: stateKeys.serviceStorage(blake2b, serviceId, key),
|
|
11598
11822
|
Codec: dumpCodec,
|
|
11599
11823
|
});
|
|
11600
11824
|
|
|
11601
11825
|
/** https://graypaper.fluffylabs.dev/#/85129da/385b03385b03?v=0.6.3 */
|
|
11602
|
-
export const servicePreimages = (serviceId: ServiceId, hash: PreimageHash) => ({
|
|
11603
|
-
key: stateKeys.servicePreimage(serviceId, hash),
|
|
11826
|
+
export const servicePreimages = (blake2b: Blake2b, serviceId: ServiceId, hash: PreimageHash) => ({
|
|
11827
|
+
key: stateKeys.servicePreimage(blake2b, serviceId, hash),
|
|
11604
11828
|
Codec: dumpCodec,
|
|
11605
11829
|
});
|
|
11606
11830
|
|
|
11607
11831
|
/** https://graypaper.fluffylabs.dev/#/85129da/387603387603?v=0.6.3 */
|
|
11608
|
-
export const serviceLookupHistory = (serviceId: ServiceId, hash: PreimageHash, len: U32) => ({
|
|
11609
|
-
key: stateKeys.serviceLookupHistory(serviceId, hash, len),
|
|
11832
|
+
export const serviceLookupHistory = (blake2b: Blake2b, serviceId: ServiceId, hash: PreimageHash, len: U32) => ({
|
|
11833
|
+
key: stateKeys.serviceLookupHistory(blake2b, serviceId, hash, len),
|
|
11610
11834
|
Codec: readonlyArray(codec.sequenceVarLen(codec.u32)),
|
|
11611
11835
|
});
|
|
11612
11836
|
}
|
|
@@ -11626,6 +11850,107 @@ declare const dumpCodec = Descriptor.new<BytesBlob>(
|
|
|
11626
11850
|
(s) => s.bytes(s.decoder.source.length - s.decoder.bytesRead()),
|
|
11627
11851
|
);
|
|
11628
11852
|
|
|
11853
|
+
/**
|
|
11854
|
+
* Abstraction over some backend containing serialized state entries.
|
|
11855
|
+
*
|
|
11856
|
+
* This may or may not be backed by some on-disk database or can be just stored in memory.
|
|
11857
|
+
*/
|
|
11858
|
+
interface SerializedStateBackend {
|
|
11859
|
+
/** Retrieve given state key. */
|
|
11860
|
+
get(key: StateKey): BytesBlob | null;
|
|
11861
|
+
}
|
|
11862
|
+
|
|
11863
|
+
declare class SerializedStateView<T extends SerializedStateBackend> implements StateView {
|
|
11864
|
+
constructor(
|
|
11865
|
+
private readonly spec: ChainSpec,
|
|
11866
|
+
public backend: T,
|
|
11867
|
+
/** Best-effort list of recently active services. */
|
|
11868
|
+
private readonly recentlyUsedServices: ServiceId[],
|
|
11869
|
+
private readonly viewCache: HashDictionary<StateKey, unknown>,
|
|
11870
|
+
) {}
|
|
11871
|
+
|
|
11872
|
+
private retrieveView<A, B>({ key, Codec }: KeyAndCodecWithView<A, B>, description: string): B {
|
|
11873
|
+
const cached = this.viewCache.get(key);
|
|
11874
|
+
if (cached !== undefined) {
|
|
11875
|
+
return cached as B;
|
|
11876
|
+
}
|
|
11877
|
+
const bytes = this.backend.get(key);
|
|
11878
|
+
if (bytes === null) {
|
|
11879
|
+
throw new Error(`Required state entry for ${description} is missing!. Accessing view of key: ${key}`);
|
|
11880
|
+
}
|
|
11881
|
+
// NOTE [ToDr] we are not using `Decoder.decodeObject` here because
|
|
11882
|
+
// it needs to get to the end of the data (skip), yet that's expensive.
|
|
11883
|
+
// we assume that the state data is correct and coherent anyway, so
|
|
11884
|
+
// for performance reasons we simply create the view here.
|
|
11885
|
+
const d = Decoder.fromBytesBlob(bytes);
|
|
11886
|
+
d.attachContext(this.spec);
|
|
11887
|
+
const view = Codec.View.decode(d);
|
|
11888
|
+
this.viewCache.set(key, view);
|
|
11889
|
+
return view;
|
|
11890
|
+
}
|
|
11891
|
+
|
|
11892
|
+
availabilityAssignmentView(): AvailabilityAssignmentsView {
|
|
11893
|
+
return this.retrieveView(serialize.availabilityAssignment, "availabilityAssignmentView");
|
|
11894
|
+
}
|
|
11895
|
+
|
|
11896
|
+
designatedValidatorDataView(): SequenceView<ValidatorData, ValidatorDataView> {
|
|
11897
|
+
return this.retrieveView(serialize.designatedValidators, "designatedValidatorsView");
|
|
11898
|
+
}
|
|
11899
|
+
|
|
11900
|
+
currentValidatorDataView(): SequenceView<ValidatorData, ValidatorDataView> {
|
|
11901
|
+
return this.retrieveView(serialize.currentValidators, "currentValidatorsView");
|
|
11902
|
+
}
|
|
11903
|
+
|
|
11904
|
+
previousValidatorDataView(): SequenceView<ValidatorData, ValidatorDataView> {
|
|
11905
|
+
return this.retrieveView(serialize.previousValidators, "previousValidatorsView");
|
|
11906
|
+
}
|
|
11907
|
+
|
|
11908
|
+
authPoolsView(): SequenceView<AuthorizationPool, SequenceView<AuthorizerHash>> {
|
|
11909
|
+
return this.retrieveView(serialize.authPools, "authPoolsView");
|
|
11910
|
+
}
|
|
11911
|
+
|
|
11912
|
+
authQueuesView(): SequenceView<AuthorizationQueue, SequenceView<AuthorizerHash>> {
|
|
11913
|
+
return this.retrieveView(serialize.authQueues, "authQueuesView");
|
|
11914
|
+
}
|
|
11915
|
+
|
|
11916
|
+
recentBlocksView(): RecentBlocksView {
|
|
11917
|
+
return this.retrieveView(serialize.recentBlocks, "recentBlocksView");
|
|
11918
|
+
}
|
|
11919
|
+
|
|
11920
|
+
statisticsView(): StatisticsDataView {
|
|
11921
|
+
return this.retrieveView(serialize.statistics, "statisticsView");
|
|
11922
|
+
}
|
|
11923
|
+
|
|
11924
|
+
accumulationQueueView(): AccumulationQueueView {
|
|
11925
|
+
return this.retrieveView(serialize.accumulationQueue, "accumulationQueueView");
|
|
11926
|
+
}
|
|
11927
|
+
|
|
11928
|
+
recentlyAccumulatedView(): RecentlyAccumulatedView {
|
|
11929
|
+
return this.retrieveView(serialize.recentlyAccumulated, "recentlyAccumulatedView");
|
|
11930
|
+
}
|
|
11931
|
+
|
|
11932
|
+
safroleDataView(): SafroleDataView {
|
|
11933
|
+
return this.retrieveView(serialize.safrole, "safroleDataView");
|
|
11934
|
+
}
|
|
11935
|
+
|
|
11936
|
+
getServiceInfoView(id: ServiceId): ServiceAccountInfoView | null {
|
|
11937
|
+
const serviceData = serialize.serviceData(id);
|
|
11938
|
+
const bytes = this.backend.get(serviceData.key);
|
|
11939
|
+
if (bytes === null) {
|
|
11940
|
+
return null;
|
|
11941
|
+
}
|
|
11942
|
+
if (!this.recentlyUsedServices.includes(id)) {
|
|
11943
|
+
this.recentlyUsedServices.push(id);
|
|
11944
|
+
}
|
|
11945
|
+
return Decoder.decodeObject(serviceData.Codec.View, bytes, this.spec);
|
|
11946
|
+
}
|
|
11947
|
+
}
|
|
11948
|
+
|
|
11949
|
+
type KeyAndCodecWithView<T, V> = {
|
|
11950
|
+
key: StateKey;
|
|
11951
|
+
Codec: CodecWithView<T, V>;
|
|
11952
|
+
};
|
|
11953
|
+
|
|
11629
11954
|
/** What should be done with that key? */
|
|
11630
11955
|
declare enum StateEntryUpdateAction {
|
|
11631
11956
|
/** Insert an entry. */
|
|
@@ -11641,6 +11966,7 @@ declare const EMPTY_BLOB = BytesBlob.empty();
|
|
|
11641
11966
|
/** Serialize given state update into a series of key-value pairs. */
|
|
11642
11967
|
declare function* serializeStateUpdate(
|
|
11643
11968
|
spec: ChainSpec,
|
|
11969
|
+
blake2b: Blake2b,
|
|
11644
11970
|
update: Partial<State & ServicesUpdate>,
|
|
11645
11971
|
): Generator<StateEntryUpdate> {
|
|
11646
11972
|
// first let's serialize all of the simple entries (if present!)
|
|
@@ -11649,9 +11975,9 @@ declare function* serializeStateUpdate(
|
|
|
11649
11975
|
const encode = <T>(codec: Encode<T>, val: T) => Encoder.encodeObject(codec, val, spec);
|
|
11650
11976
|
|
|
11651
11977
|
// then let's proceed with service updates
|
|
11652
|
-
yield* serializeServiceUpdates(update.servicesUpdates, encode);
|
|
11653
|
-
yield* serializePreimages(update.preimages, encode);
|
|
11654
|
-
yield* serializeStorage(update.storage);
|
|
11978
|
+
yield* serializeServiceUpdates(update.servicesUpdates, encode, blake2b);
|
|
11979
|
+
yield* serializePreimages(update.preimages, encode, blake2b);
|
|
11980
|
+
yield* serializeStorage(update.storage, blake2b);
|
|
11655
11981
|
yield* serializeRemovedServices(update.servicesRemoved);
|
|
11656
11982
|
}
|
|
11657
11983
|
|
|
@@ -11663,18 +11989,18 @@ declare function* serializeRemovedServices(servicesRemoved: ServiceId[] | undefi
|
|
|
11663
11989
|
}
|
|
11664
11990
|
}
|
|
11665
11991
|
|
|
11666
|
-
declare function* serializeStorage(storage: UpdateStorage[] | undefined): Generator<StateEntryUpdate> {
|
|
11992
|
+
declare function* serializeStorage(storage: UpdateStorage[] | undefined, blake2b: Blake2b): Generator<StateEntryUpdate> {
|
|
11667
11993
|
for (const { action, serviceId } of storage ?? []) {
|
|
11668
11994
|
switch (action.kind) {
|
|
11669
11995
|
case UpdateStorageKind.Set: {
|
|
11670
11996
|
const key = action.storage.key;
|
|
11671
|
-
const codec = serialize.serviceStorage(serviceId, key);
|
|
11997
|
+
const codec = serialize.serviceStorage(blake2b, serviceId, key);
|
|
11672
11998
|
yield [StateEntryUpdateAction.Insert, codec.key, action.storage.value];
|
|
11673
11999
|
break;
|
|
11674
12000
|
}
|
|
11675
12001
|
case UpdateStorageKind.Remove: {
|
|
11676
12002
|
const key = action.key;
|
|
11677
|
-
const codec = serialize.serviceStorage(serviceId, key);
|
|
12003
|
+
const codec = serialize.serviceStorage(blake2b, serviceId, key);
|
|
11678
12004
|
yield [StateEntryUpdateAction.Remove, codec.key, EMPTY_BLOB];
|
|
11679
12005
|
break;
|
|
11680
12006
|
}
|
|
@@ -11684,16 +12010,20 @@ declare function* serializeStorage(storage: UpdateStorage[] | undefined): Genera
|
|
|
11684
12010
|
}
|
|
11685
12011
|
}
|
|
11686
12012
|
|
|
11687
|
-
declare function* serializePreimages(
|
|
12013
|
+
declare function* serializePreimages(
|
|
12014
|
+
preimages: UpdatePreimage[] | undefined,
|
|
12015
|
+
encode: EncodeFun,
|
|
12016
|
+
blake2b: Blake2b,
|
|
12017
|
+
): Generator<StateEntryUpdate> {
|
|
11688
12018
|
for (const { action, serviceId } of preimages ?? []) {
|
|
11689
12019
|
switch (action.kind) {
|
|
11690
12020
|
case UpdatePreimageKind.Provide: {
|
|
11691
12021
|
const { hash, blob } = action.preimage;
|
|
11692
|
-
const codec = serialize.servicePreimages(serviceId, hash);
|
|
12022
|
+
const codec = serialize.servicePreimages(blake2b, serviceId, hash);
|
|
11693
12023
|
yield [StateEntryUpdateAction.Insert, codec.key, blob];
|
|
11694
12024
|
|
|
11695
12025
|
if (action.slot !== null) {
|
|
11696
|
-
const codec2 = serialize.serviceLookupHistory(serviceId, hash, tryAsU32(blob.length));
|
|
12026
|
+
const codec2 = serialize.serviceLookupHistory(blake2b, serviceId, hash, tryAsU32(blob.length));
|
|
11697
12027
|
yield [
|
|
11698
12028
|
StateEntryUpdateAction.Insert,
|
|
11699
12029
|
codec2.key,
|
|
@@ -11704,16 +12034,16 @@ declare function* serializePreimages(preimages: UpdatePreimage[] | undefined, en
|
|
|
11704
12034
|
}
|
|
11705
12035
|
case UpdatePreimageKind.UpdateOrAdd: {
|
|
11706
12036
|
const { hash, length, slots } = action.item;
|
|
11707
|
-
const codec = serialize.serviceLookupHistory(serviceId, hash, length);
|
|
12037
|
+
const codec = serialize.serviceLookupHistory(blake2b, serviceId, hash, length);
|
|
11708
12038
|
yield [StateEntryUpdateAction.Insert, codec.key, encode(codec.Codec, slots)];
|
|
11709
12039
|
break;
|
|
11710
12040
|
}
|
|
11711
12041
|
case UpdatePreimageKind.Remove: {
|
|
11712
12042
|
const { hash, length } = action;
|
|
11713
|
-
const codec = serialize.servicePreimages(serviceId, hash);
|
|
12043
|
+
const codec = serialize.servicePreimages(blake2b, serviceId, hash);
|
|
11714
12044
|
yield [StateEntryUpdateAction.Remove, codec.key, EMPTY_BLOB];
|
|
11715
12045
|
|
|
11716
|
-
const codec2 = serialize.serviceLookupHistory(serviceId, hash, length);
|
|
12046
|
+
const codec2 = serialize.serviceLookupHistory(blake2b, serviceId, hash, length);
|
|
11717
12047
|
yield [StateEntryUpdateAction.Remove, codec2.key, EMPTY_BLOB];
|
|
11718
12048
|
break;
|
|
11719
12049
|
}
|
|
@@ -11725,6 +12055,7 @@ declare function* serializePreimages(preimages: UpdatePreimage[] | undefined, en
|
|
|
11725
12055
|
declare function* serializeServiceUpdates(
|
|
11726
12056
|
servicesUpdates: UpdateService[] | undefined,
|
|
11727
12057
|
encode: EncodeFun,
|
|
12058
|
+
blake2b: Blake2b,
|
|
11728
12059
|
): Generator<StateEntryUpdate> {
|
|
11729
12060
|
for (const { action, serviceId } of servicesUpdates ?? []) {
|
|
11730
12061
|
// new service being created or updated
|
|
@@ -11734,7 +12065,7 @@ declare function* serializeServiceUpdates(
|
|
|
11734
12065
|
// additional lookup history update
|
|
11735
12066
|
if (action.kind === UpdateServiceKind.Create && action.lookupHistory !== null) {
|
|
11736
12067
|
const { lookupHistory } = action;
|
|
11737
|
-
const codec2 = serialize.serviceLookupHistory(serviceId, lookupHistory.hash, lookupHistory.length);
|
|
12068
|
+
const codec2 = serialize.serviceLookupHistory(blake2b, serviceId, lookupHistory.hash, lookupHistory.length);
|
|
11738
12069
|
yield [StateEntryUpdateAction.Insert, codec2.key, encode(codec2.Codec, lookupHistory.slots)];
|
|
11739
12070
|
}
|
|
11740
12071
|
}
|
|
@@ -11868,8 +12199,8 @@ declare class StateEntries {
|
|
|
11868
12199
|
);
|
|
11869
12200
|
|
|
11870
12201
|
/** Turn in-memory state into it's serialized form. */
|
|
11871
|
-
static serializeInMemory(spec: ChainSpec, state: InMemoryState) {
|
|
11872
|
-
return new StateEntries(convertInMemoryStateToDictionary(spec, state));
|
|
12202
|
+
static serializeInMemory(spec: ChainSpec, blake2b: Blake2b, state: InMemoryState) {
|
|
12203
|
+
return new StateEntries(convertInMemoryStateToDictionary(spec, blake2b, state));
|
|
11873
12204
|
}
|
|
11874
12205
|
|
|
11875
12206
|
/**
|
|
@@ -11924,7 +12255,8 @@ declare class StateEntries {
|
|
|
11924
12255
|
}
|
|
11925
12256
|
|
|
11926
12257
|
/** https://graypaper.fluffylabs.dev/#/68eaa1f/391600391600?v=0.6.4 */
|
|
11927
|
-
getRootHash(): StateRootHash {
|
|
12258
|
+
getRootHash(blake2b: Blake2b): StateRootHash {
|
|
12259
|
+
const blake2bTrieHasher = getBlake2bTrieHasher(blake2b);
|
|
11928
12260
|
const leaves: SortedSet<LeafNode> = SortedSet.fromArray(leafComparator);
|
|
11929
12261
|
for (const [key, value] of this) {
|
|
11930
12262
|
leaves.insert(InMemoryTrie.constructLeaf(blake2bTrieHasher, key.asOpaque(), value));
|
|
@@ -11937,6 +12269,7 @@ declare class StateEntries {
|
|
|
11937
12269
|
/** https://graypaper.fluffylabs.dev/#/68eaa1f/38a50038a500?v=0.6.4 */
|
|
11938
12270
|
declare function convertInMemoryStateToDictionary(
|
|
11939
12271
|
spec: ChainSpec,
|
|
12272
|
+
blake2b: Blake2b,
|
|
11940
12273
|
state: InMemoryState,
|
|
11941
12274
|
): TruncatedHashDictionary<StateKey, BytesBlob> {
|
|
11942
12275
|
const serialized = TruncatedHashDictionary.fromEntries<StateKey, BytesBlob>([]);
|
|
@@ -11969,20 +12302,25 @@ declare function convertInMemoryStateToDictionary(
|
|
|
11969
12302
|
|
|
11970
12303
|
// preimages
|
|
11971
12304
|
for (const preimage of service.data.preimages.values()) {
|
|
11972
|
-
const { key, Codec } = serialize.servicePreimages(serviceId, preimage.hash);
|
|
12305
|
+
const { key, Codec } = serialize.servicePreimages(blake2b, serviceId, preimage.hash);
|
|
11973
12306
|
serialized.set(key, Encoder.encodeObject(Codec, preimage.blob));
|
|
11974
12307
|
}
|
|
11975
12308
|
|
|
11976
12309
|
// storage
|
|
11977
12310
|
for (const storage of service.data.storage.values()) {
|
|
11978
|
-
const { key, Codec } = serialize.serviceStorage(serviceId, storage.key);
|
|
12311
|
+
const { key, Codec } = serialize.serviceStorage(blake2b, serviceId, storage.key);
|
|
11979
12312
|
serialized.set(key, Encoder.encodeObject(Codec, storage.value));
|
|
11980
12313
|
}
|
|
11981
12314
|
|
|
11982
12315
|
// lookup history
|
|
11983
12316
|
for (const lookupHistoryList of service.data.lookupHistory.values()) {
|
|
11984
12317
|
for (const lookupHistory of lookupHistoryList) {
|
|
11985
|
-
const { key, Codec } = serialize.serviceLookupHistory(
|
|
12318
|
+
const { key, Codec } = serialize.serviceLookupHistory(
|
|
12319
|
+
blake2b,
|
|
12320
|
+
serviceId,
|
|
12321
|
+
lookupHistory.hash,
|
|
12322
|
+
lookupHistory.length,
|
|
12323
|
+
);
|
|
11986
12324
|
serialized.set(key, Encoder.encodeObject(Codec, lookupHistory.slots.slice()));
|
|
11987
12325
|
}
|
|
11988
12326
|
}
|
|
@@ -11991,16 +12329,6 @@ declare function convertInMemoryStateToDictionary(
|
|
|
11991
12329
|
return serialized;
|
|
11992
12330
|
}
|
|
11993
12331
|
|
|
11994
|
-
/**
|
|
11995
|
-
* Abstraction over some backend containing serialized state entries.
|
|
11996
|
-
*
|
|
11997
|
-
* This may or may not be backed by some on-disk database or can be just stored in memory.
|
|
11998
|
-
*/
|
|
11999
|
-
interface SerializedStateBackend {
|
|
12000
|
-
/** Retrieve given state key. */
|
|
12001
|
-
get(key: StateKey): BytesBlob | null;
|
|
12002
|
-
}
|
|
12003
|
-
|
|
12004
12332
|
/**
|
|
12005
12333
|
* State object which reads it's entries from some backend.
|
|
12006
12334
|
*
|
|
@@ -12010,27 +12338,32 @@ interface SerializedStateBackend {
|
|
|
12010
12338
|
* in the backend layer, so it MAY fail during runtime.
|
|
12011
12339
|
*/
|
|
12012
12340
|
declare class SerializedState<T extends SerializedStateBackend = SerializedStateBackend>
|
|
12013
|
-
implements State, EnumerableState
|
|
12341
|
+
implements State, WithStateView, EnumerableState
|
|
12014
12342
|
{
|
|
12015
12343
|
/** Create a state-like object from collection of serialized entries. */
|
|
12016
|
-
static fromStateEntries(spec: ChainSpec, state: StateEntries, recentServices: ServiceId[] = []) {
|
|
12017
|
-
return new SerializedState(spec, state, recentServices);
|
|
12344
|
+
static fromStateEntries(spec: ChainSpec, blake2b: Blake2b, state: StateEntries, recentServices: ServiceId[] = []) {
|
|
12345
|
+
return new SerializedState(spec, blake2b, state, recentServices);
|
|
12018
12346
|
}
|
|
12019
12347
|
|
|
12020
12348
|
/** Create a state-like object backed by some DB. */
|
|
12021
12349
|
static new<T extends SerializedStateBackend>(
|
|
12022
12350
|
spec: ChainSpec,
|
|
12351
|
+
blake2b: Blake2b,
|
|
12023
12352
|
db: T,
|
|
12024
12353
|
recentServices: ServiceId[] = [],
|
|
12025
12354
|
): SerializedState<T> {
|
|
12026
|
-
return new SerializedState(spec, db, recentServices);
|
|
12355
|
+
return new SerializedState(spec, blake2b, db, recentServices);
|
|
12027
12356
|
}
|
|
12028
12357
|
|
|
12358
|
+
private dataCache: HashDictionary<StateKey, unknown> = HashDictionary.new();
|
|
12359
|
+
private viewCache: HashDictionary<StateKey, unknown> = HashDictionary.new();
|
|
12360
|
+
|
|
12029
12361
|
private constructor(
|
|
12030
12362
|
private readonly spec: ChainSpec,
|
|
12363
|
+
private readonly blake2b: Blake2b,
|
|
12031
12364
|
public backend: T,
|
|
12032
12365
|
/** Best-effort list of recently active services. */
|
|
12033
|
-
private readonly
|
|
12366
|
+
private readonly recentlyUsedServices: ServiceId[],
|
|
12034
12367
|
) {}
|
|
12035
12368
|
|
|
12036
12369
|
/** Comparing the serialized states, just means comparing their backends. */
|
|
@@ -12038,14 +12371,21 @@ declare class SerializedState<T extends SerializedStateBackend = SerializedState
|
|
|
12038
12371
|
return this.backend;
|
|
12039
12372
|
}
|
|
12040
12373
|
|
|
12374
|
+
/** Return a non-decoding version of the state. */
|
|
12375
|
+
view(): StateView {
|
|
12376
|
+
return new SerializedStateView(this.spec, this.backend, this.recentlyUsedServices, this.viewCache);
|
|
12377
|
+
}
|
|
12378
|
+
|
|
12041
12379
|
// TODO [ToDr] Temporary method to update the state,
|
|
12042
12380
|
// without changing references.
|
|
12043
12381
|
public updateBackend(newBackend: T) {
|
|
12044
12382
|
this.backend = newBackend;
|
|
12383
|
+
this.dataCache = HashDictionary.new();
|
|
12384
|
+
this.viewCache = HashDictionary.new();
|
|
12045
12385
|
}
|
|
12046
12386
|
|
|
12047
12387
|
recentServiceIds(): readonly ServiceId[] {
|
|
12048
|
-
return this.
|
|
12388
|
+
return this.recentlyUsedServices;
|
|
12049
12389
|
}
|
|
12050
12390
|
|
|
12051
12391
|
getService(id: ServiceId): SerializedService | null {
|
|
@@ -12054,27 +12394,33 @@ declare class SerializedState<T extends SerializedStateBackend = SerializedState
|
|
|
12054
12394
|
return null;
|
|
12055
12395
|
}
|
|
12056
12396
|
|
|
12057
|
-
if (!this.
|
|
12058
|
-
this.
|
|
12397
|
+
if (!this.recentlyUsedServices.includes(id)) {
|
|
12398
|
+
this.recentlyUsedServices.push(id);
|
|
12059
12399
|
}
|
|
12060
12400
|
|
|
12061
|
-
return new SerializedService(id, serviceData, (key) => this.retrieveOptional(key));
|
|
12401
|
+
return new SerializedService(this.blake2b, id, serviceData, (key) => this.retrieveOptional(key));
|
|
12062
12402
|
}
|
|
12063
12403
|
|
|
12064
|
-
private retrieve<T>(
|
|
12065
|
-
const
|
|
12066
|
-
if (
|
|
12067
|
-
throw new Error(`Required state entry for ${description} is missing!. Accessing key: ${key}`);
|
|
12404
|
+
private retrieve<T>(k: KeyAndCodec<T>, description: string): T {
|
|
12405
|
+
const data = this.retrieveOptional(k);
|
|
12406
|
+
if (data === undefined) {
|
|
12407
|
+
throw new Error(`Required state entry for ${description} is missing!. Accessing key: ${k.key}`);
|
|
12068
12408
|
}
|
|
12069
|
-
return
|
|
12409
|
+
return data;
|
|
12070
12410
|
}
|
|
12071
12411
|
|
|
12072
12412
|
private retrieveOptional<T>({ key, Codec }: KeyAndCodec<T>): T | undefined {
|
|
12413
|
+
const cached = this.dataCache.get(key);
|
|
12414
|
+
if (cached !== undefined) {
|
|
12415
|
+
return cached as T;
|
|
12416
|
+
}
|
|
12073
12417
|
const bytes = this.backend.get(key);
|
|
12074
12418
|
if (bytes === null) {
|
|
12075
12419
|
return undefined;
|
|
12076
12420
|
}
|
|
12077
|
-
|
|
12421
|
+
const data = Decoder.decodeObject(Codec, bytes, this.spec);
|
|
12422
|
+
this.dataCache.set(key, data);
|
|
12423
|
+
return data;
|
|
12078
12424
|
}
|
|
12079
12425
|
|
|
12080
12426
|
get availabilityAssignment(): State["availabilityAssignment"] {
|
|
@@ -12157,6 +12503,7 @@ declare class SerializedState<T extends SerializedStateBackend = SerializedState
|
|
|
12157
12503
|
/** Service data representation on a serialized state. */
|
|
12158
12504
|
declare class SerializedService implements Service {
|
|
12159
12505
|
constructor(
|
|
12506
|
+
public readonly blake2b: Blake2b,
|
|
12160
12507
|
/** Service id */
|
|
12161
12508
|
public readonly serviceId: ServiceId,
|
|
12162
12509
|
private readonly accountInfo: ServiceAccountInfo,
|
|
@@ -12172,14 +12519,14 @@ declare class SerializedService implements Service {
|
|
|
12172
12519
|
getStorage(rawKey: StorageKey): BytesBlob | null {
|
|
12173
12520
|
if (Compatibility.isLessThan(GpVersion.V0_6_7)) {
|
|
12174
12521
|
const SERVICE_ID_BYTES = 4;
|
|
12175
|
-
const serviceIdAndKey =
|
|
12522
|
+
const serviceIdAndKey = safeAllocUint8Array(SERVICE_ID_BYTES + rawKey.length);
|
|
12176
12523
|
serviceIdAndKey.set(u32AsLeBytes(this.serviceId));
|
|
12177
12524
|
serviceIdAndKey.set(rawKey.raw, SERVICE_ID_BYTES);
|
|
12178
|
-
const key: StorageKey = asOpaqueType(BytesBlob.blobFrom(blake2b.hashBytes(serviceIdAndKey).raw));
|
|
12179
|
-
return this.retrieveOptional(serialize.serviceStorage(this.serviceId, key)) ?? null;
|
|
12525
|
+
const key: StorageKey = asOpaqueType(BytesBlob.blobFrom(this.blake2b.hashBytes(serviceIdAndKey).raw));
|
|
12526
|
+
return this.retrieveOptional(serialize.serviceStorage(this.blake2b, this.serviceId, key)) ?? null;
|
|
12180
12527
|
}
|
|
12181
12528
|
|
|
12182
|
-
return this.retrieveOptional(serialize.serviceStorage(this.serviceId, rawKey)) ?? null;
|
|
12529
|
+
return this.retrieveOptional(serialize.serviceStorage(this.blake2b, this.serviceId, rawKey)) ?? null;
|
|
12183
12530
|
}
|
|
12184
12531
|
|
|
12185
12532
|
/**
|
|
@@ -12189,17 +12536,17 @@ declare class SerializedService implements Service {
|
|
|
12189
12536
|
*/
|
|
12190
12537
|
hasPreimage(hash: PreimageHash): boolean {
|
|
12191
12538
|
// TODO [ToDr] consider optimizing to avoid fetching the whole data.
|
|
12192
|
-
return this.retrieveOptional(serialize.servicePreimages(this.serviceId, hash)) !== undefined;
|
|
12539
|
+
return this.retrieveOptional(serialize.servicePreimages(this.blake2b, this.serviceId, hash)) !== undefined;
|
|
12193
12540
|
}
|
|
12194
12541
|
|
|
12195
12542
|
/** Retrieve preimage from the DB. */
|
|
12196
12543
|
getPreimage(hash: PreimageHash): BytesBlob | null {
|
|
12197
|
-
return this.retrieveOptional(serialize.servicePreimages(this.serviceId, hash)) ?? null;
|
|
12544
|
+
return this.retrieveOptional(serialize.servicePreimages(this.blake2b, this.serviceId, hash)) ?? null;
|
|
12198
12545
|
}
|
|
12199
12546
|
|
|
12200
12547
|
/** Retrieve preimage lookup history. */
|
|
12201
12548
|
getLookupHistory(hash: PreimageHash, len: U32): LookupHistorySlots | null {
|
|
12202
|
-
const rawSlots = this.retrieveOptional(serialize.serviceLookupHistory(this.serviceId, hash, len));
|
|
12549
|
+
const rawSlots = this.retrieveOptional(serialize.serviceLookupHistory(this.blake2b, this.serviceId, hash, len));
|
|
12203
12550
|
if (rawSlots === undefined) {
|
|
12204
12551
|
return null;
|
|
12205
12552
|
}
|
|
@@ -12212,9 +12559,9 @@ type KeyAndCodec<T> = {
|
|
|
12212
12559
|
Codec: Decode<T>;
|
|
12213
12560
|
};
|
|
12214
12561
|
|
|
12215
|
-
declare function loadState(spec: ChainSpec, entries: Iterable<[StateKey | TruncatedHash, BytesBlob]>) {
|
|
12562
|
+
declare function loadState(spec: ChainSpec, blake2b: Blake2b, entries: Iterable<[StateKey | TruncatedHash, BytesBlob]>) {
|
|
12216
12563
|
const stateEntries = StateEntries.fromEntriesUnsafe(entries);
|
|
12217
|
-
return SerializedState.fromStateEntries(spec, stateEntries);
|
|
12564
|
+
return SerializedState.fromStateEntries(spec, blake2b, stateEntries);
|
|
12218
12565
|
}
|
|
12219
12566
|
|
|
12220
12567
|
/**
|
|
@@ -12246,12 +12593,15 @@ declare function loadState(spec: ChainSpec, entries: Iterable<[StateKey | Trunca
|
|
|
12246
12593
|
declare const index$d_EMPTY_BLOB: typeof EMPTY_BLOB;
|
|
12247
12594
|
type index$d_EncodeFun = EncodeFun;
|
|
12248
12595
|
type index$d_KeyAndCodec<T> = KeyAndCodec<T>;
|
|
12596
|
+
type index$d_KeyAndCodecWithView<T, V> = KeyAndCodecWithView<T, V>;
|
|
12249
12597
|
type index$d_SerializedService = SerializedService;
|
|
12250
12598
|
declare const index$d_SerializedService: typeof SerializedService;
|
|
12251
12599
|
type index$d_SerializedState<T extends SerializedStateBackend = SerializedStateBackend> = SerializedState<T>;
|
|
12252
12600
|
declare const index$d_SerializedState: typeof SerializedState;
|
|
12253
12601
|
type index$d_SerializedStateBackend = SerializedStateBackend;
|
|
12254
|
-
type index$
|
|
12602
|
+
type index$d_SerializedStateView<T extends SerializedStateBackend> = SerializedStateView<T>;
|
|
12603
|
+
declare const index$d_SerializedStateView: typeof SerializedStateView;
|
|
12604
|
+
type index$d_StateCodec<T, V = T> = StateCodec<T, V>;
|
|
12255
12605
|
type index$d_StateEntries = StateEntries;
|
|
12256
12606
|
declare const index$d_StateEntries: typeof StateEntries;
|
|
12257
12607
|
type index$d_StateEntryUpdate = StateEntryUpdate;
|
|
@@ -12279,8 +12629,8 @@ declare const index$d_serializeStorage: typeof serializeStorage;
|
|
|
12279
12629
|
declare const index$d_stateEntriesSequenceCodec: typeof stateEntriesSequenceCodec;
|
|
12280
12630
|
import index$d_stateKeys = stateKeys;
|
|
12281
12631
|
declare namespace index$d {
|
|
12282
|
-
export { index$d_EMPTY_BLOB as EMPTY_BLOB, index$d_SerializedService as SerializedService, index$d_SerializedState as SerializedState, index$d_StateEntries as StateEntries, index$d_StateEntryUpdateAction as StateEntryUpdateAction, index$d_StateKeyIdx as StateKeyIdx, index$d_TYPICAL_STATE_ITEMS as TYPICAL_STATE_ITEMS, index$d_TYPICAL_STATE_ITEM_LEN as TYPICAL_STATE_ITEM_LEN, index$d_U32_BYTES as U32_BYTES, index$d_binaryMerkleization as binaryMerkleization, index$d_convertInMemoryStateToDictionary as convertInMemoryStateToDictionary, index$d_dumpCodec as dumpCodec, index$d_getSafroleData as getSafroleData, index$d_legacyServiceNested as legacyServiceNested, index$d_loadState as loadState, index$d_serialize as serialize, index$d_serializeBasicKeys as serializeBasicKeys, index$d_serializePreimages as serializePreimages, index$d_serializeRemovedServices as serializeRemovedServices, index$d_serializeServiceUpdates as serializeServiceUpdates, index$d_serializeStateUpdate as serializeStateUpdate, index$d_serializeStorage as serializeStorage, index$d_stateEntriesSequenceCodec as stateEntriesSequenceCodec, index$d_stateKeys as stateKeys };
|
|
12283
|
-
export type { index$d_EncodeFun as EncodeFun, index$d_KeyAndCodec as KeyAndCodec, index$d_SerializedStateBackend as SerializedStateBackend, index$d_StateCodec as StateCodec, index$d_StateEntryUpdate as StateEntryUpdate, index$d_StateKey as StateKey };
|
|
12632
|
+
export { index$d_EMPTY_BLOB as EMPTY_BLOB, index$d_SerializedService as SerializedService, index$d_SerializedState as SerializedState, index$d_SerializedStateView as SerializedStateView, index$d_StateEntries as StateEntries, index$d_StateEntryUpdateAction as StateEntryUpdateAction, index$d_StateKeyIdx as StateKeyIdx, index$d_TYPICAL_STATE_ITEMS as TYPICAL_STATE_ITEMS, index$d_TYPICAL_STATE_ITEM_LEN as TYPICAL_STATE_ITEM_LEN, index$d_U32_BYTES as U32_BYTES, index$d_binaryMerkleization as binaryMerkleization, index$d_convertInMemoryStateToDictionary as convertInMemoryStateToDictionary, index$d_dumpCodec as dumpCodec, index$d_getSafroleData as getSafroleData, index$d_legacyServiceNested as legacyServiceNested, index$d_loadState as loadState, index$d_serialize as serialize, index$d_serializeBasicKeys as serializeBasicKeys, index$d_serializePreimages as serializePreimages, index$d_serializeRemovedServices as serializeRemovedServices, index$d_serializeServiceUpdates as serializeServiceUpdates, index$d_serializeStateUpdate as serializeStateUpdate, index$d_serializeStorage as serializeStorage, index$d_stateEntriesSequenceCodec as stateEntriesSequenceCodec, index$d_stateKeys as stateKeys };
|
|
12633
|
+
export type { index$d_EncodeFun as EncodeFun, index$d_KeyAndCodec as KeyAndCodec, index$d_KeyAndCodecWithView as KeyAndCodecWithView, index$d_SerializedStateBackend as SerializedStateBackend, index$d_StateCodec as StateCodec, index$d_StateEntryUpdate as StateEntryUpdate, index$d_StateKey as StateKey };
|
|
12284
12634
|
}
|
|
12285
12635
|
|
|
12286
12636
|
/** Error during `LeafDb` creation. */
|
|
@@ -12370,7 +12720,8 @@ declare class LeafDb implements SerializedStateBackend {
|
|
|
12370
12720
|
assertNever(val);
|
|
12371
12721
|
}
|
|
12372
12722
|
|
|
12373
|
-
getStateRoot(): StateRootHash {
|
|
12723
|
+
getStateRoot(blake2b: Blake2b): StateRootHash {
|
|
12724
|
+
const blake2bTrieHasher = getBlake2bTrieHasher(blake2b);
|
|
12374
12725
|
return InMemoryTrie.computeStateRoot(blake2bTrieHasher, this.leaves).asOpaque();
|
|
12375
12726
|
}
|
|
12376
12727
|
|
|
@@ -12468,12 +12819,13 @@ declare class InMemoryStates implements StatesDb<InMemoryState> {
|
|
|
12468
12819
|
}
|
|
12469
12820
|
|
|
12470
12821
|
async getStateRoot(state: InMemoryState): Promise<StateRootHash> {
|
|
12471
|
-
|
|
12822
|
+
const blake2b = await Blake2b.createHasher();
|
|
12823
|
+
return StateEntries.serializeInMemory(this.spec, blake2b, state).getRootHash(blake2b);
|
|
12472
12824
|
}
|
|
12473
12825
|
|
|
12474
12826
|
/** Insert a full state into the database. */
|
|
12475
12827
|
async insertState(headerHash: HeaderHash, state: InMemoryState): Promise<Result$2<OK, StateUpdateError>> {
|
|
12476
|
-
const encoded = Encoder.encodeObject(inMemoryStateCodec, state, this.spec);
|
|
12828
|
+
const encoded = Encoder.encodeObject(inMemoryStateCodec(this.spec), state, this.spec);
|
|
12477
12829
|
this.db.set(headerHash, encoded);
|
|
12478
12830
|
return Result.ok(OK);
|
|
12479
12831
|
}
|
|
@@ -12484,7 +12836,7 @@ declare class InMemoryStates implements StatesDb<InMemoryState> {
|
|
|
12484
12836
|
return null;
|
|
12485
12837
|
}
|
|
12486
12838
|
|
|
12487
|
-
return Decoder.decodeObject(inMemoryStateCodec, encodedState, this.spec);
|
|
12839
|
+
return Decoder.decodeObject(inMemoryStateCodec(this.spec), encodedState, this.spec);
|
|
12488
12840
|
}
|
|
12489
12841
|
}
|
|
12490
12842
|
|
|
@@ -12573,7 +12925,7 @@ declare function padAndEncodeData(input: BytesBlob) {
|
|
|
12573
12925
|
const paddedLength = Math.ceil(input.length / PIECE_SIZE) * PIECE_SIZE;
|
|
12574
12926
|
let padded = input;
|
|
12575
12927
|
if (input.length !== paddedLength) {
|
|
12576
|
-
padded = BytesBlob.blobFrom(
|
|
12928
|
+
padded = BytesBlob.blobFrom(safeAllocUint8Array(paddedLength));
|
|
12577
12929
|
padded.raw.set(input.raw, 0);
|
|
12578
12930
|
}
|
|
12579
12931
|
return chunkingFunction(padded);
|
|
@@ -12629,7 +12981,7 @@ declare function decodeData(input: FixedSizeArray<[number, BytesBlob], N_CHUNKS_
|
|
|
12629
12981
|
*/
|
|
12630
12982
|
declare function encodePoints(input: Bytes<PIECE_SIZE>): FixedSizeArray<Bytes<POINT_LENGTH>, N_CHUNKS_TOTAL> {
|
|
12631
12983
|
const result: Bytes<POINT_LENGTH>[] = [];
|
|
12632
|
-
const data =
|
|
12984
|
+
const data = safeAllocUint8Array(POINT_ALIGNMENT * N_CHUNKS_REQUIRED);
|
|
12633
12985
|
|
|
12634
12986
|
// add original shards to the result
|
|
12635
12987
|
for (let i = 0; i < N_CHUNKS_REQUIRED; i++) {
|
|
@@ -12649,7 +13001,7 @@ declare function encodePoints(input: Bytes<PIECE_SIZE>): FixedSizeArray<Bytes<PO
|
|
|
12649
13001
|
for (let i = 0; i < N_CHUNKS_REDUNDANCY; i++) {
|
|
12650
13002
|
const pointIndex = i * POINT_ALIGNMENT;
|
|
12651
13003
|
|
|
12652
|
-
const redundancyPoint =
|
|
13004
|
+
const redundancyPoint = safeAllocUint8Array(POINT_LENGTH);
|
|
12653
13005
|
for (let j = 0; j < POINT_LENGTH; j++) {
|
|
12654
13006
|
redundancyPoint[j] = encodedData[pointIndex + j * HALF_POINT_SIZE];
|
|
12655
13007
|
}
|
|
@@ -12669,7 +13021,7 @@ declare function decodePiece(
|
|
|
12669
13021
|
): Bytes<PIECE_SIZE> {
|
|
12670
13022
|
const result = Bytes.zero(PIECE_SIZE);
|
|
12671
13023
|
|
|
12672
|
-
const data =
|
|
13024
|
+
const data = safeAllocUint8Array(N_CHUNKS_REQUIRED * POINT_ALIGNMENT);
|
|
12673
13025
|
const indices = new Uint16Array(input.length);
|
|
12674
13026
|
|
|
12675
13027
|
for (let i = 0; i < N_CHUNKS_REQUIRED; i++) {
|
|
@@ -12796,7 +13148,7 @@ declare function lace<N extends number, K extends number>(input: FixedSizeArray<
|
|
|
12796
13148
|
return BytesBlob.empty();
|
|
12797
13149
|
}
|
|
12798
13150
|
const n = input[0].length;
|
|
12799
|
-
const result = BytesBlob.blobFrom(
|
|
13151
|
+
const result = BytesBlob.blobFrom(safeAllocUint8Array(k * n));
|
|
12800
13152
|
for (let i = 0; i < k; i++) {
|
|
12801
13153
|
const entry = input[i].raw;
|
|
12802
13154
|
for (let j = 0; j < n; j++) {
|
|
@@ -13639,6 +13991,8 @@ declare enum NewServiceError {
|
|
|
13639
13991
|
InsufficientFunds = 0,
|
|
13640
13992
|
/** Service is not privileged to set gratis storage. */
|
|
13641
13993
|
UnprivilegedService = 1,
|
|
13994
|
+
/** Registrar attempting to create a service with already existing id. */
|
|
13995
|
+
RegistrarServiceIdAlreadyTaken = 2,
|
|
13642
13996
|
}
|
|
13643
13997
|
|
|
13644
13998
|
declare enum UpdatePrivilegesError {
|
|
@@ -13704,14 +14058,18 @@ interface PartialState {
|
|
|
13704
14058
|
): Result$2<OK, TransferError>;
|
|
13705
14059
|
|
|
13706
14060
|
/**
|
|
13707
|
-
* Create a new service with given codeHash, length, gas, allowance and
|
|
14061
|
+
* Create a new service with given codeHash, length, gas, allowance, gratisStorage and wantedServiceId.
|
|
14062
|
+
*
|
|
14063
|
+
* Returns a newly assigned id
|
|
14064
|
+
* or `wantedServiceId` if it's lower than `S`
|
|
14065
|
+
* and parent of that service is `Registrar`.
|
|
13708
14066
|
*
|
|
13709
|
-
*
|
|
13710
|
-
* https://graypaper.fluffylabs.dev/#/7e6ff6a/2f4c022f4c02?v=0.6.7
|
|
14067
|
+
* https://graypaper.fluffylabs.dev/#/ab2cdbd/2fa9042fc304?v=0.7.2
|
|
13711
14068
|
*
|
|
13712
14069
|
* An error can be returned in case the account does not
|
|
13713
14070
|
* have the required balance
|
|
13714
|
-
* or tries to set gratis storage without being
|
|
14071
|
+
* or tries to set gratis storage without being `Manager`
|
|
14072
|
+
* or `Registrar` tries to set service id thats already taken.
|
|
13715
14073
|
*/
|
|
13716
14074
|
newService(
|
|
13717
14075
|
codeHash: CodeHash,
|
|
@@ -13719,6 +14077,7 @@ interface PartialState {
|
|
|
13719
14077
|
gas: ServiceGas,
|
|
13720
14078
|
allowance: ServiceGas,
|
|
13721
14079
|
gratisStorage: U64,
|
|
14080
|
+
wantedServiceId: U64,
|
|
13722
14081
|
): Result$2<ServiceId, NewServiceError>;
|
|
13723
14082
|
|
|
13724
14083
|
/** Upgrade code of currently running service. */
|
|
@@ -13739,8 +14098,8 @@ interface PartialState {
|
|
|
13739
14098
|
/** Update authorization queue for given core and authorize a service for this core. */
|
|
13740
14099
|
updateAuthorizationQueue(
|
|
13741
14100
|
coreIndex: CoreIndex,
|
|
13742
|
-
authQueue:
|
|
13743
|
-
|
|
14101
|
+
authQueue: AuthorizationQueue,
|
|
14102
|
+
assigner: ServiceId | null,
|
|
13744
14103
|
): Result$2<OK, UpdatePrivilegesError>;
|
|
13745
14104
|
|
|
13746
14105
|
/**
|
|
@@ -13749,14 +14108,16 @@ interface PartialState {
|
|
|
13749
14108
|
* `m`: manager service (can change privileged services)
|
|
13750
14109
|
* `a`: manages authorization queue
|
|
13751
14110
|
* `v`: manages validator keys
|
|
13752
|
-
* `
|
|
14111
|
+
* `r`: manages create new services in protected id range.
|
|
14112
|
+
* `z`: collection of serviceId -> gas that auto-accumulate every block
|
|
13753
14113
|
*
|
|
13754
14114
|
*/
|
|
13755
14115
|
updatePrivilegedServices(
|
|
13756
14116
|
m: ServiceId | null,
|
|
13757
14117
|
a: PerCore<ServiceId>,
|
|
13758
14118
|
v: ServiceId | null,
|
|
13759
|
-
|
|
14119
|
+
r: ServiceId | null,
|
|
14120
|
+
z: [ServiceId, ServiceGas][],
|
|
13760
14121
|
): Result$2<OK, UpdatePrivilegesError>;
|
|
13761
14122
|
|
|
13762
14123
|
/** Yield accumulation trie result hash. */
|
|
@@ -13868,7 +14229,7 @@ declare class Mask {
|
|
|
13868
14229
|
}
|
|
13869
14230
|
|
|
13870
14231
|
private buildLookupTableForward(mask: BitVec) {
|
|
13871
|
-
const table =
|
|
14232
|
+
const table = safeAllocUint8Array(mask.bitLength);
|
|
13872
14233
|
let lastInstructionOffset = 0;
|
|
13873
14234
|
for (let i = mask.bitLength - 1; i >= 0; i--) {
|
|
13874
14235
|
if (mask.isSet(i)) {
|
|
@@ -14012,7 +14373,7 @@ declare class Registers {
|
|
|
14012
14373
|
private asSigned: BigInt64Array;
|
|
14013
14374
|
private asUnsigned: BigUint64Array;
|
|
14014
14375
|
|
|
14015
|
-
constructor(private readonly bytes =
|
|
14376
|
+
constructor(private readonly bytes = safeAllocUint8Array(NO_OF_REGISTERS << REGISTER_SIZE_SHIFT)) {
|
|
14016
14377
|
check`${bytes.length === NO_OF_REGISTERS << REGISTER_SIZE_SHIFT} Invalid size of registers array.`;
|
|
14017
14378
|
this.asSigned = new BigInt64Array(bytes.buffer, bytes.byteOffset);
|
|
14018
14379
|
this.asUnsigned = new BigUint64Array(bytes.buffer, bytes.byteOffset);
|
|
@@ -17464,6 +17825,29 @@ declare class Interpreter {
|
|
|
17464
17825
|
getMemoryPage(pageNumber: number): null | Uint8Array {
|
|
17465
17826
|
return this.memory.getPageDump(tryAsPageNumber(pageNumber));
|
|
17466
17827
|
}
|
|
17828
|
+
|
|
17829
|
+
calculateBlockGasCost(): Map<string, number> {
|
|
17830
|
+
const codeLength = this.code.length;
|
|
17831
|
+
const blocks: Map<string, number> = new Map();
|
|
17832
|
+
let currentBlock = "0";
|
|
17833
|
+
let gasCost = 0;
|
|
17834
|
+
const getNextIstructionIndex = (index: number) => index + 1 + this.mask.getNoOfBytesToNextInstruction(index + 1);
|
|
17835
|
+
|
|
17836
|
+
for (let index = 0; index < codeLength; index = getNextIstructionIndex(index)) {
|
|
17837
|
+
const instruction = this.code[index];
|
|
17838
|
+
if (this.basicBlocks.isBeginningOfBasicBlock(index)) {
|
|
17839
|
+
blocks.set(currentBlock, gasCost);
|
|
17840
|
+
currentBlock = index.toString();
|
|
17841
|
+
gasCost = 0;
|
|
17842
|
+
}
|
|
17843
|
+
|
|
17844
|
+
gasCost += instructionGasMap[instruction];
|
|
17845
|
+
}
|
|
17846
|
+
|
|
17847
|
+
blocks.set(currentBlock, gasCost);
|
|
17848
|
+
|
|
17849
|
+
return blocks;
|
|
17850
|
+
}
|
|
17467
17851
|
}
|
|
17468
17852
|
|
|
17469
17853
|
type index$8_BigGas = BigGas;
|
|
@@ -17674,7 +18058,7 @@ declare class AccumulationStateUpdate {
|
|
|
17674
18058
|
/** Services state updates. */
|
|
17675
18059
|
public readonly services: ServicesUpdate,
|
|
17676
18060
|
/** Pending transfers. */
|
|
17677
|
-
public
|
|
18061
|
+
public transfers: PendingTransfer[],
|
|
17678
18062
|
/** Yielded accumulation root. */
|
|
17679
18063
|
public readonly yieldedRoots: Map<ServiceId, OpaqueHash> = new Map(),
|
|
17680
18064
|
) {}
|
|
@@ -17725,11 +18109,18 @@ declare class AccumulationStateUpdate {
|
|
|
17725
18109
|
if (from.privilegedServices !== null) {
|
|
17726
18110
|
update.privilegedServices = PrivilegedServices.create({
|
|
17727
18111
|
...from.privilegedServices,
|
|
17728
|
-
|
|
18112
|
+
assigners: asKnownSize([...from.privilegedServices.assigners]),
|
|
17729
18113
|
});
|
|
17730
18114
|
}
|
|
17731
18115
|
return update;
|
|
17732
18116
|
}
|
|
18117
|
+
|
|
18118
|
+
/** Retrieve and clear pending transfers. */
|
|
18119
|
+
takeTransfers() {
|
|
18120
|
+
const transfers = this.transfers;
|
|
18121
|
+
this.transfers = [];
|
|
18122
|
+
return transfers;
|
|
18123
|
+
}
|
|
17733
18124
|
}
|
|
17734
18125
|
|
|
17735
18126
|
type StateSlice = Pick<State, "getService" | "privilegedServices">;
|
|
@@ -17996,7 +18387,7 @@ declare const HostCallResult = {
|
|
|
17996
18387
|
OOB: tryAsU64(0xffff_ffff_ffff_fffdn), // 2**64 - 3
|
|
17997
18388
|
/** Index unknown. */
|
|
17998
18389
|
WHO: tryAsU64(0xffff_ffff_ffff_fffcn), // 2**64 - 4
|
|
17999
|
-
/** Storage full. */
|
|
18390
|
+
/** Storage full or resource already allocated. */
|
|
18000
18391
|
FULL: tryAsU64(0xffff_ffff_ffff_fffbn), // 2**64 - 5
|
|
18001
18392
|
/** Core index unknown. */
|
|
18002
18393
|
CORE: tryAsU64(0xffff_ffff_ffff_fffan), // 2**64 - 6
|
|
@@ -18004,7 +18395,7 @@ declare const HostCallResult = {
|
|
|
18004
18395
|
CASH: tryAsU64(0xffff_ffff_ffff_fff9n), // 2**64 - 7
|
|
18005
18396
|
/** Gas limit too low. */
|
|
18006
18397
|
LOW: tryAsU64(0xffff_ffff_ffff_fff8n), // 2**64 - 8
|
|
18007
|
-
/** The item is already solicited
|
|
18398
|
+
/** The item is already solicited, cannot be forgotten or the operation is invalid due to privilege level. */
|
|
18008
18399
|
HUH: tryAsU64(0xffff_ffff_ffff_fff7n), // 2**64 - 9
|
|
18009
18400
|
/** The return value indicating general success. */
|
|
18010
18401
|
OK: tryAsU64(0n),
|
|
@@ -18239,7 +18630,7 @@ declare class HostCalls {
|
|
|
18239
18630
|
const maybeAddress = regs.getLowerU32(7);
|
|
18240
18631
|
const maybeLength = regs.getLowerU32(8);
|
|
18241
18632
|
|
|
18242
|
-
const result =
|
|
18633
|
+
const result = safeAllocUint8Array(maybeLength);
|
|
18243
18634
|
const startAddress = tryAsMemoryIndex(maybeAddress);
|
|
18244
18635
|
const loadResult = memory.loadInto(result, startAddress);
|
|
18245
18636
|
|
|
@@ -18678,7 +19069,7 @@ declare class DebuggerAdapter {
|
|
|
18678
19069
|
|
|
18679
19070
|
if (page === null) {
|
|
18680
19071
|
// page wasn't allocated so we return an empty page
|
|
18681
|
-
return
|
|
19072
|
+
return safeAllocUint8Array(PAGE_SIZE);
|
|
18682
19073
|
}
|
|
18683
19074
|
|
|
18684
19075
|
if (page.length === PAGE_SIZE) {
|
|
@@ -18687,7 +19078,7 @@ declare class DebuggerAdapter {
|
|
|
18687
19078
|
}
|
|
18688
19079
|
|
|
18689
19080
|
// page was allocated but it is shorter than PAGE_SIZE so we have to extend it
|
|
18690
|
-
const fullPage =
|
|
19081
|
+
const fullPage = safeAllocUint8Array(PAGE_SIZE);
|
|
18691
19082
|
fullPage.set(page);
|
|
18692
19083
|
return fullPage;
|
|
18693
19084
|
}
|
|
@@ -18880,10 +19271,10 @@ type ENTROPY_BYTES = typeof ENTROPY_BYTES;
|
|
|
18880
19271
|
*
|
|
18881
19272
|
* https://graypaper.fluffylabs.dev/#/579bd12/3b9a013b9a01
|
|
18882
19273
|
*/
|
|
18883
|
-
declare function fisherYatesShuffle<T>(arr: T[], entropy: Bytes<ENTROPY_BYTES>): T[] {
|
|
19274
|
+
declare function fisherYatesShuffle<T>(blake2b: Blake2b, arr: T[], entropy: Bytes<ENTROPY_BYTES>): T[] {
|
|
18884
19275
|
check`${entropy.length === ENTROPY_BYTES} Expected entropy of length ${ENTROPY_BYTES}, got ${entropy.length}`;
|
|
18885
19276
|
const n = arr.length;
|
|
18886
|
-
const randomNumbers = hashToNumberSequence(entropy, arr.length);
|
|
19277
|
+
const randomNumbers = hashToNumberSequence(blake2b, entropy, arr.length);
|
|
18887
19278
|
const result: T[] = new Array<T>(n);
|
|
18888
19279
|
|
|
18889
19280
|
let itemsLeft = n;
|
|
@@ -18909,6 +19300,7 @@ declare namespace index$2 {
|
|
|
18909
19300
|
declare class JsonServiceInfo {
|
|
18910
19301
|
static fromJson = json.object<JsonServiceInfo, ServiceAccountInfo>(
|
|
18911
19302
|
{
|
|
19303
|
+
...(Compatibility.isGreaterOrEqual(GpVersion.V0_7_1) ? { version: "number" } : {}),
|
|
18912
19304
|
code_hash: fromJson.bytes32(),
|
|
18913
19305
|
balance: json.fromNumber((x) => tryAsU64(x)),
|
|
18914
19306
|
min_item_gas: json.fromNumber((x) => tryAsServiceGas(x)),
|
|
@@ -18947,6 +19339,7 @@ declare class JsonServiceInfo {
|
|
|
18947
19339
|
},
|
|
18948
19340
|
);
|
|
18949
19341
|
|
|
19342
|
+
version?: number;
|
|
18950
19343
|
code_hash!: CodeHash;
|
|
18951
19344
|
balance!: U64;
|
|
18952
19345
|
min_item_gas!: ServiceGas;
|
|
@@ -18993,6 +19386,19 @@ declare const lookupMetaFromJson = json.object<JsonLookupMeta, LookupHistoryItem
|
|
|
18993
19386
|
({ key, value }) => new LookupHistoryItem(key.hash, key.length, value),
|
|
18994
19387
|
);
|
|
18995
19388
|
|
|
19389
|
+
declare const preimageStatusFromJson = json.object<JsonPreimageStatus, LookupHistoryItem>(
|
|
19390
|
+
{
|
|
19391
|
+
hash: fromJson.bytes32(),
|
|
19392
|
+
status: json.array("number"),
|
|
19393
|
+
},
|
|
19394
|
+
({ hash, status }) => new LookupHistoryItem(hash, tryAsU32(0), status),
|
|
19395
|
+
);
|
|
19396
|
+
|
|
19397
|
+
type JsonPreimageStatus = {
|
|
19398
|
+
hash: PreimageHash;
|
|
19399
|
+
status: LookupHistorySlots;
|
|
19400
|
+
};
|
|
19401
|
+
|
|
18996
19402
|
type JsonLookupMeta = {
|
|
18997
19403
|
key: {
|
|
18998
19404
|
hash: PreimageHash;
|
|
@@ -19005,21 +19411,34 @@ declare class JsonService {
|
|
|
19005
19411
|
static fromJson = json.object<JsonService, InMemoryService>(
|
|
19006
19412
|
{
|
|
19007
19413
|
id: "number",
|
|
19008
|
-
data:
|
|
19009
|
-
|
|
19010
|
-
|
|
19011
|
-
|
|
19012
|
-
|
|
19013
|
-
|
|
19414
|
+
data: Compatibility.isLessThan(GpVersion.V0_7_1)
|
|
19415
|
+
? {
|
|
19416
|
+
service: JsonServiceInfo.fromJson,
|
|
19417
|
+
preimages: json.optional(json.array(JsonPreimageItem.fromJson)),
|
|
19418
|
+
storage: json.optional(json.array(JsonStorageItem.fromJson)),
|
|
19419
|
+
lookup_meta: json.optional(json.array(lookupMetaFromJson)),
|
|
19420
|
+
}
|
|
19421
|
+
: {
|
|
19422
|
+
service: JsonServiceInfo.fromJson,
|
|
19423
|
+
storage: json.optional(json.array(JsonStorageItem.fromJson)),
|
|
19424
|
+
preimages_blob: json.optional(json.array(JsonPreimageItem.fromJson)),
|
|
19425
|
+
preimages_status: json.optional(json.array(preimageStatusFromJson)),
|
|
19426
|
+
},
|
|
19014
19427
|
},
|
|
19015
19428
|
({ id, data }) => {
|
|
19429
|
+
const preimages = HashDictionary.fromEntries(
|
|
19430
|
+
(data.preimages ?? data.preimages_blob ?? []).map((x) => [x.hash, x]),
|
|
19431
|
+
);
|
|
19432
|
+
|
|
19016
19433
|
const lookupHistory = HashDictionary.new<PreimageHash, LookupHistoryItem[]>();
|
|
19017
|
-
|
|
19434
|
+
|
|
19435
|
+
for (const item of data.lookup_meta ?? data.preimages_status ?? []) {
|
|
19018
19436
|
const data = lookupHistory.get(item.hash) ?? [];
|
|
19019
|
-
|
|
19437
|
+
const length = tryAsU32(preimages.get(item.hash)?.blob.length ?? item.length);
|
|
19438
|
+
data.push(new LookupHistoryItem(item.hash, length, item.slots));
|
|
19020
19439
|
lookupHistory.set(item.hash, data);
|
|
19021
19440
|
}
|
|
19022
|
-
|
|
19441
|
+
|
|
19023
19442
|
const storage = new Map<string, StorageItem>();
|
|
19024
19443
|
|
|
19025
19444
|
const entries = (data.storage ?? []).map(({ key, value }) => {
|
|
@@ -19046,6 +19465,8 @@ declare class JsonService {
|
|
|
19046
19465
|
preimages?: JsonPreimageItem[];
|
|
19047
19466
|
storage?: JsonStorageItem[];
|
|
19048
19467
|
lookup_meta?: LookupHistoryItem[];
|
|
19468
|
+
preimages_blob?: JsonPreimageItem[];
|
|
19469
|
+
preimages_status?: LookupHistoryItem[];
|
|
19049
19470
|
};
|
|
19050
19471
|
}
|
|
19051
19472
|
|
|
@@ -19055,8 +19476,7 @@ declare const availabilityAssignmentFromJson = json.object<JsonAvailabilityAssig
|
|
|
19055
19476
|
timeout: "number",
|
|
19056
19477
|
},
|
|
19057
19478
|
({ report, timeout }) => {
|
|
19058
|
-
|
|
19059
|
-
return AvailabilityAssignment.create({ workReport: new WithHash(workReportHash, report), timeout });
|
|
19479
|
+
return AvailabilityAssignment.create({ workReport: report, timeout });
|
|
19060
19480
|
},
|
|
19061
19481
|
);
|
|
19062
19482
|
|
|
@@ -19144,7 +19564,7 @@ type JsonRecentBlockState = {
|
|
|
19144
19564
|
reported: WorkPackageInfo[];
|
|
19145
19565
|
};
|
|
19146
19566
|
|
|
19147
|
-
declare const recentBlocksHistoryFromJson = json.object<JsonRecentBlocks,
|
|
19567
|
+
declare const recentBlocksHistoryFromJson = json.object<JsonRecentBlocks, RecentBlocks>(
|
|
19148
19568
|
{
|
|
19149
19569
|
history: json.array(recentBlockStateFromJson),
|
|
19150
19570
|
mmr: {
|
|
@@ -19152,12 +19572,10 @@ declare const recentBlocksHistoryFromJson = json.object<JsonRecentBlocks, Recent
|
|
|
19152
19572
|
},
|
|
19153
19573
|
},
|
|
19154
19574
|
({ history, mmr }) => {
|
|
19155
|
-
return
|
|
19156
|
-
|
|
19157
|
-
|
|
19158
|
-
|
|
19159
|
-
}),
|
|
19160
|
-
);
|
|
19575
|
+
return RecentBlocks.create({
|
|
19576
|
+
blocks: history,
|
|
19577
|
+
accumulationLog: mmr,
|
|
19578
|
+
});
|
|
19161
19579
|
},
|
|
19162
19580
|
);
|
|
19163
19581
|
|
|
@@ -19277,8 +19695,12 @@ declare class JsonServiceStatistics {
|
|
|
19277
19695
|
extrinsic_count: "number",
|
|
19278
19696
|
accumulate_count: "number",
|
|
19279
19697
|
accumulate_gas_used: json.fromNumber(tryAsServiceGas),
|
|
19280
|
-
|
|
19281
|
-
|
|
19698
|
+
...(Compatibility.isLessThan(GpVersion.V0_7_1)
|
|
19699
|
+
? {
|
|
19700
|
+
on_transfers_count: "number",
|
|
19701
|
+
on_transfers_gas_used: json.fromNumber(tryAsServiceGas),
|
|
19702
|
+
}
|
|
19703
|
+
: {}),
|
|
19282
19704
|
},
|
|
19283
19705
|
({
|
|
19284
19706
|
provided_count,
|
|
@@ -19305,8 +19727,8 @@ declare class JsonServiceStatistics {
|
|
|
19305
19727
|
extrinsicCount: extrinsic_count,
|
|
19306
19728
|
accumulateCount: accumulate_count,
|
|
19307
19729
|
accumulateGasUsed: accumulate_gas_used,
|
|
19308
|
-
onTransfersCount: on_transfers_count,
|
|
19309
|
-
onTransfersGasUsed: on_transfers_gas_used,
|
|
19730
|
+
onTransfersCount: on_transfers_count ?? tryAsU32(0),
|
|
19731
|
+
onTransfersGasUsed: on_transfers_gas_used ?? tryAsServiceGas(0),
|
|
19310
19732
|
});
|
|
19311
19733
|
},
|
|
19312
19734
|
);
|
|
@@ -19321,8 +19743,8 @@ declare class JsonServiceStatistics {
|
|
|
19321
19743
|
extrinsic_count!: U16;
|
|
19322
19744
|
accumulate_count!: U32;
|
|
19323
19745
|
accumulate_gas_used!: ServiceGas;
|
|
19324
|
-
on_transfers_count
|
|
19325
|
-
on_transfers_gas_used
|
|
19746
|
+
on_transfers_count?: U32;
|
|
19747
|
+
on_transfers_gas_used?: ServiceGas;
|
|
19326
19748
|
}
|
|
19327
19749
|
|
|
19328
19750
|
type ServiceStatisticsEntry = {
|
|
@@ -19394,8 +19816,9 @@ type JsonStateDump = {
|
|
|
19394
19816
|
tau: State["timeslot"];
|
|
19395
19817
|
chi: {
|
|
19396
19818
|
chi_m: PrivilegedServices["manager"];
|
|
19397
|
-
chi_a: PrivilegedServices["
|
|
19398
|
-
chi_v: PrivilegedServices["
|
|
19819
|
+
chi_a: PrivilegedServices["assigners"];
|
|
19820
|
+
chi_v: PrivilegedServices["delegator"];
|
|
19821
|
+
chi_r?: PrivilegedServices["registrar"];
|
|
19399
19822
|
chi_g: PrivilegedServices["autoAccumulateServices"] | null;
|
|
19400
19823
|
};
|
|
19401
19824
|
pi: JsonStatisticsData;
|
|
@@ -19428,6 +19851,7 @@ declare const fullStateDumpFromJson = (spec: ChainSpec) =>
|
|
|
19428
19851
|
chi_m: "number",
|
|
19429
19852
|
chi_a: json.array("number"),
|
|
19430
19853
|
chi_v: "number",
|
|
19854
|
+
chi_r: json.optional("number"),
|
|
19431
19855
|
chi_g: json.nullable(
|
|
19432
19856
|
json.array({
|
|
19433
19857
|
service: "number",
|
|
@@ -19460,7 +19884,10 @@ declare const fullStateDumpFromJson = (spec: ChainSpec) =>
|
|
|
19460
19884
|
theta,
|
|
19461
19885
|
accounts,
|
|
19462
19886
|
}): InMemoryState => {
|
|
19463
|
-
|
|
19887
|
+
if (Compatibility.isGreaterOrEqual(GpVersion.V0_7_1) && chi.chi_r === undefined) {
|
|
19888
|
+
throw new Error("Registrar is required in Privileges GP ^0.7.1");
|
|
19889
|
+
}
|
|
19890
|
+
return InMemoryState.new(spec, {
|
|
19464
19891
|
authPools: tryAsPerCore(
|
|
19465
19892
|
alpha.map((perCore) => {
|
|
19466
19893
|
if (perCore.length > MAX_AUTH_POOL_SIZE) {
|
|
@@ -19479,7 +19906,7 @@ declare const fullStateDumpFromJson = (spec: ChainSpec) =>
|
|
|
19479
19906
|
}),
|
|
19480
19907
|
spec,
|
|
19481
19908
|
),
|
|
19482
|
-
recentBlocks: beta ??
|
|
19909
|
+
recentBlocks: beta ?? RecentBlocks.empty(),
|
|
19483
19910
|
nextValidatorData: gamma.gamma_k,
|
|
19484
19911
|
epochRoot: gamma.gamma_z,
|
|
19485
19912
|
sealingKeySeries: TicketsOrKeys.toSafroleSealingKeys(gamma.gamma_s, spec),
|
|
@@ -19493,8 +19920,9 @@ declare const fullStateDumpFromJson = (spec: ChainSpec) =>
|
|
|
19493
19920
|
timeslot: tau,
|
|
19494
19921
|
privilegedServices: PrivilegedServices.create({
|
|
19495
19922
|
manager: chi.chi_m,
|
|
19496
|
-
|
|
19497
|
-
|
|
19923
|
+
assigners: chi.chi_a,
|
|
19924
|
+
delegator: chi.chi_v,
|
|
19925
|
+
registrar: chi.chi_r ?? tryAsServiceId(2 ** 32 - 1),
|
|
19498
19926
|
autoAccumulateServices: chi.chi_g ?? [],
|
|
19499
19927
|
}),
|
|
19500
19928
|
statistics: JsonStatisticsData.toStatisticsData(spec, pi),
|
|
@@ -19517,6 +19945,7 @@ declare const index$1_JsonDisputesRecords: typeof JsonDisputesRecords;
|
|
|
19517
19945
|
type index$1_JsonLookupMeta = JsonLookupMeta;
|
|
19518
19946
|
type index$1_JsonPreimageItem = JsonPreimageItem;
|
|
19519
19947
|
declare const index$1_JsonPreimageItem: typeof JsonPreimageItem;
|
|
19948
|
+
type index$1_JsonPreimageStatus = JsonPreimageStatus;
|
|
19520
19949
|
type index$1_JsonRecentBlockState = JsonRecentBlockState;
|
|
19521
19950
|
type index$1_JsonRecentBlocks = JsonRecentBlocks;
|
|
19522
19951
|
type index$1_JsonReportedWorkPackageInfo = JsonReportedWorkPackageInfo;
|
|
@@ -19541,6 +19970,7 @@ declare const index$1_disputesRecordsFromJson: typeof disputesRecordsFromJson;
|
|
|
19541
19970
|
declare const index$1_fullStateDumpFromJson: typeof fullStateDumpFromJson;
|
|
19542
19971
|
declare const index$1_lookupMetaFromJson: typeof lookupMetaFromJson;
|
|
19543
19972
|
declare const index$1_notYetAccumulatedFromJson: typeof notYetAccumulatedFromJson;
|
|
19973
|
+
declare const index$1_preimageStatusFromJson: typeof preimageStatusFromJson;
|
|
19544
19974
|
declare const index$1_recentBlockStateFromJson: typeof recentBlockStateFromJson;
|
|
19545
19975
|
declare const index$1_recentBlocksHistoryFromJson: typeof recentBlocksHistoryFromJson;
|
|
19546
19976
|
declare const index$1_reportedWorkPackageFromJson: typeof reportedWorkPackageFromJson;
|
|
@@ -19548,8 +19978,8 @@ declare const index$1_serviceStatisticsEntryFromJson: typeof serviceStatisticsEn
|
|
|
19548
19978
|
declare const index$1_ticketFromJson: typeof ticketFromJson;
|
|
19549
19979
|
declare const index$1_validatorDataFromJson: typeof validatorDataFromJson;
|
|
19550
19980
|
declare namespace index$1 {
|
|
19551
|
-
export { index$1_JsonCoreStatistics as JsonCoreStatistics, index$1_JsonDisputesRecords as JsonDisputesRecords, index$1_JsonPreimageItem as JsonPreimageItem, index$1_JsonService as JsonService, index$1_JsonServiceInfo as JsonServiceInfo, index$1_JsonServiceStatistics as JsonServiceStatistics, index$1_JsonStatisticsData as JsonStatisticsData, index$1_JsonStorageItem as JsonStorageItem, index$1_JsonValidatorStatistics as JsonValidatorStatistics, index$1_TicketsOrKeys as TicketsOrKeys, index$1_availabilityAssignmentFromJson as availabilityAssignmentFromJson, index$1_disputesRecordsFromJson as disputesRecordsFromJson, index$1_fullStateDumpFromJson as fullStateDumpFromJson, index$1_lookupMetaFromJson as lookupMetaFromJson, index$1_notYetAccumulatedFromJson as notYetAccumulatedFromJson, index$1_recentBlockStateFromJson as recentBlockStateFromJson, index$1_recentBlocksHistoryFromJson as recentBlocksHistoryFromJson, index$1_reportedWorkPackageFromJson as reportedWorkPackageFromJson, index$1_serviceStatisticsEntryFromJson as serviceStatisticsEntryFromJson, index$1_ticketFromJson as ticketFromJson, index$1_validatorDataFromJson as validatorDataFromJson };
|
|
19552
|
-
export type { index$1_JsonAvailabilityAssignment as JsonAvailabilityAssignment, index$1_JsonLookupMeta as JsonLookupMeta, index$1_JsonRecentBlockState as JsonRecentBlockState, index$1_JsonRecentBlocks as JsonRecentBlocks, index$1_JsonReportedWorkPackageInfo as JsonReportedWorkPackageInfo, index$1_JsonStateDump as JsonStateDump, index$1_ServiceStatisticsEntry as ServiceStatisticsEntry };
|
|
19981
|
+
export { index$1_JsonCoreStatistics as JsonCoreStatistics, index$1_JsonDisputesRecords as JsonDisputesRecords, index$1_JsonPreimageItem as JsonPreimageItem, index$1_JsonService as JsonService, index$1_JsonServiceInfo as JsonServiceInfo, index$1_JsonServiceStatistics as JsonServiceStatistics, index$1_JsonStatisticsData as JsonStatisticsData, index$1_JsonStorageItem as JsonStorageItem, index$1_JsonValidatorStatistics as JsonValidatorStatistics, index$1_TicketsOrKeys as TicketsOrKeys, index$1_availabilityAssignmentFromJson as availabilityAssignmentFromJson, index$1_disputesRecordsFromJson as disputesRecordsFromJson, index$1_fullStateDumpFromJson as fullStateDumpFromJson, index$1_lookupMetaFromJson as lookupMetaFromJson, index$1_notYetAccumulatedFromJson as notYetAccumulatedFromJson, index$1_preimageStatusFromJson as preimageStatusFromJson, index$1_recentBlockStateFromJson as recentBlockStateFromJson, index$1_recentBlocksHistoryFromJson as recentBlocksHistoryFromJson, index$1_reportedWorkPackageFromJson as reportedWorkPackageFromJson, index$1_serviceStatisticsEntryFromJson as serviceStatisticsEntryFromJson, index$1_ticketFromJson as ticketFromJson, index$1_validatorDataFromJson as validatorDataFromJson };
|
|
19982
|
+
export type { index$1_JsonAvailabilityAssignment as JsonAvailabilityAssignment, index$1_JsonLookupMeta as JsonLookupMeta, index$1_JsonPreimageStatus as JsonPreimageStatus, index$1_JsonRecentBlockState as JsonRecentBlockState, index$1_JsonRecentBlocks as JsonRecentBlocks, index$1_JsonReportedWorkPackageInfo as JsonReportedWorkPackageInfo, index$1_JsonStateDump as JsonStateDump, index$1_ServiceStatisticsEntry as ServiceStatisticsEntry };
|
|
19553
19983
|
}
|
|
19554
19984
|
|
|
19555
19985
|
/** Helper function to create most used hashes in the block */
|
|
@@ -19557,7 +19987,7 @@ declare class TransitionHasher implements MmrHasher<KeccakHash> {
|
|
|
19557
19987
|
constructor(
|
|
19558
19988
|
private readonly context: ChainSpec,
|
|
19559
19989
|
private readonly keccakHasher: KeccakHasher,
|
|
19560
|
-
|
|
19990
|
+
public readonly blake2b: Blake2b,
|
|
19561
19991
|
) {}
|
|
19562
19992
|
|
|
19563
19993
|
/** Concatenates two hashes and hash this concatenation */
|
|
@@ -19571,7 +20001,7 @@ declare class TransitionHasher implements MmrHasher<KeccakHash> {
|
|
|
19571
20001
|
|
|
19572
20002
|
/** Creates hash from the block header view */
|
|
19573
20003
|
header(header: HeaderView): WithHash<HeaderHash, HeaderView> {
|
|
19574
|
-
return new WithHash(blake2b.hashBytes(header.encoded()
|
|
20004
|
+
return new WithHash(this.blake2b.hashBytes(header.encoded()).asOpaque(), header);
|
|
19575
20005
|
}
|
|
19576
20006
|
|
|
19577
20007
|
/**
|
|
@@ -19581,29 +20011,31 @@ declare class TransitionHasher implements MmrHasher<KeccakHash> {
|
|
|
19581
20011
|
*/
|
|
19582
20012
|
extrinsic(extrinsicView: ExtrinsicView): WithHashAndBytes<ExtrinsicHash, ExtrinsicView> {
|
|
19583
20013
|
// https://graypaper.fluffylabs.dev/#/cc517d7/0cfb000cfb00?v=0.6.5
|
|
19584
|
-
const
|
|
20014
|
+
const guaranteesCount = tryAsU32(extrinsicView.guarantees.view().length);
|
|
20015
|
+
const countEncoded = Encoder.encodeObject(codec.varU32, guaranteesCount);
|
|
20016
|
+
const guaranteesBlobs = extrinsicView.guarantees
|
|
19585
20017
|
.view()
|
|
19586
20018
|
.map((g) => g.view())
|
|
19587
|
-
.
|
|
19588
|
-
|
|
19589
|
-
|
|
19590
|
-
reportHash.raw
|
|
19591
|
-
guarantee.slot.encoded().raw
|
|
19592
|
-
guarantee.credentials.encoded().raw
|
|
19593
|
-
|
|
19594
|
-
|
|
19595
|
-
|
|
19596
|
-
|
|
20019
|
+
.reduce(
|
|
20020
|
+
(aggregated, guarantee) => {
|
|
20021
|
+
const reportHash = this.blake2b.hashBytes(guarantee.report.encoded()).asOpaque<WorkReportHash>();
|
|
20022
|
+
aggregated.push(reportHash.raw);
|
|
20023
|
+
aggregated.push(guarantee.slot.encoded().raw);
|
|
20024
|
+
aggregated.push(guarantee.credentials.encoded().raw);
|
|
20025
|
+
return aggregated;
|
|
20026
|
+
},
|
|
20027
|
+
[countEncoded.raw],
|
|
20028
|
+
);
|
|
19597
20029
|
|
|
19598
|
-
const et = blake2b.hashBytes(extrinsicView.tickets.encoded()
|
|
19599
|
-
const ep = blake2b.hashBytes(extrinsicView.preimages.encoded()
|
|
19600
|
-
const eg = blake2b.
|
|
19601
|
-
const ea = blake2b.hashBytes(extrinsicView.assurances.encoded()
|
|
19602
|
-
const ed = blake2b.hashBytes(extrinsicView.disputes.encoded()
|
|
20030
|
+
const et = this.blake2b.hashBytes(extrinsicView.tickets.encoded()).asOpaque<ExtrinsicHash>();
|
|
20031
|
+
const ep = this.blake2b.hashBytes(extrinsicView.preimages.encoded()).asOpaque<ExtrinsicHash>();
|
|
20032
|
+
const eg = this.blake2b.hashBlobs(guaranteesBlobs).asOpaque<ExtrinsicHash>();
|
|
20033
|
+
const ea = this.blake2b.hashBytes(extrinsicView.assurances.encoded()).asOpaque<ExtrinsicHash>();
|
|
20034
|
+
const ed = this.blake2b.hashBytes(extrinsicView.disputes.encoded()).asOpaque<ExtrinsicHash>();
|
|
19603
20035
|
|
|
19604
20036
|
const encoded = BytesBlob.blobFromParts([et.raw, ep.raw, eg.raw, ea.raw, ed.raw]);
|
|
19605
20037
|
|
|
19606
|
-
return new WithHashAndBytes(blake2b.hashBytes(encoded
|
|
20038
|
+
return new WithHashAndBytes(this.blake2b.hashBytes(encoded).asOpaque(), extrinsicView, encoded);
|
|
19607
20039
|
}
|
|
19608
20040
|
|
|
19609
20041
|
/** Creates hash for given WorkPackage */
|
|
@@ -19614,7 +20046,7 @@ declare class TransitionHasher implements MmrHasher<KeccakHash> {
|
|
|
19614
20046
|
private encode<T, THash extends OpaqueHash>(codec: Codec<T>, data: T): WithHashAndBytes<THash, T> {
|
|
19615
20047
|
// TODO [ToDr] Use already allocated encoding destination and hash bytes from some arena.
|
|
19616
20048
|
const encoded = Encoder.encodeObject(codec, data, this.context);
|
|
19617
|
-
return new WithHashAndBytes(blake2b.hashBytes(encoded
|
|
20049
|
+
return new WithHashAndBytes(this.blake2b.hashBytes(encoded).asOpaque(), data, encoded);
|
|
19618
20050
|
}
|
|
19619
20051
|
}
|
|
19620
20052
|
|
|
@@ -19635,7 +20067,10 @@ declare enum PreimagesErrorCode {
|
|
|
19635
20067
|
|
|
19636
20068
|
// TODO [SeKo] consider whether this module is the right place to remove expired preimages
|
|
19637
20069
|
declare class Preimages {
|
|
19638
|
-
constructor(
|
|
20070
|
+
constructor(
|
|
20071
|
+
public readonly state: PreimagesState,
|
|
20072
|
+
public readonly blake2b: Blake2b,
|
|
20073
|
+
) {}
|
|
19639
20074
|
|
|
19640
20075
|
integrate(input: PreimagesInput): Result$2<PreimagesStateUpdate, PreimagesErrorCode> {
|
|
19641
20076
|
// make sure lookup extrinsics are sorted and unique
|
|
@@ -19664,7 +20099,7 @@ declare class Preimages {
|
|
|
19664
20099
|
// select preimages for integration
|
|
19665
20100
|
for (const preimage of preimages) {
|
|
19666
20101
|
const { requester, blob } = preimage;
|
|
19667
|
-
const hash: PreimageHash = blake2b.hashBytes(blob).asOpaque();
|
|
20102
|
+
const hash: PreimageHash = this.blake2b.hashBytes(blob).asOpaque();
|
|
19668
20103
|
|
|
19669
20104
|
const service = this.state.getService(requester);
|
|
19670
20105
|
if (service === null) {
|
|
@@ -19695,156 +20130,6 @@ declare class Preimages {
|
|
|
19695
20130
|
}
|
|
19696
20131
|
}
|
|
19697
20132
|
|
|
19698
|
-
declare enum ServiceExecutorError {
|
|
19699
|
-
NoLookup = 0,
|
|
19700
|
-
NoState = 1,
|
|
19701
|
-
NoServiceCode = 2,
|
|
19702
|
-
ServiceCodeMismatch = 3,
|
|
19703
|
-
}
|
|
19704
|
-
|
|
19705
|
-
declare class WorkPackageExecutor {
|
|
19706
|
-
constructor(
|
|
19707
|
-
private readonly blocks: BlocksDb,
|
|
19708
|
-
private readonly state: StatesDb,
|
|
19709
|
-
private readonly hasher: TransitionHasher,
|
|
19710
|
-
) {}
|
|
19711
|
-
|
|
19712
|
-
// TODO [ToDr] this while thing should be triple-checked with the GP.
|
|
19713
|
-
// I'm currently implementing some dirty version for the demo.
|
|
19714
|
-
async executeWorkPackage(pack: WorkPackage): Promise<WorkReport> {
|
|
19715
|
-
const headerHash = pack.context.lookupAnchor;
|
|
19716
|
-
// execute authorisation first or is it already executed and we just need to check it?
|
|
19717
|
-
const authExec = this.getServiceExecutor(
|
|
19718
|
-
// TODO [ToDr] should this be anchor or lookupAnchor?
|
|
19719
|
-
headerHash,
|
|
19720
|
-
pack.authCodeHost,
|
|
19721
|
-
pack.authCodeHash,
|
|
19722
|
-
);
|
|
19723
|
-
|
|
19724
|
-
if (authExec.isError) {
|
|
19725
|
-
// TODO [ToDr] most likely shouldn't be throw.
|
|
19726
|
-
throw new Error(`Could not get authorization executor: ${authExec.error}`);
|
|
19727
|
-
}
|
|
19728
|
-
|
|
19729
|
-
const pvm = authExec.ok;
|
|
19730
|
-
const authGas = tryAsGas(15_000n);
|
|
19731
|
-
const result = await pvm.run(pack.parametrization, authGas);
|
|
19732
|
-
|
|
19733
|
-
if (!result.isEqualTo(pack.authorization)) {
|
|
19734
|
-
throw new Error("Authorization is invalid.");
|
|
19735
|
-
}
|
|
19736
|
-
|
|
19737
|
-
const results: WorkResult[] = [];
|
|
19738
|
-
for (const item of pack.items) {
|
|
19739
|
-
const exec = this.getServiceExecutor(headerHash, item.service, item.codeHash);
|
|
19740
|
-
if (exec.isError) {
|
|
19741
|
-
throw new Error(`Could not get item executor: ${exec.error}`);
|
|
19742
|
-
}
|
|
19743
|
-
const pvm = exec.ok;
|
|
19744
|
-
|
|
19745
|
-
const gasRatio = tryAsServiceGas(3_000n);
|
|
19746
|
-
const ret = await pvm.run(item.payload, tryAsGas(item.refineGasLimit)); // or accumulateGasLimit?
|
|
19747
|
-
results.push(
|
|
19748
|
-
WorkResult.create({
|
|
19749
|
-
serviceId: item.service,
|
|
19750
|
-
codeHash: item.codeHash,
|
|
19751
|
-
payloadHash: blake2b.hashBytes(item.payload),
|
|
19752
|
-
gas: gasRatio,
|
|
19753
|
-
result: new WorkExecResult(WorkExecResultKind.ok, ret),
|
|
19754
|
-
load: WorkRefineLoad.create({
|
|
19755
|
-
gasUsed: tryAsServiceGas(5),
|
|
19756
|
-
importedSegments: tryAsU32(0),
|
|
19757
|
-
exportedSegments: tryAsU32(0),
|
|
19758
|
-
extrinsicSize: tryAsU32(0),
|
|
19759
|
-
extrinsicCount: tryAsU32(0),
|
|
19760
|
-
}),
|
|
19761
|
-
}),
|
|
19762
|
-
);
|
|
19763
|
-
}
|
|
19764
|
-
|
|
19765
|
-
const workPackage = this.hasher.workPackage(pack);
|
|
19766
|
-
const workPackageSpec = WorkPackageSpec.create({
|
|
19767
|
-
hash: workPackage.hash,
|
|
19768
|
-
length: tryAsU32(workPackage.encoded.length),
|
|
19769
|
-
erasureRoot: Bytes.zero(HASH_SIZE),
|
|
19770
|
-
exportsRoot: Bytes.zero(HASH_SIZE).asOpaque(),
|
|
19771
|
-
exportsCount: tryAsU16(0),
|
|
19772
|
-
});
|
|
19773
|
-
const coreIndex = tryAsCoreIndex(0);
|
|
19774
|
-
const authorizerHash = Bytes.fill(HASH_SIZE, 5).asOpaque();
|
|
19775
|
-
|
|
19776
|
-
const workResults = FixedSizeArray.new(results, tryAsWorkItemsCount(results.length));
|
|
19777
|
-
|
|
19778
|
-
return Promise.resolve(
|
|
19779
|
-
WorkReport.create({
|
|
19780
|
-
workPackageSpec,
|
|
19781
|
-
context: pack.context,
|
|
19782
|
-
coreIndex,
|
|
19783
|
-
authorizerHash,
|
|
19784
|
-
authorizationOutput: pack.authorization,
|
|
19785
|
-
segmentRootLookup: [],
|
|
19786
|
-
results: workResults,
|
|
19787
|
-
authorizationGasUsed: tryAsServiceGas(0),
|
|
19788
|
-
}),
|
|
19789
|
-
);
|
|
19790
|
-
}
|
|
19791
|
-
|
|
19792
|
-
getServiceExecutor(
|
|
19793
|
-
lookupAnchor: HeaderHash,
|
|
19794
|
-
serviceId: ServiceId,
|
|
19795
|
-
expectedCodeHash: CodeHash,
|
|
19796
|
-
): Result$2<PvmExecutor, ServiceExecutorError> {
|
|
19797
|
-
const header = this.blocks.getHeader(lookupAnchor);
|
|
19798
|
-
if (header === null) {
|
|
19799
|
-
return Result.error(ServiceExecutorError.NoLookup);
|
|
19800
|
-
}
|
|
19801
|
-
|
|
19802
|
-
const state = this.state.getState(lookupAnchor);
|
|
19803
|
-
if (state === null) {
|
|
19804
|
-
return Result.error(ServiceExecutorError.NoState);
|
|
19805
|
-
}
|
|
19806
|
-
|
|
19807
|
-
const service = state.getService(serviceId);
|
|
19808
|
-
const serviceCodeHash = service?.getInfo().codeHash ?? null;
|
|
19809
|
-
if (serviceCodeHash === null) {
|
|
19810
|
-
return Result.error(ServiceExecutorError.NoServiceCode);
|
|
19811
|
-
}
|
|
19812
|
-
|
|
19813
|
-
if (!serviceCodeHash.isEqualTo(expectedCodeHash)) {
|
|
19814
|
-
return Result.error(ServiceExecutorError.ServiceCodeMismatch);
|
|
19815
|
-
}
|
|
19816
|
-
|
|
19817
|
-
const serviceCode = service?.getPreimage(serviceCodeHash.asOpaque()) ?? null;
|
|
19818
|
-
if (serviceCode === null) {
|
|
19819
|
-
return Result.error(ServiceExecutorError.NoServiceCode);
|
|
19820
|
-
}
|
|
19821
|
-
|
|
19822
|
-
return Result.ok(new PvmExecutor(serviceCode));
|
|
19823
|
-
}
|
|
19824
|
-
}
|
|
19825
|
-
|
|
19826
|
-
declare class PvmExecutor {
|
|
19827
|
-
private readonly pvm: HostCalls;
|
|
19828
|
-
private hostCalls = new HostCallsManager({ missing: new Missing() });
|
|
19829
|
-
private pvmInstanceManager = new PvmInstanceManager(4);
|
|
19830
|
-
|
|
19831
|
-
constructor(private serviceCode: BytesBlob) {
|
|
19832
|
-
this.pvm = new PvmHostCallExtension(this.pvmInstanceManager, this.hostCalls);
|
|
19833
|
-
}
|
|
19834
|
-
|
|
19835
|
-
async run(args: BytesBlob, gas: Gas): Promise<BytesBlob> {
|
|
19836
|
-
const program = Program.fromSpi(this.serviceCode.raw, args.raw, true);
|
|
19837
|
-
|
|
19838
|
-
const result = await this.pvm.runProgram(program.code, 5, gas, program.registers, program.memory);
|
|
19839
|
-
|
|
19840
|
-
if (result.hasMemorySlice()) {
|
|
19841
|
-
return BytesBlob.blobFrom(result.memorySlice);
|
|
19842
|
-
}
|
|
19843
|
-
|
|
19844
|
-
return BytesBlob.empty();
|
|
19845
|
-
}
|
|
19846
|
-
}
|
|
19847
|
-
|
|
19848
20133
|
type index_Preimages = Preimages;
|
|
19849
20134
|
declare const index_Preimages: typeof Preimages;
|
|
19850
20135
|
type index_PreimagesErrorCode = PreimagesErrorCode;
|
|
@@ -19854,10 +20139,8 @@ type index_PreimagesState = PreimagesState;
|
|
|
19854
20139
|
type index_PreimagesStateUpdate = PreimagesStateUpdate;
|
|
19855
20140
|
type index_TransitionHasher = TransitionHasher;
|
|
19856
20141
|
declare const index_TransitionHasher: typeof TransitionHasher;
|
|
19857
|
-
type index_WorkPackageExecutor = WorkPackageExecutor;
|
|
19858
|
-
declare const index_WorkPackageExecutor: typeof WorkPackageExecutor;
|
|
19859
20142
|
declare namespace index {
|
|
19860
|
-
export { index_Preimages as Preimages, index_PreimagesErrorCode as PreimagesErrorCode, index_TransitionHasher as TransitionHasher
|
|
20143
|
+
export { index_Preimages as Preimages, index_PreimagesErrorCode as PreimagesErrorCode, index_TransitionHasher as TransitionHasher };
|
|
19861
20144
|
export type { index_PreimagesInput as PreimagesInput, index_PreimagesState as PreimagesState, index_PreimagesStateUpdate as PreimagesStateUpdate };
|
|
19862
20145
|
}
|
|
19863
20146
|
|