@typeberry/lib 0.0.1-5e4911c → 0.0.1-96e327b
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.d.ts +244 -407
- package/index.js +302 -437
- package/package.json +1 -1
package/index.d.ts
CHANGED
|
@@ -59,8 +59,6 @@ declare namespace index$s {
|
|
|
59
59
|
}
|
|
60
60
|
|
|
61
61
|
declare enum GpVersion {
|
|
62
|
-
V0_6_5 = "0.6.5",
|
|
63
|
-
V0_6_6 = "0.6.6",
|
|
64
62
|
V0_6_7 = "0.6.7",
|
|
65
63
|
V0_7_0 = "0.7.0-preview",
|
|
66
64
|
V0_7_1 = "0.7.1-preview",
|
|
@@ -73,13 +71,7 @@ declare enum TestSuite {
|
|
|
73
71
|
|
|
74
72
|
declare const DEFAULT_SUITE = TestSuite.W3F_DAVXY;
|
|
75
73
|
|
|
76
|
-
declare const ALL_VERSIONS_IN_ORDER = [
|
|
77
|
-
GpVersion.V0_6_5,
|
|
78
|
-
GpVersion.V0_6_6,
|
|
79
|
-
GpVersion.V0_6_7,
|
|
80
|
-
GpVersion.V0_7_0,
|
|
81
|
-
GpVersion.V0_7_1,
|
|
82
|
-
];
|
|
74
|
+
declare const ALL_VERSIONS_IN_ORDER = [GpVersion.V0_6_7, GpVersion.V0_7_0, GpVersion.V0_7_1];
|
|
83
75
|
|
|
84
76
|
declare const env = typeof process === "undefined" ? {} : process.env;
|
|
85
77
|
declare const DEFAULT_VERSION = GpVersion.V0_6_7;
|
|
@@ -2332,6 +2324,141 @@ declare class Skipper {
|
|
|
2332
2324
|
}
|
|
2333
2325
|
}
|
|
2334
2326
|
|
|
2327
|
+
/** Infer the type that is described by given descriptor `T` */
|
|
2328
|
+
type DescribedBy<T> = T extends Descriptor<infer V> ? V : never;
|
|
2329
|
+
|
|
2330
|
+
/**
|
|
2331
|
+
* Converts a class `T` into an object with the same fields as the class.
|
|
2332
|
+
*/
|
|
2333
|
+
type CodecRecord<T> = {
|
|
2334
|
+
[K in PropertyKeys<T>]: T[K];
|
|
2335
|
+
};
|
|
2336
|
+
|
|
2337
|
+
/**
|
|
2338
|
+
* Same as `CodecRecord<T>`, but the fields are all optional.
|
|
2339
|
+
*/
|
|
2340
|
+
type OptionalRecord<T> = {
|
|
2341
|
+
[K in PropertyKeys<T>]?: T[K];
|
|
2342
|
+
};
|
|
2343
|
+
|
|
2344
|
+
/**
|
|
2345
|
+
* `Descriptor` of a complex type of some class with a bunch of public fields.
|
|
2346
|
+
*/
|
|
2347
|
+
type DescriptorRecord<T> = {
|
|
2348
|
+
[K in PropertyKeys<T>]: Descriptor<T[K], unknown>;
|
|
2349
|
+
};
|
|
2350
|
+
|
|
2351
|
+
/**
|
|
2352
|
+
* Simplified `DescriptorRecord`, where all keys must be used as descriptor keys.
|
|
2353
|
+
*/
|
|
2354
|
+
type SimpleDescriptorRecord<T> = {
|
|
2355
|
+
[K in keyof T]: Descriptor<T[K], unknown>;
|
|
2356
|
+
};
|
|
2357
|
+
|
|
2358
|
+
/** Only keys that contain properties, not methods. */
|
|
2359
|
+
type PropertyKeys<T> = {
|
|
2360
|
+
// biome-ignore lint/complexity/noBannedTypes: We want to skip any function-like types here.
|
|
2361
|
+
[K in Extract<keyof T, string>]: T[K] extends Function ? never : K;
|
|
2362
|
+
}[Extract<keyof T, string>];
|
|
2363
|
+
|
|
2364
|
+
/** A constructor of basic data object that takes a `Record<T>`. */
|
|
2365
|
+
type ClassConstructor<T> = {
|
|
2366
|
+
name: string;
|
|
2367
|
+
create: (o: CodecRecord<T>) => T;
|
|
2368
|
+
};
|
|
2369
|
+
|
|
2370
|
+
/**
|
|
2371
|
+
* A full codec type, i.e. the `Encode` and `Decode`.
|
|
2372
|
+
*/
|
|
2373
|
+
type Codec<T> = Encode<T> & Decode<T>;
|
|
2374
|
+
|
|
2375
|
+
/**
|
|
2376
|
+
* Type descriptor definition.
|
|
2377
|
+
*
|
|
2378
|
+
* The type descriptor can encode & decode given type `T`, but
|
|
2379
|
+
* also have a `name` and a byte-size hint.
|
|
2380
|
+
*
|
|
2381
|
+
* Descriptors can be composed to form more complex typings.
|
|
2382
|
+
*/
|
|
2383
|
+
declare class Descriptor<T, V = T> implements Codec<T>, Skip {
|
|
2384
|
+
/** A "lightweight" version of the object. */
|
|
2385
|
+
public readonly View: Descriptor<V>;
|
|
2386
|
+
|
|
2387
|
+
/** New descriptor with specialized `View`. */
|
|
2388
|
+
public static withView<T, V>(
|
|
2389
|
+
name: string,
|
|
2390
|
+
sizeHint: SizeHint,
|
|
2391
|
+
encode: Descriptor<T, V>["encode"],
|
|
2392
|
+
decode: Descriptor<T, V>["decode"],
|
|
2393
|
+
skip: Descriptor<T, V>["skip"],
|
|
2394
|
+
view: Descriptor<V>,
|
|
2395
|
+
) {
|
|
2396
|
+
return new Descriptor(name, sizeHint, encode, decode, skip, view);
|
|
2397
|
+
}
|
|
2398
|
+
|
|
2399
|
+
/** Create a new descriptor without a specialized `View`. */
|
|
2400
|
+
public static new<T>(
|
|
2401
|
+
name: string,
|
|
2402
|
+
sizeHint: SizeHint,
|
|
2403
|
+
encode: Descriptor<T>["encode"],
|
|
2404
|
+
decode: Descriptor<T>["decode"],
|
|
2405
|
+
skip: Descriptor<T>["skip"],
|
|
2406
|
+
) {
|
|
2407
|
+
return new Descriptor(name, sizeHint, encode, decode, skip, null);
|
|
2408
|
+
}
|
|
2409
|
+
|
|
2410
|
+
private constructor(
|
|
2411
|
+
/** Descriptive name of the coded data. */
|
|
2412
|
+
public readonly name: string,
|
|
2413
|
+
/** A byte size hint for encoded data. */
|
|
2414
|
+
public readonly sizeHint: SizeHint,
|
|
2415
|
+
/** Encoding function. */
|
|
2416
|
+
public readonly encode: (e: Encoder, elem: T) => void,
|
|
2417
|
+
/** Decoding function. */
|
|
2418
|
+
public readonly decode: (d: Decoder) => T,
|
|
2419
|
+
/** Skipping function. */
|
|
2420
|
+
public readonly skip: (s: Skipper) => void,
|
|
2421
|
+
/** view object. It can be `null` iff T===V. */
|
|
2422
|
+
view: Descriptor<V> | null,
|
|
2423
|
+
) {
|
|
2424
|
+
// We cast here to make sure that the field is always set.
|
|
2425
|
+
this.View = view ?? (this as unknown as Descriptor<V>);
|
|
2426
|
+
}
|
|
2427
|
+
|
|
2428
|
+
/**
|
|
2429
|
+
* Extract an encoded version of this type from the decoder.
|
|
2430
|
+
*
|
|
2431
|
+
* This function skips the object instead of decoding it,
|
|
2432
|
+
* allowing to retrieve the encoded portion of the object from `Decoder`.
|
|
2433
|
+
*/
|
|
2434
|
+
public skipEncoded(decoder: Decoder) {
|
|
2435
|
+
const initBytes = decoder.bytesRead();
|
|
2436
|
+
this.skip(new Skipper(decoder));
|
|
2437
|
+
const endBytes = decoder.bytesRead();
|
|
2438
|
+
return BytesBlob.blobFrom(decoder.source.subarray(initBytes, endBytes));
|
|
2439
|
+
}
|
|
2440
|
+
|
|
2441
|
+
/** Return a new descriptor that converts data into some other type. */
|
|
2442
|
+
public convert<F>(input: (i: F) => T, output: (i: T) => F): Descriptor<F, V> {
|
|
2443
|
+
return new Descriptor(
|
|
2444
|
+
this.name,
|
|
2445
|
+
this.sizeHint,
|
|
2446
|
+
(e: Encoder, elem: F) => this.encode(e, input(elem)),
|
|
2447
|
+
(d: Decoder) => output(this.decode(d)),
|
|
2448
|
+
this.skip,
|
|
2449
|
+
this.View,
|
|
2450
|
+
);
|
|
2451
|
+
}
|
|
2452
|
+
|
|
2453
|
+
/** Safely cast the descriptor value to a opaque type. */
|
|
2454
|
+
public asOpaque<R>(): Descriptor<Opaque<T, TokenOf<R, T>>, V> {
|
|
2455
|
+
return this.convert(
|
|
2456
|
+
(i) => seeThrough(i),
|
|
2457
|
+
(o) => asOpaqueType<T, TokenOf<R, T>>(o),
|
|
2458
|
+
);
|
|
2459
|
+
}
|
|
2460
|
+
}
|
|
2461
|
+
|
|
2335
2462
|
type LengthRange = {
|
|
2336
2463
|
/** Inclusive value of minimal length of the sequence. */
|
|
2337
2464
|
minLength: number;
|
|
@@ -2624,98 +2751,6 @@ declare const TYPICAL_SEQUENCE_LENGTH = 64;
|
|
|
2624
2751
|
*/
|
|
2625
2752
|
declare const TYPICAL_DICTIONARY_LENGTH = 32;
|
|
2626
2753
|
|
|
2627
|
-
/**
|
|
2628
|
-
* A full codec type, i.e. the `Encode` and `Decode`.
|
|
2629
|
-
*/
|
|
2630
|
-
type Codec<T> = Encode<T> & Decode<T>;
|
|
2631
|
-
|
|
2632
|
-
/**
|
|
2633
|
-
* Type descriptor definition.
|
|
2634
|
-
*
|
|
2635
|
-
* The type descriptor can encode & decode given type `T`, but
|
|
2636
|
-
* also have a `name` and a byte-size hint.
|
|
2637
|
-
*
|
|
2638
|
-
* Descriptors can be composed to form more complex typings.
|
|
2639
|
-
*/
|
|
2640
|
-
declare class Descriptor<T, V = T> implements Codec<T>, Skip {
|
|
2641
|
-
/** A "lightweight" version of the object. */
|
|
2642
|
-
public readonly View: Descriptor<V>;
|
|
2643
|
-
|
|
2644
|
-
/** New descriptor with specialized `View`. */
|
|
2645
|
-
public static withView<T, V>(
|
|
2646
|
-
name: string,
|
|
2647
|
-
sizeHint: SizeHint,
|
|
2648
|
-
encode: Descriptor<T, V>["encode"],
|
|
2649
|
-
decode: Descriptor<T, V>["decode"],
|
|
2650
|
-
skip: Descriptor<T, V>["skip"],
|
|
2651
|
-
view: Descriptor<V>,
|
|
2652
|
-
) {
|
|
2653
|
-
return new Descriptor(name, sizeHint, encode, decode, skip, view);
|
|
2654
|
-
}
|
|
2655
|
-
|
|
2656
|
-
/** Create a new descriptor without a specialized `View`. */
|
|
2657
|
-
public static new<T>(
|
|
2658
|
-
name: string,
|
|
2659
|
-
sizeHint: SizeHint,
|
|
2660
|
-
encode: Descriptor<T>["encode"],
|
|
2661
|
-
decode: Descriptor<T>["decode"],
|
|
2662
|
-
skip: Descriptor<T>["skip"],
|
|
2663
|
-
) {
|
|
2664
|
-
return new Descriptor(name, sizeHint, encode, decode, skip, null);
|
|
2665
|
-
}
|
|
2666
|
-
|
|
2667
|
-
private constructor(
|
|
2668
|
-
/** Descriptive name of the coded data. */
|
|
2669
|
-
public readonly name: string,
|
|
2670
|
-
/** A byte size hint for encoded data. */
|
|
2671
|
-
public readonly sizeHint: SizeHint,
|
|
2672
|
-
/** Encoding function. */
|
|
2673
|
-
public readonly encode: (e: Encoder, elem: T) => void,
|
|
2674
|
-
/** Decoding function. */
|
|
2675
|
-
public readonly decode: (d: Decoder) => T,
|
|
2676
|
-
/** Skipping function. */
|
|
2677
|
-
public readonly skip: (s: Skipper) => void,
|
|
2678
|
-
/** view object. It can be `null` iff T===V. */
|
|
2679
|
-
view: Descriptor<V> | null,
|
|
2680
|
-
) {
|
|
2681
|
-
// We cast here to make sure that the field is always set.
|
|
2682
|
-
this.View = view ?? (this as unknown as Descriptor<V>);
|
|
2683
|
-
}
|
|
2684
|
-
|
|
2685
|
-
/**
|
|
2686
|
-
* Extract an encoded version of this type from the decoder.
|
|
2687
|
-
*
|
|
2688
|
-
* This function skips the object instead of decoding it,
|
|
2689
|
-
* allowing to retrieve the encoded portion of the object from `Decoder`.
|
|
2690
|
-
*/
|
|
2691
|
-
public skipEncoded(decoder: Decoder) {
|
|
2692
|
-
const initBytes = decoder.bytesRead();
|
|
2693
|
-
this.skip(new Skipper(decoder));
|
|
2694
|
-
const endBytes = decoder.bytesRead();
|
|
2695
|
-
return BytesBlob.blobFrom(decoder.source.subarray(initBytes, endBytes));
|
|
2696
|
-
}
|
|
2697
|
-
|
|
2698
|
-
/** Return a new descriptor that converts data into some other type. */
|
|
2699
|
-
public convert<F>(input: (i: F) => T, output: (i: T) => F): Descriptor<F, V> {
|
|
2700
|
-
return new Descriptor(
|
|
2701
|
-
this.name,
|
|
2702
|
-
this.sizeHint,
|
|
2703
|
-
(e: Encoder, elem: F) => this.encode(e, input(elem)),
|
|
2704
|
-
(d: Decoder) => output(this.decode(d)),
|
|
2705
|
-
this.skip,
|
|
2706
|
-
this.View,
|
|
2707
|
-
);
|
|
2708
|
-
}
|
|
2709
|
-
|
|
2710
|
-
/** Safely cast the descriptor value to a opaque type. */
|
|
2711
|
-
public asOpaque<R>(): Descriptor<Opaque<T, TokenOf<R, T>>, V> {
|
|
2712
|
-
return this.convert(
|
|
2713
|
-
(i) => seeThrough(i),
|
|
2714
|
-
(o) => asOpaqueType<T, TokenOf<R, T>>(o),
|
|
2715
|
-
);
|
|
2716
|
-
}
|
|
2717
|
-
}
|
|
2718
|
-
|
|
2719
2754
|
/**
|
|
2720
2755
|
* Convert a descriptor for regular array into readonly one.
|
|
2721
2756
|
*
|
|
@@ -2736,49 +2771,6 @@ declare function readonlyArray<T, V>(desc: Descriptor<T[], V>): Descriptor<reado
|
|
|
2736
2771
|
);
|
|
2737
2772
|
}
|
|
2738
2773
|
|
|
2739
|
-
/** Infer the type that is described by given descriptor `T` */
|
|
2740
|
-
type DescribedBy<T> = T extends Descriptor<infer V> ? V : never;
|
|
2741
|
-
|
|
2742
|
-
/**
|
|
2743
|
-
* Converts a class `T` into an object with the same fields as the class.
|
|
2744
|
-
*/
|
|
2745
|
-
type CodecRecord<T> = {
|
|
2746
|
-
[K in PropertyKeys<T>]: T[K];
|
|
2747
|
-
};
|
|
2748
|
-
|
|
2749
|
-
/**
|
|
2750
|
-
* Same as `CodecRecord<T>`, but the fields are all optional.
|
|
2751
|
-
*/
|
|
2752
|
-
type OptionalRecord<T> = {
|
|
2753
|
-
[K in PropertyKeys<T>]?: T[K];
|
|
2754
|
-
};
|
|
2755
|
-
|
|
2756
|
-
/**
|
|
2757
|
-
* `Descriptor` of a complex type of some class with a bunch of public fields.
|
|
2758
|
-
*/
|
|
2759
|
-
type DescriptorRecord<T> = {
|
|
2760
|
-
[K in PropertyKeys<T>]: Descriptor<T[K], unknown>;
|
|
2761
|
-
};
|
|
2762
|
-
|
|
2763
|
-
/**
|
|
2764
|
-
* Simplified `DescriptorRecord`, where all keys must be used as descriptor keys.
|
|
2765
|
-
*/
|
|
2766
|
-
type SimpleDescriptorRecord<T> = {
|
|
2767
|
-
[K in keyof T]: Descriptor<T[K], unknown>;
|
|
2768
|
-
};
|
|
2769
|
-
|
|
2770
|
-
/** Only keys that contain properties, not methods. */
|
|
2771
|
-
type PropertyKeys<T> = {
|
|
2772
|
-
// biome-ignore lint/complexity/noBannedTypes: We want to skip any function-like types here.
|
|
2773
|
-
[K in Extract<keyof T, string>]: T[K] extends Function ? never : K;
|
|
2774
|
-
}[Extract<keyof T, string>];
|
|
2775
|
-
|
|
2776
|
-
/** A constructor of basic data object that takes a `Record<T>`. */
|
|
2777
|
-
type ClassConstructor<T> = {
|
|
2778
|
-
name: string;
|
|
2779
|
-
create: (o: CodecRecord<T>) => T;
|
|
2780
|
-
};
|
|
2781
|
-
|
|
2782
2774
|
declare function exactHint(bytes: number): SizeHint {
|
|
2783
2775
|
return {
|
|
2784
2776
|
bytes,
|
|
@@ -3399,6 +3391,9 @@ type index$o_SimpleDescriptorRecord<T> = SimpleDescriptorRecord<T>;
|
|
|
3399
3391
|
type index$o_SizeHint = SizeHint;
|
|
3400
3392
|
declare const index$o_TYPICAL_DICTIONARY_LENGTH: typeof TYPICAL_DICTIONARY_LENGTH;
|
|
3401
3393
|
declare const index$o_TYPICAL_SEQUENCE_LENGTH: typeof TYPICAL_SEQUENCE_LENGTH;
|
|
3394
|
+
type index$o_ViewField<T, V> = ViewField<T, V>;
|
|
3395
|
+
declare const index$o_ViewField: typeof ViewField;
|
|
3396
|
+
type index$o_ViewOf<T, D extends DescriptorRecord<T>> = ViewOf<T, D>;
|
|
3402
3397
|
declare const index$o_addSizeHints: typeof addSizeHints;
|
|
3403
3398
|
declare const index$o_decodeVariableLengthExtraBytes: typeof decodeVariableLengthExtraBytes;
|
|
3404
3399
|
declare const index$o_exactHint: typeof exactHint;
|
|
@@ -3411,8 +3406,8 @@ declare const index$o_sequenceViewVarLen: typeof sequenceViewVarLen;
|
|
|
3411
3406
|
declare const index$o_tryAsExactBytes: typeof tryAsExactBytes;
|
|
3412
3407
|
declare const index$o_validateLength: typeof validateLength;
|
|
3413
3408
|
declare namespace index$o {
|
|
3414
|
-
export { index$o_DEFAULT_START_LENGTH as DEFAULT_START_LENGTH, index$o_Decoder as Decoder, index$o_Descriptor as Descriptor, index$o_Encoder as Encoder, index$o_MASKS as MASKS, index$o_MAX_LENGTH as MAX_LENGTH, index$o_ObjectView as ObjectView, index$o_SequenceView as SequenceView, index$o_TYPICAL_DICTIONARY_LENGTH as TYPICAL_DICTIONARY_LENGTH, index$o_TYPICAL_SEQUENCE_LENGTH as TYPICAL_SEQUENCE_LENGTH, index$o_addSizeHints as addSizeHints, codec$1 as codec, index$o_decodeVariableLengthExtraBytes as decodeVariableLengthExtraBytes, index$o_exactHint as exactHint, index$o_forEachDescriptor as forEachDescriptor, index$o_hasUniqueView as hasUniqueView, index$o_objectView as objectView, index$o_readonlyArray as readonlyArray, index$o_sequenceViewFixLen as sequenceViewFixLen, index$o_sequenceViewVarLen as sequenceViewVarLen, index$o_tryAsExactBytes as tryAsExactBytes, index$o_validateLength as validateLength };
|
|
3415
|
-
export type { index$o_ClassConstructor as ClassConstructor, index$o_Codec as Codec, index$o_CodecRecord as CodecRecord, index$o_Decode as Decode, index$o_DescribedBy as DescribedBy, index$o_DescriptorRecord as DescriptorRecord, index$o_Encode as Encode, index$o_LengthRange as LengthRange, index$o_OptionalRecord as OptionalRecord, Options$1 as Options, index$o_PropertyKeys as PropertyKeys, index$o_SimpleDescriptorRecord as SimpleDescriptorRecord, index$o_SizeHint as SizeHint };
|
|
3409
|
+
export { index$o_DEFAULT_START_LENGTH as DEFAULT_START_LENGTH, index$o_Decoder as Decoder, index$o_Descriptor as Descriptor, index$o_Encoder as Encoder, index$o_MASKS as MASKS, index$o_MAX_LENGTH as MAX_LENGTH, index$o_ObjectView as ObjectView, index$o_SequenceView as SequenceView, index$o_TYPICAL_DICTIONARY_LENGTH as TYPICAL_DICTIONARY_LENGTH, index$o_TYPICAL_SEQUENCE_LENGTH as TYPICAL_SEQUENCE_LENGTH, index$o_ViewField as ViewField, index$o_addSizeHints as addSizeHints, codec$1 as codec, index$o_decodeVariableLengthExtraBytes as decodeVariableLengthExtraBytes, index$o_exactHint as exactHint, index$o_forEachDescriptor as forEachDescriptor, index$o_hasUniqueView as hasUniqueView, index$o_objectView as objectView, index$o_readonlyArray as readonlyArray, index$o_sequenceViewFixLen as sequenceViewFixLen, index$o_sequenceViewVarLen as sequenceViewVarLen, index$o_tryAsExactBytes as tryAsExactBytes, index$o_validateLength as validateLength };
|
|
3410
|
+
export type { index$o_ClassConstructor as ClassConstructor, index$o_Codec as Codec, index$o_CodecRecord as CodecRecord, index$o_Decode as Decode, index$o_DescribedBy as DescribedBy, index$o_DescriptorRecord as DescriptorRecord, index$o_Encode as Encode, index$o_LengthRange as LengthRange, index$o_OptionalRecord as OptionalRecord, Options$1 as Options, index$o_PropertyKeys as PropertyKeys, index$o_SimpleDescriptorRecord as SimpleDescriptorRecord, index$o_SizeHint as SizeHint, index$o_ViewOf as ViewOf };
|
|
3416
3411
|
}
|
|
3417
3412
|
|
|
3418
3413
|
/**
|
|
@@ -5550,6 +5545,39 @@ declare namespace disputes {
|
|
|
5550
5545
|
*/
|
|
5551
5546
|
type BeefyHash = Opaque<OpaqueHash, "BeefyHash">;
|
|
5552
5547
|
|
|
5548
|
+
/** Authorizer hash. */
|
|
5549
|
+
type AuthorizerHash = Opaque<OpaqueHash, "AuthorizerHash">;
|
|
5550
|
+
|
|
5551
|
+
/** Blake2B hash of a work package. */
|
|
5552
|
+
type WorkPackageHash = Opaque<OpaqueHash, "WorkPackageHash">;
|
|
5553
|
+
|
|
5554
|
+
/** Work package exported segments merkle root hash. */
|
|
5555
|
+
type ExportsRootHash = Opaque<OpaqueHash, "ExportsRootHash">;
|
|
5556
|
+
|
|
5557
|
+
/**
|
|
5558
|
+
* Mapping between work package hash and root hash of it's exports.
|
|
5559
|
+
*
|
|
5560
|
+
* Used to construct a dictionary.
|
|
5561
|
+
*/
|
|
5562
|
+
declare class WorkPackageInfo extends WithDebug {
|
|
5563
|
+
static Codec = codec.Class(WorkPackageInfo, {
|
|
5564
|
+
workPackageHash: codec.bytes(HASH_SIZE).asOpaque<WorkPackageHash>(),
|
|
5565
|
+
segmentTreeRoot: codec.bytes(HASH_SIZE).asOpaque<ExportsRootHash>(),
|
|
5566
|
+
});
|
|
5567
|
+
|
|
5568
|
+
private constructor(
|
|
5569
|
+
/** Hash of the described work package. */
|
|
5570
|
+
readonly workPackageHash: WorkPackageHash,
|
|
5571
|
+
/** Exports root hash. */
|
|
5572
|
+
readonly segmentTreeRoot: ExportsRootHash,
|
|
5573
|
+
) {
|
|
5574
|
+
super();
|
|
5575
|
+
}
|
|
5576
|
+
|
|
5577
|
+
static create({ workPackageHash, segmentTreeRoot }: CodecRecord<WorkPackageInfo>) {
|
|
5578
|
+
return new WorkPackageInfo(workPackageHash, segmentTreeRoot);
|
|
5579
|
+
}
|
|
5580
|
+
}
|
|
5553
5581
|
/**
|
|
5554
5582
|
* `X`: Refinement Context - state of the chain at the point
|
|
5555
5583
|
* that the report's corresponding work-package was evaluated.
|
|
@@ -5595,12 +5623,17 @@ declare class RefineContext extends WithDebug {
|
|
|
5595
5623
|
}
|
|
5596
5624
|
}
|
|
5597
5625
|
|
|
5626
|
+
type refineContext_AuthorizerHash = AuthorizerHash;
|
|
5598
5627
|
type refineContext_BeefyHash = BeefyHash;
|
|
5628
|
+
type refineContext_ExportsRootHash = ExportsRootHash;
|
|
5599
5629
|
type refineContext_RefineContext = RefineContext;
|
|
5600
5630
|
declare const refineContext_RefineContext: typeof RefineContext;
|
|
5631
|
+
type refineContext_WorkPackageHash = WorkPackageHash;
|
|
5632
|
+
type refineContext_WorkPackageInfo = WorkPackageInfo;
|
|
5633
|
+
declare const refineContext_WorkPackageInfo: typeof WorkPackageInfo;
|
|
5601
5634
|
declare namespace refineContext {
|
|
5602
|
-
export { refineContext_RefineContext as RefineContext };
|
|
5603
|
-
export type { refineContext_BeefyHash as BeefyHash };
|
|
5635
|
+
export { refineContext_RefineContext as RefineContext, refineContext_WorkPackageInfo as WorkPackageInfo };
|
|
5636
|
+
export type { refineContext_AuthorizerHash as AuthorizerHash, refineContext_BeefyHash as BeefyHash, refineContext_ExportsRootHash as ExportsRootHash, refineContext_WorkPackageHash as WorkPackageHash };
|
|
5604
5637
|
}
|
|
5605
5638
|
|
|
5606
5639
|
/** `W_E`: The basic size of erasure-coded pieces in octets. See equation H.6. */
|
|
@@ -5925,17 +5958,13 @@ declare enum WorkExecResultKind {
|
|
|
5925
5958
|
/** `☇`: unexpected program termination. */
|
|
5926
5959
|
panic = 2,
|
|
5927
5960
|
/** `⊚`: the number of exports made was invalidly reported. */
|
|
5928
|
-
|
|
5929
|
-
incorrectNumberOfExports = Compatibility.isGreaterOrEqual(GpVersion.V0_6_7) ? 3 : -1,
|
|
5961
|
+
incorrectNumberOfExports = 3,
|
|
5930
5962
|
/** `⊖`: the size of the digest (refinement output) would cross the acceptable limit. */
|
|
5931
|
-
|
|
5932
|
-
digestTooBig = Compatibility.isGreaterOrEqual(GpVersion.V0_6_7) ? 4 : -1,
|
|
5963
|
+
digestTooBig = 4,
|
|
5933
5964
|
/** `BAD`: service code was not available for lookup in state. */
|
|
5934
|
-
|
|
5935
|
-
badCode = Compatibility.isGreaterOrEqual(GpVersion.V0_6_7) ? 5 : 3,
|
|
5965
|
+
badCode = 5,
|
|
5936
5966
|
/** `BIG`: the code was too big (beyond the maximum allowed size `W_C`) */
|
|
5937
|
-
|
|
5938
|
-
codeOversize = Compatibility.isGreaterOrEqual(GpVersion.V0_6_7) ? 6 : 4,
|
|
5967
|
+
codeOversize = 6,
|
|
5939
5968
|
}
|
|
5940
5969
|
|
|
5941
5970
|
/** The execution result of some work-package. */
|
|
@@ -6093,14 +6122,6 @@ declare namespace workResult {
|
|
|
6093
6122
|
};
|
|
6094
6123
|
}
|
|
6095
6124
|
|
|
6096
|
-
/** Authorizer hash. */
|
|
6097
|
-
type AuthorizerHash = Opaque<OpaqueHash, "AuthorizerHash">;
|
|
6098
|
-
|
|
6099
|
-
/** Blake2B hash of a work package. */
|
|
6100
|
-
type WorkPackageHash = Opaque<OpaqueHash, "WorkPackageHash">;
|
|
6101
|
-
/** Work package exported segments merkle root hash. */
|
|
6102
|
-
type ExportsRootHash = Opaque<OpaqueHash, "ExportsRootHash">;
|
|
6103
|
-
|
|
6104
6125
|
/**
|
|
6105
6126
|
* Details about the work package being reported on.
|
|
6106
6127
|
*
|
|
@@ -6135,31 +6156,6 @@ declare class WorkPackageSpec extends WithDebug {
|
|
|
6135
6156
|
}
|
|
6136
6157
|
}
|
|
6137
6158
|
|
|
6138
|
-
/**
|
|
6139
|
-
* Mapping between work package hash and root hash of it's exports.
|
|
6140
|
-
*
|
|
6141
|
-
* Used to construct a dictionary.
|
|
6142
|
-
*/
|
|
6143
|
-
declare class WorkPackageInfo extends WithDebug {
|
|
6144
|
-
static Codec = codec.Class(WorkPackageInfo, {
|
|
6145
|
-
workPackageHash: codec.bytes(HASH_SIZE).asOpaque<WorkPackageHash>(),
|
|
6146
|
-
segmentTreeRoot: codec.bytes(HASH_SIZE).asOpaque<ExportsRootHash>(),
|
|
6147
|
-
});
|
|
6148
|
-
|
|
6149
|
-
private constructor(
|
|
6150
|
-
/** Hash of the described work package. */
|
|
6151
|
-
readonly workPackageHash: WorkPackageHash,
|
|
6152
|
-
/** Exports root hash. */
|
|
6153
|
-
readonly segmentTreeRoot: ExportsRootHash,
|
|
6154
|
-
) {
|
|
6155
|
-
super();
|
|
6156
|
-
}
|
|
6157
|
-
|
|
6158
|
-
static create({ workPackageHash, segmentTreeRoot }: CodecRecord<WorkPackageInfo>) {
|
|
6159
|
-
return new WorkPackageInfo(workPackageHash, segmentTreeRoot);
|
|
6160
|
-
}
|
|
6161
|
-
}
|
|
6162
|
-
|
|
6163
6159
|
/**
|
|
6164
6160
|
* A report of execution of some work package.
|
|
6165
6161
|
*
|
|
@@ -6238,18 +6234,15 @@ declare const WorkReportCodec = codec.Class(WorkReportNoCodec, {
|
|
|
6238
6234
|
declare const WorkReportCodecPre070 = codec.Class(WorkReportNoCodec, {
|
|
6239
6235
|
workPackageSpec: WorkPackageSpec.Codec,
|
|
6240
6236
|
context: RefineContext.Codec,
|
|
6241
|
-
coreIndex:
|
|
6242
|
-
|
|
6243
|
-
|
|
6244
|
-
|
|
6245
|
-
|
|
6246
|
-
|
|
6247
|
-
|
|
6248
|
-
|
|
6249
|
-
|
|
6250
|
-
},
|
|
6251
|
-
)
|
|
6252
|
-
: codec.u16.asOpaque<CoreIndex>(),
|
|
6237
|
+
coreIndex: codec.varU32.convert(
|
|
6238
|
+
(o) => tryAsU32(o),
|
|
6239
|
+
(i) => {
|
|
6240
|
+
if (!isU16(i)) {
|
|
6241
|
+
throw new Error(`Core index exceeds U16: ${i}`);
|
|
6242
|
+
}
|
|
6243
|
+
return tryAsCoreIndex(i);
|
|
6244
|
+
},
|
|
6245
|
+
),
|
|
6253
6246
|
authorizerHash: codec.bytes(HASH_SIZE).asOpaque<AuthorizerHash>(),
|
|
6254
6247
|
authorizationOutput: codec.blob,
|
|
6255
6248
|
segmentRootLookup: readonlyArray(codec.sequenceVarLen(WorkPackageInfo.Codec)),
|
|
@@ -6266,11 +6259,6 @@ declare class WorkReport extends WorkReportNoCodec {
|
|
|
6266
6259
|
: WorkReportCodecPre070;
|
|
6267
6260
|
}
|
|
6268
6261
|
|
|
6269
|
-
type workReport_AuthorizerHash = AuthorizerHash;
|
|
6270
|
-
type workReport_ExportsRootHash = ExportsRootHash;
|
|
6271
|
-
type workReport_WorkPackageHash = WorkPackageHash;
|
|
6272
|
-
type workReport_WorkPackageInfo = WorkPackageInfo;
|
|
6273
|
-
declare const workReport_WorkPackageInfo: typeof WorkPackageInfo;
|
|
6274
6262
|
type workReport_WorkPackageSpec = WorkPackageSpec;
|
|
6275
6263
|
declare const workReport_WorkPackageSpec: typeof WorkPackageSpec;
|
|
6276
6264
|
type workReport_WorkReport = WorkReport;
|
|
@@ -6280,8 +6268,13 @@ declare const workReport_WorkReportCodecPre070: typeof WorkReportCodecPre070;
|
|
|
6280
6268
|
type workReport_WorkReportNoCodec = WorkReportNoCodec;
|
|
6281
6269
|
declare const workReport_WorkReportNoCodec: typeof WorkReportNoCodec;
|
|
6282
6270
|
declare namespace workReport {
|
|
6283
|
-
export {
|
|
6284
|
-
|
|
6271
|
+
export {
|
|
6272
|
+
workReport_WorkPackageSpec as WorkPackageSpec,
|
|
6273
|
+
workReport_WorkReport as WorkReport,
|
|
6274
|
+
workReport_WorkReportCodec as WorkReportCodec,
|
|
6275
|
+
workReport_WorkReportCodecPre070 as WorkReportCodecPre070,
|
|
6276
|
+
workReport_WorkReportNoCodec as WorkReportNoCodec,
|
|
6277
|
+
};
|
|
6285
6278
|
}
|
|
6286
6279
|
|
|
6287
6280
|
/**
|
|
@@ -8432,60 +8425,6 @@ declare namespace index$f {
|
|
|
8432
8425
|
declare const MAX_RECENT_HISTORY = 8;
|
|
8433
8426
|
type MAX_RECENT_HISTORY = typeof MAX_RECENT_HISTORY;
|
|
8434
8427
|
|
|
8435
|
-
type LegacyBlocksState = KnownSizeArray<LegacyBlockState, `0..${typeof MAX_RECENT_HISTORY}`>;
|
|
8436
|
-
|
|
8437
|
-
declare class LegacyBlockState extends WithDebug {
|
|
8438
|
-
static Codec = codec.Class(LegacyBlockState, {
|
|
8439
|
-
headerHash: codec.bytes(HASH_SIZE).asOpaque<HeaderHash>(),
|
|
8440
|
-
mmr: codec.object({
|
|
8441
|
-
peaks: readonlyArray(codec.sequenceVarLen(codec.optional(codec.bytes(HASH_SIZE)))),
|
|
8442
|
-
}),
|
|
8443
|
-
postStateRoot: codec.bytes(HASH_SIZE).asOpaque<StateRootHash>(),
|
|
8444
|
-
reported: codecHashDictionary(WorkPackageInfo.Codec, (x) => x.workPackageHash),
|
|
8445
|
-
});
|
|
8446
|
-
|
|
8447
|
-
static create({ headerHash, mmr, postStateRoot, reported }: CodecRecord<LegacyBlockState>) {
|
|
8448
|
-
return new LegacyBlockState(headerHash, mmr, postStateRoot, reported);
|
|
8449
|
-
}
|
|
8450
|
-
|
|
8451
|
-
private constructor(
|
|
8452
|
-
/** Header hash. */
|
|
8453
|
-
public readonly headerHash: HeaderHash,
|
|
8454
|
-
/** Merkle mountain range peaks. */
|
|
8455
|
-
public readonly mmr: MmrPeaks<KeccakHash>,
|
|
8456
|
-
/** Posterior state root filled in with a 1-block delay. */
|
|
8457
|
-
public postStateRoot: StateRootHash,
|
|
8458
|
-
/** Reported work packages (no more than number of cores). */
|
|
8459
|
-
public readonly reported: HashDictionary<WorkPackageHash, WorkPackageInfo>,
|
|
8460
|
-
) {
|
|
8461
|
-
super();
|
|
8462
|
-
}
|
|
8463
|
-
}
|
|
8464
|
-
|
|
8465
|
-
declare class LegacyRecentBlocks extends WithDebug {
|
|
8466
|
-
static Codec = codec.Class(LegacyRecentBlocks, {
|
|
8467
|
-
blocks: codecKnownSizeArray(LegacyBlockState.Codec, {
|
|
8468
|
-
minLength: 0,
|
|
8469
|
-
maxLength: MAX_RECENT_HISTORY,
|
|
8470
|
-
typicalLength: MAX_RECENT_HISTORY,
|
|
8471
|
-
}),
|
|
8472
|
-
});
|
|
8473
|
-
|
|
8474
|
-
static create(a: CodecRecord<LegacyRecentBlocks>) {
|
|
8475
|
-
return new LegacyRecentBlocks(a.blocks);
|
|
8476
|
-
}
|
|
8477
|
-
|
|
8478
|
-
private constructor(
|
|
8479
|
-
/**
|
|
8480
|
-
* Most recent blocks.
|
|
8481
|
-
* https://graypaper.fluffylabs.dev/#/85129da/0fb6010fb601?v=0.6.3
|
|
8482
|
-
*/
|
|
8483
|
-
public readonly blocks: LegacyBlocksState,
|
|
8484
|
-
) {
|
|
8485
|
-
super();
|
|
8486
|
-
}
|
|
8487
|
-
}
|
|
8488
|
-
|
|
8489
8428
|
/** Array of recent blocks with maximum size of `MAX_RECENT_HISTORY` */
|
|
8490
8429
|
type BlocksState = KnownSizeArray<BlockState, `0..${typeof MAX_RECENT_HISTORY}`>;
|
|
8491
8430
|
|
|
@@ -8549,87 +8488,54 @@ declare class RecentBlocks extends WithDebug {
|
|
|
8549
8488
|
}
|
|
8550
8489
|
|
|
8551
8490
|
/**
|
|
8552
|
-
*
|
|
8491
|
+
* Recent history of blocks.
|
|
8553
8492
|
*
|
|
8554
|
-
* https://graypaper.fluffylabs.dev/#/85129da/38cb0138cb01?v=0.6.3
|
|
8555
8493
|
* https://graypaper.fluffylabs.dev/#/7e6ff6a/0fc9010fc901?v=0.6.7
|
|
8556
8494
|
*/
|
|
8557
8495
|
declare class RecentBlocksHistory extends WithDebug {
|
|
8558
8496
|
static Codec = Descriptor.new<RecentBlocksHistory>(
|
|
8559
8497
|
"RecentBlocksHistory",
|
|
8560
|
-
|
|
8561
|
-
(encoder, value) =>
|
|
8562
|
-
Compatibility.isGreaterOrEqual(GpVersion.V0_6_7)
|
|
8563
|
-
? RecentBlocks.Codec.encode(encoder, value.asCurrent())
|
|
8564
|
-
: LegacyRecentBlocks.Codec.encode(encoder, value.asLegacy()),
|
|
8498
|
+
RecentBlocks.Codec.sizeHint,
|
|
8499
|
+
(encoder, value) => RecentBlocks.Codec.encode(encoder, value.asCurrent()),
|
|
8565
8500
|
(decoder) => {
|
|
8566
|
-
|
|
8567
|
-
|
|
8568
|
-
return RecentBlocksHistory.create(recentBlocks);
|
|
8569
|
-
}
|
|
8570
|
-
const legacyBlocks = LegacyRecentBlocks.Codec.decode(decoder);
|
|
8571
|
-
return RecentBlocksHistory.legacyCreate(legacyBlocks);
|
|
8501
|
+
const recentBlocks = RecentBlocks.Codec.decode(decoder);
|
|
8502
|
+
return RecentBlocksHistory.create(recentBlocks);
|
|
8572
8503
|
},
|
|
8573
8504
|
(_sizer) => {
|
|
8574
|
-
return
|
|
8575
|
-
? RecentBlocks.Codec.sizeHint
|
|
8576
|
-
: LegacyRecentBlocks.Codec.sizeHint;
|
|
8505
|
+
return RecentBlocks.Codec.sizeHint;
|
|
8577
8506
|
},
|
|
8578
8507
|
);
|
|
8579
8508
|
|
|
8580
8509
|
static create(recentBlocks: RecentBlocks) {
|
|
8581
|
-
return new RecentBlocksHistory(recentBlocks
|
|
8582
|
-
}
|
|
8583
|
-
|
|
8584
|
-
static legacyCreate(legacyRecentBlocks: LegacyRecentBlocks) {
|
|
8585
|
-
return new RecentBlocksHistory(null, legacyRecentBlocks);
|
|
8510
|
+
return new RecentBlocksHistory(recentBlocks);
|
|
8586
8511
|
}
|
|
8587
8512
|
|
|
8588
8513
|
static empty() {
|
|
8589
|
-
|
|
8590
|
-
|
|
8591
|
-
|
|
8592
|
-
|
|
8593
|
-
|
|
8594
|
-
|
|
8595
|
-
);
|
|
8596
|
-
}
|
|
8597
|
-
return RecentBlocksHistory.legacyCreate(LegacyRecentBlocks.create({ blocks: asKnownSize([]) }));
|
|
8514
|
+
return RecentBlocksHistory.create(
|
|
8515
|
+
RecentBlocks.create({
|
|
8516
|
+
blocks: asKnownSize([]),
|
|
8517
|
+
accumulationLog: { peaks: [] },
|
|
8518
|
+
}),
|
|
8519
|
+
);
|
|
8598
8520
|
}
|
|
8599
8521
|
|
|
8600
8522
|
/**
|
|
8601
8523
|
* Returns the block's BEEFY super peak.
|
|
8602
|
-
*
|
|
8603
|
-
* NOTE: The `hasher` parameter exists solely for backward compatibility with legacy block format.
|
|
8604
8524
|
*/
|
|
8605
|
-
static accumulationResult(
|
|
8606
|
-
block
|
|
8607
|
-
{
|
|
8608
|
-
hasher,
|
|
8609
|
-
}: {
|
|
8610
|
-
hasher: MmrHasher<KeccakHash>;
|
|
8611
|
-
},
|
|
8612
|
-
): KeccakHash {
|
|
8613
|
-
return Compatibility.isGreaterOrEqual(GpVersion.V0_6_7)
|
|
8614
|
-
? (block as BlockState).accumulationResult
|
|
8615
|
-
: MerkleMountainRange.fromPeaks(hasher, (block as LegacyBlockState).mmr).getSuperPeakHash();
|
|
8525
|
+
static accumulationResult(block: BlockState): KeccakHash {
|
|
8526
|
+
return (block as BlockState).accumulationResult;
|
|
8616
8527
|
}
|
|
8617
8528
|
|
|
8618
|
-
private constructor(
|
|
8619
|
-
private readonly current: RecentBlocks | null,
|
|
8620
|
-
private readonly legacy: LegacyRecentBlocks | null,
|
|
8621
|
-
) {
|
|
8529
|
+
private constructor(private readonly current: RecentBlocks | null) {
|
|
8622
8530
|
super();
|
|
8623
8531
|
}
|
|
8624
8532
|
|
|
8625
8533
|
/** History of recent blocks with maximum size of `MAX_RECENT_HISTORY` */
|
|
8626
|
-
get blocks(): readonly
|
|
8627
|
-
if (
|
|
8534
|
+
get blocks(): readonly BlockState[] {
|
|
8535
|
+
if (this.current !== null) {
|
|
8628
8536
|
return this.current.blocks;
|
|
8629
8537
|
}
|
|
8630
|
-
|
|
8631
|
-
return this.legacy.blocks;
|
|
8632
|
-
}
|
|
8538
|
+
|
|
8633
8539
|
throw new Error("RecentBlocksHistory is in invalid state");
|
|
8634
8540
|
}
|
|
8635
8541
|
|
|
@@ -8640,15 +8546,8 @@ declare class RecentBlocksHistory extends WithDebug {
|
|
|
8640
8546
|
return this.current;
|
|
8641
8547
|
}
|
|
8642
8548
|
|
|
8643
|
-
|
|
8644
|
-
if (this.
|
|
8645
|
-
throw new Error("Cannot access legacy RecentBlocks format");
|
|
8646
|
-
}
|
|
8647
|
-
return this.legacy;
|
|
8648
|
-
}
|
|
8649
|
-
|
|
8650
|
-
updateBlocks(blocks: (BlockState | LegacyBlockState)[]): RecentBlocksHistory {
|
|
8651
|
-
if (Compatibility.isGreaterOrEqual(GpVersion.V0_6_7) && this.current !== null) {
|
|
8549
|
+
updateBlocks(blocks: BlockState[]): RecentBlocksHistory {
|
|
8550
|
+
if (this.current !== null) {
|
|
8652
8551
|
return RecentBlocksHistory.create(
|
|
8653
8552
|
RecentBlocks.create({
|
|
8654
8553
|
...this.current,
|
|
@@ -8656,13 +8555,7 @@ declare class RecentBlocksHistory extends WithDebug {
|
|
|
8656
8555
|
}),
|
|
8657
8556
|
);
|
|
8658
8557
|
}
|
|
8659
|
-
|
|
8660
|
-
return RecentBlocksHistory.legacyCreate(
|
|
8661
|
-
LegacyRecentBlocks.create({
|
|
8662
|
-
blocks: asOpaqueType(blocks as LegacyBlockState[]),
|
|
8663
|
-
}),
|
|
8664
|
-
);
|
|
8665
|
-
}
|
|
8558
|
+
|
|
8666
8559
|
throw new Error("RecentBlocksHistory is in invalid state. Cannot be updated!");
|
|
8667
8560
|
}
|
|
8668
8561
|
}
|
|
@@ -9211,9 +9104,7 @@ type ServicesUpdate = {
|
|
|
9211
9104
|
};
|
|
9212
9105
|
|
|
9213
9106
|
declare const codecServiceId: Descriptor<ServiceId> =
|
|
9214
|
-
Compatibility.isSuite(TestSuite.W3F_DAVXY) ||
|
|
9215
|
-
Compatibility.isSuite(TestSuite.JAMDUNA, GpVersion.V0_6_5) ||
|
|
9216
|
-
Compatibility.isSuite(TestSuite.JAMDUNA, GpVersion.V0_6_7)
|
|
9107
|
+
Compatibility.isSuite(TestSuite.W3F_DAVXY) || Compatibility.isSuite(TestSuite.JAMDUNA, GpVersion.V0_6_7)
|
|
9217
9108
|
? codec.u32.asOpaque<ServiceId>()
|
|
9218
9109
|
: codec.varU32.convert(
|
|
9219
9110
|
(s) => tryAsU32(s),
|
|
@@ -10255,11 +10146,6 @@ declare const index$e_InMemoryService: typeof InMemoryService;
|
|
|
10255
10146
|
type index$e_InMemoryState = InMemoryState;
|
|
10256
10147
|
declare const index$e_InMemoryState: typeof InMemoryState;
|
|
10257
10148
|
type index$e_InMemoryStateFields = InMemoryStateFields;
|
|
10258
|
-
type index$e_LegacyBlockState = LegacyBlockState;
|
|
10259
|
-
declare const index$e_LegacyBlockState: typeof LegacyBlockState;
|
|
10260
|
-
type index$e_LegacyBlocksState = LegacyBlocksState;
|
|
10261
|
-
type index$e_LegacyRecentBlocks = LegacyRecentBlocks;
|
|
10262
|
-
declare const index$e_LegacyRecentBlocks: typeof LegacyRecentBlocks;
|
|
10263
10149
|
type index$e_LookupHistoryItem = LookupHistoryItem;
|
|
10264
10150
|
declare const index$e_LookupHistoryItem: typeof LookupHistoryItem;
|
|
10265
10151
|
type index$e_LookupHistorySlots = LookupHistorySlots;
|
|
@@ -10330,8 +10216,8 @@ declare const index$e_tryAsPerCore: typeof tryAsPerCore;
|
|
|
10330
10216
|
declare const index$e_workReportsSortedSetCodec: typeof workReportsSortedSetCodec;
|
|
10331
10217
|
declare const index$e_zeroSizeHint: typeof zeroSizeHint;
|
|
10332
10218
|
declare namespace index$e {
|
|
10333
|
-
export { index$e_AccumulationOutput as AccumulationOutput, index$e_AutoAccumulate as AutoAccumulate, index$e_AvailabilityAssignment as AvailabilityAssignment, index$e_BASE_SERVICE_BALANCE as BASE_SERVICE_BALANCE, index$e_BlockState as BlockState, index$e_CoreStatistics as CoreStatistics, index$e_DisputesRecords as DisputesRecords, index$e_ELECTIVE_BYTE_BALANCE as ELECTIVE_BYTE_BALANCE, index$e_ELECTIVE_ITEM_BALANCE as ELECTIVE_ITEM_BALANCE, index$e_InMemoryService as InMemoryService, index$e_InMemoryState as InMemoryState, index$
|
|
10334
|
-
export type { index$e_BlocksState as BlocksState, index$e_ENTROPY_ENTRIES as ENTROPY_ENTRIES, index$e_EnumerableState as EnumerableState, index$e_FieldNames as FieldNames, index$e_InMemoryStateFields as InMemoryStateFields, index$
|
|
10219
|
+
export { index$e_AccumulationOutput as AccumulationOutput, index$e_AutoAccumulate as AutoAccumulate, index$e_AvailabilityAssignment as AvailabilityAssignment, index$e_BASE_SERVICE_BALANCE as BASE_SERVICE_BALANCE, index$e_BlockState as BlockState, index$e_CoreStatistics as CoreStatistics, index$e_DisputesRecords as DisputesRecords, index$e_ELECTIVE_BYTE_BALANCE as ELECTIVE_BYTE_BALANCE, index$e_ELECTIVE_ITEM_BALANCE as ELECTIVE_ITEM_BALANCE, index$e_InMemoryService as InMemoryService, index$e_InMemoryState as InMemoryState, index$e_LookupHistoryItem as LookupHistoryItem, index$e_MAX_LOOKUP_HISTORY_SLOTS as MAX_LOOKUP_HISTORY_SLOTS, index$e_PreimageItem as PreimageItem, index$e_PrivilegedServices as PrivilegedServices, index$e_RecentBlocks as RecentBlocks, index$e_RecentBlocksHistory as RecentBlocksHistory, index$e_SafroleData as SafroleData, index$e_SafroleSealingKeysData as SafroleSealingKeysData, index$e_SafroleSealingKeysKind as SafroleSealingKeysKind, index$e_ServiceAccountInfo as ServiceAccountInfo, index$e_ServiceStatistics as ServiceStatistics, index$e_StatisticsData as StatisticsData, index$e_StorageItem as StorageItem, index$e_UpdateError as UpdateError, index$e_UpdatePreimage as UpdatePreimage, index$e_UpdatePreimageKind as UpdatePreimageKind, index$e_UpdateService as UpdateService, index$e_UpdateServiceKind as UpdateServiceKind, index$e_UpdateStorage as UpdateStorage, index$e_UpdateStorageKind as UpdateStorageKind, index$e_ValidatorData as ValidatorData, index$e_ValidatorStatistics as ValidatorStatistics, index$e_codecBandersnatchKey as codecBandersnatchKey, index$e_codecPerCore as codecPerCore, index$e_codecServiceId as codecServiceId, index$e_codecVarGas as codecVarGas, index$e_codecVarU16 as codecVarU16, index$e_codecWithHash as codecWithHash, index$e_hashComparator as hashComparator, index$e_ignoreValueWithDefault as ignoreValueWithDefault, index$e_serviceDataCodec as serviceDataCodec, index$e_serviceEntriesCodec as serviceEntriesCodec, index$e_sortedSetCodec as sortedSetCodec, index$e_tryAsLookupHistorySlots as tryAsLookupHistorySlots, index$e_tryAsPerCore as tryAsPerCore, index$e_workReportsSortedSetCodec as workReportsSortedSetCodec, index$e_zeroSizeHint as zeroSizeHint };
|
|
10220
|
+
export type { index$e_BlocksState as BlocksState, index$e_ENTROPY_ENTRIES as ENTROPY_ENTRIES, index$e_EnumerableState as EnumerableState, index$e_FieldNames as FieldNames, index$e_InMemoryStateFields as InMemoryStateFields, index$e_LookupHistorySlots as LookupHistorySlots, index$e_MAX_RECENT_HISTORY as MAX_RECENT_HISTORY, index$e_PerCore as PerCore, index$e_SafroleSealingKeys as SafroleSealingKeys, index$e_Service as Service, index$e_ServiceData as ServiceData, index$e_ServiceEntries as ServiceEntries, index$e_ServicesUpdate as ServicesUpdate, index$e_State as State, index$e_StorageKey as StorageKey, index$e_VALIDATOR_META_BYTES as VALIDATOR_META_BYTES };
|
|
10335
10221
|
}
|
|
10336
10222
|
|
|
10337
10223
|
type StateKey$1 = Opaque<OpaqueHash, "stateKey">;
|
|
@@ -16237,12 +16123,10 @@ declare enum Status {
|
|
|
16237
16123
|
|
|
16238
16124
|
type InterpreterOptions = {
|
|
16239
16125
|
useSbrkGas?: boolean;
|
|
16240
|
-
ignoreInstructionGas?: boolean;
|
|
16241
16126
|
};
|
|
16242
16127
|
|
|
16243
16128
|
declare class Interpreter {
|
|
16244
16129
|
private readonly useSbrkGas: boolean;
|
|
16245
|
-
private readonly ignoreInstructionGas: boolean;
|
|
16246
16130
|
private registers = new Registers();
|
|
16247
16131
|
private code: Uint8Array = new Uint8Array();
|
|
16248
16132
|
private mask = Mask.empty();
|
|
@@ -16270,9 +16154,8 @@ declare class Interpreter {
|
|
|
16270
16154
|
private basicBlocks: BasicBlocks;
|
|
16271
16155
|
private jumpTable = JumpTable.empty();
|
|
16272
16156
|
|
|
16273
|
-
constructor({ useSbrkGas = false
|
|
16157
|
+
constructor({ useSbrkGas = false }: InterpreterOptions = {}) {
|
|
16274
16158
|
this.useSbrkGas = useSbrkGas;
|
|
16275
|
-
this.ignoreInstructionGas = ignoreInstructionGas;
|
|
16276
16159
|
this.argsDecoder = new ArgsDecoder();
|
|
16277
16160
|
this.basicBlocks = new BasicBlocks();
|
|
16278
16161
|
const mathOps = new MathOps(this.registers);
|
|
@@ -16368,7 +16251,7 @@ declare class Interpreter {
|
|
|
16368
16251
|
const currentInstruction = this.code[this.pc] ?? Instruction.TRAP;
|
|
16369
16252
|
const isValidInstruction = Instruction[currentInstruction] !== undefined;
|
|
16370
16253
|
const gasCost = instructionGasMap[currentInstruction] ?? instructionGasMap[Instruction.TRAP];
|
|
16371
|
-
const underflow = this.
|
|
16254
|
+
const underflow = this.gas.sub(gasCost);
|
|
16372
16255
|
if (underflow) {
|
|
16373
16256
|
this.status = Status.OOG;
|
|
16374
16257
|
return this.status;
|
|
@@ -16618,7 +16501,7 @@ interface HostCallHandler {
|
|
|
16618
16501
|
readonly gasCost: SmallGas | ((reg: IHostCallRegisters) => Gas);
|
|
16619
16502
|
|
|
16620
16503
|
/** Currently executing service id. */
|
|
16621
|
-
readonly currentServiceId:
|
|
16504
|
+
readonly currentServiceId: U32;
|
|
16622
16505
|
|
|
16623
16506
|
/** Input&Output registers that we should add to tracing log. */
|
|
16624
16507
|
readonly tracedRegisters: RegisterIndex[];
|
|
@@ -16631,14 +16514,21 @@ interface HostCallHandler {
|
|
|
16631
16514
|
execute(gas: GasCounter, regs: IHostCallRegisters, memory: IHostCallMemory): Promise<undefined | PvmExecution>;
|
|
16632
16515
|
}
|
|
16633
16516
|
|
|
16634
|
-
// TODO [ToDr] Rename to just `HostCalls`
|
|
16635
16517
|
/** Container for all available host calls. */
|
|
16636
16518
|
declare class HostCallsManager {
|
|
16637
16519
|
private readonly hostCalls = new Map<HostCallIndex, HostCallHandler>();
|
|
16638
|
-
private readonly missing
|
|
16520
|
+
private readonly missing;
|
|
16639
16521
|
|
|
16640
|
-
constructor(
|
|
16641
|
-
|
|
16522
|
+
constructor({
|
|
16523
|
+
missing,
|
|
16524
|
+
handlers = [],
|
|
16525
|
+
}: {
|
|
16526
|
+
missing: HostCallHandler;
|
|
16527
|
+
handlers?: HostCallHandler[];
|
|
16528
|
+
}) {
|
|
16529
|
+
this.missing = missing;
|
|
16530
|
+
|
|
16531
|
+
for (const handler of handlers) {
|
|
16642
16532
|
check(this.hostCalls.get(handler.index) === undefined, `Overwriting host call handler at index ${handler.index}`);
|
|
16643
16533
|
this.hostCalls.set(handler.index, handler);
|
|
16644
16534
|
}
|
|
@@ -16677,16 +16567,10 @@ declare class InterpreterInstanceManager {
|
|
|
16677
16567
|
private waitingQueue: ResolveFn[] = [];
|
|
16678
16568
|
|
|
16679
16569
|
constructor(noOfPvmInstances: number) {
|
|
16680
|
-
const shouldCountGas =
|
|
16681
|
-
Compatibility.isGreaterOrEqual(GpVersion.V0_6_7) ||
|
|
16682
|
-
Compatibility.isSuite(TestSuite.JAMDUNA, GpVersion.V0_6_5) ||
|
|
16683
|
-
Compatibility.isSuite(TestSuite.W3F_DAVXY, GpVersion.V0_6_6);
|
|
16684
|
-
|
|
16685
16570
|
for (let i = 0; i < noOfPvmInstances; i++) {
|
|
16686
16571
|
this.instances.push(
|
|
16687
16572
|
new Interpreter({
|
|
16688
16573
|
useSbrkGas: false,
|
|
16689
|
-
ignoreInstructionGas: !shouldCountGas,
|
|
16690
16574
|
}),
|
|
16691
16575
|
);
|
|
16692
16576
|
}
|
|
@@ -18470,7 +18354,7 @@ type JsonRecentBlockState = {
|
|
|
18470
18354
|
reported: WorkPackageInfo[];
|
|
18471
18355
|
};
|
|
18472
18356
|
|
|
18473
|
-
declare const
|
|
18357
|
+
declare const recentBlocksHistoryFromJson = json.object<JsonRecentBlocks, RecentBlocksHistory>(
|
|
18474
18358
|
{
|
|
18475
18359
|
history: json.array(recentBlockStateFromJson),
|
|
18476
18360
|
mmr: {
|
|
@@ -18494,49 +18378,6 @@ type JsonRecentBlocks = {
|
|
|
18494
18378
|
};
|
|
18495
18379
|
};
|
|
18496
18380
|
|
|
18497
|
-
declare const legacyRecentBlockStateFromJson = json.object<JsonRecentBlockStateLegacy, LegacyBlockState>(
|
|
18498
|
-
{
|
|
18499
|
-
header_hash: fromJson.bytes32(),
|
|
18500
|
-
mmr: {
|
|
18501
|
-
peaks: json.array(json.nullable(fromJson.bytes32())),
|
|
18502
|
-
},
|
|
18503
|
-
state_root: fromJson.bytes32(),
|
|
18504
|
-
reported: json.array(reportedWorkPackageFromJson),
|
|
18505
|
-
},
|
|
18506
|
-
({ header_hash, mmr, state_root, reported }) => {
|
|
18507
|
-
return {
|
|
18508
|
-
headerHash: header_hash,
|
|
18509
|
-
mmr,
|
|
18510
|
-
postStateRoot: state_root,
|
|
18511
|
-
reported: HashDictionary.fromEntries(reported.map((x) => [x.workPackageHash, x])),
|
|
18512
|
-
};
|
|
18513
|
-
},
|
|
18514
|
-
);
|
|
18515
|
-
|
|
18516
|
-
type JsonRecentBlockStateLegacy = {
|
|
18517
|
-
header_hash: HeaderHash;
|
|
18518
|
-
mmr: {
|
|
18519
|
-
peaks: Array<KeccakHash | null>;
|
|
18520
|
-
};
|
|
18521
|
-
state_root: StateRootHash;
|
|
18522
|
-
reported: WorkPackageInfo[];
|
|
18523
|
-
};
|
|
18524
|
-
|
|
18525
|
-
declare const legacyRecentBlocksFromJson = json.object<LegacyBlocksState, RecentBlocksHistory>(
|
|
18526
|
-
json.array(legacyRecentBlockStateFromJson),
|
|
18527
|
-
(blocks) => {
|
|
18528
|
-
return RecentBlocksHistory.legacyCreate(
|
|
18529
|
-
LegacyRecentBlocks.create({
|
|
18530
|
-
blocks,
|
|
18531
|
-
}),
|
|
18532
|
-
);
|
|
18533
|
-
},
|
|
18534
|
-
);
|
|
18535
|
-
|
|
18536
|
-
declare const recentBlocksHistoryFromJson = Compatibility.isGreaterOrEqual(GpVersion.V0_6_7)
|
|
18537
|
-
? recentBlocksFromJson
|
|
18538
|
-
: legacyRecentBlocksFromJson;
|
|
18539
|
-
|
|
18540
18381
|
declare const ticketFromJson: FromJson<Ticket> = json.object<Ticket>(
|
|
18541
18382
|
{
|
|
18542
18383
|
id: fromJson.bytes32(),
|
|
@@ -19019,7 +18860,6 @@ type index$1_JsonLookupMeta = JsonLookupMeta;
|
|
|
19019
18860
|
type index$1_JsonPreimageItem = JsonPreimageItem;
|
|
19020
18861
|
declare const index$1_JsonPreimageItem: typeof JsonPreimageItem;
|
|
19021
18862
|
type index$1_JsonRecentBlockState = JsonRecentBlockState;
|
|
19022
|
-
type index$1_JsonRecentBlockStateLegacy = JsonRecentBlockStateLegacy;
|
|
19023
18863
|
type index$1_JsonRecentBlocks = JsonRecentBlocks;
|
|
19024
18864
|
type index$1_JsonReportedWorkPackageInfo = JsonReportedWorkPackageInfo;
|
|
19025
18865
|
type index$1_JsonService = JsonService;
|
|
@@ -19045,20 +18885,17 @@ declare const index$1_availabilityAssignmentFromJson: typeof availabilityAssignm
|
|
|
19045
18885
|
declare const index$1_disputesRecordsFromJson: typeof disputesRecordsFromJson;
|
|
19046
18886
|
declare const index$1_fullStateDumpFromJson: typeof fullStateDumpFromJson;
|
|
19047
18887
|
declare const index$1_fullStateDumpFromJsonPre067: typeof fullStateDumpFromJsonPre067;
|
|
19048
|
-
declare const index$1_legacyRecentBlockStateFromJson: typeof legacyRecentBlockStateFromJson;
|
|
19049
|
-
declare const index$1_legacyRecentBlocksFromJson: typeof legacyRecentBlocksFromJson;
|
|
19050
18888
|
declare const index$1_lookupMetaFromJson: typeof lookupMetaFromJson;
|
|
19051
18889
|
declare const index$1_notYetAccumulatedFromJson: typeof notYetAccumulatedFromJson;
|
|
19052
18890
|
declare const index$1_recentBlockStateFromJson: typeof recentBlockStateFromJson;
|
|
19053
|
-
declare const index$1_recentBlocksFromJson: typeof recentBlocksFromJson;
|
|
19054
18891
|
declare const index$1_recentBlocksHistoryFromJson: typeof recentBlocksHistoryFromJson;
|
|
19055
18892
|
declare const index$1_reportedWorkPackageFromJson: typeof reportedWorkPackageFromJson;
|
|
19056
18893
|
declare const index$1_serviceStatisticsEntryFromJson: typeof serviceStatisticsEntryFromJson;
|
|
19057
18894
|
declare const index$1_ticketFromJson: typeof ticketFromJson;
|
|
19058
18895
|
declare const index$1_validatorDataFromJson: typeof validatorDataFromJson;
|
|
19059
18896
|
declare namespace index$1 {
|
|
19060
|
-
export { index$1_JsonCoreStatistics as JsonCoreStatistics, index$1_JsonDisputesRecords as JsonDisputesRecords, index$1_JsonPreimageItem as JsonPreimageItem, index$1_JsonService as JsonService, index$1_JsonServiceInfo as JsonServiceInfo, index$1_JsonServiceInfoPre067 as JsonServiceInfoPre067, index$1_JsonServiceStatistics as JsonServiceStatistics, index$1_JsonStatisticsData as JsonStatisticsData, index$1_JsonStorageItem as JsonStorageItem, index$1_JsonValidatorStatistics as JsonValidatorStatistics, index$1_TicketsOrKeys as TicketsOrKeys, index$1_availabilityAssignmentFromJson as availabilityAssignmentFromJson, index$1_disputesRecordsFromJson as disputesRecordsFromJson, index$1_fullStateDumpFromJson as fullStateDumpFromJson, index$1_fullStateDumpFromJsonPre067 as fullStateDumpFromJsonPre067, index$
|
|
19061
|
-
export type { index$1_JsonAvailabilityAssignment as JsonAvailabilityAssignment, index$1_JsonLookupMeta as JsonLookupMeta, index$1_JsonRecentBlockState as JsonRecentBlockState, index$
|
|
18897
|
+
export { index$1_JsonCoreStatistics as JsonCoreStatistics, index$1_JsonDisputesRecords as JsonDisputesRecords, index$1_JsonPreimageItem as JsonPreimageItem, index$1_JsonService as JsonService, index$1_JsonServiceInfo as JsonServiceInfo, index$1_JsonServiceInfoPre067 as JsonServiceInfoPre067, index$1_JsonServiceStatistics as JsonServiceStatistics, index$1_JsonStatisticsData as JsonStatisticsData, index$1_JsonStorageItem as JsonStorageItem, index$1_JsonValidatorStatistics as JsonValidatorStatistics, index$1_TicketsOrKeys as TicketsOrKeys, index$1_availabilityAssignmentFromJson as availabilityAssignmentFromJson, index$1_disputesRecordsFromJson as disputesRecordsFromJson, index$1_fullStateDumpFromJson as fullStateDumpFromJson, index$1_fullStateDumpFromJsonPre067 as fullStateDumpFromJsonPre067, index$1_lookupMetaFromJson as lookupMetaFromJson, index$1_notYetAccumulatedFromJson as notYetAccumulatedFromJson, index$1_recentBlockStateFromJson as recentBlockStateFromJson, index$1_recentBlocksHistoryFromJson as recentBlocksHistoryFromJson, index$1_reportedWorkPackageFromJson as reportedWorkPackageFromJson, index$1_serviceStatisticsEntryFromJson as serviceStatisticsEntryFromJson, index$1_ticketFromJson as ticketFromJson, index$1_validatorDataFromJson as validatorDataFromJson };
|
|
18898
|
+
export type { index$1_JsonAvailabilityAssignment as JsonAvailabilityAssignment, index$1_JsonLookupMeta as JsonLookupMeta, index$1_JsonRecentBlockState as JsonRecentBlockState, index$1_JsonRecentBlocks as JsonRecentBlocks, index$1_JsonReportedWorkPackageInfo as JsonReportedWorkPackageInfo, index$1_JsonStateDump as JsonStateDump, index$1_JsonStateDumpPre067 as JsonStateDumpPre067, index$1_ServiceStatisticsEntry as ServiceStatisticsEntry };
|
|
19062
18899
|
}
|
|
19063
18900
|
|
|
19064
18901
|
/** Helper function to create most used hashes in the block */
|
|
@@ -19257,7 +19094,7 @@ declare class WorkPackageExecutor {
|
|
|
19257
19094
|
|
|
19258
19095
|
declare class PvmExecutor {
|
|
19259
19096
|
private readonly pvm: HostCalls;
|
|
19260
|
-
private hostCalls = new
|
|
19097
|
+
private hostCalls = new HostCallsManager({ missing: new Missing() });
|
|
19261
19098
|
private pvmInstanceManager = new PvmInstanceManager(4);
|
|
19262
19099
|
|
|
19263
19100
|
constructor(private serviceCode: BytesBlob) {
|