@typeberry/lib 0.0.1-1c07527 → 0.0.1-1ece488

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (4) hide show
  1. package/configs/index.d.ts +74 -0
  2. package/index.d.ts +786 -1068
  3. package/index.js +1886 -1340
  4. package/package.json +1 -1
package/index.d.ts CHANGED
@@ -59,10 +59,8 @@ declare namespace index$s {
59
59
  }
60
60
 
61
61
  declare enum GpVersion {
62
- V0_6_5 = "0.6.5",
63
- V0_6_6 = "0.6.6",
64
62
  V0_6_7 = "0.6.7",
65
- V0_7_0 = "0.7.0-preview",
63
+ V0_7_0 = "0.7.0",
66
64
  V0_7_1 = "0.7.1-preview",
67
65
  }
68
66
 
@@ -73,16 +71,10 @@ declare enum TestSuite {
73
71
 
74
72
  declare const DEFAULT_SUITE = TestSuite.W3F_DAVXY;
75
73
 
76
- declare const ALL_VERSIONS_IN_ORDER = [
77
- GpVersion.V0_6_5,
78
- GpVersion.V0_6_6,
79
- GpVersion.V0_6_7,
80
- GpVersion.V0_7_0,
81
- GpVersion.V0_7_1,
82
- ];
74
+ declare const ALL_VERSIONS_IN_ORDER = [GpVersion.V0_6_7, GpVersion.V0_7_0, GpVersion.V0_7_1];
83
75
 
84
76
  declare const env = typeof process === "undefined" ? {} : process.env;
85
- declare const DEFAULT_VERSION = GpVersion.V0_6_7;
77
+ declare const DEFAULT_VERSION = GpVersion.V0_7_0;
86
78
  declare let CURRENT_VERSION = parseCurrentVersion(env.GP_VERSION) ?? DEFAULT_VERSION;
87
79
  declare let CURRENT_SUITE = parseCurrentSuite(env.TEST_SUITE) ?? DEFAULT_SUITE;
88
80
 
@@ -168,6 +160,10 @@ declare class Compatibility {
168
160
  }
169
161
  }
170
162
 
163
+ declare function isBrowser() {
164
+ return typeof process === "undefined" || typeof process.abort === "undefined";
165
+ }
166
+
171
167
  /**
172
168
  * A function to perform runtime assertions.
173
169
  *
@@ -286,20 +282,19 @@ declare function inspect<T>(val: T): string {
286
282
  }
287
283
 
288
284
  /** Utility function to measure time taken for some operation [ms]. */
289
- declare const measure =
290
- typeof process === "undefined"
291
- ? (id: string) => {
292
- const start = performance.now();
293
- return () => `${id} took ${performance.now() - start}ms`;
294
- }
295
- : (id: string) => {
296
- const start = process.hrtime.bigint();
297
- return () => {
298
- const tookNano = process.hrtime.bigint() - start;
299
- const tookMilli = Number(tookNano / 1_000_000n).toFixed(2);
300
- return `${id} took ${tookMilli}ms`;
301
- };
285
+ declare const measure = isBrowser()
286
+ ? (id: string) => {
287
+ const start = performance.now();
288
+ return () => `${id} took ${performance.now() - start}ms`;
289
+ }
290
+ : (id: string) => {
291
+ const start = process.hrtime.bigint();
292
+ return () => {
293
+ const tookNano = process.hrtime.bigint() - start;
294
+ const tookMilli = Number(tookNano / 1_000_000n).toFixed(2);
295
+ return `${id} took ${tookMilli}ms`;
302
296
  };
297
+ };
303
298
 
304
299
  /** A class that adds `toString` method that prints all properties of an object. */
305
300
  declare abstract class WithDebug {
@@ -498,6 +493,8 @@ type DeepEqualOptions = {
498
493
  errorsCollector?: ErrorsCollector;
499
494
  };
500
495
 
496
+ declare let oomWarningPrinted = false;
497
+
501
498
  /** Deeply compare `actual` and `expected` values. */
502
499
  declare function deepEqual<T>(
503
500
  actual: T | undefined,
@@ -530,7 +527,7 @@ declare function deepEqual<T>(
530
527
  try {
531
528
  assert.strictEqual(actualDisp, expectedDisp, message);
532
529
  } catch (e) {
533
- if (isOoMWorkaroundNeeded) {
530
+ if (isOoMWorkaroundNeeded && !oomWarningPrinted) {
534
531
  console.warn(
535
532
  [
536
533
  "Stacktrace may be crappy because of a problem in nodejs.",
@@ -538,6 +535,7 @@ declare function deepEqual<T>(
538
535
  "Maybe we do not need it anymore",
539
536
  ].join("\n"),
540
537
  );
538
+ oomWarningPrinted = true;
541
539
  }
542
540
  throw e;
543
541
  }
@@ -786,17 +784,19 @@ declare const index$r_ensure: typeof ensure;
786
784
  declare const index$r_env: typeof env;
787
785
  declare const index$r_getAllKeysSorted: typeof getAllKeysSorted;
788
786
  declare const index$r_inspect: typeof inspect;
787
+ declare const index$r_isBrowser: typeof isBrowser;
789
788
  declare const index$r_isResult: typeof isResult;
790
789
  declare const index$r_isTaggedError: typeof isTaggedError;
791
790
  declare const index$r_maybeTaggedErrorToString: typeof maybeTaggedErrorToString;
792
791
  declare const index$r_measure: typeof measure;
792
+ declare const index$r_oomWarningPrinted: typeof oomWarningPrinted;
793
793
  declare const index$r_parseCurrentSuite: typeof parseCurrentSuite;
794
794
  declare const index$r_parseCurrentVersion: typeof parseCurrentVersion;
795
795
  declare const index$r_resultToString: typeof resultToString;
796
796
  declare const index$r_seeThrough: typeof seeThrough;
797
797
  declare const index$r_trimStack: typeof trimStack;
798
798
  declare namespace index$r {
799
- export { index$r_ALL_VERSIONS_IN_ORDER as ALL_VERSIONS_IN_ORDER, index$r_CURRENT_SUITE as CURRENT_SUITE, index$r_CURRENT_VERSION as CURRENT_VERSION, index$r_Compatibility as Compatibility, index$r_DEFAULT_SUITE as DEFAULT_SUITE, index$r_DEFAULT_VERSION as DEFAULT_VERSION, index$r_ErrorsCollector as ErrorsCollector, index$r_GpVersion as GpVersion, Result$2 as Result, index$r_RichTaggedError as RichTaggedError, index$r_TEST_COMPARE_USING as TEST_COMPARE_USING, index$r_TestSuite as TestSuite, index$r_WithDebug as WithDebug, index$r___OPAQUE_TYPE__ as __OPAQUE_TYPE__, index$r_asOpaqueType as asOpaqueType, index$r_assertEmpty as assertEmpty, index$r_assertNever as assertNever, index$r_callCompareFunction as callCompareFunction, index$r_cast as cast, index$r_check as check, index$r_deepEqual as deepEqual, index$r_ensure as ensure, index$r_env as env, index$r_getAllKeysSorted as getAllKeysSorted, index$r_inspect as inspect, index$r_isResult as isResult, index$r_isTaggedError as isTaggedError, index$r_maybeTaggedErrorToString as maybeTaggedErrorToString, index$r_measure as measure, index$r_parseCurrentSuite as parseCurrentSuite, index$r_parseCurrentVersion as parseCurrentVersion, index$r_resultToString as resultToString, index$r_seeThrough as seeThrough, index$r_trimStack as trimStack };
799
+ export { index$r_ALL_VERSIONS_IN_ORDER as ALL_VERSIONS_IN_ORDER, index$r_CURRENT_SUITE as CURRENT_SUITE, index$r_CURRENT_VERSION as CURRENT_VERSION, index$r_Compatibility as Compatibility, index$r_DEFAULT_SUITE as DEFAULT_SUITE, index$r_DEFAULT_VERSION as DEFAULT_VERSION, index$r_ErrorsCollector as ErrorsCollector, index$r_GpVersion as GpVersion, Result$2 as Result, index$r_RichTaggedError as RichTaggedError, index$r_TEST_COMPARE_USING as TEST_COMPARE_USING, index$r_TestSuite as TestSuite, index$r_WithDebug as WithDebug, index$r___OPAQUE_TYPE__ as __OPAQUE_TYPE__, index$r_asOpaqueType as asOpaqueType, index$r_assertEmpty as assertEmpty, index$r_assertNever as assertNever, index$r_callCompareFunction as callCompareFunction, index$r_cast as cast, index$r_check as check, index$r_deepEqual as deepEqual, index$r_ensure as ensure, index$r_env as env, index$r_getAllKeysSorted as getAllKeysSorted, index$r_inspect as inspect, index$r_isBrowser as isBrowser, index$r_isResult as isResult, index$r_isTaggedError as isTaggedError, index$r_maybeTaggedErrorToString as maybeTaggedErrorToString, index$r_measure as measure, index$r_oomWarningPrinted as oomWarningPrinted, index$r_parseCurrentSuite as parseCurrentSuite, index$r_parseCurrentVersion as parseCurrentVersion, index$r_resultToString as resultToString, index$r_seeThrough as seeThrough, index$r_trimStack as trimStack };
800
800
  export type { index$r_DeepEqualOptions as DeepEqualOptions, index$r_EnumMapping as EnumMapping, index$r_ErrorResult as ErrorResult, index$r_OK as OK, index$r_OkResult as OkResult, index$r_Opaque as Opaque, index$r_StringLiteral as StringLiteral, index$r_TaggedError as TaggedError, index$r_TokenOf as TokenOf, index$r_Uninstantiable as Uninstantiable, index$r_WithOpaque as WithOpaque };
801
801
  }
802
802
 
@@ -2332,6 +2332,141 @@ declare class Skipper {
2332
2332
  }
2333
2333
  }
2334
2334
 
2335
+ /** Infer the type that is described by given descriptor `T` */
2336
+ type DescribedBy<T> = T extends Descriptor<infer V> ? V : never;
2337
+
2338
+ /**
2339
+ * Converts a class `T` into an object with the same fields as the class.
2340
+ */
2341
+ type CodecRecord<T> = {
2342
+ [K in PropertyKeys<T>]: T[K];
2343
+ };
2344
+
2345
+ /**
2346
+ * Same as `CodecRecord<T>`, but the fields are all optional.
2347
+ */
2348
+ type OptionalRecord<T> = {
2349
+ [K in PropertyKeys<T>]?: T[K];
2350
+ };
2351
+
2352
+ /**
2353
+ * `Descriptor` of a complex type of some class with a bunch of public fields.
2354
+ */
2355
+ type DescriptorRecord<T> = {
2356
+ [K in PropertyKeys<T>]: Descriptor<T[K], unknown>;
2357
+ };
2358
+
2359
+ /**
2360
+ * Simplified `DescriptorRecord`, where all keys must be used as descriptor keys.
2361
+ */
2362
+ type SimpleDescriptorRecord<T> = {
2363
+ [K in keyof T]: Descriptor<T[K], unknown>;
2364
+ };
2365
+
2366
+ /** Only keys that contain properties, not methods. */
2367
+ type PropertyKeys<T> = {
2368
+ // biome-ignore lint/complexity/noBannedTypes: We want to skip any function-like types here.
2369
+ [K in Extract<keyof T, string>]: T[K] extends Function ? never : K;
2370
+ }[Extract<keyof T, string>];
2371
+
2372
+ /** A constructor of basic data object that takes a `Record<T>`. */
2373
+ type ClassConstructor<T> = {
2374
+ name: string;
2375
+ create: (o: CodecRecord<T>) => T;
2376
+ };
2377
+
2378
+ /**
2379
+ * A full codec type, i.e. the `Encode` and `Decode`.
2380
+ */
2381
+ type Codec<T> = Encode<T> & Decode<T>;
2382
+
2383
+ /**
2384
+ * Type descriptor definition.
2385
+ *
2386
+ * The type descriptor can encode & decode given type `T`, but
2387
+ * also have a `name` and a byte-size hint.
2388
+ *
2389
+ * Descriptors can be composed to form more complex typings.
2390
+ */
2391
+ declare class Descriptor<T, V = T> implements Codec<T>, Skip {
2392
+ /** A "lightweight" version of the object. */
2393
+ public readonly View: Descriptor<V>;
2394
+
2395
+ /** New descriptor with specialized `View`. */
2396
+ public static withView<T, V>(
2397
+ name: string,
2398
+ sizeHint: SizeHint,
2399
+ encode: Descriptor<T, V>["encode"],
2400
+ decode: Descriptor<T, V>["decode"],
2401
+ skip: Descriptor<T, V>["skip"],
2402
+ view: Descriptor<V>,
2403
+ ) {
2404
+ return new Descriptor(name, sizeHint, encode, decode, skip, view);
2405
+ }
2406
+
2407
+ /** Create a new descriptor without a specialized `View`. */
2408
+ public static new<T>(
2409
+ name: string,
2410
+ sizeHint: SizeHint,
2411
+ encode: Descriptor<T>["encode"],
2412
+ decode: Descriptor<T>["decode"],
2413
+ skip: Descriptor<T>["skip"],
2414
+ ) {
2415
+ return new Descriptor(name, sizeHint, encode, decode, skip, null);
2416
+ }
2417
+
2418
+ private constructor(
2419
+ /** Descriptive name of the coded data. */
2420
+ public readonly name: string,
2421
+ /** A byte size hint for encoded data. */
2422
+ public readonly sizeHint: SizeHint,
2423
+ /** Encoding function. */
2424
+ public readonly encode: (e: Encoder, elem: T) => void,
2425
+ /** Decoding function. */
2426
+ public readonly decode: (d: Decoder) => T,
2427
+ /** Skipping function. */
2428
+ public readonly skip: (s: Skipper) => void,
2429
+ /** view object. It can be `null` iff T===V. */
2430
+ view: Descriptor<V> | null,
2431
+ ) {
2432
+ // We cast here to make sure that the field is always set.
2433
+ this.View = view ?? (this as unknown as Descriptor<V>);
2434
+ }
2435
+
2436
+ /**
2437
+ * Extract an encoded version of this type from the decoder.
2438
+ *
2439
+ * This function skips the object instead of decoding it,
2440
+ * allowing to retrieve the encoded portion of the object from `Decoder`.
2441
+ */
2442
+ public skipEncoded(decoder: Decoder) {
2443
+ const initBytes = decoder.bytesRead();
2444
+ this.skip(new Skipper(decoder));
2445
+ const endBytes = decoder.bytesRead();
2446
+ return BytesBlob.blobFrom(decoder.source.subarray(initBytes, endBytes));
2447
+ }
2448
+
2449
+ /** Return a new descriptor that converts data into some other type. */
2450
+ public convert<F>(input: (i: F) => T, output: (i: T) => F): Descriptor<F, V> {
2451
+ return new Descriptor(
2452
+ this.name,
2453
+ this.sizeHint,
2454
+ (e: Encoder, elem: F) => this.encode(e, input(elem)),
2455
+ (d: Decoder) => output(this.decode(d)),
2456
+ this.skip,
2457
+ this.View,
2458
+ );
2459
+ }
2460
+
2461
+ /** Safely cast the descriptor value to a opaque type. */
2462
+ public asOpaque<R>(): Descriptor<Opaque<T, TokenOf<R, T>>, V> {
2463
+ return this.convert(
2464
+ (i) => seeThrough(i),
2465
+ (o) => asOpaqueType<T, TokenOf<R, T>>(o),
2466
+ );
2467
+ }
2468
+ }
2469
+
2335
2470
  type LengthRange = {
2336
2471
  /** Inclusive value of minimal length of the sequence. */
2337
2472
  minLength: number;
@@ -2625,159 +2760,24 @@ declare const TYPICAL_SEQUENCE_LENGTH = 64;
2625
2760
  declare const TYPICAL_DICTIONARY_LENGTH = 32;
2626
2761
 
2627
2762
  /**
2628
- * A full codec type, i.e. the `Encode` and `Decode`.
2629
- */
2630
- type Codec<T> = Encode<T> & Decode<T>;
2631
-
2632
- /**
2633
- * Type descriptor definition.
2634
- *
2635
- * The type descriptor can encode & decode given type `T`, but
2636
- * also have a `name` and a byte-size hint.
2637
- *
2638
- * Descriptors can be composed to form more complex typings.
2639
- */
2640
- declare class Descriptor<T, V = T> implements Codec<T>, Skip {
2641
- /** A "lightweight" version of the object. */
2642
- public readonly View: Descriptor<V>;
2643
-
2644
- /** New descriptor with specialized `View`. */
2645
- public static withView<T, V>(
2646
- name: string,
2647
- sizeHint: SizeHint,
2648
- encode: Descriptor<T, V>["encode"],
2649
- decode: Descriptor<T, V>["decode"],
2650
- skip: Descriptor<T, V>["skip"],
2651
- view: Descriptor<V>,
2652
- ) {
2653
- return new Descriptor(name, sizeHint, encode, decode, skip, view);
2654
- }
2655
-
2656
- /** Create a new descriptor without a specialized `View`. */
2657
- public static new<T>(
2658
- name: string,
2659
- sizeHint: SizeHint,
2660
- encode: Descriptor<T>["encode"],
2661
- decode: Descriptor<T>["decode"],
2662
- skip: Descriptor<T>["skip"],
2663
- ) {
2664
- return new Descriptor(name, sizeHint, encode, decode, skip, null);
2665
- }
2666
-
2667
- private constructor(
2668
- /** Descriptive name of the coded data. */
2669
- public readonly name: string,
2670
- /** A byte size hint for encoded data. */
2671
- public readonly sizeHint: SizeHint,
2672
- /** Encoding function. */
2673
- public readonly encode: (e: Encoder, elem: T) => void,
2674
- /** Decoding function. */
2675
- public readonly decode: (d: Decoder) => T,
2676
- /** Skipping function. */
2677
- public readonly skip: (s: Skipper) => void,
2678
- /** view object. It can be `null` iff T===V. */
2679
- view: Descriptor<V> | null,
2680
- ) {
2681
- // We cast here to make sure that the field is always set.
2682
- this.View = view ?? (this as unknown as Descriptor<V>);
2683
- }
2684
-
2685
- /**
2686
- * Extract an encoded version of this type from the decoder.
2687
- *
2688
- * This function skips the object instead of decoding it,
2689
- * allowing to retrieve the encoded portion of the object from `Decoder`.
2690
- */
2691
- public skipEncoded(decoder: Decoder) {
2692
- const initBytes = decoder.bytesRead();
2693
- this.skip(new Skipper(decoder));
2694
- const endBytes = decoder.bytesRead();
2695
- return BytesBlob.blobFrom(decoder.source.subarray(initBytes, endBytes));
2696
- }
2697
-
2698
- /** Return a new descriptor that converts data into some other type. */
2699
- public convert<F>(input: (i: F) => T, output: (i: T) => F): Descriptor<F, V> {
2700
- return new Descriptor(
2701
- this.name,
2702
- this.sizeHint,
2703
- (e: Encoder, elem: F) => this.encode(e, input(elem)),
2704
- (d: Decoder) => output(this.decode(d)),
2705
- this.skip,
2706
- this.View,
2707
- );
2708
- }
2709
-
2710
- /** Safely cast the descriptor value to a opaque type. */
2711
- public asOpaque<R>(): Descriptor<Opaque<T, TokenOf<R, T>>, V> {
2712
- return this.convert(
2713
- (i) => seeThrough(i),
2714
- (o) => asOpaqueType<T, TokenOf<R, T>>(o),
2715
- );
2716
- }
2717
- }
2718
-
2719
- /**
2720
- * Convert a descriptor for regular array into readonly one.
2721
- *
2722
- * NOTE: for performance reasons we assume that every `readonly T[]` is `T[]`,
2723
- * and the `readonly` annotation is there just to prevent altering it.
2724
- * It's not true in a general case, but should be good enough for us.
2725
- *
2726
- */
2727
- declare function readonlyArray<T, V>(desc: Descriptor<T[], V>): Descriptor<readonly T[], V> {
2728
- return desc.convert(
2729
- (x) => {
2730
- check(Array.isArray(x), `Non-arrays are not supported as 'readonly': got ${typeof x}, ${x}`);
2731
- // NOTE [ToDr] This assumption is incorrect in general, but it's documented
2732
- // in the general note. We avoid `.slice()` the array for performance reasons.
2733
- return x as T[];
2734
- },
2735
- (x) => x,
2736
- );
2737
- }
2738
-
2739
- /** Infer the type that is described by given descriptor `T` */
2740
- type DescribedBy<T> = T extends Descriptor<infer V> ? V : never;
2741
-
2742
- /**
2743
- * Converts a class `T` into an object with the same fields as the class.
2744
- */
2745
- type CodecRecord<T> = {
2746
- [K in PropertyKeys<T>]: T[K];
2747
- };
2748
-
2749
- /**
2750
- * Same as `CodecRecord<T>`, but the fields are all optional.
2751
- */
2752
- type OptionalRecord<T> = {
2753
- [K in PropertyKeys<T>]?: T[K];
2754
- };
2755
-
2756
- /**
2757
- * `Descriptor` of a complex type of some class with a bunch of public fields.
2758
- */
2759
- type DescriptorRecord<T> = {
2760
- [K in PropertyKeys<T>]: Descriptor<T[K], unknown>;
2761
- };
2762
-
2763
- /**
2764
- * Simplified `DescriptorRecord`, where all keys must be used as descriptor keys.
2765
- */
2766
- type SimpleDescriptorRecord<T> = {
2767
- [K in keyof T]: Descriptor<T[K], unknown>;
2768
- };
2769
-
2770
- /** Only keys that contain properties, not methods. */
2771
- type PropertyKeys<T> = {
2772
- // biome-ignore lint/complexity/noBannedTypes: We want to skip any function-like types here.
2773
- [K in Extract<keyof T, string>]: T[K] extends Function ? never : K;
2774
- }[Extract<keyof T, string>];
2775
-
2776
- /** A constructor of basic data object that takes a `Record<T>`. */
2777
- type ClassConstructor<T> = {
2778
- name: string;
2779
- create: (o: CodecRecord<T>) => T;
2780
- };
2763
+ * Convert a descriptor for regular array into readonly one.
2764
+ *
2765
+ * NOTE: for performance reasons we assume that every `readonly T[]` is `T[]`,
2766
+ * and the `readonly` annotation is there just to prevent altering it.
2767
+ * It's not true in a general case, but should be good enough for us.
2768
+ *
2769
+ */
2770
+ declare function readonlyArray<T, V>(desc: Descriptor<T[], V>): Descriptor<readonly T[], V> {
2771
+ return desc.convert(
2772
+ (x) => {
2773
+ check(Array.isArray(x), `Non-arrays are not supported as 'readonly': got ${typeof x}, ${x}`);
2774
+ // NOTE [ToDr] This assumption is incorrect in general, but it's documented
2775
+ // in the general note. We avoid `.slice()` the array for performance reasons.
2776
+ return x as T[];
2777
+ },
2778
+ (x) => x,
2779
+ );
2780
+ }
2781
2781
 
2782
2782
  declare function exactHint(bytes: number): SizeHint {
2783
2783
  return {
@@ -3399,6 +3399,9 @@ type index$o_SimpleDescriptorRecord<T> = SimpleDescriptorRecord<T>;
3399
3399
  type index$o_SizeHint = SizeHint;
3400
3400
  declare const index$o_TYPICAL_DICTIONARY_LENGTH: typeof TYPICAL_DICTIONARY_LENGTH;
3401
3401
  declare const index$o_TYPICAL_SEQUENCE_LENGTH: typeof TYPICAL_SEQUENCE_LENGTH;
3402
+ type index$o_ViewField<T, V> = ViewField<T, V>;
3403
+ declare const index$o_ViewField: typeof ViewField;
3404
+ type index$o_ViewOf<T, D extends DescriptorRecord<T>> = ViewOf<T, D>;
3402
3405
  declare const index$o_addSizeHints: typeof addSizeHints;
3403
3406
  declare const index$o_decodeVariableLengthExtraBytes: typeof decodeVariableLengthExtraBytes;
3404
3407
  declare const index$o_exactHint: typeof exactHint;
@@ -3411,8 +3414,8 @@ declare const index$o_sequenceViewVarLen: typeof sequenceViewVarLen;
3411
3414
  declare const index$o_tryAsExactBytes: typeof tryAsExactBytes;
3412
3415
  declare const index$o_validateLength: typeof validateLength;
3413
3416
  declare namespace index$o {
3414
- export { index$o_DEFAULT_START_LENGTH as DEFAULT_START_LENGTH, index$o_Decoder as Decoder, index$o_Descriptor as Descriptor, index$o_Encoder as Encoder, index$o_MASKS as MASKS, index$o_MAX_LENGTH as MAX_LENGTH, index$o_ObjectView as ObjectView, index$o_SequenceView as SequenceView, index$o_TYPICAL_DICTIONARY_LENGTH as TYPICAL_DICTIONARY_LENGTH, index$o_TYPICAL_SEQUENCE_LENGTH as TYPICAL_SEQUENCE_LENGTH, index$o_addSizeHints as addSizeHints, codec$1 as codec, index$o_decodeVariableLengthExtraBytes as decodeVariableLengthExtraBytes, index$o_exactHint as exactHint, index$o_forEachDescriptor as forEachDescriptor, index$o_hasUniqueView as hasUniqueView, index$o_objectView as objectView, index$o_readonlyArray as readonlyArray, index$o_sequenceViewFixLen as sequenceViewFixLen, index$o_sequenceViewVarLen as sequenceViewVarLen, index$o_tryAsExactBytes as tryAsExactBytes, index$o_validateLength as validateLength };
3415
- export type { index$o_ClassConstructor as ClassConstructor, index$o_Codec as Codec, index$o_CodecRecord as CodecRecord, index$o_Decode as Decode, index$o_DescribedBy as DescribedBy, index$o_DescriptorRecord as DescriptorRecord, index$o_Encode as Encode, index$o_LengthRange as LengthRange, index$o_OptionalRecord as OptionalRecord, Options$1 as Options, index$o_PropertyKeys as PropertyKeys, index$o_SimpleDescriptorRecord as SimpleDescriptorRecord, index$o_SizeHint as SizeHint };
3417
+ export { index$o_DEFAULT_START_LENGTH as DEFAULT_START_LENGTH, index$o_Decoder as Decoder, index$o_Descriptor as Descriptor, index$o_Encoder as Encoder, index$o_MASKS as MASKS, index$o_MAX_LENGTH as MAX_LENGTH, index$o_ObjectView as ObjectView, index$o_SequenceView as SequenceView, index$o_TYPICAL_DICTIONARY_LENGTH as TYPICAL_DICTIONARY_LENGTH, index$o_TYPICAL_SEQUENCE_LENGTH as TYPICAL_SEQUENCE_LENGTH, index$o_ViewField as ViewField, index$o_addSizeHints as addSizeHints, codec$1 as codec, index$o_decodeVariableLengthExtraBytes as decodeVariableLengthExtraBytes, index$o_exactHint as exactHint, index$o_forEachDescriptor as forEachDescriptor, index$o_hasUniqueView as hasUniqueView, index$o_objectView as objectView, index$o_readonlyArray as readonlyArray, index$o_sequenceViewFixLen as sequenceViewFixLen, index$o_sequenceViewVarLen as sequenceViewVarLen, index$o_tryAsExactBytes as tryAsExactBytes, index$o_validateLength as validateLength };
3418
+ export type { index$o_ClassConstructor as ClassConstructor, index$o_Codec as Codec, index$o_CodecRecord as CodecRecord, index$o_Decode as Decode, index$o_DescribedBy as DescribedBy, index$o_DescriptorRecord as DescriptorRecord, index$o_Encode as Encode, index$o_LengthRange as LengthRange, index$o_OptionalRecord as OptionalRecord, Options$1 as Options, index$o_PropertyKeys as PropertyKeys, index$o_SimpleDescriptorRecord as SimpleDescriptorRecord, index$o_SizeHint as SizeHint, index$o_ViewOf as ViewOf };
3416
3419
  }
3417
3420
 
3418
3421
  /**
@@ -4461,6 +4464,84 @@ declare namespace index$m {
4461
4464
  export type { index$m_HashWithZeroedBit as HashWithZeroedBit, index$m_ImmutableHashDictionary as ImmutableHashDictionary, index$m_ImmutableHashSet as ImmutableHashSet, index$m_ImmutableSortedArray as ImmutableSortedArray, index$m_ImmutableSortedSet as ImmutableSortedSet, index$m_KeyMapper as KeyMapper, index$m_KeyMappers as KeyMappers, index$m_KnownSize as KnownSize, index$m_KnownSizeArray as KnownSizeArray, index$m_KnownSizeId as KnownSizeId, index$m_NestedMaps as NestedMaps };
4462
4465
  }
4463
4466
 
4467
+ declare namespace bandersnatch_d_exports {
4468
+ export { batch_verify_tickets, __wbg_init$2 as default, derive_public_key, initSync$2 as initSync, ring_commitment, verify_seal };
4469
+ export type { InitInput$2 as InitInput, InitOutput$2 as InitOutput, SyncInitInput$2 as SyncInitInput };
4470
+ }
4471
+ /* tslint:disable */
4472
+ /* eslint-disable */
4473
+ /**
4474
+ * @param {Uint8Array} keys
4475
+ * @returns {Uint8Array}
4476
+ */
4477
+ declare function ring_commitment(keys: Uint8Array): Uint8Array;
4478
+ /**
4479
+ * Derive Private and Public Key from Seed
4480
+ *
4481
+ * returns: `Vec<u8>` containing the exit (1 byte) status followed by the (32 bytes) public key
4482
+ * @param {Uint8Array} seed
4483
+ * @returns {Uint8Array}
4484
+ */
4485
+ declare function derive_public_key(seed: Uint8Array): Uint8Array;
4486
+ /**
4487
+ * Seal verification as defined in:
4488
+ * https://graypaper.fluffylabs.dev/#/68eaa1f/0eff000eff00?v=0.6.4
4489
+ * or
4490
+ * https://graypaper.fluffylabs.dev/#/68eaa1f/0e54010e5401?v=0.6.4
4491
+ * @param {Uint8Array} keys
4492
+ * @param {number} signer_key_index
4493
+ * @param {Uint8Array} seal_data
4494
+ * @param {Uint8Array} payload
4495
+ * @param {Uint8Array} aux_data
4496
+ * @returns {Uint8Array}
4497
+ */
4498
+ declare function verify_seal(keys: Uint8Array, signer_key_index: number, seal_data: Uint8Array, payload: Uint8Array, aux_data: Uint8Array): Uint8Array;
4499
+ /**
4500
+ * Verify multiple tickets at once as defined in:
4501
+ * https://graypaper.fluffylabs.dev/#/68eaa1f/0f3e000f3e00?v=0.6.4
4502
+ *
4503
+ * NOTE: the aux_data of VRF function is empty!
4504
+ * @param {Uint8Array} keys
4505
+ * @param {Uint8Array} tickets_data
4506
+ * @param {number} vrf_input_data_len
4507
+ * @returns {Uint8Array}
4508
+ */
4509
+ declare function batch_verify_tickets(keys: Uint8Array, tickets_data: Uint8Array, vrf_input_data_len: number): Uint8Array;
4510
+ type InitInput$2 = RequestInfo | URL | Response | BufferSource | WebAssembly.Module;
4511
+ interface InitOutput$2 {
4512
+ readonly memory: WebAssembly.Memory;
4513
+ readonly ring_commitment: (a: number, b: number, c: number) => void;
4514
+ readonly derive_public_key: (a: number, b: number, c: number) => void;
4515
+ readonly verify_seal: (a: number, b: number, c: number, d: number, e: number, f: number, g: number, h: number, i: number, j: number) => void;
4516
+ readonly batch_verify_tickets: (a: number, b: number, c: number, d: number, e: number, f: number) => void;
4517
+ readonly __wbindgen_add_to_stack_pointer: (a: number) => number;
4518
+ readonly __wbindgen_malloc: (a: number, b: number) => number;
4519
+ readonly __wbindgen_free: (a: number, b: number, c: number) => void;
4520
+ }
4521
+ type SyncInitInput$2 = BufferSource | WebAssembly.Module;
4522
+ /**
4523
+ * Instantiates the given `module`, which can either be bytes or
4524
+ * a precompiled `WebAssembly.Module`.
4525
+ *
4526
+ * @param {SyncInitInput} module
4527
+ *
4528
+ * @returns {InitOutput}
4529
+ */
4530
+ declare function initSync$2(module: SyncInitInput$2): InitOutput$2;
4531
+
4532
+ /**
4533
+ * If `module_or_path` is {RequestInfo} or {URL}, makes a request and
4534
+ * for everything else, calls `WebAssembly.instantiate` directly.
4535
+ *
4536
+ * @param {InitInput | Promise<InitInput>} module_or_path
4537
+ *
4538
+ * @returns {Promise<InitOutput>}
4539
+ */
4540
+ declare function __wbg_init$2(module_or_path?: InitInput$2 | Promise<InitInput$2>): Promise<InitOutput$2>;
4541
+ //#endregion
4542
+ //#region native/index.d.ts
4543
+ declare function initAll(): Promise<void>;
4544
+
4464
4545
  /** ED25519 private key size. */
4465
4546
  declare const ED25519_PRIV_KEY_BYTES = 32;
4466
4547
  type ED25519_PRIV_KEY_BYTES = typeof ED25519_PRIV_KEY_BYTES;
@@ -4553,7 +4634,7 @@ declare async function verify<T extends BytesBlob>(input: Input<T>[]): Promise<b
4553
4634
  offset += messageLength;
4554
4635
  }
4555
4636
 
4556
- const result = Array.from(verify_ed25519(data)).map((x) => x === 1);
4637
+ const result = Array.from(ed25519.verify_ed25519(data)).map((x) => x === 1);
4557
4638
  return Promise.resolve(result);
4558
4639
  }
4559
4640
 
@@ -4575,7 +4656,7 @@ declare async function verifyBatch<T extends BytesBlob>(input: Input<T>[]): Prom
4575
4656
 
4576
4657
  const data = BytesBlob.blobFromParts(first, ...rest).raw;
4577
4658
 
4578
- return Promise.resolve(verify_ed25519_batch(data));
4659
+ return Promise.resolve(ed25519.verify_ed25519_batch(data));
4579
4660
  }
4580
4661
 
4581
4662
  type ed25519_ED25519_KEY_BYTES = ED25519_KEY_BYTES;
@@ -4595,59 +4676,6 @@ declare namespace ed25519 {
4595
4676
  export type { ed25519_ED25519_KEY_BYTES as ED25519_KEY_BYTES, ed25519_ED25519_PRIV_KEY_BYTES as ED25519_PRIV_KEY_BYTES, ed25519_ED25519_SIGNATURE_BYTES as ED25519_SIGNATURE_BYTES, ed25519_Ed25519Key as Ed25519Key, ed25519_Ed25519Signature as Ed25519Signature, ed25519_Input as Input };
4596
4677
  }
4597
4678
 
4598
- /* tslint:disable */
4599
- /* eslint-disable */
4600
- /**
4601
- * @param {Uint8Array} keys
4602
- * @returns {Uint8Array}
4603
- */
4604
- declare function ring_commitment(keys: Uint8Array): Uint8Array;
4605
- /**
4606
- * Derive Private and Public Key from Seed
4607
- *
4608
- * returns: `Vec<u8>` containing the exit (1 byte) status followed by the (32 bytes) public key
4609
- * @param {Uint8Array} seed
4610
- * @returns {Uint8Array}
4611
- */
4612
- declare function derive_public_key(seed: Uint8Array): Uint8Array;
4613
- /**
4614
- * Seal verification as defined in:
4615
- * https://graypaper.fluffylabs.dev/#/68eaa1f/0eff000eff00?v=0.6.4
4616
- * or
4617
- * https://graypaper.fluffylabs.dev/#/68eaa1f/0e54010e5401?v=0.6.4
4618
- * @param {Uint8Array} keys
4619
- * @param {number} signer_key_index
4620
- * @param {Uint8Array} seal_data
4621
- * @param {Uint8Array} payload
4622
- * @param {Uint8Array} aux_data
4623
- * @returns {Uint8Array}
4624
- */
4625
- declare function verify_seal(keys: Uint8Array, signer_key_index: number, seal_data: Uint8Array, payload: Uint8Array, aux_data: Uint8Array): Uint8Array;
4626
- /**
4627
- * Verify multiple tickets at once as defined in:
4628
- * https://graypaper.fluffylabs.dev/#/68eaa1f/0f3e000f3e00?v=0.6.4
4629
- *
4630
- * NOTE: the aux_data of VRF function is empty!
4631
- * @param {Uint8Array} keys
4632
- * @param {Uint8Array} tickets_data
4633
- * @param {number} vrf_input_data_len
4634
- * @returns {Uint8Array}
4635
- */
4636
- declare function batch_verify_tickets(keys: Uint8Array, tickets_data: Uint8Array, vrf_input_data_len: number): Uint8Array;
4637
-
4638
- declare const bandersnatch_d_batch_verify_tickets: typeof batch_verify_tickets;
4639
- declare const bandersnatch_d_derive_public_key: typeof derive_public_key;
4640
- declare const bandersnatch_d_ring_commitment: typeof ring_commitment;
4641
- declare const bandersnatch_d_verify_seal: typeof verify_seal;
4642
- declare namespace bandersnatch_d {
4643
- export {
4644
- bandersnatch_d_batch_verify_tickets as batch_verify_tickets,
4645
- bandersnatch_d_derive_public_key as derive_public_key,
4646
- bandersnatch_d_ring_commitment as ring_commitment,
4647
- bandersnatch_d_verify_seal as verify_seal,
4648
- };
4649
- }
4650
-
4651
4679
  /** Bandersnatch public key size. */
4652
4680
  declare const BANDERSNATCH_KEY_BYTES = 32;
4653
4681
  type BANDERSNATCH_KEY_BYTES = typeof BANDERSNATCH_KEY_BYTES;
@@ -4705,7 +4733,7 @@ type BlsKey = Opaque<Bytes<BLS_KEY_BYTES>, "BlsKey">;
4705
4733
 
4706
4734
  /** Derive a Bandersnatch public key from a seed. */
4707
4735
  declare function publicKey(seed: Uint8Array): BandersnatchKey {
4708
- const key = derive_public_key(seed);
4736
+ const key = bandersnatch.derive_public_key(seed);
4709
4737
 
4710
4738
  check(key[0] === 0, "Invalid Bandersnatch public key derived from seed");
4711
4739
 
@@ -4831,7 +4859,7 @@ declare const index$l_bandersnatch: typeof bandersnatch;
4831
4859
  declare const index$l_ed25519: typeof ed25519;
4832
4860
  declare const index$l_keyDerivation: typeof keyDerivation;
4833
4861
  declare namespace index$l {
4834
- export { index$l_Ed25519Pair as Ed25519Pair, index$l_bandersnatch as bandersnatch, bandersnatch_d as bandersnatchWasm, index$l_ed25519 as ed25519, index$l_keyDerivation as keyDerivation };
4862
+ export { index$l_Ed25519Pair as Ed25519Pair, index$l_bandersnatch as bandersnatch, bandersnatch_d_exports as bandersnatchWasm, index$l_ed25519 as ed25519, initAll as initWasm, index$l_keyDerivation as keyDerivation };
4835
4863
  export type { index$l_BANDERSNATCH_KEY_BYTES as BANDERSNATCH_KEY_BYTES, index$l_BANDERSNATCH_PROOF_BYTES as BANDERSNATCH_PROOF_BYTES, index$l_BANDERSNATCH_RING_ROOT_BYTES as BANDERSNATCH_RING_ROOT_BYTES, index$l_BANDERSNATCH_VRF_SIGNATURE_BYTES as BANDERSNATCH_VRF_SIGNATURE_BYTES, index$l_BLS_KEY_BYTES as BLS_KEY_BYTES, index$l_BandersnatchKey as BandersnatchKey, index$l_BandersnatchProof as BandersnatchProof, index$l_BandersnatchRingRoot as BandersnatchRingRoot, index$l_BandersnatchSecretSeed as BandersnatchSecretSeed, index$l_BandersnatchVrfSignature as BandersnatchVrfSignature, index$l_BlsKey as BlsKey, index$l_ED25519_KEY_BYTES as ED25519_KEY_BYTES, index$l_ED25519_PRIV_KEY_BYTES as ED25519_PRIV_KEY_BYTES, index$l_ED25519_SIGNATURE_BYTES as ED25519_SIGNATURE_BYTES, index$l_Ed25519Key as Ed25519Key, index$l_Ed25519SecretSeed as Ed25519SecretSeed, index$l_Ed25519Signature as Ed25519Signature, KeySeed as PublicKeySeed, index$l_SEED_SIZE as SEED_SIZE };
4836
4864
  }
4837
4865
 
@@ -4911,6 +4939,8 @@ declare class ChainSpec extends WithDebug {
4911
4939
  readonly maxBlockGas: U64;
4912
4940
  /** `G_R`: The gas allocated to invoke a work-package’s Refine logic. */
4913
4941
  readonly maxRefineGas: U64;
4942
+ /** `L`: The maximum age in timeslots of the lookup anchor. */
4943
+ readonly maxLookupAnchorAge: U32;
4914
4944
 
4915
4945
  constructor(data: Omit<ChainSpec, "validatorsSuperMajority" | "thirdOfValidators" | "erasureCodedPieceSize">) {
4916
4946
  super();
@@ -4930,6 +4960,7 @@ declare class ChainSpec extends WithDebug {
4930
4960
  this.erasureCodedPieceSize = tryAsU32(EC_SEGMENT_SIZE / data.numberECPiecesPerSegment);
4931
4961
  this.maxBlockGas = data.maxBlockGas;
4932
4962
  this.maxRefineGas = data.maxRefineGas;
4963
+ this.maxLookupAnchorAge = data.maxLookupAnchorAge;
4933
4964
  }
4934
4965
  }
4935
4966
 
@@ -4948,6 +4979,8 @@ declare const tinyChainSpec = new ChainSpec({
4948
4979
  preimageExpungePeriod: tryAsU32(32),
4949
4980
  maxBlockGas: tryAsU64(20_000_000),
4950
4981
  maxRefineGas: tryAsU64(1_000_000_000),
4982
+ // https://github.com/davxy/jam-conformance/pull/47/files#diff-27e26142b3a96e407dab40d388b63d553f5d9cdb66dec58cd93e63dd434f9e45R260
4983
+ maxLookupAnchorAge: tryAsU32(24),
4951
4984
  });
4952
4985
 
4953
4986
  /**
@@ -4967,6 +5000,7 @@ declare const fullChainSpec = new ChainSpec({
4967
5000
  preimageExpungePeriod: tryAsU32(19_200),
4968
5001
  maxBlockGas: tryAsU64(3_500_000_000),
4969
5002
  maxRefineGas: tryAsU64(5_000_000_000),
5003
+ maxLookupAnchorAge: tryAsU32(14_400),
4970
5004
  });
4971
5005
 
4972
5006
  /**
@@ -5550,6 +5584,39 @@ declare namespace disputes {
5550
5584
  */
5551
5585
  type BeefyHash = Opaque<OpaqueHash, "BeefyHash">;
5552
5586
 
5587
+ /** Authorizer hash. */
5588
+ type AuthorizerHash = Opaque<OpaqueHash, "AuthorizerHash">;
5589
+
5590
+ /** Blake2B hash of a work package. */
5591
+ type WorkPackageHash = Opaque<OpaqueHash, "WorkPackageHash">;
5592
+
5593
+ /** Work package exported segments merkle root hash. */
5594
+ type ExportsRootHash = Opaque<OpaqueHash, "ExportsRootHash">;
5595
+
5596
+ /**
5597
+ * Mapping between work package hash and root hash of it's exports.
5598
+ *
5599
+ * Used to construct a dictionary.
5600
+ */
5601
+ declare class WorkPackageInfo extends WithDebug {
5602
+ static Codec = codec.Class(WorkPackageInfo, {
5603
+ workPackageHash: codec.bytes(HASH_SIZE).asOpaque<WorkPackageHash>(),
5604
+ segmentTreeRoot: codec.bytes(HASH_SIZE).asOpaque<ExportsRootHash>(),
5605
+ });
5606
+
5607
+ private constructor(
5608
+ /** Hash of the described work package. */
5609
+ readonly workPackageHash: WorkPackageHash,
5610
+ /** Exports root hash. */
5611
+ readonly segmentTreeRoot: ExportsRootHash,
5612
+ ) {
5613
+ super();
5614
+ }
5615
+
5616
+ static create({ workPackageHash, segmentTreeRoot }: CodecRecord<WorkPackageInfo>) {
5617
+ return new WorkPackageInfo(workPackageHash, segmentTreeRoot);
5618
+ }
5619
+ }
5553
5620
  /**
5554
5621
  * `X`: Refinement Context - state of the chain at the point
5555
5622
  * that the report's corresponding work-package was evaluated.
@@ -5595,12 +5662,17 @@ declare class RefineContext extends WithDebug {
5595
5662
  }
5596
5663
  }
5597
5664
 
5665
+ type refineContext_AuthorizerHash = AuthorizerHash;
5598
5666
  type refineContext_BeefyHash = BeefyHash;
5667
+ type refineContext_ExportsRootHash = ExportsRootHash;
5599
5668
  type refineContext_RefineContext = RefineContext;
5600
5669
  declare const refineContext_RefineContext: typeof RefineContext;
5670
+ type refineContext_WorkPackageHash = WorkPackageHash;
5671
+ type refineContext_WorkPackageInfo = WorkPackageInfo;
5672
+ declare const refineContext_WorkPackageInfo: typeof WorkPackageInfo;
5601
5673
  declare namespace refineContext {
5602
- export { refineContext_RefineContext as RefineContext };
5603
- export type { refineContext_BeefyHash as BeefyHash };
5674
+ export { refineContext_RefineContext as RefineContext, refineContext_WorkPackageInfo as WorkPackageInfo };
5675
+ export type { refineContext_AuthorizerHash as AuthorizerHash, refineContext_BeefyHash as BeefyHash, refineContext_ExportsRootHash as ExportsRootHash, refineContext_WorkPackageHash as WorkPackageHash };
5604
5676
  }
5605
5677
 
5606
5678
  /** `W_E`: The basic size of erasure-coded pieces in octets. See equation H.6. */
@@ -5925,17 +5997,13 @@ declare enum WorkExecResultKind {
5925
5997
  /** `☇`: unexpected program termination. */
5926
5998
  panic = 2,
5927
5999
  /** `⊚`: the number of exports made was invalidly reported. */
5928
- // biome-ignore lint/style/useLiteralEnumMembers: Compatibility
5929
- incorrectNumberOfExports = Compatibility.isGreaterOrEqual(GpVersion.V0_6_7) ? 3 : -1,
6000
+ incorrectNumberOfExports = 3,
5930
6001
  /** `⊖`: the size of the digest (refinement output) would cross the acceptable limit. */
5931
- // biome-ignore lint/style/useLiteralEnumMembers: Compatibility
5932
- digestTooBig = Compatibility.isGreaterOrEqual(GpVersion.V0_6_7) ? 4 : -1,
6002
+ digestTooBig = 4,
5933
6003
  /** `BAD`: service code was not available for lookup in state. */
5934
- // biome-ignore lint/style/useLiteralEnumMembers: Compatibility
5935
- badCode = Compatibility.isGreaterOrEqual(GpVersion.V0_6_7) ? 5 : 3,
6004
+ badCode = 5,
5936
6005
  /** `BIG`: the code was too big (beyond the maximum allowed size `W_C`) */
5937
- // biome-ignore lint/style/useLiteralEnumMembers: Compatibility
5938
- codeOversize = Compatibility.isGreaterOrEqual(GpVersion.V0_6_7) ? 6 : 4,
6006
+ codeOversize = 6,
5939
6007
  }
5940
6008
 
5941
6009
  /** The execution result of some work-package. */
@@ -6093,14 +6161,6 @@ declare namespace workResult {
6093
6161
  };
6094
6162
  }
6095
6163
 
6096
- /** Authorizer hash. */
6097
- type AuthorizerHash = Opaque<OpaqueHash, "AuthorizerHash">;
6098
-
6099
- /** Blake2B hash of a work package. */
6100
- type WorkPackageHash = Opaque<OpaqueHash, "WorkPackageHash">;
6101
- /** Work package exported segments merkle root hash. */
6102
- type ExportsRootHash = Opaque<OpaqueHash, "ExportsRootHash">;
6103
-
6104
6164
  /**
6105
6165
  * Details about the work package being reported on.
6106
6166
  *
@@ -6135,31 +6195,6 @@ declare class WorkPackageSpec extends WithDebug {
6135
6195
  }
6136
6196
  }
6137
6197
 
6138
- /**
6139
- * Mapping between work package hash and root hash of it's exports.
6140
- *
6141
- * Used to construct a dictionary.
6142
- */
6143
- declare class WorkPackageInfo extends WithDebug {
6144
- static Codec = codec.Class(WorkPackageInfo, {
6145
- workPackageHash: codec.bytes(HASH_SIZE).asOpaque<WorkPackageHash>(),
6146
- segmentTreeRoot: codec.bytes(HASH_SIZE).asOpaque<ExportsRootHash>(),
6147
- });
6148
-
6149
- private constructor(
6150
- /** Hash of the described work package. */
6151
- readonly workPackageHash: WorkPackageHash,
6152
- /** Exports root hash. */
6153
- readonly segmentTreeRoot: ExportsRootHash,
6154
- ) {
6155
- super();
6156
- }
6157
-
6158
- static create({ workPackageHash, segmentTreeRoot }: CodecRecord<WorkPackageInfo>) {
6159
- return new WorkPackageInfo(workPackageHash, segmentTreeRoot);
6160
- }
6161
- }
6162
-
6163
6198
  /**
6164
6199
  * A report of execution of some work package.
6165
6200
  *
@@ -6238,18 +6273,15 @@ declare const WorkReportCodec = codec.Class(WorkReportNoCodec, {
6238
6273
  declare const WorkReportCodecPre070 = codec.Class(WorkReportNoCodec, {
6239
6274
  workPackageSpec: WorkPackageSpec.Codec,
6240
6275
  context: RefineContext.Codec,
6241
- coreIndex:
6242
- Compatibility.isGreaterOrEqual(GpVersion.V0_6_5) && !Compatibility.isSuite(TestSuite.JAMDUNA, GpVersion.V0_6_5)
6243
- ? codec.varU32.convert(
6244
- (o) => tryAsU32(o),
6245
- (i) => {
6246
- if (!isU16(i)) {
6247
- throw new Error(`Core index exceeds U16: ${i}`);
6248
- }
6249
- return tryAsCoreIndex(i);
6250
- },
6251
- )
6252
- : codec.u16.asOpaque<CoreIndex>(),
6276
+ coreIndex: codec.varU32.convert(
6277
+ (o) => tryAsU32(o),
6278
+ (i) => {
6279
+ if (!isU16(i)) {
6280
+ throw new Error(`Core index exceeds U16: ${i}`);
6281
+ }
6282
+ return tryAsCoreIndex(i);
6283
+ },
6284
+ ),
6253
6285
  authorizerHash: codec.bytes(HASH_SIZE).asOpaque<AuthorizerHash>(),
6254
6286
  authorizationOutput: codec.blob,
6255
6287
  segmentRootLookup: readonlyArray(codec.sequenceVarLen(WorkPackageInfo.Codec)),
@@ -6266,11 +6298,6 @@ declare class WorkReport extends WorkReportNoCodec {
6266
6298
  : WorkReportCodecPre070;
6267
6299
  }
6268
6300
 
6269
- type workReport_AuthorizerHash = AuthorizerHash;
6270
- type workReport_ExportsRootHash = ExportsRootHash;
6271
- type workReport_WorkPackageHash = WorkPackageHash;
6272
- type workReport_WorkPackageInfo = WorkPackageInfo;
6273
- declare const workReport_WorkPackageInfo: typeof WorkPackageInfo;
6274
6301
  type workReport_WorkPackageSpec = WorkPackageSpec;
6275
6302
  declare const workReport_WorkPackageSpec: typeof WorkPackageSpec;
6276
6303
  type workReport_WorkReport = WorkReport;
@@ -6280,8 +6307,13 @@ declare const workReport_WorkReportCodecPre070: typeof WorkReportCodecPre070;
6280
6307
  type workReport_WorkReportNoCodec = WorkReportNoCodec;
6281
6308
  declare const workReport_WorkReportNoCodec: typeof WorkReportNoCodec;
6282
6309
  declare namespace workReport {
6283
- export { workReport_WorkPackageInfo as WorkPackageInfo, workReport_WorkPackageSpec as WorkPackageSpec, workReport_WorkReport as WorkReport, workReport_WorkReportCodec as WorkReportCodec, workReport_WorkReportCodecPre070 as WorkReportCodecPre070, workReport_WorkReportNoCodec as WorkReportNoCodec };
6284
- export type { workReport_AuthorizerHash as AuthorizerHash, workReport_ExportsRootHash as ExportsRootHash, workReport_WorkPackageHash as WorkPackageHash };
6310
+ export {
6311
+ workReport_WorkPackageSpec as WorkPackageSpec,
6312
+ workReport_WorkReport as WorkReport,
6313
+ workReport_WorkReportCodec as WorkReportCodec,
6314
+ workReport_WorkReportCodecPre070 as WorkReportCodecPre070,
6315
+ workReport_WorkReportNoCodec as WorkReportNoCodec,
6316
+ };
6285
6317
  }
6286
6318
 
6287
6319
  /**
@@ -6516,6 +6548,22 @@ declare class ValidatorKeys extends WithDebug {
6516
6548
  }
6517
6549
  }
6518
6550
 
6551
+ declare class TicketsMarker extends WithDebug {
6552
+ static Codec = codec.Class(TicketsMarker, {
6553
+ tickets: codecPerEpochBlock(Ticket.Codec),
6554
+ });
6555
+
6556
+ static create({ tickets }: CodecRecord<TicketsMarker>) {
6557
+ return new TicketsMarker(tickets);
6558
+ }
6559
+
6560
+ private constructor(public readonly tickets: PerEpochBlock<Ticket>) {
6561
+ super();
6562
+ }
6563
+ }
6564
+
6565
+ type TicketsMarkerView = DescribedBy<typeof TicketsMarker.Codec.View>;
6566
+
6519
6567
  /**
6520
6568
  * For the first block in a new epoch, the epoch marker is set
6521
6569
  * and contains the epoch randomness and validator keys
@@ -6546,6 +6594,8 @@ declare class EpochMarker extends WithDebug {
6546
6594
  }
6547
6595
  }
6548
6596
 
6597
+ type EpochMarkerView = DescribedBy<typeof EpochMarker.Codec.View>;
6598
+
6549
6599
  /**
6550
6600
  * Return an encoded header without the seal components.
6551
6601
  *
@@ -6568,7 +6618,7 @@ declare const legacyDescriptor = {
6568
6618
  extrinsicHash: codec.bytes(HASH_SIZE).asOpaque<ExtrinsicHash>(),
6569
6619
  timeSlotIndex: codec.u32.asOpaque<TimeSlot>(),
6570
6620
  epochMarker: codec.optional(EpochMarker.Codec),
6571
- ticketsMarker: codec.optional(codecPerEpochBlock(Ticket.Codec)),
6621
+ ticketsMarker: codec.optional(TicketsMarker.Codec),
6572
6622
  offendersMarker: codec.sequenceVarLen(codec.bytes(ED25519_KEY_BYTES).asOpaque<Ed25519Key>()),
6573
6623
  bandersnatchBlockAuthorIndex: codec.u16.asOpaque<ValidatorIndex>(),
6574
6624
  entropySource: codec.bytes(BANDERSNATCH_VRF_SIGNATURE_BYTES).asOpaque<BandersnatchVrfSignature>(),
@@ -6591,7 +6641,7 @@ declare class Header extends WithDebug {
6591
6641
  extrinsicHash: codec.bytes(HASH_SIZE).asOpaque<ExtrinsicHash>(),
6592
6642
  timeSlotIndex: codec.u32.asOpaque<TimeSlot>(),
6593
6643
  epochMarker: codec.optional(EpochMarker.Codec),
6594
- ticketsMarker: codec.optional(codecPerEpochBlock(Ticket.Codec)),
6644
+ ticketsMarker: codec.optional(TicketsMarker.Codec),
6595
6645
  bandersnatchBlockAuthorIndex: codec.u16.asOpaque<ValidatorIndex>(),
6596
6646
  entropySource: codec.bytes(BANDERSNATCH_VRF_SIGNATURE_BYTES).asOpaque<BandersnatchVrfSignature>(),
6597
6647
  offendersMarker: codec.sequenceVarLen(codec.bytes(ED25519_KEY_BYTES).asOpaque<Ed25519Key>()),
@@ -6624,7 +6674,7 @@ declare class Header extends WithDebug {
6624
6674
  * `H_w`: Winning tickets provides the series of 600 slot sealing "tickets"
6625
6675
  * for the next epoch.
6626
6676
  */
6627
- public ticketsMarker: PerEpochBlock<Ticket> | null = null;
6677
+ public ticketsMarker: TicketsMarker | null = null;
6628
6678
  /** `H_i`: Block author's index in the current validator set. */
6629
6679
  public bandersnatchBlockAuthorIndex: ValidatorIndex = tryAsValidatorIndex(0);
6630
6680
  /** `H_v`: Entropy-yielding VRF signature. */
@@ -6809,6 +6859,7 @@ type index$j_EntropyHash = EntropyHash;
6809
6859
  type index$j_Epoch = Epoch;
6810
6860
  type index$j_EpochMarker = EpochMarker;
6811
6861
  declare const index$j_EpochMarker: typeof EpochMarker;
6862
+ type index$j_EpochMarkerView = EpochMarkerView;
6812
6863
  type index$j_Extrinsic = Extrinsic;
6813
6864
  declare const index$j_Extrinsic: typeof Extrinsic;
6814
6865
  type index$j_ExtrinsicHash = ExtrinsicHash;
@@ -6828,6 +6879,9 @@ type index$j_SegmentIndex = SegmentIndex;
6828
6879
  type index$j_ServiceGas = ServiceGas;
6829
6880
  type index$j_ServiceId = ServiceId;
6830
6881
  type index$j_StateRootHash = StateRootHash;
6882
+ type index$j_TicketsMarker = TicketsMarker;
6883
+ declare const index$j_TicketsMarker: typeof TicketsMarker;
6884
+ type index$j_TicketsMarkerView = TicketsMarkerView;
6831
6885
  type index$j_TimeSlot = TimeSlot;
6832
6886
  type index$j_ValidatorIndex = ValidatorIndex;
6833
6887
  type index$j_ValidatorKeys = ValidatorKeys;
@@ -6860,8 +6914,8 @@ declare const index$j_workPackage: typeof workPackage;
6860
6914
  declare const index$j_workReport: typeof workReport;
6861
6915
  declare const index$j_workResult: typeof workResult;
6862
6916
  declare namespace index$j {
6863
- export { index$j_Block as Block, index$j_EpochMarker as EpochMarker, index$j_Extrinsic as Extrinsic, index$j_Header as Header, index$j_HeaderViewWithHash as HeaderViewWithHash, index$j_MAX_NUMBER_OF_SEGMENTS as MAX_NUMBER_OF_SEGMENTS, index$j_ValidatorKeys as ValidatorKeys, index$j_W_E as W_E, index$j_W_S as W_S, index$j_assurances as assurances, index$j_codecPerEpochBlock as codecPerEpochBlock, index$j_codecPerValidator as codecPerValidator, codec as codecUtils, index$j_disputes as disputes, index$j_encodeUnsealedHeader as encodeUnsealedHeader, index$j_guarantees as guarantees, index$j_headerViewWithHashCodec as headerViewWithHashCodec, index$j_legacyDescriptor as legacyDescriptor, index$j_preimage as preimage, index$j_refineContext as refineContext, index$j_tickets as tickets, index$j_tryAsCoreIndex as tryAsCoreIndex, index$j_tryAsEpoch as tryAsEpoch, index$j_tryAsPerEpochBlock as tryAsPerEpochBlock, index$j_tryAsPerValidator as tryAsPerValidator, index$j_tryAsSegmentIndex as tryAsSegmentIndex, index$j_tryAsServiceGas as tryAsServiceGas, index$j_tryAsServiceId as tryAsServiceId, index$j_tryAsTimeSlot as tryAsTimeSlot, index$j_tryAsValidatorIndex as tryAsValidatorIndex, index$j_workItem as workItem, index$j_workPackage as workPackage, index$j_workReport as workReport, index$j_workResult as workResult };
6864
- export type { index$j_BlockView as BlockView, index$j_CodeHash as CodeHash, index$j_CoreIndex as CoreIndex, index$j_EntropyHash as EntropyHash, index$j_Epoch as Epoch, index$j_ExtrinsicHash as ExtrinsicHash, index$j_ExtrinsicView as ExtrinsicView, index$j_HeaderHash as HeaderHash, index$j_HeaderView as HeaderView, index$j_PerEpochBlock as PerEpochBlock, index$j_PerValidator as PerValidator, index$j_SEGMENT_BYTES as SEGMENT_BYTES, index$j_Segment as Segment, index$j_SegmentIndex as SegmentIndex, index$j_ServiceGas as ServiceGas, index$j_ServiceId as ServiceId, index$j_StateRootHash as StateRootHash, index$j_TimeSlot as TimeSlot, index$j_ValidatorIndex as ValidatorIndex, index$j_WorkReportHash as WorkReportHash };
6917
+ export { index$j_Block as Block, index$j_EpochMarker as EpochMarker, index$j_Extrinsic as Extrinsic, index$j_Header as Header, index$j_HeaderViewWithHash as HeaderViewWithHash, index$j_MAX_NUMBER_OF_SEGMENTS as MAX_NUMBER_OF_SEGMENTS, index$j_TicketsMarker as TicketsMarker, index$j_ValidatorKeys as ValidatorKeys, index$j_W_E as W_E, index$j_W_S as W_S, index$j_assurances as assurances, index$j_codecPerEpochBlock as codecPerEpochBlock, index$j_codecPerValidator as codecPerValidator, codec as codecUtils, index$j_disputes as disputes, index$j_encodeUnsealedHeader as encodeUnsealedHeader, index$j_guarantees as guarantees, index$j_headerViewWithHashCodec as headerViewWithHashCodec, index$j_legacyDescriptor as legacyDescriptor, index$j_preimage as preimage, index$j_refineContext as refineContext, index$j_tickets as tickets, index$j_tryAsCoreIndex as tryAsCoreIndex, index$j_tryAsEpoch as tryAsEpoch, index$j_tryAsPerEpochBlock as tryAsPerEpochBlock, index$j_tryAsPerValidator as tryAsPerValidator, index$j_tryAsSegmentIndex as tryAsSegmentIndex, index$j_tryAsServiceGas as tryAsServiceGas, index$j_tryAsServiceId as tryAsServiceId, index$j_tryAsTimeSlot as tryAsTimeSlot, index$j_tryAsValidatorIndex as tryAsValidatorIndex, index$j_workItem as workItem, index$j_workPackage as workPackage, index$j_workReport as workReport, index$j_workResult as workResult };
6918
+ export type { index$j_BlockView as BlockView, index$j_CodeHash as CodeHash, index$j_CoreIndex as CoreIndex, index$j_EntropyHash as EntropyHash, index$j_Epoch as Epoch, index$j_EpochMarkerView as EpochMarkerView, index$j_ExtrinsicHash as ExtrinsicHash, index$j_ExtrinsicView as ExtrinsicView, index$j_HeaderHash as HeaderHash, index$j_HeaderView as HeaderView, index$j_PerEpochBlock as PerEpochBlock, index$j_PerValidator as PerValidator, index$j_SEGMENT_BYTES as SEGMENT_BYTES, index$j_Segment as Segment, index$j_SegmentIndex as SegmentIndex, index$j_ServiceGas as ServiceGas, index$j_ServiceId as ServiceId, index$j_StateRootHash as StateRootHash, index$j_TicketsMarkerView as TicketsMarkerView, index$j_TimeSlot as TimeSlot, index$j_ValidatorIndex as ValidatorIndex, index$j_WorkReportHash as WorkReportHash };
6865
6919
  }
6866
6920
 
6867
6921
  /** A type that can be read from a JSON-parsed object. */
@@ -7603,7 +7657,7 @@ declare const epochMark = json.object<JsonEpochMarker, EpochMarker>(
7603
7657
  (x) => EpochMarker.create({ entropy: x.entropy, ticketsEntropy: x.tickets_entropy, validators: x.validators }),
7604
7658
  );
7605
7659
 
7606
- declare const ticketsMark = json.object<Ticket>(
7660
+ declare const ticket = json.object<Ticket>(
7607
7661
  {
7608
7662
  id: fromJson.bytes32(),
7609
7663
  attempt: fromJson.ticketAttempt,
@@ -7617,7 +7671,7 @@ type JsonHeader = {
7617
7671
  extrinsic_hash: ExtrinsicHash;
7618
7672
  slot: TimeSlot;
7619
7673
  epoch_mark?: EpochMarker;
7620
- tickets_mark?: KnownSizeArray<Ticket, "EpochLength">;
7674
+ tickets_mark?: Ticket[];
7621
7675
  offenders_mark: Ed25519Key[];
7622
7676
  author_index: ValidatorIndex;
7623
7677
  entropy_source: BandersnatchVrfSignature;
@@ -7631,7 +7685,7 @@ declare const headerFromJson = json.object<JsonHeader, Header>(
7631
7685
  extrinsic_hash: fromJson.bytes32(),
7632
7686
  slot: "number",
7633
7687
  epoch_mark: json.optional(epochMark),
7634
- tickets_mark: json.optional<Ticket[]>(json.array(ticketsMark)),
7688
+ tickets_mark: json.optional(json.array(ticket)),
7635
7689
  offenders_mark: json.array(fromJson.bytes32<Ed25519Key>()),
7636
7690
  author_index: "number",
7637
7691
  entropy_source: bandersnatchVrfSignature,
@@ -7655,7 +7709,10 @@ declare const headerFromJson = json.object<JsonHeader, Header>(
7655
7709
  header.extrinsicHash = extrinsic_hash;
7656
7710
  header.timeSlotIndex = slot;
7657
7711
  header.epochMarker = epoch_mark ?? null;
7658
- header.ticketsMarker = tickets_mark ?? null;
7712
+ header.ticketsMarker =
7713
+ tickets_mark === undefined || tickets_mark === null
7714
+ ? null
7715
+ : TicketsMarker.create({ tickets: asOpaqueType(tickets_mark) });
7659
7716
  header.offendersMarker = offenders_mark;
7660
7717
  header.bandersnatchBlockAuthorIndex = author_index;
7661
7718
  header.entropySource = entropy_source;
@@ -7705,9 +7762,9 @@ declare const index$h_preimagesExtrinsicFromJson: typeof preimagesExtrinsicFromJ
7705
7762
  declare const index$h_refineContextFromJson: typeof refineContextFromJson;
7706
7763
  declare const index$h_reportGuaranteeFromJson: typeof reportGuaranteeFromJson;
7707
7764
  declare const index$h_segmentRootLookupItemFromJson: typeof segmentRootLookupItemFromJson;
7765
+ declare const index$h_ticket: typeof ticket;
7708
7766
  declare const index$h_ticketEnvelopeFromJson: typeof ticketEnvelopeFromJson;
7709
7767
  declare const index$h_ticketsExtrinsicFromJson: typeof ticketsExtrinsicFromJson;
7710
- declare const index$h_ticketsMark: typeof ticketsMark;
7711
7768
  declare const index$h_validatorKeysFromJson: typeof validatorKeysFromJson;
7712
7769
  declare const index$h_validatorSignatureFromJson: typeof validatorSignatureFromJson;
7713
7770
  declare const index$h_verdictFromJson: typeof verdictFromJson;
@@ -7717,7 +7774,7 @@ declare const index$h_workRefineLoadFromJson: typeof workRefineLoadFromJson;
7717
7774
  declare const index$h_workReportFromJson: typeof workReportFromJson;
7718
7775
  declare const index$h_workResultFromJson: typeof workResultFromJson;
7719
7776
  declare namespace index$h {
7720
- export { index$h_bandersnatchVrfSignature as bandersnatchVrfSignature, index$h_blockFromJson as blockFromJson, index$h_culpritFromJson as culpritFromJson, index$h_disputesExtrinsicFromJson as disputesExtrinsicFromJson, index$h_epochMark as epochMark, index$h_faultFromJson as faultFromJson, index$h_fromJson as fromJson, index$h_getAssurancesExtrinsicFromJson as getAssurancesExtrinsicFromJson, index$h_getAvailabilityAssuranceFromJson as getAvailabilityAssuranceFromJson, index$h_getExtrinsicFromJson as getExtrinsicFromJson, index$h_guaranteesExtrinsicFromJson as guaranteesExtrinsicFromJson, index$h_headerFromJson as headerFromJson, index$h_judgementFromJson as judgementFromJson, index$h_preimageFromJson as preimageFromJson, index$h_preimagesExtrinsicFromJson as preimagesExtrinsicFromJson, index$h_refineContextFromJson as refineContextFromJson, index$h_reportGuaranteeFromJson as reportGuaranteeFromJson, index$h_segmentRootLookupItemFromJson as segmentRootLookupItemFromJson, index$h_ticketEnvelopeFromJson as ticketEnvelopeFromJson, index$h_ticketsExtrinsicFromJson as ticketsExtrinsicFromJson, index$h_ticketsMark as ticketsMark, index$h_validatorKeysFromJson as validatorKeysFromJson, index$h_validatorSignatureFromJson as validatorSignatureFromJson, index$h_verdictFromJson as verdictFromJson, index$h_workExecResultFromJson as workExecResultFromJson, index$h_workPackageSpecFromJson as workPackageSpecFromJson, index$h_workRefineLoadFromJson as workRefineLoadFromJson, index$h_workReportFromJson as workReportFromJson, index$h_workResultFromJson as workResultFromJson };
7777
+ export { index$h_bandersnatchVrfSignature as bandersnatchVrfSignature, index$h_blockFromJson as blockFromJson, index$h_culpritFromJson as culpritFromJson, index$h_disputesExtrinsicFromJson as disputesExtrinsicFromJson, index$h_epochMark as epochMark, index$h_faultFromJson as faultFromJson, index$h_fromJson as fromJson, index$h_getAssurancesExtrinsicFromJson as getAssurancesExtrinsicFromJson, index$h_getAvailabilityAssuranceFromJson as getAvailabilityAssuranceFromJson, index$h_getExtrinsicFromJson as getExtrinsicFromJson, index$h_guaranteesExtrinsicFromJson as guaranteesExtrinsicFromJson, index$h_headerFromJson as headerFromJson, index$h_judgementFromJson as judgementFromJson, index$h_preimageFromJson as preimageFromJson, index$h_preimagesExtrinsicFromJson as preimagesExtrinsicFromJson, index$h_refineContextFromJson as refineContextFromJson, index$h_reportGuaranteeFromJson as reportGuaranteeFromJson, index$h_segmentRootLookupItemFromJson as segmentRootLookupItemFromJson, index$h_ticket as ticket, index$h_ticketEnvelopeFromJson as ticketEnvelopeFromJson, index$h_ticketsExtrinsicFromJson as ticketsExtrinsicFromJson, index$h_validatorKeysFromJson as validatorKeysFromJson, index$h_validatorSignatureFromJson as validatorSignatureFromJson, index$h_verdictFromJson as verdictFromJson, index$h_workExecResultFromJson as workExecResultFromJson, index$h_workPackageSpecFromJson as workPackageSpecFromJson, index$h_workRefineLoadFromJson as workRefineLoadFromJson, index$h_workReportFromJson as workReportFromJson, index$h_workResultFromJson as workResultFromJson };
7721
7778
  export type { index$h_CamelToSnake as CamelToSnake, index$h_JsonCulprit as JsonCulprit, index$h_JsonEpochMarker as JsonEpochMarker, index$h_JsonFault as JsonFault, index$h_JsonHeader as JsonHeader, index$h_JsonJudgement as JsonJudgement, index$h_JsonObject as JsonObject, index$h_JsonRefineContext as JsonRefineContext, index$h_JsonReportGuarantee as JsonReportGuarantee, index$h_JsonVerdict as JsonVerdict, index$h_JsonWorkExecResult as JsonWorkExecResult, index$h_JsonWorkRefineLoad as JsonWorkRefineLoad, index$h_JsonWorkReport as JsonWorkReport, index$h_JsonWorkResult as JsonWorkResult };
7722
7779
  }
7723
7780
 
@@ -7827,7 +7884,7 @@ declare const DEV_CONFIG = "dev";
7827
7884
  declare const DEFAULT_CONFIG = "default";
7828
7885
 
7829
7886
  declare const NODE_DEFAULTS = {
7830
- name: os.hostname(),
7887
+ name: isBrowser() ? "browser" : os.hostname(),
7831
7888
  config: DEFAULT_CONFIG,
7832
7889
  };
7833
7890
 
@@ -7882,11 +7939,11 @@ declare class NodeConfiguration {
7882
7939
 
7883
7940
  declare function loadConfig(configPath: string): NodeConfiguration {
7884
7941
  if (configPath === DEFAULT_CONFIG) {
7885
- return parseFromJson(defaultConfigJson, NodeConfiguration.fromJson);
7942
+ return parseFromJson(configs.default, NodeConfiguration.fromJson);
7886
7943
  }
7887
7944
 
7888
7945
  if (configPath === DEV_CONFIG) {
7889
- return parseFromJson(devConfigJson, NodeConfiguration.fromJson);
7946
+ return parseFromJson(configs.dev, NodeConfiguration.fromJson);
7890
7947
  }
7891
7948
 
7892
7949
  try {
@@ -8227,20 +8284,7 @@ declare class AutoAccumulate {
8227
8284
  declare class PrivilegedServices {
8228
8285
  static Codec = codec.Class(PrivilegedServices, {
8229
8286
  manager: codec.u32.asOpaque<ServiceId>(),
8230
- authManager: Compatibility.isGreaterOrEqual(GpVersion.V0_6_7)
8231
- ? codecPerCore(codec.u32.asOpaque<ServiceId>())
8232
- : codecWithContext((ctx) =>
8233
- codec.u32.asOpaque<ServiceId>().convert(
8234
- // NOTE: [MaSo] In a compatibility mode we are always updating all entries
8235
- // (all the entries are the same)
8236
- // so it doesn't matter which one we take here.
8237
- (perCore: PerCore<ServiceId>) => perCore[0],
8238
- (serviceId: ServiceId) => {
8239
- const array = new Array(ctx.coresCount).fill(serviceId);
8240
- return tryAsPerCore(array, ctx);
8241
- },
8242
- ),
8243
- ),
8287
+ authManager: codecPerCore(codec.u32.asOpaque<ServiceId>()),
8244
8288
  validatorsManager: codec.u32.asOpaque<ServiceId>(),
8245
8289
  autoAccumulateServices: readonlyArray(codec.sequenceVarLen(AutoAccumulate.Codec)),
8246
8290
  });
@@ -8407,85 +8451,31 @@ declare class Mountain<H extends OpaqueHash> {
8407
8451
  return Mountain.fromChildren(hasher, [this, other]);
8408
8452
  }
8409
8453
 
8410
- toString() {
8411
- return `${this.size} @ ${this.peak}`;
8412
- }
8413
- }
8414
-
8415
- type index$f_MerkleMountainRange<H extends OpaqueHash> = MerkleMountainRange<H>;
8416
- declare const index$f_MerkleMountainRange: typeof MerkleMountainRange;
8417
- type index$f_MmrHasher<H extends OpaqueHash> = MmrHasher<H>;
8418
- type index$f_MmrPeaks<H extends OpaqueHash> = MmrPeaks<H>;
8419
- type index$f_Mountain<H extends OpaqueHash> = Mountain<H>;
8420
- declare const index$f_Mountain: typeof Mountain;
8421
- declare const index$f_SUPER_PEAK_STRING: typeof SUPER_PEAK_STRING;
8422
- declare namespace index$f {
8423
- export { index$f_MerkleMountainRange as MerkleMountainRange, index$f_Mountain as Mountain, index$f_SUPER_PEAK_STRING as SUPER_PEAK_STRING };
8424
- export type { index$f_MmrHasher as MmrHasher, index$f_MmrPeaks as MmrPeaks };
8425
- }
8426
-
8427
- /**
8428
- * `H = 8`: The size of recent history, in blocks.
8429
- *
8430
- * https://graypaper.fluffylabs.dev/#/579bd12/416300416500
8431
- */
8432
- declare const MAX_RECENT_HISTORY = 8;
8433
- type MAX_RECENT_HISTORY = typeof MAX_RECENT_HISTORY;
8434
-
8435
- type LegacyBlocksState = KnownSizeArray<LegacyBlockState, `0..${typeof MAX_RECENT_HISTORY}`>;
8436
-
8437
- declare class LegacyBlockState extends WithDebug {
8438
- static Codec = codec.Class(LegacyBlockState, {
8439
- headerHash: codec.bytes(HASH_SIZE).asOpaque<HeaderHash>(),
8440
- mmr: codec.object({
8441
- peaks: readonlyArray(codec.sequenceVarLen(codec.optional(codec.bytes(HASH_SIZE)))),
8442
- }),
8443
- postStateRoot: codec.bytes(HASH_SIZE).asOpaque<StateRootHash>(),
8444
- reported: codecHashDictionary(WorkPackageInfo.Codec, (x) => x.workPackageHash),
8445
- });
8446
-
8447
- static create({ headerHash, mmr, postStateRoot, reported }: CodecRecord<LegacyBlockState>) {
8448
- return new LegacyBlockState(headerHash, mmr, postStateRoot, reported);
8449
- }
8450
-
8451
- private constructor(
8452
- /** Header hash. */
8453
- public readonly headerHash: HeaderHash,
8454
- /** Merkle mountain range peaks. */
8455
- public readonly mmr: MmrPeaks<KeccakHash>,
8456
- /** Posterior state root filled in with a 1-block delay. */
8457
- public postStateRoot: StateRootHash,
8458
- /** Reported work packages (no more than number of cores). */
8459
- public readonly reported: HashDictionary<WorkPackageHash, WorkPackageInfo>,
8460
- ) {
8461
- super();
8462
- }
8463
- }
8464
-
8465
- declare class LegacyRecentBlocks extends WithDebug {
8466
- static Codec = codec.Class(LegacyRecentBlocks, {
8467
- blocks: codecKnownSizeArray(LegacyBlockState.Codec, {
8468
- minLength: 0,
8469
- maxLength: MAX_RECENT_HISTORY,
8470
- typicalLength: MAX_RECENT_HISTORY,
8471
- }),
8472
- });
8473
-
8474
- static create(a: CodecRecord<LegacyRecentBlocks>) {
8475
- return new LegacyRecentBlocks(a.blocks);
8476
- }
8477
-
8478
- private constructor(
8479
- /**
8480
- * Most recent blocks.
8481
- * https://graypaper.fluffylabs.dev/#/85129da/0fb6010fb601?v=0.6.3
8482
- */
8483
- public readonly blocks: LegacyBlocksState,
8484
- ) {
8485
- super();
8454
+ toString() {
8455
+ return `${this.size} @ ${this.peak}`;
8486
8456
  }
8487
8457
  }
8488
8458
 
8459
+ type index$f_MerkleMountainRange<H extends OpaqueHash> = MerkleMountainRange<H>;
8460
+ declare const index$f_MerkleMountainRange: typeof MerkleMountainRange;
8461
+ type index$f_MmrHasher<H extends OpaqueHash> = MmrHasher<H>;
8462
+ type index$f_MmrPeaks<H extends OpaqueHash> = MmrPeaks<H>;
8463
+ type index$f_Mountain<H extends OpaqueHash> = Mountain<H>;
8464
+ declare const index$f_Mountain: typeof Mountain;
8465
+ declare const index$f_SUPER_PEAK_STRING: typeof SUPER_PEAK_STRING;
8466
+ declare namespace index$f {
8467
+ export { index$f_MerkleMountainRange as MerkleMountainRange, index$f_Mountain as Mountain, index$f_SUPER_PEAK_STRING as SUPER_PEAK_STRING };
8468
+ export type { index$f_MmrHasher as MmrHasher, index$f_MmrPeaks as MmrPeaks };
8469
+ }
8470
+
8471
+ /**
8472
+ * `H = 8`: The size of recent history, in blocks.
8473
+ *
8474
+ * https://graypaper.fluffylabs.dev/#/579bd12/416300416500
8475
+ */
8476
+ declare const MAX_RECENT_HISTORY = 8;
8477
+ type MAX_RECENT_HISTORY = typeof MAX_RECENT_HISTORY;
8478
+
8489
8479
  /** Array of recent blocks with maximum size of `MAX_RECENT_HISTORY` */
8490
8480
  type BlocksState = KnownSizeArray<BlockState, `0..${typeof MAX_RECENT_HISTORY}`>;
8491
8481
 
@@ -8549,87 +8539,54 @@ declare class RecentBlocks extends WithDebug {
8549
8539
  }
8550
8540
 
8551
8541
  /**
8552
- * Unified recent history of blocks that handles both legacy and current formats.
8542
+ * Recent history of blocks.
8553
8543
  *
8554
- * https://graypaper.fluffylabs.dev/#/85129da/38cb0138cb01?v=0.6.3
8555
8544
  * https://graypaper.fluffylabs.dev/#/7e6ff6a/0fc9010fc901?v=0.6.7
8556
8545
  */
8557
8546
  declare class RecentBlocksHistory extends WithDebug {
8558
8547
  static Codec = Descriptor.new<RecentBlocksHistory>(
8559
8548
  "RecentBlocksHistory",
8560
- Compatibility.isGreaterOrEqual(GpVersion.V0_6_7) ? RecentBlocks.Codec.sizeHint : LegacyRecentBlocks.Codec.sizeHint,
8561
- (encoder, value) =>
8562
- Compatibility.isGreaterOrEqual(GpVersion.V0_6_7)
8563
- ? RecentBlocks.Codec.encode(encoder, value.asCurrent())
8564
- : LegacyRecentBlocks.Codec.encode(encoder, value.asLegacy()),
8549
+ RecentBlocks.Codec.sizeHint,
8550
+ (encoder, value) => RecentBlocks.Codec.encode(encoder, value.asCurrent()),
8565
8551
  (decoder) => {
8566
- if (Compatibility.isGreaterOrEqual(GpVersion.V0_6_7)) {
8567
- const recentBlocks = RecentBlocks.Codec.decode(decoder);
8568
- return RecentBlocksHistory.create(recentBlocks);
8569
- }
8570
- const legacyBlocks = LegacyRecentBlocks.Codec.decode(decoder);
8571
- return RecentBlocksHistory.legacyCreate(legacyBlocks);
8552
+ const recentBlocks = RecentBlocks.Codec.decode(decoder);
8553
+ return RecentBlocksHistory.create(recentBlocks);
8572
8554
  },
8573
- (_sizer) => {
8574
- return Compatibility.isGreaterOrEqual(GpVersion.V0_6_7)
8575
- ? RecentBlocks.Codec.sizeHint
8576
- : LegacyRecentBlocks.Codec.sizeHint;
8555
+ (skip) => {
8556
+ return RecentBlocks.Codec.skip(skip);
8577
8557
  },
8578
8558
  );
8579
8559
 
8580
8560
  static create(recentBlocks: RecentBlocks) {
8581
- return new RecentBlocksHistory(recentBlocks, null);
8582
- }
8583
-
8584
- static legacyCreate(legacyRecentBlocks: LegacyRecentBlocks) {
8585
- return new RecentBlocksHistory(null, legacyRecentBlocks);
8561
+ return new RecentBlocksHistory(recentBlocks);
8586
8562
  }
8587
8563
 
8588
8564
  static empty() {
8589
- if (Compatibility.isGreaterOrEqual(GpVersion.V0_6_7)) {
8590
- return RecentBlocksHistory.create(
8591
- RecentBlocks.create({
8592
- blocks: asKnownSize([]),
8593
- accumulationLog: { peaks: [] },
8594
- }),
8595
- );
8596
- }
8597
- return RecentBlocksHistory.legacyCreate(LegacyRecentBlocks.create({ blocks: asKnownSize([]) }));
8565
+ return RecentBlocksHistory.create(
8566
+ RecentBlocks.create({
8567
+ blocks: asKnownSize([]),
8568
+ accumulationLog: { peaks: [] },
8569
+ }),
8570
+ );
8598
8571
  }
8599
8572
 
8600
8573
  /**
8601
8574
  * Returns the block's BEEFY super peak.
8602
- *
8603
- * NOTE: The `hasher` parameter exists solely for backward compatibility with legacy block format.
8604
8575
  */
8605
- static accumulationResult(
8606
- block: BlockState | LegacyBlockState,
8607
- {
8608
- hasher,
8609
- }: {
8610
- hasher: MmrHasher<KeccakHash>;
8611
- },
8612
- ): KeccakHash {
8613
- return Compatibility.isGreaterOrEqual(GpVersion.V0_6_7)
8614
- ? (block as BlockState).accumulationResult
8615
- : MerkleMountainRange.fromPeaks(hasher, (block as LegacyBlockState).mmr).getSuperPeakHash();
8576
+ static accumulationResult(block: BlockState): KeccakHash {
8577
+ return (block as BlockState).accumulationResult;
8616
8578
  }
8617
8579
 
8618
- private constructor(
8619
- private readonly current: RecentBlocks | null,
8620
- private readonly legacy: LegacyRecentBlocks | null,
8621
- ) {
8580
+ private constructor(private readonly current: RecentBlocks | null) {
8622
8581
  super();
8623
8582
  }
8624
8583
 
8625
8584
  /** History of recent blocks with maximum size of `MAX_RECENT_HISTORY` */
8626
- get blocks(): readonly (BlockState | LegacyBlockState)[] {
8627
- if (Compatibility.isGreaterOrEqual(GpVersion.V0_6_7) && this.current !== null) {
8585
+ get blocks(): readonly BlockState[] {
8586
+ if (this.current !== null) {
8628
8587
  return this.current.blocks;
8629
8588
  }
8630
- if (this.legacy !== null) {
8631
- return this.legacy.blocks;
8632
- }
8589
+
8633
8590
  throw new Error("RecentBlocksHistory is in invalid state");
8634
8591
  }
8635
8592
 
@@ -8640,15 +8597,8 @@ declare class RecentBlocksHistory extends WithDebug {
8640
8597
  return this.current;
8641
8598
  }
8642
8599
 
8643
- asLegacy() {
8644
- if (this.legacy === null) {
8645
- throw new Error("Cannot access legacy RecentBlocks format");
8646
- }
8647
- return this.legacy;
8648
- }
8649
-
8650
- updateBlocks(blocks: (BlockState | LegacyBlockState)[]): RecentBlocksHistory {
8651
- if (Compatibility.isGreaterOrEqual(GpVersion.V0_6_7) && this.current !== null) {
8600
+ updateBlocks(blocks: BlockState[]): RecentBlocksHistory {
8601
+ if (this.current !== null) {
8652
8602
  return RecentBlocksHistory.create(
8653
8603
  RecentBlocks.create({
8654
8604
  ...this.current,
@@ -8656,13 +8606,7 @@ declare class RecentBlocksHistory extends WithDebug {
8656
8606
  }),
8657
8607
  );
8658
8608
  }
8659
- if (this.legacy !== null) {
8660
- return RecentBlocksHistory.legacyCreate(
8661
- LegacyRecentBlocks.create({
8662
- blocks: asOpaqueType(blocks as LegacyBlockState[]),
8663
- }),
8664
- );
8665
- }
8609
+
8666
8610
  throw new Error("RecentBlocksHistory is in invalid state. Cannot be updated!");
8667
8611
  }
8668
8612
  }
@@ -8850,31 +8794,18 @@ declare const ignoreValueWithDefault = <T>(defaultValue: T) =>
8850
8794
  * https://graypaper.fluffylabs.dev/#/7e6ff6a/108301108301?v=0.6.7
8851
8795
  */
8852
8796
  declare class ServiceAccountInfo extends WithDebug {
8853
- static Codec = Compatibility.isGreaterOrEqual(GpVersion.V0_6_7)
8854
- ? codec.Class(ServiceAccountInfo, {
8855
- codeHash: codec.bytes(HASH_SIZE).asOpaque<CodeHash>(),
8856
- balance: codec.u64,
8857
- accumulateMinGas: codec.u64.convert((x) => x, tryAsServiceGas),
8858
- onTransferMinGas: codec.u64.convert((x) => x, tryAsServiceGas),
8859
- storageUtilisationBytes: codec.u64,
8860
- gratisStorage: codec.u64,
8861
- storageUtilisationCount: codec.u32,
8862
- created: codec.u32.convert((x) => x, tryAsTimeSlot),
8863
- lastAccumulation: codec.u32.convert((x) => x, tryAsTimeSlot),
8864
- parentService: codec.u32.convert((x) => x, tryAsServiceId),
8865
- })
8866
- : codec.Class(ServiceAccountInfo, {
8867
- codeHash: codec.bytes(HASH_SIZE).asOpaque<CodeHash>(),
8868
- balance: codec.u64,
8869
- accumulateMinGas: codec.u64.convert((x) => x, tryAsServiceGas),
8870
- onTransferMinGas: codec.u64.convert((x) => x, tryAsServiceGas),
8871
- storageUtilisationBytes: codec.u64,
8872
- storageUtilisationCount: codec.u32,
8873
- gratisStorage: ignoreValueWithDefault(tryAsU64(0)),
8874
- created: ignoreValueWithDefault(tryAsTimeSlot(0)),
8875
- lastAccumulation: ignoreValueWithDefault(tryAsTimeSlot(0)),
8876
- parentService: ignoreValueWithDefault(tryAsServiceId(0)),
8877
- });
8797
+ static Codec = codec.Class(ServiceAccountInfo, {
8798
+ codeHash: codec.bytes(HASH_SIZE).asOpaque<CodeHash>(),
8799
+ balance: codec.u64,
8800
+ accumulateMinGas: codec.u64.convert((x) => x, tryAsServiceGas),
8801
+ onTransferMinGas: codec.u64.convert((x) => x, tryAsServiceGas),
8802
+ storageUtilisationBytes: codec.u64,
8803
+ gratisStorage: codec.u64,
8804
+ storageUtilisationCount: codec.u32,
8805
+ created: codec.u32.convert((x) => x, tryAsTimeSlot),
8806
+ lastAccumulation: codec.u32.convert((x) => x, tryAsTimeSlot),
8807
+ parentService: codec.u32.convert((x) => x, tryAsServiceId),
8808
+ });
8878
8809
 
8879
8810
  static create(a: CodecRecord<ServiceAccountInfo>) {
8880
8811
  return new ServiceAccountInfo(
@@ -8896,11 +8827,6 @@ declare class ServiceAccountInfo extends WithDebug {
8896
8827
  * https://graypaper.fluffylabs.dev/#/7e6ff6a/119e01119e01?v=0.6.7
8897
8828
  */
8898
8829
  static calculateThresholdBalance(items: U32, bytes: U64, gratisStorage: U64): U64 {
8899
- check(
8900
- gratisStorage === tryAsU64(0) || Compatibility.isGreaterOrEqual(GpVersion.V0_6_7),
8901
- "Gratis storage cannot be non-zero before 0.6.7",
8902
- );
8903
-
8904
8830
  const storageCost =
8905
8831
  BASE_SERVICE_BALANCE + ELECTIVE_ITEM_BALANCE * BigInt(items) + ELECTIVE_BYTE_BALANCE * bytes - gratisStorage;
8906
8832
 
@@ -9211,9 +9137,7 @@ type ServicesUpdate = {
9211
9137
  };
9212
9138
 
9213
9139
  declare const codecServiceId: Descriptor<ServiceId> =
9214
- Compatibility.isSuite(TestSuite.W3F_DAVXY) ||
9215
- Compatibility.isSuite(TestSuite.JAMDUNA, GpVersion.V0_6_5) ||
9216
- Compatibility.isSuite(TestSuite.JAMDUNA, GpVersion.V0_6_7)
9140
+ Compatibility.isSuite(TestSuite.W3F_DAVXY) || Compatibility.isSuite(TestSuite.JAMDUNA, GpVersion.V0_6_7)
9217
9141
  ? codec.u32.asOpaque<ServiceId>()
9218
9142
  : codec.varU32.convert(
9219
9143
  (s) => tryAsU32(s),
@@ -10255,11 +10179,6 @@ declare const index$e_InMemoryService: typeof InMemoryService;
10255
10179
  type index$e_InMemoryState = InMemoryState;
10256
10180
  declare const index$e_InMemoryState: typeof InMemoryState;
10257
10181
  type index$e_InMemoryStateFields = InMemoryStateFields;
10258
- type index$e_LegacyBlockState = LegacyBlockState;
10259
- declare const index$e_LegacyBlockState: typeof LegacyBlockState;
10260
- type index$e_LegacyBlocksState = LegacyBlocksState;
10261
- type index$e_LegacyRecentBlocks = LegacyRecentBlocks;
10262
- declare const index$e_LegacyRecentBlocks: typeof LegacyRecentBlocks;
10263
10182
  type index$e_LookupHistoryItem = LookupHistoryItem;
10264
10183
  declare const index$e_LookupHistoryItem: typeof LookupHistoryItem;
10265
10184
  type index$e_LookupHistorySlots = LookupHistorySlots;
@@ -10330,8 +10249,8 @@ declare const index$e_tryAsPerCore: typeof tryAsPerCore;
10330
10249
  declare const index$e_workReportsSortedSetCodec: typeof workReportsSortedSetCodec;
10331
10250
  declare const index$e_zeroSizeHint: typeof zeroSizeHint;
10332
10251
  declare namespace index$e {
10333
- export { index$e_AccumulationOutput as AccumulationOutput, index$e_AutoAccumulate as AutoAccumulate, index$e_AvailabilityAssignment as AvailabilityAssignment, index$e_BASE_SERVICE_BALANCE as BASE_SERVICE_BALANCE, index$e_BlockState as BlockState, index$e_CoreStatistics as CoreStatistics, index$e_DisputesRecords as DisputesRecords, index$e_ELECTIVE_BYTE_BALANCE as ELECTIVE_BYTE_BALANCE, index$e_ELECTIVE_ITEM_BALANCE as ELECTIVE_ITEM_BALANCE, index$e_InMemoryService as InMemoryService, index$e_InMemoryState as InMemoryState, index$e_LegacyBlockState as LegacyBlockState, index$e_LegacyRecentBlocks as LegacyRecentBlocks, index$e_LookupHistoryItem as LookupHistoryItem, index$e_MAX_LOOKUP_HISTORY_SLOTS as MAX_LOOKUP_HISTORY_SLOTS, index$e_PreimageItem as PreimageItem, index$e_PrivilegedServices as PrivilegedServices, index$e_RecentBlocks as RecentBlocks, index$e_RecentBlocksHistory as RecentBlocksHistory, index$e_SafroleData as SafroleData, index$e_SafroleSealingKeysData as SafroleSealingKeysData, index$e_SafroleSealingKeysKind as SafroleSealingKeysKind, index$e_ServiceAccountInfo as ServiceAccountInfo, index$e_ServiceStatistics as ServiceStatistics, index$e_StatisticsData as StatisticsData, index$e_StorageItem as StorageItem, index$e_UpdateError as UpdateError, index$e_UpdatePreimage as UpdatePreimage, index$e_UpdatePreimageKind as UpdatePreimageKind, index$e_UpdateService as UpdateService, index$e_UpdateServiceKind as UpdateServiceKind, index$e_UpdateStorage as UpdateStorage, index$e_UpdateStorageKind as UpdateStorageKind, index$e_ValidatorData as ValidatorData, index$e_ValidatorStatistics as ValidatorStatistics, index$e_codecBandersnatchKey as codecBandersnatchKey, index$e_codecPerCore as codecPerCore, index$e_codecServiceId as codecServiceId, index$e_codecVarGas as codecVarGas, index$e_codecVarU16 as codecVarU16, index$e_codecWithHash as codecWithHash, index$e_hashComparator as hashComparator, index$e_ignoreValueWithDefault as ignoreValueWithDefault, index$e_serviceDataCodec as serviceDataCodec, index$e_serviceEntriesCodec as serviceEntriesCodec, index$e_sortedSetCodec as sortedSetCodec, index$e_tryAsLookupHistorySlots as tryAsLookupHistorySlots, index$e_tryAsPerCore as tryAsPerCore, index$e_workReportsSortedSetCodec as workReportsSortedSetCodec, index$e_zeroSizeHint as zeroSizeHint };
10334
- export type { index$e_BlocksState as BlocksState, index$e_ENTROPY_ENTRIES as ENTROPY_ENTRIES, index$e_EnumerableState as EnumerableState, index$e_FieldNames as FieldNames, index$e_InMemoryStateFields as InMemoryStateFields, index$e_LegacyBlocksState as LegacyBlocksState, index$e_LookupHistorySlots as LookupHistorySlots, index$e_MAX_RECENT_HISTORY as MAX_RECENT_HISTORY, index$e_PerCore as PerCore, index$e_SafroleSealingKeys as SafroleSealingKeys, index$e_Service as Service, index$e_ServiceData as ServiceData, index$e_ServiceEntries as ServiceEntries, index$e_ServicesUpdate as ServicesUpdate, index$e_State as State, index$e_StorageKey as StorageKey, index$e_VALIDATOR_META_BYTES as VALIDATOR_META_BYTES };
10252
+ export { index$e_AccumulationOutput as AccumulationOutput, index$e_AutoAccumulate as AutoAccumulate, index$e_AvailabilityAssignment as AvailabilityAssignment, index$e_BASE_SERVICE_BALANCE as BASE_SERVICE_BALANCE, index$e_BlockState as BlockState, index$e_CoreStatistics as CoreStatistics, index$e_DisputesRecords as DisputesRecords, index$e_ELECTIVE_BYTE_BALANCE as ELECTIVE_BYTE_BALANCE, index$e_ELECTIVE_ITEM_BALANCE as ELECTIVE_ITEM_BALANCE, index$e_InMemoryService as InMemoryService, index$e_InMemoryState as InMemoryState, index$e_LookupHistoryItem as LookupHistoryItem, index$e_MAX_LOOKUP_HISTORY_SLOTS as MAX_LOOKUP_HISTORY_SLOTS, index$e_PreimageItem as PreimageItem, index$e_PrivilegedServices as PrivilegedServices, index$e_RecentBlocks as RecentBlocks, index$e_RecentBlocksHistory as RecentBlocksHistory, index$e_SafroleData as SafroleData, index$e_SafroleSealingKeysData as SafroleSealingKeysData, index$e_SafroleSealingKeysKind as SafroleSealingKeysKind, index$e_ServiceAccountInfo as ServiceAccountInfo, index$e_ServiceStatistics as ServiceStatistics, index$e_StatisticsData as StatisticsData, index$e_StorageItem as StorageItem, index$e_UpdateError as UpdateError, index$e_UpdatePreimage as UpdatePreimage, index$e_UpdatePreimageKind as UpdatePreimageKind, index$e_UpdateService as UpdateService, index$e_UpdateServiceKind as UpdateServiceKind, index$e_UpdateStorage as UpdateStorage, index$e_UpdateStorageKind as UpdateStorageKind, index$e_ValidatorData as ValidatorData, index$e_ValidatorStatistics as ValidatorStatistics, index$e_codecBandersnatchKey as codecBandersnatchKey, index$e_codecPerCore as codecPerCore, index$e_codecServiceId as codecServiceId, index$e_codecVarGas as codecVarGas, index$e_codecVarU16 as codecVarU16, index$e_codecWithHash as codecWithHash, index$e_hashComparator as hashComparator, index$e_ignoreValueWithDefault as ignoreValueWithDefault, index$e_serviceDataCodec as serviceDataCodec, index$e_serviceEntriesCodec as serviceEntriesCodec, index$e_sortedSetCodec as sortedSetCodec, index$e_tryAsLookupHistorySlots as tryAsLookupHistorySlots, index$e_tryAsPerCore as tryAsPerCore, index$e_workReportsSortedSetCodec as workReportsSortedSetCodec, index$e_zeroSizeHint as zeroSizeHint };
10253
+ export type { index$e_BlocksState as BlocksState, index$e_ENTROPY_ENTRIES as ENTROPY_ENTRIES, index$e_EnumerableState as EnumerableState, index$e_FieldNames as FieldNames, index$e_InMemoryStateFields as InMemoryStateFields, index$e_LookupHistorySlots as LookupHistorySlots, index$e_MAX_RECENT_HISTORY as MAX_RECENT_HISTORY, index$e_PerCore as PerCore, index$e_SafroleSealingKeys as SafroleSealingKeys, index$e_Service as Service, index$e_ServiceData as ServiceData, index$e_ServiceEntries as ServiceEntries, index$e_ServicesUpdate as ServicesUpdate, index$e_State as State, index$e_StorageKey as StorageKey, index$e_VALIDATOR_META_BYTES as VALIDATOR_META_BYTES };
10335
10254
  }
10336
10255
 
10337
10256
  type StateKey$1 = Opaque<OpaqueHash, "stateKey">;
@@ -10475,7 +10394,7 @@ type StateCodec<T> = {
10475
10394
 
10476
10395
  /** Serialization for particular state entries. */
10477
10396
  declare namespace serialize {
10478
- /** C(1): https://graypaper.fluffylabs.dev/#/85129da/38a20138a201?v=0.6.3 */
10397
+ /** C(1): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b15013b1501?v=0.6.7 */
10479
10398
  export const authPools: StateCodec<State["authPools"]> = {
10480
10399
  key: stateKeys.index(StateKeyIdx.Alpha),
10481
10400
  Codec: codecPerCore(
@@ -10488,7 +10407,7 @@ declare namespace serialize {
10488
10407
  extract: (s) => s.authPools,
10489
10408
  };
10490
10409
 
10491
- /** C(2): https://graypaper.fluffylabs.dev/#/85129da/38be0138be01?v=0.6.3 */
10410
+ /** C(2): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b31013b3101?v=0.6.7 */
10492
10411
  export const authQueues: StateCodec<State["authQueues"]> = {
10493
10412
  key: stateKeys.index(StateKeyIdx.Phi),
10494
10413
  Codec: codecPerCore(
@@ -10499,7 +10418,6 @@ declare namespace serialize {
10499
10418
 
10500
10419
  /**
10501
10420
  * C(3): Recent blocks with compatibility
10502
- * https://graypaper.fluffylabs.dev/#/85129da/38cb0138cb01?v=0.6.3
10503
10421
  * https://graypaper.fluffylabs.dev/#/7e6ff6a/3b3e013b3e01?v=0.6.7
10504
10422
  */
10505
10423
  export const recentBlocks: StateCodec<State["recentBlocks"]> = {
@@ -10508,7 +10426,7 @@ declare namespace serialize {
10508
10426
  extract: (s) => s.recentBlocks,
10509
10427
  };
10510
10428
 
10511
- /** C(4): https://graypaper.fluffylabs.dev/#/85129da/38e60138e601?v=0.6.3 */
10429
+ /** C(4): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b63013b6301?v=0.6.7 */
10512
10430
  export const safrole: StateCodec<SafroleData> = {
10513
10431
  key: stateKeys.index(StateKeyIdx.Gamma),
10514
10432
  Codec: SafroleData.Codec,
@@ -10521,63 +10439,63 @@ declare namespace serialize {
10521
10439
  }),
10522
10440
  };
10523
10441
 
10524
- /** C(5): https://graypaper.fluffylabs.dev/#/85129da/383d02383d02?v=0.6.3 */
10442
+ /** C(5): https://graypaper.fluffylabs.dev/#/7e6ff6a/3bba013bba01?v=0.6.7 */
10525
10443
  export const disputesRecords: StateCodec<State["disputesRecords"]> = {
10526
10444
  key: stateKeys.index(StateKeyIdx.Psi),
10527
10445
  Codec: DisputesRecords.Codec,
10528
10446
  extract: (s) => s.disputesRecords,
10529
10447
  };
10530
10448
 
10531
- /** C(6): https://graypaper.fluffylabs.dev/#/85129da/387602387602?v=0.6.3 */
10449
+ /** C(6): https://graypaper.fluffylabs.dev/#/7e6ff6a/3bf3013bf301?v=0.6.7 */
10532
10450
  export const entropy: StateCodec<State["entropy"]> = {
10533
10451
  key: stateKeys.index(StateKeyIdx.Eta),
10534
10452
  Codec: codecFixedSizeArray(codec.bytes(HASH_SIZE).asOpaque<EntropyHash>(), ENTROPY_ENTRIES),
10535
10453
  extract: (s) => s.entropy,
10536
10454
  };
10537
10455
 
10538
- /** C(7): https://graypaper.fluffylabs.dev/#/85129da/388302388302?v=0.6.3 */
10456
+ /** C(7): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b00023b0002?v=0.6.7 */
10539
10457
  export const designatedValidators: StateCodec<State["designatedValidatorData"]> = {
10540
10458
  key: stateKeys.index(StateKeyIdx.Iota),
10541
10459
  Codec: codecPerValidator(ValidatorData.Codec),
10542
10460
  extract: (s) => s.designatedValidatorData,
10543
10461
  };
10544
10462
 
10545
- /** C(8): https://graypaper.fluffylabs.dev/#/85129da/389002389002?v=0.6.3 */
10463
+ /** C(8): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b0d023b0d02?v=0.6.7 */
10546
10464
  export const currentValidators: StateCodec<State["currentValidatorData"]> = {
10547
10465
  key: stateKeys.index(StateKeyIdx.Kappa),
10548
10466
  Codec: codecPerValidator(ValidatorData.Codec),
10549
10467
  extract: (s) => s.currentValidatorData,
10550
10468
  };
10551
10469
 
10552
- /** C(9): https://graypaper.fluffylabs.dev/#/85129da/389d02389d02?v=0.6.3 */
10470
+ /** C(9): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b1a023b1a02?v=0.6.7 */
10553
10471
  export const previousValidators: StateCodec<State["previousValidatorData"]> = {
10554
10472
  key: stateKeys.index(StateKeyIdx.Lambda),
10555
10473
  Codec: codecPerValidator(ValidatorData.Codec),
10556
10474
  extract: (s) => s.previousValidatorData,
10557
10475
  };
10558
10476
 
10559
- /** C(10): https://graypaper.fluffylabs.dev/#/85129da/38aa0238aa02?v=0.6.3 */
10477
+ /** C(10): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b27023b2702?v=0.6.7 */
10560
10478
  export const availabilityAssignment: StateCodec<State["availabilityAssignment"]> = {
10561
10479
  key: stateKeys.index(StateKeyIdx.Rho),
10562
10480
  Codec: codecPerCore(codec.optional(AvailabilityAssignment.Codec)),
10563
10481
  extract: (s) => s.availabilityAssignment,
10564
10482
  };
10565
10483
 
10566
- /** C(11): https://graypaper.fluffylabs.dev/#/85129da/38c10238c102?v=0.6.3 */
10484
+ /** C(11): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b3e023b3e02?v=0.6.7 */
10567
10485
  export const timeslot: StateCodec<State["timeslot"]> = {
10568
10486
  key: stateKeys.index(StateKeyIdx.Tau),
10569
10487
  Codec: codec.u32.asOpaque<TimeSlot>(),
10570
10488
  extract: (s) => s.timeslot,
10571
10489
  };
10572
10490
 
10573
- /** C(12): https://graypaper.fluffylabs.dev/#/85129da/38cf0238cf02?v=0.6.3 */
10491
+ /** C(12): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b4c023b4c02?v=0.6.7 */
10574
10492
  export const privilegedServices: StateCodec<State["privilegedServices"]> = {
10575
10493
  key: stateKeys.index(StateKeyIdx.Chi),
10576
10494
  Codec: PrivilegedServices.Codec,
10577
10495
  extract: (s) => s.privilegedServices,
10578
10496
  };
10579
10497
 
10580
- /** C(13): https://graypaper.fluffylabs.dev/#/85129da/38e10238e102?v=0.6.3 */
10498
+ /** C(13): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b5e023b5e02?v=0.6.7 */
10581
10499
  export const statistics: StateCodec<State["statistics"]> = {
10582
10500
  key: stateKeys.index(StateKeyIdx.Pi),
10583
10501
  Codec: StatisticsData.Codec,
@@ -10591,7 +10509,7 @@ declare namespace serialize {
10591
10509
  extract: (s) => s.accumulationQueue,
10592
10510
  };
10593
10511
 
10594
- /** C(15): https://graypaper.fluffylabs.dev/#/85129da/381903381903?v=0.6.3 */
10512
+ /** C(15): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b96023b9602?v=0.6.7 */
10595
10513
  export const recentlyAccumulated: StateCodec<State["recentlyAccumulated"]> = {
10596
10514
  key: stateKeys.index(StateKeyIdx.Xi),
10597
10515
  Codec: codecPerEpochBlock(
@@ -10687,27 +10605,17 @@ declare function* serializeRemovedServices(servicesRemoved: ServiceId[] | undefi
10687
10605
  }
10688
10606
  }
10689
10607
 
10690
- declare function getLegacyKey(serviceId: ServiceId, rawKey: StorageKey): StorageKey {
10691
- const SERVICE_ID_BYTES = 4;
10692
- const serviceIdAndKey = new Uint8Array(SERVICE_ID_BYTES + rawKey.length);
10693
- serviceIdAndKey.set(u32AsLeBytes(serviceId));
10694
- serviceIdAndKey.set(rawKey.raw, SERVICE_ID_BYTES);
10695
- return asOpaqueType(BytesBlob.blobFrom(blake2b.hashBytes(serviceIdAndKey).raw));
10696
- }
10697
-
10698
10608
  declare function* serializeStorage(storage: UpdateStorage[] | undefined): Generator<StateEntryUpdate> {
10699
10609
  for (const { action, serviceId } of storage ?? []) {
10700
10610
  switch (action.kind) {
10701
10611
  case UpdateStorageKind.Set: {
10702
- const key = Compatibility.isGreaterOrEqual(GpVersion.V0_6_7)
10703
- ? action.storage.key
10704
- : getLegacyKey(serviceId, action.storage.key);
10612
+ const key = action.storage.key;
10705
10613
  const codec = serialize.serviceStorage(serviceId, key);
10706
10614
  yield [StateEntryUpdateAction.Insert, codec.key, action.storage.value];
10707
10615
  break;
10708
10616
  }
10709
10617
  case UpdateStorageKind.Remove: {
10710
- const key = Compatibility.isGreaterOrEqual(GpVersion.V0_6_7) ? action.key : getLegacyKey(serviceId, action.key);
10618
+ const key = action.key;
10711
10619
  const codec = serialize.serviceStorage(serviceId, key);
10712
10620
  yield [StateEntryUpdateAction.Remove, codec.key, EMPTY_BLOB];
10713
10621
  break;
@@ -10847,7 +10755,7 @@ declare function* serializeBasicKeys(spec: ChainSpec, update: Partial<State>) {
10847
10755
  yield doSerialize(update.recentlyAccumulated, serialize.recentlyAccumulated); // C(15)
10848
10756
  }
10849
10757
 
10850
- if (update.accumulationOutputLog !== undefined && Compatibility.isGreaterOrEqual(GpVersion.V0_6_7)) {
10758
+ if (update.accumulationOutputLog !== undefined) {
10851
10759
  yield doSerialize(update.accumulationOutputLog, serialize.accumulationOutputLog); // C(16)
10852
10760
  }
10853
10761
  }
@@ -11642,9 +11550,7 @@ declare function convertInMemoryStateToDictionary(
11642
11550
  doSerialize(serialize.statistics); // C(13)
11643
11551
  doSerialize(serialize.accumulationQueue); // C(14)
11644
11552
  doSerialize(serialize.recentlyAccumulated); // C(15)
11645
- if (Compatibility.isGreaterOrEqual(GpVersion.V0_6_7)) {
11646
- doSerialize(serialize.accumulationOutputLog); // C(16)
11647
- }
11553
+ doSerialize(serialize.accumulationOutputLog); // C(16)
11648
11554
 
11649
11555
  // services
11650
11556
  for (const [serviceId, service] of state.services.entries()) {
@@ -11835,10 +11741,7 @@ declare class SerializedState<T extends SerializedStateBackend = SerializedState
11835
11741
  }
11836
11742
 
11837
11743
  get accumulationOutputLog(): State["accumulationOutputLog"] {
11838
- if (Compatibility.isGreaterOrEqual(GpVersion.V0_6_7)) {
11839
- return this.retrieve(serialize.accumulationOutputLog, "accumulationOutputLog");
11840
- }
11841
- return [];
11744
+ return this.retrieve(serialize.accumulationOutputLog, "accumulationOutputLog");
11842
11745
  }
11843
11746
  }
11844
11747
 
@@ -11985,7 +11888,6 @@ declare const index$c_U32_BYTES: typeof U32_BYTES;
11985
11888
  declare const index$c_binaryMerkleization: typeof binaryMerkleization;
11986
11889
  declare const index$c_convertInMemoryStateToDictionary: typeof convertInMemoryStateToDictionary;
11987
11890
  declare const index$c_dumpCodec: typeof dumpCodec;
11988
- declare const index$c_getLegacyKey: typeof getLegacyKey;
11989
11891
  declare const index$c_getSafroleData: typeof getSafroleData;
11990
11892
  declare const index$c_legacyServiceNested: typeof legacyServiceNested;
11991
11893
  declare const index$c_loadState: typeof loadState;
@@ -11999,7 +11901,7 @@ declare const index$c_serializeStorage: typeof serializeStorage;
11999
11901
  declare const index$c_stateEntriesSequenceCodec: typeof stateEntriesSequenceCodec;
12000
11902
  import index$c_stateKeys = stateKeys;
12001
11903
  declare namespace index$c {
12002
- export { index$c_EMPTY_BLOB as EMPTY_BLOB, index$c_SerializedService as SerializedService, index$c_SerializedState as SerializedState, index$c_StateEntries as StateEntries, index$c_StateEntryUpdateAction as StateEntryUpdateAction, index$c_StateKeyIdx as StateKeyIdx, index$c_TYPICAL_STATE_ITEMS as TYPICAL_STATE_ITEMS, index$c_TYPICAL_STATE_ITEM_LEN as TYPICAL_STATE_ITEM_LEN, index$c_U32_BYTES as U32_BYTES, index$c_binaryMerkleization as binaryMerkleization, index$c_convertInMemoryStateToDictionary as convertInMemoryStateToDictionary, index$c_dumpCodec as dumpCodec, index$c_getLegacyKey as getLegacyKey, index$c_getSafroleData as getSafroleData, index$c_legacyServiceNested as legacyServiceNested, index$c_loadState as loadState, index$c_serialize as serialize, index$c_serializeBasicKeys as serializeBasicKeys, index$c_serializePreimages as serializePreimages, index$c_serializeRemovedServices as serializeRemovedServices, index$c_serializeServiceUpdates as serializeServiceUpdates, index$c_serializeStateUpdate as serializeStateUpdate, index$c_serializeStorage as serializeStorage, index$c_stateEntriesSequenceCodec as stateEntriesSequenceCodec, index$c_stateKeys as stateKeys };
11904
+ export { index$c_EMPTY_BLOB as EMPTY_BLOB, index$c_SerializedService as SerializedService, index$c_SerializedState as SerializedState, index$c_StateEntries as StateEntries, index$c_StateEntryUpdateAction as StateEntryUpdateAction, index$c_StateKeyIdx as StateKeyIdx, index$c_TYPICAL_STATE_ITEMS as TYPICAL_STATE_ITEMS, index$c_TYPICAL_STATE_ITEM_LEN as TYPICAL_STATE_ITEM_LEN, index$c_U32_BYTES as U32_BYTES, index$c_binaryMerkleization as binaryMerkleization, index$c_convertInMemoryStateToDictionary as convertInMemoryStateToDictionary, index$c_dumpCodec as dumpCodec, index$c_getSafroleData as getSafroleData, index$c_legacyServiceNested as legacyServiceNested, index$c_loadState as loadState, index$c_serialize as serialize, index$c_serializeBasicKeys as serializeBasicKeys, index$c_serializePreimages as serializePreimages, index$c_serializeRemovedServices as serializeRemovedServices, index$c_serializeServiceUpdates as serializeServiceUpdates, index$c_serializeStateUpdate as serializeStateUpdate, index$c_serializeStorage as serializeStorage, index$c_stateEntriesSequenceCodec as stateEntriesSequenceCodec, index$c_stateKeys as stateKeys };
12003
11905
  export type { index$c_EncodeFun as EncodeFun, index$c_KeyAndCodec as KeyAndCodec, index$c_SerializedStateBackend as SerializedStateBackend, index$c_StateCodec as StateCodec, index$c_StateEntryUpdate as StateEntryUpdate, StateKey$1 as StateKey };
12004
11906
  }
12005
11907
 
@@ -12359,8 +12261,8 @@ declare function encodePoints(input: Bytes<PIECE_SIZE>): FixedSizeArray<Bytes<PO
12359
12261
  }
12360
12262
 
12361
12263
  // encode and add redundancy shards
12362
- const points = new ShardsCollection(POINT_ALIGNMENT, data);
12363
- const encodedResult = encode(N_CHUNKS_REDUNDANCY, POINT_ALIGNMENT, points);
12264
+ const points = new reedSolomon.ShardsCollection(POINT_ALIGNMENT, data);
12265
+ const encodedResult = reedSolomon.encode(N_CHUNKS_REDUNDANCY, points);
12364
12266
  const encodedData = encodedResult.take_data();
12365
12267
 
12366
12268
  for (let i = 0; i < N_CHUNKS_REDUNDANCY; i++) {
@@ -12402,9 +12304,9 @@ declare function decodePiece(
12402
12304
  result.raw.set(points.raw, pointStartInResult);
12403
12305
  }
12404
12306
  }
12405
- const points = new ShardsCollection(POINT_ALIGNMENT, data, indices);
12307
+ const points = new reedSolomon.ShardsCollection(POINT_ALIGNMENT, data, indices);
12406
12308
 
12407
- const decodingResult = decode(N_CHUNKS_REQUIRED, N_CHUNKS_REDUNDANCY, POINT_ALIGNMENT, points);
12309
+ const decodingResult = reedSolomon.decode(N_CHUNKS_REQUIRED, N_CHUNKS_REDUNDANCY, points);
12408
12310
  const resultIndices = decodingResult.take_indices(); // it has to be called before take_data
12409
12311
  const resultData = decodingResult.take_data(); // it destroys the result object in rust
12410
12312
 
@@ -12622,77 +12524,249 @@ declare function chunksToShards(
12622
12524
  ): PerValidator<BytesBlob> {
12623
12525
  const result = new Array<BytesBlob>();
12624
12526
 
12625
- const allChunks = BytesBlob.blobFromParts(chunks.map((c) => c.raw));
12626
- const shardSize = allChunks.length / N_CHUNKS_TOTAL;
12527
+ const allChunks = BytesBlob.blobFromParts(chunks.map((c) => c.raw));
12528
+ const shardSize = allChunks.length / N_CHUNKS_TOTAL;
12529
+
12530
+ // wrap around the data to have enough
12531
+ const bytesToDrawFrom = BytesBlob.blobFromParts(allChunks.raw, allChunks.raw);
12532
+ const bytesPerValidator = Math.ceil(allChunks.length / spec.validatorsCount);
12533
+ // align number of bytes to the shard length.
12534
+ const alignedBytesPerValidator = Math.ceil(bytesPerValidator / shardSize) * shardSize;
12535
+
12536
+ for (let i = 0; i < spec.validatorsCount; i++) {
12537
+ const start = i * alignedBytesPerValidator;
12538
+ const end = start + alignedBytesPerValidator;
12539
+
12540
+ result.push(BytesBlob.blobFrom(bytesToDrawFrom.raw.subarray(start, end)));
12541
+ }
12542
+
12543
+ return tryAsPerValidator(result, spec);
12544
+ }
12545
+
12546
+ declare const initEc = async () => {
12547
+ await init.reedSolomon();
12548
+ };
12549
+
12550
+ declare const index$a_HALF_POINT_SIZE: typeof HALF_POINT_SIZE;
12551
+ declare const index$a_N_CHUNKS_REDUNDANCY: typeof N_CHUNKS_REDUNDANCY;
12552
+ type index$a_N_CHUNKS_REQUIRED = N_CHUNKS_REQUIRED;
12553
+ type index$a_N_CHUNKS_TOTAL = N_CHUNKS_TOTAL;
12554
+ type index$a_PIECE_SIZE = PIECE_SIZE;
12555
+ declare const index$a_POINT_ALIGNMENT: typeof POINT_ALIGNMENT;
12556
+ type index$a_POINT_LENGTH = POINT_LENGTH;
12557
+ declare const index$a_chunkingFunction: typeof chunkingFunction;
12558
+ declare const index$a_chunksToShards: typeof chunksToShards;
12559
+ declare const index$a_decodeData: typeof decodeData;
12560
+ declare const index$a_decodeDataAndTrim: typeof decodeDataAndTrim;
12561
+ declare const index$a_decodePiece: typeof decodePiece;
12562
+ declare const index$a_encodePoints: typeof encodePoints;
12563
+ declare const index$a_initEc: typeof initEc;
12564
+ declare const index$a_join: typeof join;
12565
+ declare const index$a_lace: typeof lace;
12566
+ declare const index$a_padAndEncodeData: typeof padAndEncodeData;
12567
+ declare const index$a_shardsToChunks: typeof shardsToChunks;
12568
+ declare const index$a_split: typeof split;
12569
+ declare const index$a_transpose: typeof transpose;
12570
+ declare const index$a_unzip: typeof unzip;
12571
+ declare namespace index$a {
12572
+ export { index$a_HALF_POINT_SIZE as HALF_POINT_SIZE, index$a_N_CHUNKS_REDUNDANCY as N_CHUNKS_REDUNDANCY, index$a_POINT_ALIGNMENT as POINT_ALIGNMENT, index$a_chunkingFunction as chunkingFunction, index$a_chunksToShards as chunksToShards, index$a_decodeData as decodeData, index$a_decodeDataAndTrim as decodeDataAndTrim, index$a_decodePiece as decodePiece, index$a_encodePoints as encodePoints, index$a_initEc as initEc, index$a_join as join, index$a_lace as lace, index$a_padAndEncodeData as padAndEncodeData, index$a_shardsToChunks as shardsToChunks, index$a_split as split, index$a_transpose as transpose, index$a_unzip as unzip };
12573
+ export type { index$a_N_CHUNKS_REQUIRED as N_CHUNKS_REQUIRED, index$a_N_CHUNKS_TOTAL as N_CHUNKS_TOTAL, index$a_PIECE_SIZE as PIECE_SIZE, index$a_POINT_LENGTH as POINT_LENGTH };
12574
+ }
12575
+
12576
+ /**
12577
+ * Host call result constants.
12578
+ *
12579
+ * https://graypaper.fluffylabs.dev/#/85129da/2c7c022c7c02?v=0.6.3
12580
+ */
12581
+ declare const HostCallResult = {
12582
+ /** The return value indicating an item does not exist. */
12583
+ NONE: tryAsU64(0xffff_ffff_ffff_ffffn), // 2**64 - 1
12584
+ /** Name unknown. */
12585
+ WHAT: tryAsU64(0xffff_ffff_ffff_fffen), // 2**64 - 2
12586
+ /** The inner PVM memory index provided for reading/writing is not accessible. */
12587
+ OOB: tryAsU64(0xffff_ffff_ffff_fffdn), // 2**64 - 3
12588
+ /** Index unknown. */
12589
+ WHO: tryAsU64(0xffff_ffff_ffff_fffcn), // 2**64 - 4
12590
+ /** Storage full. */
12591
+ FULL: tryAsU64(0xffff_ffff_ffff_fffbn), // 2**64 - 5
12592
+ /** Core index unknown. */
12593
+ CORE: tryAsU64(0xffff_ffff_ffff_fffan), // 2**64 - 6
12594
+ /** Insufficient funds. */
12595
+ CASH: tryAsU64(0xffff_ffff_ffff_fff9n), // 2**64 - 7
12596
+ /** Gas limit too low. */
12597
+ LOW: tryAsU64(0xffff_ffff_ffff_fff8n), // 2**64 - 8
12598
+ /** The item is already solicited or cannot be forgotten. */
12599
+ HUH: tryAsU64(0xffff_ffff_ffff_fff7n), // 2**64 - 9
12600
+ /** The return value indicating general success. */
12601
+ OK: tryAsU64(0n),
12602
+ } as const;
12603
+
12604
+ declare enum Level {
12605
+ INSANE = 1,
12606
+ TRACE = 2,
12607
+ LOG = 3,
12608
+ INFO = 4,
12609
+ WARN = 5,
12610
+ ERROR = 6,
12611
+ }
12612
+
12613
+ type Options = {
12614
+ defaultLevel: Level;
12615
+ workingDir: string;
12616
+ modules: Map<string, Level>;
12617
+ };
12618
+
12619
+ /**
12620
+ * A function to parse logger definition (including modules) given as a string.
12621
+ *
12622
+ * Examples
12623
+ * - `info` - setup default logging level to `info`.
12624
+ * - `trace` - default logging level set to `trace`.
12625
+ * - `debug;consensus=trace` - default level is set to `debug/log`, but consensus is in trace mode.
12626
+ */
12627
+ declare function parseLoggerOptions(input: string, defaultLevel: Level, workingDir?: string): Options {
12628
+ const modules = new Map<string, Level>();
12629
+ const parts = input.toLowerCase().split(",");
12630
+ let defLevel = defaultLevel;
12631
+
12632
+ for (const p of parts) {
12633
+ const clean = p.trim();
12634
+ // skip empty objects (forgotten `,` removed)
12635
+ if (clean.length === 0) {
12636
+ continue;
12637
+ }
12638
+ // we just have the default level
12639
+ if (clean.includes("=")) {
12640
+ const [mod, lvl] = clean.split("=");
12641
+ modules.set(mod.trim(), parseLevel(lvl.trim()));
12642
+ } else {
12643
+ defLevel = parseLevel(clean);
12644
+ }
12645
+ }
12646
+
12647
+ // TODO [ToDr] Fix dirname for workers.
12648
+ const myDir = (import.meta.dirname ?? "").split("/");
12649
+ myDir.pop();
12650
+ myDir.pop();
12651
+ return {
12652
+ defaultLevel: defLevel,
12653
+ modules,
12654
+ workingDir: workingDir ?? myDir.join("/"),
12655
+ };
12656
+ }
12657
+
12658
+ declare const GLOBAL_CONFIG = {
12659
+ options: DEFAULT_OPTIONS,
12660
+ transport: ConsoleTransport.create(DEFAULT_OPTIONS.defaultLevel, DEFAULT_OPTIONS),
12661
+ };
12662
+
12663
+ /**
12664
+ * A logger instance.
12665
+ */
12666
+ declare class Logger {
12667
+ /**
12668
+ * Create a new logger instance given filename and an optional module name.
12669
+ *
12670
+ * If the module name is not given, `fileName` becomes the module name.
12671
+ * The module name can be composed from multiple parts separated with `/`.
12672
+ *
12673
+ * The logger will use a global configuration which can be changed using
12674
+ * [`configureLogger`] function.
12675
+ */
12676
+ static new(fileName?: string, moduleName?: string) {
12677
+ const fName = fileName ?? "unknown";
12678
+ return new Logger(moduleName ?? fName, fName, GLOBAL_CONFIG);
12679
+ }
12680
+
12681
+ /**
12682
+ * Return currently configured level for given module. */
12683
+ static getLevel(moduleName: string): Level {
12684
+ return findLevel(GLOBAL_CONFIG.options, moduleName);
12685
+ }
12686
+
12687
+ /**
12688
+ * Global configuration of all loggers.
12689
+ *
12690
+ * One can specify a default logging level (only logs with level >= default will be printed).
12691
+ * It's also possible to configure per-module logging level that takes precedence
12692
+ * over the default one.
12693
+ *
12694
+ * Changing the options affects all previously created loggers.
12695
+ */
12696
+ static configureAllFromOptions(options: Options) {
12697
+ // find minimal level to optimise logging in case
12698
+ // we don't care about low-level logs.
12699
+ const minimalLevel = Array.from(options.modules.values()).reduce((level, modLevel) => {
12700
+ return level < modLevel ? level : modLevel;
12701
+ }, options.defaultLevel);
12702
+
12703
+ const transport = ConsoleTransport.create(minimalLevel, options);
12704
+
12705
+ // set the global config
12706
+ GLOBAL_CONFIG.options = options;
12707
+ GLOBAL_CONFIG.transport = transport;
12708
+ }
12709
+
12710
+ /**
12711
+ * Global configuration of all loggers.
12712
+ *
12713
+ * Parse configuration options from an input string typically obtained
12714
+ * from environment variable `JAM_LOG`.
12715
+ */
12716
+ static configureAll(input: string, defaultLevel: Level, workingDir?: string) {
12717
+ const options = parseLoggerOptions(input, defaultLevel, workingDir);
12718
+ Logger.configureAllFromOptions(options);
12719
+ }
12720
+
12721
+ constructor(
12722
+ private readonly moduleName: string,
12723
+ private readonly fileName: string,
12724
+ private readonly config: typeof GLOBAL_CONFIG,
12725
+ ) {}
12726
+
12727
+ /** Log a message with `INSANE` level. */
12728
+ insane(val: string) {
12729
+ this.config.transport.insane(this.moduleName, val);
12730
+ }
12627
12731
 
12628
- // wrap around the data to have enough
12629
- const bytesToDrawFrom = BytesBlob.blobFromParts(allChunks.raw, allChunks.raw);
12630
- const bytesPerValidator = Math.ceil(allChunks.length / spec.validatorsCount);
12631
- // align number of bytes to the shard length.
12632
- const alignedBytesPerValidator = Math.ceil(bytesPerValidator / shardSize) * shardSize;
12732
+ /** Log a message with `TRACE` level. */
12733
+ trace(val: string) {
12734
+ this.config.transport.trace(this.moduleName, val);
12735
+ }
12633
12736
 
12634
- for (let i = 0; i < spec.validatorsCount; i++) {
12635
- const start = i * alignedBytesPerValidator;
12636
- const end = start + alignedBytesPerValidator;
12737
+ /** Log a message with `DEBUG`/`LOG` level. */
12738
+ log(val: string) {
12739
+ this.config.transport.log(this.moduleName, val);
12740
+ }
12637
12741
 
12638
- result.push(BytesBlob.blobFrom(bytesToDrawFrom.raw.subarray(start, end)));
12742
+ /** Log a message with `INFO` level. */
12743
+ info(val: string) {
12744
+ this.config.transport.info(this.moduleName, val);
12639
12745
  }
12640
12746
 
12641
- return tryAsPerValidator(result, spec);
12642
- }
12747
+ /** Log a message with `WARN` level. */
12748
+ warn(val: string) {
12749
+ this.config.transport.warn(this.moduleName, val);
12750
+ }
12643
12751
 
12644
- declare const index$a_HALF_POINT_SIZE: typeof HALF_POINT_SIZE;
12645
- declare const index$a_N_CHUNKS_REDUNDANCY: typeof N_CHUNKS_REDUNDANCY;
12646
- type index$a_N_CHUNKS_REQUIRED = N_CHUNKS_REQUIRED;
12647
- type index$a_N_CHUNKS_TOTAL = N_CHUNKS_TOTAL;
12648
- type index$a_PIECE_SIZE = PIECE_SIZE;
12649
- declare const index$a_POINT_ALIGNMENT: typeof POINT_ALIGNMENT;
12650
- type index$a_POINT_LENGTH = POINT_LENGTH;
12651
- declare const index$a_chunkingFunction: typeof chunkingFunction;
12652
- declare const index$a_chunksToShards: typeof chunksToShards;
12653
- declare const index$a_decodeData: typeof decodeData;
12654
- declare const index$a_decodeDataAndTrim: typeof decodeDataAndTrim;
12655
- declare const index$a_decodePiece: typeof decodePiece;
12656
- declare const index$a_encodePoints: typeof encodePoints;
12657
- declare const index$a_join: typeof join;
12658
- declare const index$a_lace: typeof lace;
12659
- declare const index$a_padAndEncodeData: typeof padAndEncodeData;
12660
- declare const index$a_shardsToChunks: typeof shardsToChunks;
12661
- declare const index$a_split: typeof split;
12662
- declare const index$a_transpose: typeof transpose;
12663
- declare const index$a_unzip: typeof unzip;
12664
- declare namespace index$a {
12665
- export { index$a_HALF_POINT_SIZE as HALF_POINT_SIZE, index$a_N_CHUNKS_REDUNDANCY as N_CHUNKS_REDUNDANCY, index$a_POINT_ALIGNMENT as POINT_ALIGNMENT, index$a_chunkingFunction as chunkingFunction, index$a_chunksToShards as chunksToShards, index$a_decodeData as decodeData, index$a_decodeDataAndTrim as decodeDataAndTrim, index$a_decodePiece as decodePiece, index$a_encodePoints as encodePoints, index$a_join as join, index$a_lace as lace, index$a_padAndEncodeData as padAndEncodeData, index$a_shardsToChunks as shardsToChunks, index$a_split as split, index$a_transpose as transpose, index$a_unzip as unzip };
12666
- export type { index$a_N_CHUNKS_REQUIRED as N_CHUNKS_REQUIRED, index$a_N_CHUNKS_TOTAL as N_CHUNKS_TOTAL, index$a_PIECE_SIZE as PIECE_SIZE, index$a_POINT_LENGTH as POINT_LENGTH };
12752
+ /** Log a message with `ERROR` level. */
12753
+ error(val: string) {
12754
+ this.config.transport.error(this.moduleName, val);
12755
+ }
12667
12756
  }
12668
12757
 
12669
- /**
12670
- * Host call result constants.
12671
- *
12672
- * https://graypaper.fluffylabs.dev/#/85129da/2c7c022c7c02?v=0.6.3
12673
- */
12674
- declare const HostCallResult = {
12675
- /** The return value indicating an item does not exist. */
12676
- NONE: tryAsU64(0xffff_ffff_ffff_ffffn), // 2**64 - 1
12677
- /** Name unknown. */
12678
- WHAT: tryAsU64(0xffff_ffff_ffff_fffen), // 2**64 - 2
12679
- /** The inner PVM memory index provided for reading/writing is not accessible. */
12680
- OOB: tryAsU64(0xffff_ffff_ffff_fffdn), // 2**64 - 3
12681
- /** Index unknown. */
12682
- WHO: tryAsU64(0xffff_ffff_ffff_fffcn), // 2**64 - 4
12683
- /** Storage full. */
12684
- FULL: tryAsU64(0xffff_ffff_ffff_fffbn), // 2**64 - 5
12685
- /** Core index unknown. */
12686
- CORE: tryAsU64(0xffff_ffff_ffff_fffan), // 2**64 - 6
12687
- /** Insufficient funds. */
12688
- CASH: tryAsU64(0xffff_ffff_ffff_fff9n), // 2**64 - 7
12689
- /** Gas limit too low. */
12690
- LOW: tryAsU64(0xffff_ffff_ffff_fff8n), // 2**64 - 8
12691
- /** The item is already solicited or cannot be forgotten. */
12692
- HUH: tryAsU64(0xffff_ffff_ffff_fff7n), // 2**64 - 9
12693
- /** The return value indicating general success. */
12694
- OK: tryAsU64(0n),
12695
- } as const;
12758
+ type index$9_Level = Level;
12759
+ declare const index$9_Level: typeof Level;
12760
+ type index$9_Logger = Logger;
12761
+ declare const index$9_Logger: typeof Logger;
12762
+ declare const index$9_parseLoggerOptions: typeof parseLoggerOptions;
12763
+ declare namespace index$9 {
12764
+ export {
12765
+ index$9_Level as Level,
12766
+ index$9_Logger as Logger,
12767
+ index$9_parseLoggerOptions as parseLoggerOptions,
12768
+ };
12769
+ }
12696
12770
 
12697
12771
  /**
12698
12772
  * Mask class is an implementation of skip function defined in GP.
@@ -13953,13 +14027,14 @@ declare abstract class MemoryPage {
13953
14027
  * And then a new version of TypeScript is released.
13954
14028
  */
13955
14029
  declare global {
13956
- interface ArrayBufferConstructor {
13957
- new (length: number, options?: { maxByteLength: number }): ArrayBuffer;
13958
- }
13959
-
13960
- interface ArrayBuffer {
13961
- resize(length: number): void;
13962
- }
14030
+ interface ArrayBufferConstructor {
14031
+ new (length: number, options?: {
14032
+ maxByteLength: number;
14033
+ }): ArrayBuffer;
14034
+ }
14035
+ interface ArrayBuffer {
14036
+ resize(length: number): void;
14037
+ }
13963
14038
  }
13964
14039
 
13965
14040
  type InitialMemoryState = {
@@ -13972,6 +14047,7 @@ declare enum AccessType {
13972
14047
  READ = 0,
13973
14048
  WRITE = 1,
13974
14049
  }
14050
+
13975
14051
  declare class Memory {
13976
14052
  static fromInitialMemory(initialMemoryState: InitialMemoryState) {
13977
14053
  return new Memory(
@@ -14008,6 +14084,7 @@ declare class Memory {
14008
14084
  return Result.ok(OK);
14009
14085
  }
14010
14086
 
14087
+ logger.insane(`MEM[${address}] <- ${BytesBlob.blobFrom(bytes)}`);
14011
14088
  const pagesResult = this.getPages(address, bytes.length, AccessType.WRITE);
14012
14089
 
14013
14090
  if (pagesResult.isError) {
@@ -14096,6 +14173,7 @@ declare class Memory {
14096
14173
  bytesLeft -= bytesToRead;
14097
14174
  }
14098
14175
 
14176
+ logger.insane(`MEM[${startAddress}] => ${BytesBlob.blobFrom(result)}`);
14099
14177
  return Result.ok(OK);
14100
14178
  }
14101
14179
 
@@ -15095,6 +15173,10 @@ declare class JumpTable {
15095
15173
  return new JumpTable(0, new Uint8Array());
15096
15174
  }
15097
15175
 
15176
+ getSize() {
15177
+ return this.indices.length;
15178
+ }
15179
+
15098
15180
  copyFrom(jt: JumpTable) {
15099
15181
  this.indices = jt.indices;
15100
15182
  }
@@ -15956,207 +16038,46 @@ declare class NoArgsDispatcher {
15956
16038
 
15957
16039
  declare class TwoRegsTwoImmsDispatcher {
15958
16040
  constructor(
15959
- private loadOps: LoadOps,
15960
- private dynamicJumpOps: DynamicJumpOps,
15961
- ) {}
15962
-
15963
- dispatch(instruction: Instruction, args: TwoRegistersTwoImmediatesArgs) {
15964
- switch (instruction) {
15965
- case Instruction.LOAD_IMM_JUMP_IND: {
15966
- const address = this.dynamicJumpOps.caluclateJumpAddress(args.secondImmediateDecoder, args.secondRegisterIndex);
15967
- this.loadOps.loadImmediate(args.firstRegisterIndex, args.firstImmediateDecoder);
15968
- this.dynamicJumpOps.jumpInd(address);
15969
- break;
15970
- }
15971
- }
15972
- }
15973
- }
15974
-
15975
- declare class OneImmDispatcher {
15976
- constructor(private hostCallOps: HostCallOps) {}
15977
-
15978
- dispatch(instruction: Instruction, args: OneImmediateArgs) {
15979
- switch (instruction) {
15980
- case Instruction.ECALLI:
15981
- this.hostCallOps.hostCall(args.immediateDecoder);
15982
- break;
15983
- }
15984
- }
15985
- }
15986
-
15987
- declare class OneRegOneExtImmDispatcher {
15988
- constructor(private loadOps: LoadOps) {}
15989
-
15990
- dispatch(instruction: Instruction, args: OneRegisterOneExtendedWidthImmediateArgs) {
15991
- switch (instruction) {
15992
- case Instruction.LOAD_IMM_64:
15993
- this.loadOps.loadImmediateU64(args.registerIndex, args.immediateDecoder);
15994
- break;
15995
- }
15996
- }
15997
- }
15998
-
15999
- declare enum Level {
16000
- TRACE = 1,
16001
- LOG = 2,
16002
- INFO = 3,
16003
- WARN = 4,
16004
- ERROR = 5,
16005
- }
16006
-
16007
- type Options = {
16008
- defaultLevel: Level;
16009
- workingDir: string;
16010
- modules: Map<string, Level>;
16011
- };
16012
-
16013
- /**
16014
- * A function to parse logger definition (including modules) given as a string.
16015
- *
16016
- * Examples
16017
- * - `info` - setup default logging level to `info`.
16018
- * - `trace` - default logging level set to `trace`.
16019
- * - `debug;consensus=trace` - default level is set to `debug/log`, but consensus is in trace mode.
16020
- */
16021
- declare function parseLoggerOptions(input: string, defaultLevel: Level, workingDir?: string): Options {
16022
- const modules = new Map<string, Level>();
16023
- const parts = input.toLowerCase().split(",");
16024
- let defLevel = defaultLevel;
16025
-
16026
- for (const p of parts) {
16027
- const clean = p.trim();
16028
- // skip empty objects (forgotten `,` removed)
16029
- if (clean.length === 0) {
16030
- continue;
16031
- }
16032
- // we just have the default level
16033
- if (clean.includes("=")) {
16034
- const [mod, lvl] = clean.split("=");
16035
- modules.set(mod.trim(), parseLevel(lvl.trim()));
16036
- } else {
16037
- defLevel = parseLevel(clean);
16038
- }
16039
- }
16040
-
16041
- // TODO [ToDr] Fix dirname for workers.
16042
- const myDir = (import.meta.dirname ?? "").split("/");
16043
- myDir.pop();
16044
- myDir.pop();
16045
- return {
16046
- defaultLevel: defLevel,
16047
- modules,
16048
- workingDir: workingDir ?? myDir.join("/"),
16049
- };
16050
- }
16051
-
16052
- declare const GLOBAL_CONFIG = {
16053
- options: DEFAULT_OPTIONS,
16054
- transport: ConsoleTransport.create(DEFAULT_OPTIONS.defaultLevel, DEFAULT_OPTIONS),
16055
- };
16056
-
16057
- /**
16058
- * A logger instance.
16059
- */
16060
- declare class Logger {
16061
- /**
16062
- * Create a new logger instance given filename and an optional module name.
16063
- *
16064
- * If the module name is not given, `fileName` becomes the module name.
16065
- * The module name can be composed from multiple parts separated with `/`.
16066
- *
16067
- * The logger will use a global configuration which can be changed using
16068
- * [`configureLogger`] function.
16069
- */
16070
- static new(fileName?: string, moduleName?: string) {
16071
- const fName = fileName ?? "unknown";
16072
- return new Logger(moduleName ?? fName, fName, GLOBAL_CONFIG);
16073
- }
16074
-
16075
- /**
16076
- * Return currently configured level for given module. */
16077
- static getLevel(moduleName: string): Level {
16078
- return findLevel(GLOBAL_CONFIG.options, moduleName);
16079
- }
16080
-
16081
- /**
16082
- * Global configuration of all loggers.
16083
- *
16084
- * One can specify a default logging level (only logs with level >= default will be printed).
16085
- * It's also possible to configure per-module logging level that takes precedence
16086
- * over the default one.
16087
- *
16088
- * Changing the options affects all previously created loggers.
16089
- */
16090
- static configureAllFromOptions(options: Options) {
16091
- // find minimal level to optimise logging in case
16092
- // we don't care about low-level logs.
16093
- const minimalLevel = Array.from(options.modules.values()).reduce((level, modLevel) => {
16094
- return level < modLevel ? level : modLevel;
16095
- }, options.defaultLevel);
16096
-
16097
- const transport = ConsoleTransport.create(minimalLevel, options);
16098
-
16099
- // set the global config
16100
- GLOBAL_CONFIG.options = options;
16101
- GLOBAL_CONFIG.transport = transport;
16102
- }
16103
-
16104
- /**
16105
- * Global configuration of all loggers.
16106
- *
16107
- * Parse configuration options from an input string typically obtained
16108
- * from environment variable `JAM_LOG`.
16109
- */
16110
- static configureAll(input: string, defaultLevel: Level, workingDir?: string) {
16111
- const options = parseLoggerOptions(input, defaultLevel, workingDir);
16112
- Logger.configureAllFromOptions(options);
16113
- }
16114
-
16115
- constructor(
16116
- private readonly moduleName: string,
16117
- private readonly fileName: string,
16118
- private readonly config: typeof GLOBAL_CONFIG,
16041
+ private loadOps: LoadOps,
16042
+ private dynamicJumpOps: DynamicJumpOps,
16119
16043
  ) {}
16120
16044
 
16121
- /** Log a message with `TRACE` level. */
16122
- trace(val: string) {
16123
- this.config.transport.trace(this.moduleName, this.fileName, val);
16045
+ dispatch(instruction: Instruction, args: TwoRegistersTwoImmediatesArgs) {
16046
+ switch (instruction) {
16047
+ case Instruction.LOAD_IMM_JUMP_IND: {
16048
+ const address = this.dynamicJumpOps.caluclateJumpAddress(args.secondImmediateDecoder, args.secondRegisterIndex);
16049
+ this.loadOps.loadImmediate(args.firstRegisterIndex, args.firstImmediateDecoder);
16050
+ this.dynamicJumpOps.jumpInd(address);
16051
+ break;
16052
+ }
16053
+ }
16124
16054
  }
16055
+ }
16125
16056
 
16126
- /** Log a message with `DEBUG`/`LOG` level. */
16127
- log(val: string) {
16128
- this.config.transport.log(this.moduleName, this.fileName, val);
16129
- }
16057
+ declare class OneImmDispatcher {
16058
+ constructor(private hostCallOps: HostCallOps) {}
16130
16059
 
16131
- /** Log a message with `INFO` level. */
16132
- info(val: string) {
16133
- this.config.transport.info(this.moduleName, this.fileName, val);
16060
+ dispatch(instruction: Instruction, args: OneImmediateArgs) {
16061
+ switch (instruction) {
16062
+ case Instruction.ECALLI:
16063
+ this.hostCallOps.hostCall(args.immediateDecoder);
16064
+ break;
16065
+ }
16134
16066
  }
16067
+ }
16135
16068
 
16136
- /** Log a message with `WARN` level. */
16137
- warn(val: string) {
16138
- this.config.transport.warn(this.moduleName, this.fileName, val);
16139
- }
16069
+ declare class OneRegOneExtImmDispatcher {
16070
+ constructor(private loadOps: LoadOps) {}
16140
16071
 
16141
- /** Log a message with `ERROR` level. */
16142
- error(val: string) {
16143
- this.config.transport.error(this.moduleName, this.fileName, val);
16072
+ dispatch(instruction: Instruction, args: OneRegisterOneExtendedWidthImmediateArgs) {
16073
+ switch (instruction) {
16074
+ case Instruction.LOAD_IMM_64:
16075
+ this.loadOps.loadImmediateU64(args.registerIndex, args.immediateDecoder);
16076
+ break;
16077
+ }
16144
16078
  }
16145
16079
  }
16146
16080
 
16147
- type index$9_Level = Level;
16148
- declare const index$9_Level: typeof Level;
16149
- type index$9_Logger = Logger;
16150
- declare const index$9_Logger: typeof Logger;
16151
- declare const index$9_parseLoggerOptions: typeof parseLoggerOptions;
16152
- declare namespace index$9 {
16153
- export {
16154
- index$9_Level as Level,
16155
- index$9_Logger as Logger,
16156
- index$9_parseLoggerOptions as parseLoggerOptions,
16157
- };
16158
- }
16159
-
16160
16081
  declare enum ProgramDecoderError {
16161
16082
  InvalidProgramError = 0,
16162
16083
  }
@@ -16237,12 +16158,12 @@ declare enum Status {
16237
16158
 
16238
16159
  type InterpreterOptions = {
16239
16160
  useSbrkGas?: boolean;
16240
- ignoreInstructionGas?: boolean;
16241
16161
  };
16242
16162
 
16163
+ declare const logger = Logger.new(import.meta.filename, "pvm");
16164
+
16243
16165
  declare class Interpreter {
16244
16166
  private readonly useSbrkGas: boolean;
16245
- private readonly ignoreInstructionGas: boolean;
16246
16167
  private registers = new Registers();
16247
16168
  private code: Uint8Array = new Uint8Array();
16248
16169
  private mask = Mask.empty();
@@ -16270,9 +16191,8 @@ declare class Interpreter {
16270
16191
  private basicBlocks: BasicBlocks;
16271
16192
  private jumpTable = JumpTable.empty();
16272
16193
 
16273
- constructor({ useSbrkGas = false, ignoreInstructionGas = false }: InterpreterOptions = {}) {
16194
+ constructor({ useSbrkGas = false }: InterpreterOptions = {}) {
16274
16195
  this.useSbrkGas = useSbrkGas;
16275
- this.ignoreInstructionGas = ignoreInstructionGas;
16276
16196
  this.argsDecoder = new ArgsDecoder();
16277
16197
  this.basicBlocks = new BasicBlocks();
16278
16198
  const mathOps = new MathOps(this.registers);
@@ -16368,7 +16288,7 @@ declare class Interpreter {
16368
16288
  const currentInstruction = this.code[this.pc] ?? Instruction.TRAP;
16369
16289
  const isValidInstruction = Instruction[currentInstruction] !== undefined;
16370
16290
  const gasCost = instructionGasMap[currentInstruction] ?? instructionGasMap[Instruction.TRAP];
16371
- const underflow = this.ignoreInstructionGas ? false : this.gas.sub(gasCost);
16291
+ const underflow = this.gas.sub(gasCost);
16372
16292
  if (underflow) {
16373
16293
  this.status = Status.OOG;
16374
16294
  return this.status;
@@ -16377,6 +16297,8 @@ declare class Interpreter {
16377
16297
  const argsResult = this.argsDecodingResults[argsType];
16378
16298
  this.argsDecoder.fillArgs(this.pc, argsResult);
16379
16299
 
16300
+ logger.insane(`[PC: ${this.pc}] ${Instruction[currentInstruction]}`);
16301
+
16380
16302
  if (!isValidInstruction) {
16381
16303
  this.instructionResult.status = Result.PANIC;
16382
16304
  } else {
@@ -16437,12 +16359,6 @@ declare class Interpreter {
16437
16359
  }
16438
16360
 
16439
16361
  if (this.instructionResult.status !== null) {
16440
- // All abnormal terminations should be interpreted as TRAP and we should subtract the gas. In case of FAULT we have to do it manually at the very end.
16441
- if (this.instructionResult.status === Result.FAULT || this.instructionResult.status === Result.FAULT_ACCESS) {
16442
- // TODO [ToDr] underflow?
16443
- this.gas.sub(instructionGasMap[Instruction.TRAP]);
16444
- }
16445
-
16446
16362
  switch (this.instructionResult.status) {
16447
16363
  case Result.FAULT:
16448
16364
  this.status = Status.FAULT;
@@ -16458,6 +16374,7 @@ declare class Interpreter {
16458
16374
  this.status = Status.HOST;
16459
16375
  break;
16460
16376
  }
16377
+ logger.insane(`[PC: ${this.pc}] Status: ${Result[this.instructionResult.status]}`);
16461
16378
  return this.status;
16462
16379
  }
16463
16380
 
@@ -16529,13 +16446,14 @@ declare const index$8_Registers: typeof Registers;
16529
16446
  type index$8_SbrkIndex = SbrkIndex;
16530
16447
  type index$8_SmallGas = SmallGas;
16531
16448
  declare const index$8_gasCounter: typeof gasCounter;
16449
+ declare const index$8_logger: typeof logger;
16532
16450
  declare const index$8_tryAsBigGas: typeof tryAsBigGas;
16533
16451
  declare const index$8_tryAsGas: typeof tryAsGas;
16534
16452
  declare const index$8_tryAsMemoryIndex: typeof tryAsMemoryIndex;
16535
16453
  declare const index$8_tryAsSbrkIndex: typeof tryAsSbrkIndex;
16536
16454
  declare const index$8_tryAsSmallGas: typeof tryAsSmallGas;
16537
16455
  declare namespace index$8 {
16538
- export { index$8_Interpreter as Interpreter, index$8_Memory as Memory, index$8_MemoryBuilder as MemoryBuilder, index$8_Registers as Registers, index$8_gasCounter as gasCounter, index$8_tryAsBigGas as tryAsBigGas, index$8_tryAsGas as tryAsGas, index$8_tryAsMemoryIndex as tryAsMemoryIndex, index$8_tryAsSbrkIndex as tryAsSbrkIndex, index$8_tryAsSmallGas as tryAsSmallGas };
16456
+ export { index$8_Interpreter as Interpreter, index$8_Memory as Memory, index$8_MemoryBuilder as MemoryBuilder, index$8_Registers as Registers, index$8_gasCounter as gasCounter, index$8_logger as logger, index$8_tryAsBigGas as tryAsBigGas, index$8_tryAsGas as tryAsGas, index$8_tryAsMemoryIndex as tryAsMemoryIndex, index$8_tryAsSbrkIndex as tryAsSbrkIndex, index$8_tryAsSmallGas as tryAsSmallGas };
16539
16457
  export type { index$8_BigGas as BigGas, index$8_Gas as Gas, index$8_GasCounter as GasCounter, index$8_InterpreterOptions as InterpreterOptions, index$8_MemoryIndex as MemoryIndex, index$8_SbrkIndex as SbrkIndex, index$8_SmallGas as SmallGas };
16540
16458
  }
16541
16459
 
@@ -16618,7 +16536,7 @@ interface HostCallHandler {
16618
16536
  readonly gasCost: SmallGas | ((reg: IHostCallRegisters) => Gas);
16619
16537
 
16620
16538
  /** Currently executing service id. */
16621
- readonly currentServiceId: ServiceId;
16539
+ readonly currentServiceId: U32;
16622
16540
 
16623
16541
  /** Input&Output registers that we should add to tracing log. */
16624
16542
  readonly tracedRegisters: RegisterIndex[];
@@ -16631,14 +16549,21 @@ interface HostCallHandler {
16631
16549
  execute(gas: GasCounter, regs: IHostCallRegisters, memory: IHostCallMemory): Promise<undefined | PvmExecution>;
16632
16550
  }
16633
16551
 
16634
- // TODO [ToDr] Rename to just `HostCalls`
16635
16552
  /** Container for all available host calls. */
16636
16553
  declare class HostCallsManager {
16637
16554
  private readonly hostCalls = new Map<HostCallIndex, HostCallHandler>();
16638
- private readonly missing = new Missing();
16555
+ private readonly missing;
16556
+
16557
+ constructor({
16558
+ missing,
16559
+ handlers = [],
16560
+ }: {
16561
+ missing: HostCallHandler;
16562
+ handlers?: HostCallHandler[];
16563
+ }) {
16564
+ this.missing = missing;
16639
16565
 
16640
- constructor(...hostCallHandlers: HostCallHandler[]) {
16641
- for (const handler of hostCallHandlers) {
16566
+ for (const handler of handlers) {
16642
16567
  check(this.hostCalls.get(handler.index) === undefined, `Overwriting host call handler at index ${handler.index}`);
16643
16568
  this.hostCalls.set(handler.index, handler);
16644
16569
  }
@@ -16666,7 +16591,7 @@ declare class HostCallsManager {
16666
16591
  return `r${idx}=${value} (0x${value.toString(16)})`;
16667
16592
  })
16668
16593
  .join(", ");
16669
- logger.trace(`[${currentServiceId}] ${context} ${name}${requested}. Gas: ${gas}. Regs: ${registerValues}.`);
16594
+ logger.insane(`[${currentServiceId}] ${context} ${name}${requested}. Gas: ${gas}. Regs: ${registerValues}.`);
16670
16595
  }
16671
16596
  }
16672
16597
 
@@ -16677,16 +16602,10 @@ declare class InterpreterInstanceManager {
16677
16602
  private waitingQueue: ResolveFn[] = [];
16678
16603
 
16679
16604
  constructor(noOfPvmInstances: number) {
16680
- const shouldCountGas =
16681
- Compatibility.isGreaterOrEqual(GpVersion.V0_6_7) ||
16682
- Compatibility.isSuite(TestSuite.JAMDUNA, GpVersion.V0_6_5) ||
16683
- Compatibility.isSuite(TestSuite.W3F_DAVXY, GpVersion.V0_6_6);
16684
-
16685
16605
  for (let i = 0; i < noOfPvmInstances; i++) {
16686
16606
  this.instances.push(
16687
16607
  new Interpreter({
16688
16608
  useSbrkGas: false,
16689
- ignoreInstructionGas: !shouldCountGas,
16690
16609
  }),
16691
16610
  );
16692
16611
  }
@@ -16793,14 +16712,15 @@ declare class HostCalls {
16793
16712
  const gasCost = typeof hostCall.gasCost === "number" ? hostCall.gasCost : hostCall.gasCost(regs);
16794
16713
  const underflow = gas.sub(gasCost);
16795
16714
 
16715
+ const pcLog = `[PC: ${pvmInstance.getPC()}]`;
16796
16716
  if (underflow) {
16797
- this.hostCalls.traceHostCall("OOG", index, hostCall, regs, gas.get());
16717
+ this.hostCalls.traceHostCall(`${pcLog} OOG`, index, hostCall, regs, gas.get());
16798
16718
  return ReturnValue.fromStatus(pvmInstance.getGasConsumed(), Status.OOG);
16799
16719
  }
16800
- this.hostCalls.traceHostCall("Invoking", index, hostCall, regs, gasBefore);
16720
+ this.hostCalls.traceHostCall(`${pcLog} Invoking`, index, hostCall, regs, gasBefore);
16801
16721
  const result = await hostCall.execute(gas, regs, memory);
16802
16722
  this.hostCalls.traceHostCall(
16803
- result === undefined ? "Result" : `Status(${result})`,
16723
+ result === undefined ? `${pcLog} Result` : `${pcLog} Status(${PvmExecution[result]})`,
16804
16724
  index,
16805
16725
  hostCall,
16806
16726
  regs,
@@ -16812,8 +16732,18 @@ declare class HostCalls {
16812
16732
  return this.getReturnValue(status, pvmInstance);
16813
16733
  }
16814
16734
 
16815
- pvmInstance.runProgram();
16816
- status = pvmInstance.getStatus();
16735
+ if (result === PvmExecution.Panic) {
16736
+ status = Status.PANIC;
16737
+ return this.getReturnValue(status, pvmInstance);
16738
+ }
16739
+
16740
+ if (result === undefined) {
16741
+ pvmInstance.runProgram();
16742
+ status = pvmInstance.getStatus();
16743
+ continue;
16744
+ }
16745
+
16746
+ assertNever(result);
16817
16747
  }
16818
16748
  }
16819
16749
 
@@ -18152,6 +18082,7 @@ declare const index$3_getServiceId: typeof getServiceId;
18152
18082
  declare const index$3_getServiceIdOrCurrent: typeof getServiceIdOrCurrent;
18153
18083
  declare const index$3_inspect: typeof inspect;
18154
18084
  declare const index$3_instructionArgumentTypeMap: typeof instructionArgumentTypeMap;
18085
+ declare const index$3_isBrowser: typeof isBrowser;
18155
18086
  declare const index$3_isTaggedError: typeof isTaggedError;
18156
18087
  declare const index$3_maybeTaggedErrorToString: typeof maybeTaggedErrorToString;
18157
18088
  declare const index$3_measure: typeof measure;
@@ -18164,7 +18095,7 @@ declare const index$3_tryAsMachineId: typeof tryAsMachineId;
18164
18095
  declare const index$3_tryAsProgramCounter: typeof tryAsProgramCounter;
18165
18096
  declare const index$3_writeServiceIdAsLeBytes: typeof writeServiceIdAsLeBytes;
18166
18097
  declare namespace index$3 {
18167
- export { index$3_AccumulationStateUpdate as AccumulationStateUpdate, index$3_ArgsDecoder as ArgsDecoder, index$3_ArgumentType as ArgumentType, index$3_BasicBlocks as BasicBlocks, index$3_CURRENT_SERVICE_ID as CURRENT_SERVICE_ID, index$3_EjectError as EjectError, index$3_ExtendedWitdthImmediateDecoder as ExtendedWitdthImmediateDecoder, index$3_ForgetPreimageError as ForgetPreimageError, index$3_HostCallMemory as HostCallMemory, index$3_HostCallRegisters as HostCallRegisters, index$3_HostCallResult as HostCallResult, index$3_ImmediateDecoder as ImmediateDecoder, index$3_MAX_U32 as MAX_U32, index$3_MAX_U32_BIG_INT as MAX_U32_BIG_INT, index$3_MachineInstance as MachineInstance, index$3_Mask as Mask, index$3_MemoryOperation as MemoryOperation, index$3_MemorySegment as MemorySegment, NO_OF_REGISTERS$1 as NO_OF_REGISTERS, index$3_NewServiceError as NewServiceError, index$3_NibblesDecoder as NibblesDecoder, index$3_PagesError as PagesError, index$3_PartiallyUpdatedState as PartiallyUpdatedState, index$3_PeekPokeError as PeekPokeError, index$3_PendingTransfer as PendingTransfer, index$3_PreimageStatusKind as PreimageStatusKind, index$3_Program as Program, index$3_ProgramDecoder as ProgramDecoder, index$3_ProvidePreimageError as ProvidePreimageError, DebuggerAdapter as Pvm, index$3_Registers as Registers, index$3_RequestPreimageError as RequestPreimageError, Result$2 as Result, index$3_RichTaggedError as RichTaggedError, index$3_SERVICE_ID_BYTES as SERVICE_ID_BYTES, index$3_SpiMemory as SpiMemory, index$3_SpiProgram as SpiProgram, index$3_TransferError as TransferError, index$3_UpdatePrivilegesError as UpdatePrivilegesError, index$3_WithDebug as WithDebug, index$3_ZeroVoidError as ZeroVoidError, index$3___OPAQUE_TYPE__ as __OPAQUE_TYPE__, index$3_asOpaqueType as asOpaqueType, index$3_assertEmpty as assertEmpty, index$3_assertNever as assertNever, index$j as block, index$q as bytes, index$3_cast as cast, index$3_check as check, index$3_clampU64ToU32 as clampU64ToU32, index$3_createResults as createResults, index$3_decodeStandardProgram as decodeStandardProgram, index$3_ensure as ensure, index$3_extractCodeAndMetadata as extractCodeAndMetadata, index$3_getServiceId as getServiceId, index$3_getServiceIdOrCurrent as getServiceIdOrCurrent, index$n as hash, index$3_inspect as inspect, index$3_instructionArgumentTypeMap as instructionArgumentTypeMap, index$8 as interpreter, index$3_isTaggedError as isTaggedError, index$3_maybeTaggedErrorToString as maybeTaggedErrorToString, index$3_measure as measure, index$p as numbers, index$3_preimageLenAsU32 as preimageLenAsU32, index$3_resultToString as resultToString, index$3_seeThrough as seeThrough, index$3_slotsToPreimageStatus as slotsToPreimageStatus, index$3_toMemoryOperation as toMemoryOperation, index$3_tryAsMachineId as tryAsMachineId, index$3_tryAsProgramCounter as tryAsProgramCounter, index$3_writeServiceIdAsLeBytes as writeServiceIdAsLeBytes };
18098
+ export { index$3_AccumulationStateUpdate as AccumulationStateUpdate, index$3_ArgsDecoder as ArgsDecoder, index$3_ArgumentType as ArgumentType, index$3_BasicBlocks as BasicBlocks, index$3_CURRENT_SERVICE_ID as CURRENT_SERVICE_ID, index$3_EjectError as EjectError, index$3_ExtendedWitdthImmediateDecoder as ExtendedWitdthImmediateDecoder, index$3_ForgetPreimageError as ForgetPreimageError, index$3_HostCallMemory as HostCallMemory, index$3_HostCallRegisters as HostCallRegisters, index$3_HostCallResult as HostCallResult, index$3_ImmediateDecoder as ImmediateDecoder, index$3_MAX_U32 as MAX_U32, index$3_MAX_U32_BIG_INT as MAX_U32_BIG_INT, index$3_MachineInstance as MachineInstance, index$3_Mask as Mask, index$3_MemoryOperation as MemoryOperation, index$3_MemorySegment as MemorySegment, NO_OF_REGISTERS$1 as NO_OF_REGISTERS, index$3_NewServiceError as NewServiceError, index$3_NibblesDecoder as NibblesDecoder, index$3_PagesError as PagesError, index$3_PartiallyUpdatedState as PartiallyUpdatedState, index$3_PeekPokeError as PeekPokeError, index$3_PendingTransfer as PendingTransfer, index$3_PreimageStatusKind as PreimageStatusKind, index$3_Program as Program, index$3_ProgramDecoder as ProgramDecoder, index$3_ProvidePreimageError as ProvidePreimageError, DebuggerAdapter as Pvm, index$3_Registers as Registers, index$3_RequestPreimageError as RequestPreimageError, Result$2 as Result, index$3_RichTaggedError as RichTaggedError, index$3_SERVICE_ID_BYTES as SERVICE_ID_BYTES, index$3_SpiMemory as SpiMemory, index$3_SpiProgram as SpiProgram, index$3_TransferError as TransferError, index$3_UpdatePrivilegesError as UpdatePrivilegesError, index$3_WithDebug as WithDebug, index$3_ZeroVoidError as ZeroVoidError, index$3___OPAQUE_TYPE__ as __OPAQUE_TYPE__, index$3_asOpaqueType as asOpaqueType, index$3_assertEmpty as assertEmpty, index$3_assertNever as assertNever, index$j as block, index$q as bytes, index$3_cast as cast, index$3_check as check, index$3_clampU64ToU32 as clampU64ToU32, index$3_createResults as createResults, index$3_decodeStandardProgram as decodeStandardProgram, index$3_ensure as ensure, index$3_extractCodeAndMetadata as extractCodeAndMetadata, index$3_getServiceId as getServiceId, index$3_getServiceIdOrCurrent as getServiceIdOrCurrent, index$n as hash, index$3_inspect as inspect, index$3_instructionArgumentTypeMap as instructionArgumentTypeMap, index$8 as interpreter, index$3_isBrowser as isBrowser, index$3_isTaggedError as isTaggedError, index$3_maybeTaggedErrorToString as maybeTaggedErrorToString, index$3_measure as measure, index$p as numbers, index$3_preimageLenAsU32 as preimageLenAsU32, index$3_resultToString as resultToString, index$3_seeThrough as seeThrough, index$3_slotsToPreimageStatus as slotsToPreimageStatus, index$3_toMemoryOperation as toMemoryOperation, index$3_tryAsMachineId as tryAsMachineId, index$3_tryAsProgramCounter as tryAsProgramCounter, index$3_writeServiceIdAsLeBytes as writeServiceIdAsLeBytes };
18168
18099
  export type { index$3_Args as Args, index$3_EnumMapping as EnumMapping, index$3_ErrorResult as ErrorResult, index$3_IHostCallMemory as IHostCallMemory, index$3_IHostCallRegisters as IHostCallRegisters, index$3_InsufficientFundsError as InsufficientFundsError, index$3_MachineId as MachineId, index$3_MachineResult as MachineResult, index$3_MachineStatus as MachineStatus, index$3_NoMachineError as NoMachineError, index$3_OK as OK, index$3_OkResult as OkResult, index$3_Opaque as Opaque, index$3_PartialState as PartialState, index$3_PreimageStatus as PreimageStatus, index$3_ProgramCounter as ProgramCounter, index$3_RefineExternalities as RefineExternalities, index$3_SegmentExportError as SegmentExportError, index$3_ServiceStateUpdate as ServiceStateUpdate, index$3_StateSlice as StateSlice, index$3_StringLiteral as StringLiteral, index$3_TRANSFER_MEMO_BYTES as TRANSFER_MEMO_BYTES, index$3_TaggedError as TaggedError, index$3_TokenOf as TokenOf, index$3_Uninstantiable as Uninstantiable, index$3_UnprivilegedError as UnprivilegedError, index$3_WithOpaque as WithOpaque };
18169
18100
  }
18170
18101
 
@@ -18202,41 +18133,7 @@ declare namespace index$2 {
18202
18133
  };
18203
18134
  }
18204
18135
 
18205
- declare class JsonServiceInfoPre067 {
18206
- static fromJson = json.object<JsonServiceInfoPre067, ServiceAccountInfo>(
18207
- {
18208
- code_hash: fromJson.bytes32(),
18209
- balance: json.fromNumber((x) => tryAsU64(x)),
18210
- min_item_gas: json.fromNumber((x) => tryAsServiceGas(x)),
18211
- min_memo_gas: json.fromNumber((x) => tryAsServiceGas(x)),
18212
- bytes: json.fromNumber((x) => tryAsU64(x)),
18213
- items: "number",
18214
- },
18215
- ({ code_hash, balance, min_item_gas, min_memo_gas, bytes, items }) => {
18216
- return ServiceAccountInfo.create({
18217
- codeHash: code_hash,
18218
- balance,
18219
- accumulateMinGas: min_item_gas,
18220
- onTransferMinGas: min_memo_gas,
18221
- storageUtilisationBytes: bytes,
18222
- storageUtilisationCount: items,
18223
- gratisStorage: tryAsU64(0),
18224
- created: tryAsTimeSlot(0),
18225
- lastAccumulation: tryAsTimeSlot(0),
18226
- parentService: tryAsServiceId(0),
18227
- });
18228
- },
18229
- );
18230
-
18231
- code_hash!: CodeHash;
18232
- balance!: U64;
18233
- min_item_gas!: ServiceGas;
18234
- min_memo_gas!: ServiceGas;
18235
- bytes!: U64;
18236
- items!: U32;
18237
- }
18238
-
18239
- declare class JsonServiceInfo extends JsonServiceInfoPre067 {
18136
+ declare class JsonServiceInfo {
18240
18137
  static fromJson = json.object<JsonServiceInfo, ServiceAccountInfo>(
18241
18138
  {
18242
18139
  code_hash: fromJson.bytes32(),
@@ -18277,6 +18174,12 @@ declare class JsonServiceInfo extends JsonServiceInfoPre067 {
18277
18174
  },
18278
18175
  );
18279
18176
 
18177
+ code_hash!: CodeHash;
18178
+ balance!: U64;
18179
+ min_item_gas!: ServiceGas;
18180
+ min_memo_gas!: ServiceGas;
18181
+ bytes!: U64;
18182
+ items!: U32;
18280
18183
  creation_slot!: TimeSlot;
18281
18184
  deposit_offset!: U64;
18282
18185
  last_accumulation_slot!: TimeSlot;
@@ -18330,9 +18233,7 @@ declare class JsonService {
18330
18233
  {
18331
18234
  id: "number",
18332
18235
  data: {
18333
- service: Compatibility.isGreaterOrEqual(GpVersion.V0_6_7)
18334
- ? JsonServiceInfo.fromJson
18335
- : JsonServiceInfoPre067.fromJson,
18236
+ service: JsonServiceInfo.fromJson,
18336
18237
  preimages: json.optional(json.array(JsonPreimageItem.fromJson)),
18337
18238
  storage: json.optional(json.array(JsonStorageItem.fromJson)),
18338
18239
  lookup_meta: json.optional(json.array(lookupMetaFromJson)),
@@ -18470,7 +18371,7 @@ type JsonRecentBlockState = {
18470
18371
  reported: WorkPackageInfo[];
18471
18372
  };
18472
18373
 
18473
- declare const recentBlocksFromJson = json.object<JsonRecentBlocks, RecentBlocksHistory>(
18374
+ declare const recentBlocksHistoryFromJson = json.object<JsonRecentBlocks, RecentBlocksHistory>(
18474
18375
  {
18475
18376
  history: json.array(recentBlockStateFromJson),
18476
18377
  mmr: {
@@ -18494,49 +18395,6 @@ type JsonRecentBlocks = {
18494
18395
  };
18495
18396
  };
18496
18397
 
18497
- declare const legacyRecentBlockStateFromJson = json.object<JsonRecentBlockStateLegacy, LegacyBlockState>(
18498
- {
18499
- header_hash: fromJson.bytes32(),
18500
- mmr: {
18501
- peaks: json.array(json.nullable(fromJson.bytes32())),
18502
- },
18503
- state_root: fromJson.bytes32(),
18504
- reported: json.array(reportedWorkPackageFromJson),
18505
- },
18506
- ({ header_hash, mmr, state_root, reported }) => {
18507
- return {
18508
- headerHash: header_hash,
18509
- mmr,
18510
- postStateRoot: state_root,
18511
- reported: HashDictionary.fromEntries(reported.map((x) => [x.workPackageHash, x])),
18512
- };
18513
- },
18514
- );
18515
-
18516
- type JsonRecentBlockStateLegacy = {
18517
- header_hash: HeaderHash;
18518
- mmr: {
18519
- peaks: Array<KeccakHash | null>;
18520
- };
18521
- state_root: StateRootHash;
18522
- reported: WorkPackageInfo[];
18523
- };
18524
-
18525
- declare const legacyRecentBlocksFromJson = json.object<LegacyBlocksState, RecentBlocksHistory>(
18526
- json.array(legacyRecentBlockStateFromJson),
18527
- (blocks) => {
18528
- return RecentBlocksHistory.legacyCreate(
18529
- LegacyRecentBlocks.create({
18530
- blocks,
18531
- }),
18532
- );
18533
- },
18534
- );
18535
-
18536
- declare const recentBlocksHistoryFromJson = Compatibility.isGreaterOrEqual(GpVersion.V0_6_7)
18537
- ? recentBlocksFromJson
18538
- : legacyRecentBlocksFromJson;
18539
-
18540
18398
  declare const ticketFromJson: FromJson<Ticket> = json.object<Ticket>(
18541
18399
  {
18542
18400
  id: fromJson.bytes32(),
@@ -18878,138 +18736,6 @@ declare const fullStateDumpFromJson = (spec: ChainSpec) =>
18878
18736
  },
18879
18737
  );
18880
18738
 
18881
- type JsonStateDumpPre067 = {
18882
- alpha: AuthorizerHash[][];
18883
- varphi: AuthorizerHash[][];
18884
- beta: State["recentBlocks"] | null;
18885
- gamma: {
18886
- gamma_k: State["nextValidatorData"];
18887
- gamma_z: State["epochRoot"];
18888
- gamma_s: TicketsOrKeys;
18889
- gamma_a: State["ticketsAccumulator"];
18890
- };
18891
- psi: State["disputesRecords"];
18892
- eta: State["entropy"];
18893
- iota: State["designatedValidatorData"];
18894
- kappa: State["currentValidatorData"];
18895
- lambda: State["previousValidatorData"];
18896
- rho: State["availabilityAssignment"];
18897
- tau: State["timeslot"];
18898
- chi: {
18899
- chi_m: PrivilegedServices["manager"];
18900
- chi_a: ServiceId; // NOTE: [MaSo] pre067
18901
- chi_v: PrivilegedServices["validatorsManager"];
18902
- chi_g: PrivilegedServices["autoAccumulateServices"] | null;
18903
- };
18904
- pi: JsonStatisticsData;
18905
- theta: State["accumulationQueue"];
18906
- xi: PerEpochBlock<WorkPackageHash[]>;
18907
- accounts: InMemoryService[];
18908
- };
18909
-
18910
- declare const fullStateDumpFromJsonPre067 = (spec: ChainSpec) =>
18911
- json.object<JsonStateDumpPre067, InMemoryState>(
18912
- {
18913
- alpha: json.array(json.array(fromJson.bytes32<AuthorizerHash>())),
18914
- varphi: json.array(json.array(fromJson.bytes32<AuthorizerHash>())),
18915
- beta: json.nullable(recentBlocksHistoryFromJson),
18916
- gamma: {
18917
- gamma_k: json.array(validatorDataFromJson),
18918
- gamma_a: json.array(ticketFromJson),
18919
- gamma_s: TicketsOrKeys.fromJson,
18920
- gamma_z: json.fromString((v) => Bytes.parseBytes(v, BANDERSNATCH_RING_ROOT_BYTES).asOpaque()),
18921
- },
18922
- psi: disputesRecordsFromJson,
18923
- eta: json.array(fromJson.bytes32<EntropyHash>()),
18924
- iota: json.array(validatorDataFromJson),
18925
- kappa: json.array(validatorDataFromJson),
18926
- lambda: json.array(validatorDataFromJson),
18927
- rho: json.array(json.nullable(availabilityAssignmentFromJson)),
18928
- tau: "number",
18929
- chi: {
18930
- chi_m: "number",
18931
- chi_a: "number",
18932
- chi_v: "number",
18933
- chi_g: json.nullable(
18934
- json.array({
18935
- service: "number",
18936
- gasLimit: json.fromNumber((v) => tryAsServiceGas(v)),
18937
- }),
18938
- ),
18939
- },
18940
- pi: JsonStatisticsData.fromJson,
18941
- theta: json.array(json.array(notYetAccumulatedFromJson)),
18942
- xi: json.array(json.array(fromJson.bytes32())),
18943
- accounts: json.array(JsonService.fromJson),
18944
- },
18945
- ({
18946
- alpha,
18947
- varphi,
18948
- beta,
18949
- gamma,
18950
- psi,
18951
- eta,
18952
- iota,
18953
- kappa,
18954
- lambda,
18955
- rho,
18956
- tau,
18957
- chi,
18958
- pi,
18959
- theta,
18960
- xi,
18961
- accounts,
18962
- }): InMemoryState => {
18963
- return InMemoryState.create({
18964
- authPools: tryAsPerCore(
18965
- alpha.map((perCore) => {
18966
- if (perCore.length > MAX_AUTH_POOL_SIZE) {
18967
- throw new Error(`AuthPools: expected less than ${MAX_AUTH_POOL_SIZE}, got ${perCore.length}`);
18968
- }
18969
- return asKnownSize(perCore);
18970
- }),
18971
- spec,
18972
- ),
18973
- authQueues: tryAsPerCore(
18974
- varphi.map((perCore) => {
18975
- if (perCore.length !== AUTHORIZATION_QUEUE_SIZE) {
18976
- throw new Error(`AuthQueues: expected ${AUTHORIZATION_QUEUE_SIZE}, got: ${perCore.length}`);
18977
- }
18978
- return asKnownSize(perCore);
18979
- }),
18980
- spec,
18981
- ),
18982
- recentBlocks: beta ?? RecentBlocksHistory.empty(),
18983
- nextValidatorData: gamma.gamma_k,
18984
- epochRoot: gamma.gamma_z,
18985
- sealingKeySeries: TicketsOrKeys.toSafroleSealingKeys(gamma.gamma_s, spec),
18986
- ticketsAccumulator: gamma.gamma_a,
18987
- disputesRecords: psi,
18988
- entropy: eta,
18989
- designatedValidatorData: iota,
18990
- currentValidatorData: kappa,
18991
- previousValidatorData: lambda,
18992
- availabilityAssignment: rho,
18993
- timeslot: tau,
18994
- privilegedServices: PrivilegedServices.create({
18995
- manager: chi.chi_m,
18996
- authManager: tryAsPerCore(new Array(spec.coresCount).fill(chi.chi_a), spec),
18997
- validatorsManager: chi.chi_v,
18998
- autoAccumulateServices: chi.chi_g ?? [],
18999
- }),
19000
- statistics: JsonStatisticsData.toStatisticsData(spec, pi),
19001
- accumulationQueue: theta,
19002
- recentlyAccumulated: tryAsPerEpochBlock(
19003
- xi.map((x) => HashSet.from(x)),
19004
- spec,
19005
- ),
19006
- services: new Map(accounts.map((x) => [x.serviceId, x])),
19007
- // NOTE Field not present in pre067, added here for compatibility reasons
19008
- accumulationOutputLog: [],
19009
- });
19010
- },
19011
- );
19012
-
19013
18739
  type index$1_JsonAvailabilityAssignment = JsonAvailabilityAssignment;
19014
18740
  type index$1_JsonCoreStatistics = JsonCoreStatistics;
19015
18741
  declare const index$1_JsonCoreStatistics: typeof JsonCoreStatistics;
@@ -19019,19 +18745,15 @@ type index$1_JsonLookupMeta = JsonLookupMeta;
19019
18745
  type index$1_JsonPreimageItem = JsonPreimageItem;
19020
18746
  declare const index$1_JsonPreimageItem: typeof JsonPreimageItem;
19021
18747
  type index$1_JsonRecentBlockState = JsonRecentBlockState;
19022
- type index$1_JsonRecentBlockStateLegacy = JsonRecentBlockStateLegacy;
19023
18748
  type index$1_JsonRecentBlocks = JsonRecentBlocks;
19024
18749
  type index$1_JsonReportedWorkPackageInfo = JsonReportedWorkPackageInfo;
19025
18750
  type index$1_JsonService = JsonService;
19026
18751
  declare const index$1_JsonService: typeof JsonService;
19027
18752
  type index$1_JsonServiceInfo = JsonServiceInfo;
19028
18753
  declare const index$1_JsonServiceInfo: typeof JsonServiceInfo;
19029
- type index$1_JsonServiceInfoPre067 = JsonServiceInfoPre067;
19030
- declare const index$1_JsonServiceInfoPre067: typeof JsonServiceInfoPre067;
19031
18754
  type index$1_JsonServiceStatistics = JsonServiceStatistics;
19032
18755
  declare const index$1_JsonServiceStatistics: typeof JsonServiceStatistics;
19033
18756
  type index$1_JsonStateDump = JsonStateDump;
19034
- type index$1_JsonStateDumpPre067 = JsonStateDumpPre067;
19035
18757
  type index$1_JsonStatisticsData = JsonStatisticsData;
19036
18758
  declare const index$1_JsonStatisticsData: typeof JsonStatisticsData;
19037
18759
  type index$1_JsonStorageItem = JsonStorageItem;
@@ -19044,21 +18766,17 @@ declare const index$1_TicketsOrKeys: typeof TicketsOrKeys;
19044
18766
  declare const index$1_availabilityAssignmentFromJson: typeof availabilityAssignmentFromJson;
19045
18767
  declare const index$1_disputesRecordsFromJson: typeof disputesRecordsFromJson;
19046
18768
  declare const index$1_fullStateDumpFromJson: typeof fullStateDumpFromJson;
19047
- declare const index$1_fullStateDumpFromJsonPre067: typeof fullStateDumpFromJsonPre067;
19048
- declare const index$1_legacyRecentBlockStateFromJson: typeof legacyRecentBlockStateFromJson;
19049
- declare const index$1_legacyRecentBlocksFromJson: typeof legacyRecentBlocksFromJson;
19050
18769
  declare const index$1_lookupMetaFromJson: typeof lookupMetaFromJson;
19051
18770
  declare const index$1_notYetAccumulatedFromJson: typeof notYetAccumulatedFromJson;
19052
18771
  declare const index$1_recentBlockStateFromJson: typeof recentBlockStateFromJson;
19053
- declare const index$1_recentBlocksFromJson: typeof recentBlocksFromJson;
19054
18772
  declare const index$1_recentBlocksHistoryFromJson: typeof recentBlocksHistoryFromJson;
19055
18773
  declare const index$1_reportedWorkPackageFromJson: typeof reportedWorkPackageFromJson;
19056
18774
  declare const index$1_serviceStatisticsEntryFromJson: typeof serviceStatisticsEntryFromJson;
19057
18775
  declare const index$1_ticketFromJson: typeof ticketFromJson;
19058
18776
  declare const index$1_validatorDataFromJson: typeof validatorDataFromJson;
19059
18777
  declare namespace index$1 {
19060
- export { index$1_JsonCoreStatistics as JsonCoreStatistics, index$1_JsonDisputesRecords as JsonDisputesRecords, index$1_JsonPreimageItem as JsonPreimageItem, index$1_JsonService as JsonService, index$1_JsonServiceInfo as JsonServiceInfo, index$1_JsonServiceInfoPre067 as JsonServiceInfoPre067, index$1_JsonServiceStatistics as JsonServiceStatistics, index$1_JsonStatisticsData as JsonStatisticsData, index$1_JsonStorageItem as JsonStorageItem, index$1_JsonValidatorStatistics as JsonValidatorStatistics, index$1_TicketsOrKeys as TicketsOrKeys, index$1_availabilityAssignmentFromJson as availabilityAssignmentFromJson, index$1_disputesRecordsFromJson as disputesRecordsFromJson, index$1_fullStateDumpFromJson as fullStateDumpFromJson, index$1_fullStateDumpFromJsonPre067 as fullStateDumpFromJsonPre067, index$1_legacyRecentBlockStateFromJson as legacyRecentBlockStateFromJson, index$1_legacyRecentBlocksFromJson as legacyRecentBlocksFromJson, index$1_lookupMetaFromJson as lookupMetaFromJson, index$1_notYetAccumulatedFromJson as notYetAccumulatedFromJson, index$1_recentBlockStateFromJson as recentBlockStateFromJson, index$1_recentBlocksFromJson as recentBlocksFromJson, index$1_recentBlocksHistoryFromJson as recentBlocksHistoryFromJson, index$1_reportedWorkPackageFromJson as reportedWorkPackageFromJson, index$1_serviceStatisticsEntryFromJson as serviceStatisticsEntryFromJson, index$1_ticketFromJson as ticketFromJson, index$1_validatorDataFromJson as validatorDataFromJson };
19061
- export type { index$1_JsonAvailabilityAssignment as JsonAvailabilityAssignment, index$1_JsonLookupMeta as JsonLookupMeta, index$1_JsonRecentBlockState as JsonRecentBlockState, index$1_JsonRecentBlockStateLegacy as JsonRecentBlockStateLegacy, index$1_JsonRecentBlocks as JsonRecentBlocks, index$1_JsonReportedWorkPackageInfo as JsonReportedWorkPackageInfo, index$1_JsonStateDump as JsonStateDump, index$1_JsonStateDumpPre067 as JsonStateDumpPre067, index$1_ServiceStatisticsEntry as ServiceStatisticsEntry };
18778
+ export { index$1_JsonCoreStatistics as JsonCoreStatistics, index$1_JsonDisputesRecords as JsonDisputesRecords, index$1_JsonPreimageItem as JsonPreimageItem, index$1_JsonService as JsonService, index$1_JsonServiceInfo as JsonServiceInfo, index$1_JsonServiceStatistics as JsonServiceStatistics, index$1_JsonStatisticsData as JsonStatisticsData, index$1_JsonStorageItem as JsonStorageItem, index$1_JsonValidatorStatistics as JsonValidatorStatistics, index$1_TicketsOrKeys as TicketsOrKeys, index$1_availabilityAssignmentFromJson as availabilityAssignmentFromJson, index$1_disputesRecordsFromJson as disputesRecordsFromJson, index$1_fullStateDumpFromJson as fullStateDumpFromJson, index$1_lookupMetaFromJson as lookupMetaFromJson, index$1_notYetAccumulatedFromJson as notYetAccumulatedFromJson, index$1_recentBlockStateFromJson as recentBlockStateFromJson, index$1_recentBlocksHistoryFromJson as recentBlocksHistoryFromJson, index$1_reportedWorkPackageFromJson as reportedWorkPackageFromJson, index$1_serviceStatisticsEntryFromJson as serviceStatisticsEntryFromJson, index$1_ticketFromJson as ticketFromJson, index$1_validatorDataFromJson as validatorDataFromJson };
18779
+ export type { index$1_JsonAvailabilityAssignment as JsonAvailabilityAssignment, index$1_JsonLookupMeta as JsonLookupMeta, index$1_JsonRecentBlockState as JsonRecentBlockState, index$1_JsonRecentBlocks as JsonRecentBlocks, index$1_JsonReportedWorkPackageInfo as JsonReportedWorkPackageInfo, index$1_JsonStateDump as JsonStateDump, index$1_ServiceStatisticsEntry as ServiceStatisticsEntry };
19062
18780
  }
19063
18781
 
19064
18782
  /** Helper function to create most used hashes in the block */
@@ -19257,7 +18975,7 @@ declare class WorkPackageExecutor {
19257
18975
 
19258
18976
  declare class PvmExecutor {
19259
18977
  private readonly pvm: HostCalls;
19260
- private hostCalls = new HostCalls();
18978
+ private hostCalls = new HostCallsManager({ missing: new Missing() });
19261
18979
  private pvmInstanceManager = new PvmInstanceManager(4);
19262
18980
 
19263
18981
  constructor(private serviceCode: BytesBlob) {