@typeberry/lib 0.0.1-cf41358 → 0.0.2-5755578

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (4) hide show
  1. package/configs/index.d.ts +74 -0
  2. package/index.d.ts +477 -774
  3. package/index.js +1711 -1168
  4. package/package.json +1 -1
package/index.d.ts CHANGED
@@ -59,10 +59,8 @@ declare namespace index$s {
59
59
  }
60
60
 
61
61
  declare enum GpVersion {
62
- V0_6_5 = "0.6.5",
63
- V0_6_6 = "0.6.6",
64
62
  V0_6_7 = "0.6.7",
65
- V0_7_0 = "0.7.0-preview",
63
+ V0_7_0 = "0.7.0",
66
64
  V0_7_1 = "0.7.1-preview",
67
65
  }
68
66
 
@@ -73,16 +71,10 @@ declare enum TestSuite {
73
71
 
74
72
  declare const DEFAULT_SUITE = TestSuite.W3F_DAVXY;
75
73
 
76
- declare const ALL_VERSIONS_IN_ORDER = [
77
- GpVersion.V0_6_5,
78
- GpVersion.V0_6_6,
79
- GpVersion.V0_6_7,
80
- GpVersion.V0_7_0,
81
- GpVersion.V0_7_1,
82
- ];
74
+ declare const ALL_VERSIONS_IN_ORDER = [GpVersion.V0_6_7, GpVersion.V0_7_0, GpVersion.V0_7_1];
83
75
 
84
76
  declare const env = typeof process === "undefined" ? {} : process.env;
85
- declare const DEFAULT_VERSION = GpVersion.V0_6_7;
77
+ declare const DEFAULT_VERSION = GpVersion.V0_7_0;
86
78
  declare let CURRENT_VERSION = parseCurrentVersion(env.GP_VERSION) ?? DEFAULT_VERSION;
87
79
  declare let CURRENT_SUITE = parseCurrentSuite(env.TEST_SUITE) ?? DEFAULT_SUITE;
88
80
 
@@ -168,6 +160,10 @@ declare class Compatibility {
168
160
  }
169
161
  }
170
162
 
163
+ declare function isBrowser() {
164
+ return typeof process === "undefined" || typeof process.abort === "undefined";
165
+ }
166
+
171
167
  /**
172
168
  * A function to perform runtime assertions.
173
169
  *
@@ -286,20 +282,19 @@ declare function inspect<T>(val: T): string {
286
282
  }
287
283
 
288
284
  /** Utility function to measure time taken for some operation [ms]. */
289
- declare const measure =
290
- typeof process === "undefined"
291
- ? (id: string) => {
292
- const start = performance.now();
293
- return () => `${id} took ${performance.now() - start}ms`;
294
- }
295
- : (id: string) => {
296
- const start = process.hrtime.bigint();
297
- return () => {
298
- const tookNano = process.hrtime.bigint() - start;
299
- const tookMilli = Number(tookNano / 1_000_000n).toFixed(2);
300
- return `${id} took ${tookMilli}ms`;
301
- };
285
+ declare const measure = isBrowser()
286
+ ? (id: string) => {
287
+ const start = performance.now();
288
+ return () => `${id} took ${performance.now() - start}ms`;
289
+ }
290
+ : (id: string) => {
291
+ const start = process.hrtime.bigint();
292
+ return () => {
293
+ const tookNano = process.hrtime.bigint() - start;
294
+ const tookMilli = Number(tookNano / 1_000_000n).toFixed(2);
295
+ return `${id} took ${tookMilli}ms`;
302
296
  };
297
+ };
303
298
 
304
299
  /** A class that adds `toString` method that prints all properties of an object. */
305
300
  declare abstract class WithDebug {
@@ -498,6 +493,8 @@ type DeepEqualOptions = {
498
493
  errorsCollector?: ErrorsCollector;
499
494
  };
500
495
 
496
+ declare let oomWarningPrinted = false;
497
+
501
498
  /** Deeply compare `actual` and `expected` values. */
502
499
  declare function deepEqual<T>(
503
500
  actual: T | undefined,
@@ -530,7 +527,7 @@ declare function deepEqual<T>(
530
527
  try {
531
528
  assert.strictEqual(actualDisp, expectedDisp, message);
532
529
  } catch (e) {
533
- if (isOoMWorkaroundNeeded) {
530
+ if (isOoMWorkaroundNeeded && !oomWarningPrinted) {
534
531
  console.warn(
535
532
  [
536
533
  "Stacktrace may be crappy because of a problem in nodejs.",
@@ -538,6 +535,7 @@ declare function deepEqual<T>(
538
535
  "Maybe we do not need it anymore",
539
536
  ].join("\n"),
540
537
  );
538
+ oomWarningPrinted = true;
541
539
  }
542
540
  throw e;
543
541
  }
@@ -786,17 +784,19 @@ declare const index$r_ensure: typeof ensure;
786
784
  declare const index$r_env: typeof env;
787
785
  declare const index$r_getAllKeysSorted: typeof getAllKeysSorted;
788
786
  declare const index$r_inspect: typeof inspect;
787
+ declare const index$r_isBrowser: typeof isBrowser;
789
788
  declare const index$r_isResult: typeof isResult;
790
789
  declare const index$r_isTaggedError: typeof isTaggedError;
791
790
  declare const index$r_maybeTaggedErrorToString: typeof maybeTaggedErrorToString;
792
791
  declare const index$r_measure: typeof measure;
792
+ declare const index$r_oomWarningPrinted: typeof oomWarningPrinted;
793
793
  declare const index$r_parseCurrentSuite: typeof parseCurrentSuite;
794
794
  declare const index$r_parseCurrentVersion: typeof parseCurrentVersion;
795
795
  declare const index$r_resultToString: typeof resultToString;
796
796
  declare const index$r_seeThrough: typeof seeThrough;
797
797
  declare const index$r_trimStack: typeof trimStack;
798
798
  declare namespace index$r {
799
- export { index$r_ALL_VERSIONS_IN_ORDER as ALL_VERSIONS_IN_ORDER, index$r_CURRENT_SUITE as CURRENT_SUITE, index$r_CURRENT_VERSION as CURRENT_VERSION, index$r_Compatibility as Compatibility, index$r_DEFAULT_SUITE as DEFAULT_SUITE, index$r_DEFAULT_VERSION as DEFAULT_VERSION, index$r_ErrorsCollector as ErrorsCollector, index$r_GpVersion as GpVersion, Result$2 as Result, index$r_RichTaggedError as RichTaggedError, index$r_TEST_COMPARE_USING as TEST_COMPARE_USING, index$r_TestSuite as TestSuite, index$r_WithDebug as WithDebug, index$r___OPAQUE_TYPE__ as __OPAQUE_TYPE__, index$r_asOpaqueType as asOpaqueType, index$r_assertEmpty as assertEmpty, index$r_assertNever as assertNever, index$r_callCompareFunction as callCompareFunction, index$r_cast as cast, index$r_check as check, index$r_deepEqual as deepEqual, index$r_ensure as ensure, index$r_env as env, index$r_getAllKeysSorted as getAllKeysSorted, index$r_inspect as inspect, index$r_isResult as isResult, index$r_isTaggedError as isTaggedError, index$r_maybeTaggedErrorToString as maybeTaggedErrorToString, index$r_measure as measure, index$r_parseCurrentSuite as parseCurrentSuite, index$r_parseCurrentVersion as parseCurrentVersion, index$r_resultToString as resultToString, index$r_seeThrough as seeThrough, index$r_trimStack as trimStack };
799
+ export { index$r_ALL_VERSIONS_IN_ORDER as ALL_VERSIONS_IN_ORDER, index$r_CURRENT_SUITE as CURRENT_SUITE, index$r_CURRENT_VERSION as CURRENT_VERSION, index$r_Compatibility as Compatibility, index$r_DEFAULT_SUITE as DEFAULT_SUITE, index$r_DEFAULT_VERSION as DEFAULT_VERSION, index$r_ErrorsCollector as ErrorsCollector, index$r_GpVersion as GpVersion, Result$2 as Result, index$r_RichTaggedError as RichTaggedError, index$r_TEST_COMPARE_USING as TEST_COMPARE_USING, index$r_TestSuite as TestSuite, index$r_WithDebug as WithDebug, index$r___OPAQUE_TYPE__ as __OPAQUE_TYPE__, index$r_asOpaqueType as asOpaqueType, index$r_assertEmpty as assertEmpty, index$r_assertNever as assertNever, index$r_callCompareFunction as callCompareFunction, index$r_cast as cast, index$r_check as check, index$r_deepEqual as deepEqual, index$r_ensure as ensure, index$r_env as env, index$r_getAllKeysSorted as getAllKeysSorted, index$r_inspect as inspect, index$r_isBrowser as isBrowser, index$r_isResult as isResult, index$r_isTaggedError as isTaggedError, index$r_maybeTaggedErrorToString as maybeTaggedErrorToString, index$r_measure as measure, index$r_oomWarningPrinted as oomWarningPrinted, index$r_parseCurrentSuite as parseCurrentSuite, index$r_parseCurrentVersion as parseCurrentVersion, index$r_resultToString as resultToString, index$r_seeThrough as seeThrough, index$r_trimStack as trimStack };
800
800
  export type { index$r_DeepEqualOptions as DeepEqualOptions, index$r_EnumMapping as EnumMapping, index$r_ErrorResult as ErrorResult, index$r_OK as OK, index$r_OkResult as OkResult, index$r_Opaque as Opaque, index$r_StringLiteral as StringLiteral, index$r_TaggedError as TaggedError, index$r_TokenOf as TokenOf, index$r_Uninstantiable as Uninstantiable, index$r_WithOpaque as WithOpaque };
801
801
  }
802
802
 
@@ -4464,6 +4464,84 @@ declare namespace index$m {
4464
4464
  export type { index$m_HashWithZeroedBit as HashWithZeroedBit, index$m_ImmutableHashDictionary as ImmutableHashDictionary, index$m_ImmutableHashSet as ImmutableHashSet, index$m_ImmutableSortedArray as ImmutableSortedArray, index$m_ImmutableSortedSet as ImmutableSortedSet, index$m_KeyMapper as KeyMapper, index$m_KeyMappers as KeyMappers, index$m_KnownSize as KnownSize, index$m_KnownSizeArray as KnownSizeArray, index$m_KnownSizeId as KnownSizeId, index$m_NestedMaps as NestedMaps };
4465
4465
  }
4466
4466
 
4467
+ declare namespace bandersnatch_d_exports {
4468
+ export { batch_verify_tickets, __wbg_init$2 as default, derive_public_key, initSync$2 as initSync, ring_commitment, verify_seal };
4469
+ export type { InitInput$2 as InitInput, InitOutput$2 as InitOutput, SyncInitInput$2 as SyncInitInput };
4470
+ }
4471
+ /* tslint:disable */
4472
+ /* eslint-disable */
4473
+ /**
4474
+ * @param {Uint8Array} keys
4475
+ * @returns {Uint8Array}
4476
+ */
4477
+ declare function ring_commitment(keys: Uint8Array): Uint8Array;
4478
+ /**
4479
+ * Derive Private and Public Key from Seed
4480
+ *
4481
+ * returns: `Vec<u8>` containing the exit (1 byte) status followed by the (32 bytes) public key
4482
+ * @param {Uint8Array} seed
4483
+ * @returns {Uint8Array}
4484
+ */
4485
+ declare function derive_public_key(seed: Uint8Array): Uint8Array;
4486
+ /**
4487
+ * Seal verification as defined in:
4488
+ * https://graypaper.fluffylabs.dev/#/68eaa1f/0eff000eff00?v=0.6.4
4489
+ * or
4490
+ * https://graypaper.fluffylabs.dev/#/68eaa1f/0e54010e5401?v=0.6.4
4491
+ * @param {Uint8Array} keys
4492
+ * @param {number} signer_key_index
4493
+ * @param {Uint8Array} seal_data
4494
+ * @param {Uint8Array} payload
4495
+ * @param {Uint8Array} aux_data
4496
+ * @returns {Uint8Array}
4497
+ */
4498
+ declare function verify_seal(keys: Uint8Array, signer_key_index: number, seal_data: Uint8Array, payload: Uint8Array, aux_data: Uint8Array): Uint8Array;
4499
+ /**
4500
+ * Verify multiple tickets at once as defined in:
4501
+ * https://graypaper.fluffylabs.dev/#/68eaa1f/0f3e000f3e00?v=0.6.4
4502
+ *
4503
+ * NOTE: the aux_data of VRF function is empty!
4504
+ * @param {Uint8Array} keys
4505
+ * @param {Uint8Array} tickets_data
4506
+ * @param {number} vrf_input_data_len
4507
+ * @returns {Uint8Array}
4508
+ */
4509
+ declare function batch_verify_tickets(keys: Uint8Array, tickets_data: Uint8Array, vrf_input_data_len: number): Uint8Array;
4510
+ type InitInput$2 = RequestInfo | URL | Response | BufferSource | WebAssembly.Module;
4511
+ interface InitOutput$2 {
4512
+ readonly memory: WebAssembly.Memory;
4513
+ readonly ring_commitment: (a: number, b: number, c: number) => void;
4514
+ readonly derive_public_key: (a: number, b: number, c: number) => void;
4515
+ readonly verify_seal: (a: number, b: number, c: number, d: number, e: number, f: number, g: number, h: number, i: number, j: number) => void;
4516
+ readonly batch_verify_tickets: (a: number, b: number, c: number, d: number, e: number, f: number) => void;
4517
+ readonly __wbindgen_add_to_stack_pointer: (a: number) => number;
4518
+ readonly __wbindgen_malloc: (a: number, b: number) => number;
4519
+ readonly __wbindgen_free: (a: number, b: number, c: number) => void;
4520
+ }
4521
+ type SyncInitInput$2 = BufferSource | WebAssembly.Module;
4522
+ /**
4523
+ * Instantiates the given `module`, which can either be bytes or
4524
+ * a precompiled `WebAssembly.Module`.
4525
+ *
4526
+ * @param {SyncInitInput} module
4527
+ *
4528
+ * @returns {InitOutput}
4529
+ */
4530
+ declare function initSync$2(module: SyncInitInput$2): InitOutput$2;
4531
+
4532
+ /**
4533
+ * If `module_or_path` is {RequestInfo} or {URL}, makes a request and
4534
+ * for everything else, calls `WebAssembly.instantiate` directly.
4535
+ *
4536
+ * @param {InitInput | Promise<InitInput>} module_or_path
4537
+ *
4538
+ * @returns {Promise<InitOutput>}
4539
+ */
4540
+ declare function __wbg_init$2(module_or_path?: InitInput$2 | Promise<InitInput$2>): Promise<InitOutput$2>;
4541
+ //#endregion
4542
+ //#region native/index.d.ts
4543
+ declare function initAll(): Promise<void>;
4544
+
4467
4545
  /** ED25519 private key size. */
4468
4546
  declare const ED25519_PRIV_KEY_BYTES = 32;
4469
4547
  type ED25519_PRIV_KEY_BYTES = typeof ED25519_PRIV_KEY_BYTES;
@@ -4556,7 +4634,7 @@ declare async function verify<T extends BytesBlob>(input: Input<T>[]): Promise<b
4556
4634
  offset += messageLength;
4557
4635
  }
4558
4636
 
4559
- const result = Array.from(verify_ed25519(data)).map((x) => x === 1);
4637
+ const result = Array.from(ed25519.verify_ed25519(data)).map((x) => x === 1);
4560
4638
  return Promise.resolve(result);
4561
4639
  }
4562
4640
 
@@ -4578,7 +4656,7 @@ declare async function verifyBatch<T extends BytesBlob>(input: Input<T>[]): Prom
4578
4656
 
4579
4657
  const data = BytesBlob.blobFromParts(first, ...rest).raw;
4580
4658
 
4581
- return Promise.resolve(verify_ed25519_batch(data));
4659
+ return Promise.resolve(ed25519.verify_ed25519_batch(data));
4582
4660
  }
4583
4661
 
4584
4662
  type ed25519_ED25519_KEY_BYTES = ED25519_KEY_BYTES;
@@ -4598,59 +4676,6 @@ declare namespace ed25519 {
4598
4676
  export type { ed25519_ED25519_KEY_BYTES as ED25519_KEY_BYTES, ed25519_ED25519_PRIV_KEY_BYTES as ED25519_PRIV_KEY_BYTES, ed25519_ED25519_SIGNATURE_BYTES as ED25519_SIGNATURE_BYTES, ed25519_Ed25519Key as Ed25519Key, ed25519_Ed25519Signature as Ed25519Signature, ed25519_Input as Input };
4599
4677
  }
4600
4678
 
4601
- /* tslint:disable */
4602
- /* eslint-disable */
4603
- /**
4604
- * @param {Uint8Array} keys
4605
- * @returns {Uint8Array}
4606
- */
4607
- declare function ring_commitment(keys: Uint8Array): Uint8Array;
4608
- /**
4609
- * Derive Private and Public Key from Seed
4610
- *
4611
- * returns: `Vec<u8>` containing the exit (1 byte) status followed by the (32 bytes) public key
4612
- * @param {Uint8Array} seed
4613
- * @returns {Uint8Array}
4614
- */
4615
- declare function derive_public_key(seed: Uint8Array): Uint8Array;
4616
- /**
4617
- * Seal verification as defined in:
4618
- * https://graypaper.fluffylabs.dev/#/68eaa1f/0eff000eff00?v=0.6.4
4619
- * or
4620
- * https://graypaper.fluffylabs.dev/#/68eaa1f/0e54010e5401?v=0.6.4
4621
- * @param {Uint8Array} keys
4622
- * @param {number} signer_key_index
4623
- * @param {Uint8Array} seal_data
4624
- * @param {Uint8Array} payload
4625
- * @param {Uint8Array} aux_data
4626
- * @returns {Uint8Array}
4627
- */
4628
- declare function verify_seal(keys: Uint8Array, signer_key_index: number, seal_data: Uint8Array, payload: Uint8Array, aux_data: Uint8Array): Uint8Array;
4629
- /**
4630
- * Verify multiple tickets at once as defined in:
4631
- * https://graypaper.fluffylabs.dev/#/68eaa1f/0f3e000f3e00?v=0.6.4
4632
- *
4633
- * NOTE: the aux_data of VRF function is empty!
4634
- * @param {Uint8Array} keys
4635
- * @param {Uint8Array} tickets_data
4636
- * @param {number} vrf_input_data_len
4637
- * @returns {Uint8Array}
4638
- */
4639
- declare function batch_verify_tickets(keys: Uint8Array, tickets_data: Uint8Array, vrf_input_data_len: number): Uint8Array;
4640
-
4641
- declare const bandersnatch_d_batch_verify_tickets: typeof batch_verify_tickets;
4642
- declare const bandersnatch_d_derive_public_key: typeof derive_public_key;
4643
- declare const bandersnatch_d_ring_commitment: typeof ring_commitment;
4644
- declare const bandersnatch_d_verify_seal: typeof verify_seal;
4645
- declare namespace bandersnatch_d {
4646
- export {
4647
- bandersnatch_d_batch_verify_tickets as batch_verify_tickets,
4648
- bandersnatch_d_derive_public_key as derive_public_key,
4649
- bandersnatch_d_ring_commitment as ring_commitment,
4650
- bandersnatch_d_verify_seal as verify_seal,
4651
- };
4652
- }
4653
-
4654
4679
  /** Bandersnatch public key size. */
4655
4680
  declare const BANDERSNATCH_KEY_BYTES = 32;
4656
4681
  type BANDERSNATCH_KEY_BYTES = typeof BANDERSNATCH_KEY_BYTES;
@@ -4708,7 +4733,7 @@ type BlsKey = Opaque<Bytes<BLS_KEY_BYTES>, "BlsKey">;
4708
4733
 
4709
4734
  /** Derive a Bandersnatch public key from a seed. */
4710
4735
  declare function publicKey(seed: Uint8Array): BandersnatchKey {
4711
- const key = derive_public_key(seed);
4736
+ const key = bandersnatch.derive_public_key(seed);
4712
4737
 
4713
4738
  check(key[0] === 0, "Invalid Bandersnatch public key derived from seed");
4714
4739
 
@@ -4834,7 +4859,7 @@ declare const index$l_bandersnatch: typeof bandersnatch;
4834
4859
  declare const index$l_ed25519: typeof ed25519;
4835
4860
  declare const index$l_keyDerivation: typeof keyDerivation;
4836
4861
  declare namespace index$l {
4837
- export { index$l_Ed25519Pair as Ed25519Pair, index$l_bandersnatch as bandersnatch, bandersnatch_d as bandersnatchWasm, index$l_ed25519 as ed25519, index$l_keyDerivation as keyDerivation };
4862
+ export { index$l_Ed25519Pair as Ed25519Pair, index$l_bandersnatch as bandersnatch, bandersnatch_d_exports as bandersnatchWasm, index$l_ed25519 as ed25519, initAll as initWasm, index$l_keyDerivation as keyDerivation };
4838
4863
  export type { index$l_BANDERSNATCH_KEY_BYTES as BANDERSNATCH_KEY_BYTES, index$l_BANDERSNATCH_PROOF_BYTES as BANDERSNATCH_PROOF_BYTES, index$l_BANDERSNATCH_RING_ROOT_BYTES as BANDERSNATCH_RING_ROOT_BYTES, index$l_BANDERSNATCH_VRF_SIGNATURE_BYTES as BANDERSNATCH_VRF_SIGNATURE_BYTES, index$l_BLS_KEY_BYTES as BLS_KEY_BYTES, index$l_BandersnatchKey as BandersnatchKey, index$l_BandersnatchProof as BandersnatchProof, index$l_BandersnatchRingRoot as BandersnatchRingRoot, index$l_BandersnatchSecretSeed as BandersnatchSecretSeed, index$l_BandersnatchVrfSignature as BandersnatchVrfSignature, index$l_BlsKey as BlsKey, index$l_ED25519_KEY_BYTES as ED25519_KEY_BYTES, index$l_ED25519_PRIV_KEY_BYTES as ED25519_PRIV_KEY_BYTES, index$l_ED25519_SIGNATURE_BYTES as ED25519_SIGNATURE_BYTES, index$l_Ed25519Key as Ed25519Key, index$l_Ed25519SecretSeed as Ed25519SecretSeed, index$l_Ed25519Signature as Ed25519Signature, KeySeed as PublicKeySeed, index$l_SEED_SIZE as SEED_SIZE };
4839
4864
  }
4840
4865
 
@@ -4914,6 +4939,8 @@ declare class ChainSpec extends WithDebug {
4914
4939
  readonly maxBlockGas: U64;
4915
4940
  /** `G_R`: The gas allocated to invoke a work-package’s Refine logic. */
4916
4941
  readonly maxRefineGas: U64;
4942
+ /** `L`: The maximum age in timeslots of the lookup anchor. */
4943
+ readonly maxLookupAnchorAge: U32;
4917
4944
 
4918
4945
  constructor(data: Omit<ChainSpec, "validatorsSuperMajority" | "thirdOfValidators" | "erasureCodedPieceSize">) {
4919
4946
  super();
@@ -4933,6 +4960,7 @@ declare class ChainSpec extends WithDebug {
4933
4960
  this.erasureCodedPieceSize = tryAsU32(EC_SEGMENT_SIZE / data.numberECPiecesPerSegment);
4934
4961
  this.maxBlockGas = data.maxBlockGas;
4935
4962
  this.maxRefineGas = data.maxRefineGas;
4963
+ this.maxLookupAnchorAge = data.maxLookupAnchorAge;
4936
4964
  }
4937
4965
  }
4938
4966
 
@@ -4951,6 +4979,8 @@ declare const tinyChainSpec = new ChainSpec({
4951
4979
  preimageExpungePeriod: tryAsU32(32),
4952
4980
  maxBlockGas: tryAsU64(20_000_000),
4953
4981
  maxRefineGas: tryAsU64(1_000_000_000),
4982
+ // https://github.com/davxy/jam-conformance/pull/47/files#diff-27e26142b3a96e407dab40d388b63d553f5d9cdb66dec58cd93e63dd434f9e45R260
4983
+ maxLookupAnchorAge: tryAsU32(24),
4954
4984
  });
4955
4985
 
4956
4986
  /**
@@ -4970,6 +5000,7 @@ declare const fullChainSpec = new ChainSpec({
4970
5000
  preimageExpungePeriod: tryAsU32(19_200),
4971
5001
  maxBlockGas: tryAsU64(3_500_000_000),
4972
5002
  maxRefineGas: tryAsU64(5_000_000_000),
5003
+ maxLookupAnchorAge: tryAsU32(14_400),
4973
5004
  });
4974
5005
 
4975
5006
  /**
@@ -5966,17 +5997,13 @@ declare enum WorkExecResultKind {
5966
5997
  /** `☇`: unexpected program termination. */
5967
5998
  panic = 2,
5968
5999
  /** `⊚`: the number of exports made was invalidly reported. */
5969
- // biome-ignore lint/style/useLiteralEnumMembers: Compatibility
5970
- incorrectNumberOfExports = Compatibility.isGreaterOrEqual(GpVersion.V0_6_7) ? 3 : -1,
6000
+ incorrectNumberOfExports = 3,
5971
6001
  /** `⊖`: the size of the digest (refinement output) would cross the acceptable limit. */
5972
- // biome-ignore lint/style/useLiteralEnumMembers: Compatibility
5973
- digestTooBig = Compatibility.isGreaterOrEqual(GpVersion.V0_6_7) ? 4 : -1,
6002
+ digestTooBig = 4,
5974
6003
  /** `BAD`: service code was not available for lookup in state. */
5975
- // biome-ignore lint/style/useLiteralEnumMembers: Compatibility
5976
- badCode = Compatibility.isGreaterOrEqual(GpVersion.V0_6_7) ? 5 : 3,
6004
+ badCode = 5,
5977
6005
  /** `BIG`: the code was too big (beyond the maximum allowed size `W_C`) */
5978
- // biome-ignore lint/style/useLiteralEnumMembers: Compatibility
5979
- codeOversize = Compatibility.isGreaterOrEqual(GpVersion.V0_6_7) ? 6 : 4,
6006
+ codeOversize = 6,
5980
6007
  }
5981
6008
 
5982
6009
  /** The execution result of some work-package. */
@@ -6246,18 +6273,15 @@ declare const WorkReportCodec = codec.Class(WorkReportNoCodec, {
6246
6273
  declare const WorkReportCodecPre070 = codec.Class(WorkReportNoCodec, {
6247
6274
  workPackageSpec: WorkPackageSpec.Codec,
6248
6275
  context: RefineContext.Codec,
6249
- coreIndex:
6250
- Compatibility.isGreaterOrEqual(GpVersion.V0_6_5) && !Compatibility.isSuite(TestSuite.JAMDUNA, GpVersion.V0_6_5)
6251
- ? codec.varU32.convert(
6252
- (o) => tryAsU32(o),
6253
- (i) => {
6254
- if (!isU16(i)) {
6255
- throw new Error(`Core index exceeds U16: ${i}`);
6256
- }
6257
- return tryAsCoreIndex(i);
6258
- },
6259
- )
6260
- : codec.u16.asOpaque<CoreIndex>(),
6276
+ coreIndex: codec.varU32.convert(
6277
+ (o) => tryAsU32(o),
6278
+ (i) => {
6279
+ if (!isU16(i)) {
6280
+ throw new Error(`Core index exceeds U16: ${i}`);
6281
+ }
6282
+ return tryAsCoreIndex(i);
6283
+ },
6284
+ ),
6261
6285
  authorizerHash: codec.bytes(HASH_SIZE).asOpaque<AuthorizerHash>(),
6262
6286
  authorizationOutput: codec.blob,
6263
6287
  segmentRootLookup: readonlyArray(codec.sequenceVarLen(WorkPackageInfo.Codec)),
@@ -6524,6 +6548,22 @@ declare class ValidatorKeys extends WithDebug {
6524
6548
  }
6525
6549
  }
6526
6550
 
6551
+ declare class TicketsMarker extends WithDebug {
6552
+ static Codec = codec.Class(TicketsMarker, {
6553
+ tickets: codecPerEpochBlock(Ticket.Codec),
6554
+ });
6555
+
6556
+ static create({ tickets }: CodecRecord<TicketsMarker>) {
6557
+ return new TicketsMarker(tickets);
6558
+ }
6559
+
6560
+ private constructor(public readonly tickets: PerEpochBlock<Ticket>) {
6561
+ super();
6562
+ }
6563
+ }
6564
+
6565
+ type TicketsMarkerView = DescribedBy<typeof TicketsMarker.Codec.View>;
6566
+
6527
6567
  /**
6528
6568
  * For the first block in a new epoch, the epoch marker is set
6529
6569
  * and contains the epoch randomness and validator keys
@@ -6554,6 +6594,8 @@ declare class EpochMarker extends WithDebug {
6554
6594
  }
6555
6595
  }
6556
6596
 
6597
+ type EpochMarkerView = DescribedBy<typeof EpochMarker.Codec.View>;
6598
+
6557
6599
  /**
6558
6600
  * Return an encoded header without the seal components.
6559
6601
  *
@@ -6576,7 +6618,7 @@ declare const legacyDescriptor = {
6576
6618
  extrinsicHash: codec.bytes(HASH_SIZE).asOpaque<ExtrinsicHash>(),
6577
6619
  timeSlotIndex: codec.u32.asOpaque<TimeSlot>(),
6578
6620
  epochMarker: codec.optional(EpochMarker.Codec),
6579
- ticketsMarker: codec.optional(codecPerEpochBlock(Ticket.Codec)),
6621
+ ticketsMarker: codec.optional(TicketsMarker.Codec),
6580
6622
  offendersMarker: codec.sequenceVarLen(codec.bytes(ED25519_KEY_BYTES).asOpaque<Ed25519Key>()),
6581
6623
  bandersnatchBlockAuthorIndex: codec.u16.asOpaque<ValidatorIndex>(),
6582
6624
  entropySource: codec.bytes(BANDERSNATCH_VRF_SIGNATURE_BYTES).asOpaque<BandersnatchVrfSignature>(),
@@ -6599,7 +6641,7 @@ declare class Header extends WithDebug {
6599
6641
  extrinsicHash: codec.bytes(HASH_SIZE).asOpaque<ExtrinsicHash>(),
6600
6642
  timeSlotIndex: codec.u32.asOpaque<TimeSlot>(),
6601
6643
  epochMarker: codec.optional(EpochMarker.Codec),
6602
- ticketsMarker: codec.optional(codecPerEpochBlock(Ticket.Codec)),
6644
+ ticketsMarker: codec.optional(TicketsMarker.Codec),
6603
6645
  bandersnatchBlockAuthorIndex: codec.u16.asOpaque<ValidatorIndex>(),
6604
6646
  entropySource: codec.bytes(BANDERSNATCH_VRF_SIGNATURE_BYTES).asOpaque<BandersnatchVrfSignature>(),
6605
6647
  offendersMarker: codec.sequenceVarLen(codec.bytes(ED25519_KEY_BYTES).asOpaque<Ed25519Key>()),
@@ -6632,7 +6674,7 @@ declare class Header extends WithDebug {
6632
6674
  * `H_w`: Winning tickets provides the series of 600 slot sealing "tickets"
6633
6675
  * for the next epoch.
6634
6676
  */
6635
- public ticketsMarker: PerEpochBlock<Ticket> | null = null;
6677
+ public ticketsMarker: TicketsMarker | null = null;
6636
6678
  /** `H_i`: Block author's index in the current validator set. */
6637
6679
  public bandersnatchBlockAuthorIndex: ValidatorIndex = tryAsValidatorIndex(0);
6638
6680
  /** `H_v`: Entropy-yielding VRF signature. */
@@ -6817,6 +6859,7 @@ type index$j_EntropyHash = EntropyHash;
6817
6859
  type index$j_Epoch = Epoch;
6818
6860
  type index$j_EpochMarker = EpochMarker;
6819
6861
  declare const index$j_EpochMarker: typeof EpochMarker;
6862
+ type index$j_EpochMarkerView = EpochMarkerView;
6820
6863
  type index$j_Extrinsic = Extrinsic;
6821
6864
  declare const index$j_Extrinsic: typeof Extrinsic;
6822
6865
  type index$j_ExtrinsicHash = ExtrinsicHash;
@@ -6836,6 +6879,9 @@ type index$j_SegmentIndex = SegmentIndex;
6836
6879
  type index$j_ServiceGas = ServiceGas;
6837
6880
  type index$j_ServiceId = ServiceId;
6838
6881
  type index$j_StateRootHash = StateRootHash;
6882
+ type index$j_TicketsMarker = TicketsMarker;
6883
+ declare const index$j_TicketsMarker: typeof TicketsMarker;
6884
+ type index$j_TicketsMarkerView = TicketsMarkerView;
6839
6885
  type index$j_TimeSlot = TimeSlot;
6840
6886
  type index$j_ValidatorIndex = ValidatorIndex;
6841
6887
  type index$j_ValidatorKeys = ValidatorKeys;
@@ -6868,8 +6914,8 @@ declare const index$j_workPackage: typeof workPackage;
6868
6914
  declare const index$j_workReport: typeof workReport;
6869
6915
  declare const index$j_workResult: typeof workResult;
6870
6916
  declare namespace index$j {
6871
- export { index$j_Block as Block, index$j_EpochMarker as EpochMarker, index$j_Extrinsic as Extrinsic, index$j_Header as Header, index$j_HeaderViewWithHash as HeaderViewWithHash, index$j_MAX_NUMBER_OF_SEGMENTS as MAX_NUMBER_OF_SEGMENTS, index$j_ValidatorKeys as ValidatorKeys, index$j_W_E as W_E, index$j_W_S as W_S, index$j_assurances as assurances, index$j_codecPerEpochBlock as codecPerEpochBlock, index$j_codecPerValidator as codecPerValidator, codec as codecUtils, index$j_disputes as disputes, index$j_encodeUnsealedHeader as encodeUnsealedHeader, index$j_guarantees as guarantees, index$j_headerViewWithHashCodec as headerViewWithHashCodec, index$j_legacyDescriptor as legacyDescriptor, index$j_preimage as preimage, index$j_refineContext as refineContext, index$j_tickets as tickets, index$j_tryAsCoreIndex as tryAsCoreIndex, index$j_tryAsEpoch as tryAsEpoch, index$j_tryAsPerEpochBlock as tryAsPerEpochBlock, index$j_tryAsPerValidator as tryAsPerValidator, index$j_tryAsSegmentIndex as tryAsSegmentIndex, index$j_tryAsServiceGas as tryAsServiceGas, index$j_tryAsServiceId as tryAsServiceId, index$j_tryAsTimeSlot as tryAsTimeSlot, index$j_tryAsValidatorIndex as tryAsValidatorIndex, index$j_workItem as workItem, index$j_workPackage as workPackage, index$j_workReport as workReport, index$j_workResult as workResult };
6872
- export type { index$j_BlockView as BlockView, index$j_CodeHash as CodeHash, index$j_CoreIndex as CoreIndex, index$j_EntropyHash as EntropyHash, index$j_Epoch as Epoch, index$j_ExtrinsicHash as ExtrinsicHash, index$j_ExtrinsicView as ExtrinsicView, index$j_HeaderHash as HeaderHash, index$j_HeaderView as HeaderView, index$j_PerEpochBlock as PerEpochBlock, index$j_PerValidator as PerValidator, index$j_SEGMENT_BYTES as SEGMENT_BYTES, index$j_Segment as Segment, index$j_SegmentIndex as SegmentIndex, index$j_ServiceGas as ServiceGas, index$j_ServiceId as ServiceId, index$j_StateRootHash as StateRootHash, index$j_TimeSlot as TimeSlot, index$j_ValidatorIndex as ValidatorIndex, index$j_WorkReportHash as WorkReportHash };
6917
+ export { index$j_Block as Block, index$j_EpochMarker as EpochMarker, index$j_Extrinsic as Extrinsic, index$j_Header as Header, index$j_HeaderViewWithHash as HeaderViewWithHash, index$j_MAX_NUMBER_OF_SEGMENTS as MAX_NUMBER_OF_SEGMENTS, index$j_TicketsMarker as TicketsMarker, index$j_ValidatorKeys as ValidatorKeys, index$j_W_E as W_E, index$j_W_S as W_S, index$j_assurances as assurances, index$j_codecPerEpochBlock as codecPerEpochBlock, index$j_codecPerValidator as codecPerValidator, codec as codecUtils, index$j_disputes as disputes, index$j_encodeUnsealedHeader as encodeUnsealedHeader, index$j_guarantees as guarantees, index$j_headerViewWithHashCodec as headerViewWithHashCodec, index$j_legacyDescriptor as legacyDescriptor, index$j_preimage as preimage, index$j_refineContext as refineContext, index$j_tickets as tickets, index$j_tryAsCoreIndex as tryAsCoreIndex, index$j_tryAsEpoch as tryAsEpoch, index$j_tryAsPerEpochBlock as tryAsPerEpochBlock, index$j_tryAsPerValidator as tryAsPerValidator, index$j_tryAsSegmentIndex as tryAsSegmentIndex, index$j_tryAsServiceGas as tryAsServiceGas, index$j_tryAsServiceId as tryAsServiceId, index$j_tryAsTimeSlot as tryAsTimeSlot, index$j_tryAsValidatorIndex as tryAsValidatorIndex, index$j_workItem as workItem, index$j_workPackage as workPackage, index$j_workReport as workReport, index$j_workResult as workResult };
6918
+ export type { index$j_BlockView as BlockView, index$j_CodeHash as CodeHash, index$j_CoreIndex as CoreIndex, index$j_EntropyHash as EntropyHash, index$j_Epoch as Epoch, index$j_EpochMarkerView as EpochMarkerView, index$j_ExtrinsicHash as ExtrinsicHash, index$j_ExtrinsicView as ExtrinsicView, index$j_HeaderHash as HeaderHash, index$j_HeaderView as HeaderView, index$j_PerEpochBlock as PerEpochBlock, index$j_PerValidator as PerValidator, index$j_SEGMENT_BYTES as SEGMENT_BYTES, index$j_Segment as Segment, index$j_SegmentIndex as SegmentIndex, index$j_ServiceGas as ServiceGas, index$j_ServiceId as ServiceId, index$j_StateRootHash as StateRootHash, index$j_TicketsMarkerView as TicketsMarkerView, index$j_TimeSlot as TimeSlot, index$j_ValidatorIndex as ValidatorIndex, index$j_WorkReportHash as WorkReportHash };
6873
6919
  }
6874
6920
 
6875
6921
  /** A type that can be read from a JSON-parsed object. */
@@ -7611,7 +7657,7 @@ declare const epochMark = json.object<JsonEpochMarker, EpochMarker>(
7611
7657
  (x) => EpochMarker.create({ entropy: x.entropy, ticketsEntropy: x.tickets_entropy, validators: x.validators }),
7612
7658
  );
7613
7659
 
7614
- declare const ticketsMark = json.object<Ticket>(
7660
+ declare const ticket = json.object<Ticket>(
7615
7661
  {
7616
7662
  id: fromJson.bytes32(),
7617
7663
  attempt: fromJson.ticketAttempt,
@@ -7625,7 +7671,7 @@ type JsonHeader = {
7625
7671
  extrinsic_hash: ExtrinsicHash;
7626
7672
  slot: TimeSlot;
7627
7673
  epoch_mark?: EpochMarker;
7628
- tickets_mark?: KnownSizeArray<Ticket, "EpochLength">;
7674
+ tickets_mark?: Ticket[];
7629
7675
  offenders_mark: Ed25519Key[];
7630
7676
  author_index: ValidatorIndex;
7631
7677
  entropy_source: BandersnatchVrfSignature;
@@ -7639,7 +7685,7 @@ declare const headerFromJson = json.object<JsonHeader, Header>(
7639
7685
  extrinsic_hash: fromJson.bytes32(),
7640
7686
  slot: "number",
7641
7687
  epoch_mark: json.optional(epochMark),
7642
- tickets_mark: json.optional<Ticket[]>(json.array(ticketsMark)),
7688
+ tickets_mark: json.optional(json.array(ticket)),
7643
7689
  offenders_mark: json.array(fromJson.bytes32<Ed25519Key>()),
7644
7690
  author_index: "number",
7645
7691
  entropy_source: bandersnatchVrfSignature,
@@ -7663,7 +7709,10 @@ declare const headerFromJson = json.object<JsonHeader, Header>(
7663
7709
  header.extrinsicHash = extrinsic_hash;
7664
7710
  header.timeSlotIndex = slot;
7665
7711
  header.epochMarker = epoch_mark ?? null;
7666
- header.ticketsMarker = tickets_mark ?? null;
7712
+ header.ticketsMarker =
7713
+ tickets_mark === undefined || tickets_mark === null
7714
+ ? null
7715
+ : TicketsMarker.create({ tickets: asOpaqueType(tickets_mark) });
7667
7716
  header.offendersMarker = offenders_mark;
7668
7717
  header.bandersnatchBlockAuthorIndex = author_index;
7669
7718
  header.entropySource = entropy_source;
@@ -7713,9 +7762,9 @@ declare const index$h_preimagesExtrinsicFromJson: typeof preimagesExtrinsicFromJ
7713
7762
  declare const index$h_refineContextFromJson: typeof refineContextFromJson;
7714
7763
  declare const index$h_reportGuaranteeFromJson: typeof reportGuaranteeFromJson;
7715
7764
  declare const index$h_segmentRootLookupItemFromJson: typeof segmentRootLookupItemFromJson;
7765
+ declare const index$h_ticket: typeof ticket;
7716
7766
  declare const index$h_ticketEnvelopeFromJson: typeof ticketEnvelopeFromJson;
7717
7767
  declare const index$h_ticketsExtrinsicFromJson: typeof ticketsExtrinsicFromJson;
7718
- declare const index$h_ticketsMark: typeof ticketsMark;
7719
7768
  declare const index$h_validatorKeysFromJson: typeof validatorKeysFromJson;
7720
7769
  declare const index$h_validatorSignatureFromJson: typeof validatorSignatureFromJson;
7721
7770
  declare const index$h_verdictFromJson: typeof verdictFromJson;
@@ -7725,7 +7774,7 @@ declare const index$h_workRefineLoadFromJson: typeof workRefineLoadFromJson;
7725
7774
  declare const index$h_workReportFromJson: typeof workReportFromJson;
7726
7775
  declare const index$h_workResultFromJson: typeof workResultFromJson;
7727
7776
  declare namespace index$h {
7728
- export { index$h_bandersnatchVrfSignature as bandersnatchVrfSignature, index$h_blockFromJson as blockFromJson, index$h_culpritFromJson as culpritFromJson, index$h_disputesExtrinsicFromJson as disputesExtrinsicFromJson, index$h_epochMark as epochMark, index$h_faultFromJson as faultFromJson, index$h_fromJson as fromJson, index$h_getAssurancesExtrinsicFromJson as getAssurancesExtrinsicFromJson, index$h_getAvailabilityAssuranceFromJson as getAvailabilityAssuranceFromJson, index$h_getExtrinsicFromJson as getExtrinsicFromJson, index$h_guaranteesExtrinsicFromJson as guaranteesExtrinsicFromJson, index$h_headerFromJson as headerFromJson, index$h_judgementFromJson as judgementFromJson, index$h_preimageFromJson as preimageFromJson, index$h_preimagesExtrinsicFromJson as preimagesExtrinsicFromJson, index$h_refineContextFromJson as refineContextFromJson, index$h_reportGuaranteeFromJson as reportGuaranteeFromJson, index$h_segmentRootLookupItemFromJson as segmentRootLookupItemFromJson, index$h_ticketEnvelopeFromJson as ticketEnvelopeFromJson, index$h_ticketsExtrinsicFromJson as ticketsExtrinsicFromJson, index$h_ticketsMark as ticketsMark, index$h_validatorKeysFromJson as validatorKeysFromJson, index$h_validatorSignatureFromJson as validatorSignatureFromJson, index$h_verdictFromJson as verdictFromJson, index$h_workExecResultFromJson as workExecResultFromJson, index$h_workPackageSpecFromJson as workPackageSpecFromJson, index$h_workRefineLoadFromJson as workRefineLoadFromJson, index$h_workReportFromJson as workReportFromJson, index$h_workResultFromJson as workResultFromJson };
7777
+ export { index$h_bandersnatchVrfSignature as bandersnatchVrfSignature, index$h_blockFromJson as blockFromJson, index$h_culpritFromJson as culpritFromJson, index$h_disputesExtrinsicFromJson as disputesExtrinsicFromJson, index$h_epochMark as epochMark, index$h_faultFromJson as faultFromJson, index$h_fromJson as fromJson, index$h_getAssurancesExtrinsicFromJson as getAssurancesExtrinsicFromJson, index$h_getAvailabilityAssuranceFromJson as getAvailabilityAssuranceFromJson, index$h_getExtrinsicFromJson as getExtrinsicFromJson, index$h_guaranteesExtrinsicFromJson as guaranteesExtrinsicFromJson, index$h_headerFromJson as headerFromJson, index$h_judgementFromJson as judgementFromJson, index$h_preimageFromJson as preimageFromJson, index$h_preimagesExtrinsicFromJson as preimagesExtrinsicFromJson, index$h_refineContextFromJson as refineContextFromJson, index$h_reportGuaranteeFromJson as reportGuaranteeFromJson, index$h_segmentRootLookupItemFromJson as segmentRootLookupItemFromJson, index$h_ticket as ticket, index$h_ticketEnvelopeFromJson as ticketEnvelopeFromJson, index$h_ticketsExtrinsicFromJson as ticketsExtrinsicFromJson, index$h_validatorKeysFromJson as validatorKeysFromJson, index$h_validatorSignatureFromJson as validatorSignatureFromJson, index$h_verdictFromJson as verdictFromJson, index$h_workExecResultFromJson as workExecResultFromJson, index$h_workPackageSpecFromJson as workPackageSpecFromJson, index$h_workRefineLoadFromJson as workRefineLoadFromJson, index$h_workReportFromJson as workReportFromJson, index$h_workResultFromJson as workResultFromJson };
7729
7778
  export type { index$h_CamelToSnake as CamelToSnake, index$h_JsonCulprit as JsonCulprit, index$h_JsonEpochMarker as JsonEpochMarker, index$h_JsonFault as JsonFault, index$h_JsonHeader as JsonHeader, index$h_JsonJudgement as JsonJudgement, index$h_JsonObject as JsonObject, index$h_JsonRefineContext as JsonRefineContext, index$h_JsonReportGuarantee as JsonReportGuarantee, index$h_JsonVerdict as JsonVerdict, index$h_JsonWorkExecResult as JsonWorkExecResult, index$h_JsonWorkRefineLoad as JsonWorkRefineLoad, index$h_JsonWorkReport as JsonWorkReport, index$h_JsonWorkResult as JsonWorkResult };
7730
7779
  }
7731
7780
 
@@ -7835,7 +7884,7 @@ declare const DEV_CONFIG = "dev";
7835
7884
  declare const DEFAULT_CONFIG = "default";
7836
7885
 
7837
7886
  declare const NODE_DEFAULTS = {
7838
- name: os.hostname(),
7887
+ name: isBrowser() ? "browser" : os.hostname(),
7839
7888
  config: DEFAULT_CONFIG,
7840
7889
  };
7841
7890
 
@@ -7890,11 +7939,11 @@ declare class NodeConfiguration {
7890
7939
 
7891
7940
  declare function loadConfig(configPath: string): NodeConfiguration {
7892
7941
  if (configPath === DEFAULT_CONFIG) {
7893
- return parseFromJson(defaultConfigJson, NodeConfiguration.fromJson);
7942
+ return parseFromJson(configs.default, NodeConfiguration.fromJson);
7894
7943
  }
7895
7944
 
7896
7945
  if (configPath === DEV_CONFIG) {
7897
- return parseFromJson(devConfigJson, NodeConfiguration.fromJson);
7946
+ return parseFromJson(configs.dev, NodeConfiguration.fromJson);
7898
7947
  }
7899
7948
 
7900
7949
  try {
@@ -8235,20 +8284,7 @@ declare class AutoAccumulate {
8235
8284
  declare class PrivilegedServices {
8236
8285
  static Codec = codec.Class(PrivilegedServices, {
8237
8286
  manager: codec.u32.asOpaque<ServiceId>(),
8238
- authManager: Compatibility.isGreaterOrEqual(GpVersion.V0_6_7)
8239
- ? codecPerCore(codec.u32.asOpaque<ServiceId>())
8240
- : codecWithContext((ctx) =>
8241
- codec.u32.asOpaque<ServiceId>().convert(
8242
- // NOTE: [MaSo] In a compatibility mode we are always updating all entries
8243
- // (all the entries are the same)
8244
- // so it doesn't matter which one we take here.
8245
- (perCore: PerCore<ServiceId>) => perCore[0],
8246
- (serviceId: ServiceId) => {
8247
- const array = new Array(ctx.coresCount).fill(serviceId);
8248
- return tryAsPerCore(array, ctx);
8249
- },
8250
- ),
8251
- ),
8287
+ authManager: codecPerCore(codec.u32.asOpaque<ServiceId>()),
8252
8288
  validatorsManager: codec.u32.asOpaque<ServiceId>(),
8253
8289
  autoAccumulateServices: readonlyArray(codec.sequenceVarLen(AutoAccumulate.Codec)),
8254
8290
  });
@@ -8440,60 +8476,6 @@ declare namespace index$f {
8440
8476
  declare const MAX_RECENT_HISTORY = 8;
8441
8477
  type MAX_RECENT_HISTORY = typeof MAX_RECENT_HISTORY;
8442
8478
 
8443
- type LegacyBlocksState = KnownSizeArray<LegacyBlockState, `0..${typeof MAX_RECENT_HISTORY}`>;
8444
-
8445
- declare class LegacyBlockState extends WithDebug {
8446
- static Codec = codec.Class(LegacyBlockState, {
8447
- headerHash: codec.bytes(HASH_SIZE).asOpaque<HeaderHash>(),
8448
- mmr: codec.object({
8449
- peaks: readonlyArray(codec.sequenceVarLen(codec.optional(codec.bytes(HASH_SIZE)))),
8450
- }),
8451
- postStateRoot: codec.bytes(HASH_SIZE).asOpaque<StateRootHash>(),
8452
- reported: codecHashDictionary(WorkPackageInfo.Codec, (x) => x.workPackageHash),
8453
- });
8454
-
8455
- static create({ headerHash, mmr, postStateRoot, reported }: CodecRecord<LegacyBlockState>) {
8456
- return new LegacyBlockState(headerHash, mmr, postStateRoot, reported);
8457
- }
8458
-
8459
- private constructor(
8460
- /** Header hash. */
8461
- public readonly headerHash: HeaderHash,
8462
- /** Merkle mountain range peaks. */
8463
- public readonly mmr: MmrPeaks<KeccakHash>,
8464
- /** Posterior state root filled in with a 1-block delay. */
8465
- public postStateRoot: StateRootHash,
8466
- /** Reported work packages (no more than number of cores). */
8467
- public readonly reported: HashDictionary<WorkPackageHash, WorkPackageInfo>,
8468
- ) {
8469
- super();
8470
- }
8471
- }
8472
-
8473
- declare class LegacyRecentBlocks extends WithDebug {
8474
- static Codec = codec.Class(LegacyRecentBlocks, {
8475
- blocks: codecKnownSizeArray(LegacyBlockState.Codec, {
8476
- minLength: 0,
8477
- maxLength: MAX_RECENT_HISTORY,
8478
- typicalLength: MAX_RECENT_HISTORY,
8479
- }),
8480
- });
8481
-
8482
- static create(a: CodecRecord<LegacyRecentBlocks>) {
8483
- return new LegacyRecentBlocks(a.blocks);
8484
- }
8485
-
8486
- private constructor(
8487
- /**
8488
- * Most recent blocks.
8489
- * https://graypaper.fluffylabs.dev/#/85129da/0fb6010fb601?v=0.6.3
8490
- */
8491
- public readonly blocks: LegacyBlocksState,
8492
- ) {
8493
- super();
8494
- }
8495
- }
8496
-
8497
8479
  /** Array of recent blocks with maximum size of `MAX_RECENT_HISTORY` */
8498
8480
  type BlocksState = KnownSizeArray<BlockState, `0..${typeof MAX_RECENT_HISTORY}`>;
8499
8481
 
@@ -8557,87 +8539,54 @@ declare class RecentBlocks extends WithDebug {
8557
8539
  }
8558
8540
 
8559
8541
  /**
8560
- * Unified recent history of blocks that handles both legacy and current formats.
8542
+ * Recent history of blocks.
8561
8543
  *
8562
- * https://graypaper.fluffylabs.dev/#/85129da/38cb0138cb01?v=0.6.3
8563
8544
  * https://graypaper.fluffylabs.dev/#/7e6ff6a/0fc9010fc901?v=0.6.7
8564
8545
  */
8565
8546
  declare class RecentBlocksHistory extends WithDebug {
8566
8547
  static Codec = Descriptor.new<RecentBlocksHistory>(
8567
8548
  "RecentBlocksHistory",
8568
- Compatibility.isGreaterOrEqual(GpVersion.V0_6_7) ? RecentBlocks.Codec.sizeHint : LegacyRecentBlocks.Codec.sizeHint,
8569
- (encoder, value) =>
8570
- Compatibility.isGreaterOrEqual(GpVersion.V0_6_7)
8571
- ? RecentBlocks.Codec.encode(encoder, value.asCurrent())
8572
- : LegacyRecentBlocks.Codec.encode(encoder, value.asLegacy()),
8549
+ RecentBlocks.Codec.sizeHint,
8550
+ (encoder, value) => RecentBlocks.Codec.encode(encoder, value.asCurrent()),
8573
8551
  (decoder) => {
8574
- if (Compatibility.isGreaterOrEqual(GpVersion.V0_6_7)) {
8575
- const recentBlocks = RecentBlocks.Codec.decode(decoder);
8576
- return RecentBlocksHistory.create(recentBlocks);
8577
- }
8578
- const legacyBlocks = LegacyRecentBlocks.Codec.decode(decoder);
8579
- return RecentBlocksHistory.legacyCreate(legacyBlocks);
8552
+ const recentBlocks = RecentBlocks.Codec.decode(decoder);
8553
+ return RecentBlocksHistory.create(recentBlocks);
8580
8554
  },
8581
- (_sizer) => {
8582
- return Compatibility.isGreaterOrEqual(GpVersion.V0_6_7)
8583
- ? RecentBlocks.Codec.sizeHint
8584
- : LegacyRecentBlocks.Codec.sizeHint;
8555
+ (skip) => {
8556
+ return RecentBlocks.Codec.skip(skip);
8585
8557
  },
8586
8558
  );
8587
8559
 
8588
8560
  static create(recentBlocks: RecentBlocks) {
8589
- return new RecentBlocksHistory(recentBlocks, null);
8590
- }
8591
-
8592
- static legacyCreate(legacyRecentBlocks: LegacyRecentBlocks) {
8593
- return new RecentBlocksHistory(null, legacyRecentBlocks);
8561
+ return new RecentBlocksHistory(recentBlocks);
8594
8562
  }
8595
8563
 
8596
8564
  static empty() {
8597
- if (Compatibility.isGreaterOrEqual(GpVersion.V0_6_7)) {
8598
- return RecentBlocksHistory.create(
8599
- RecentBlocks.create({
8600
- blocks: asKnownSize([]),
8601
- accumulationLog: { peaks: [] },
8602
- }),
8603
- );
8604
- }
8605
- return RecentBlocksHistory.legacyCreate(LegacyRecentBlocks.create({ blocks: asKnownSize([]) }));
8565
+ return RecentBlocksHistory.create(
8566
+ RecentBlocks.create({
8567
+ blocks: asKnownSize([]),
8568
+ accumulationLog: { peaks: [] },
8569
+ }),
8570
+ );
8606
8571
  }
8607
8572
 
8608
8573
  /**
8609
8574
  * Returns the block's BEEFY super peak.
8610
- *
8611
- * NOTE: The `hasher` parameter exists solely for backward compatibility with legacy block format.
8612
8575
  */
8613
- static accumulationResult(
8614
- block: BlockState | LegacyBlockState,
8615
- {
8616
- hasher,
8617
- }: {
8618
- hasher: MmrHasher<KeccakHash>;
8619
- },
8620
- ): KeccakHash {
8621
- return Compatibility.isGreaterOrEqual(GpVersion.V0_6_7)
8622
- ? (block as BlockState).accumulationResult
8623
- : MerkleMountainRange.fromPeaks(hasher, (block as LegacyBlockState).mmr).getSuperPeakHash();
8576
+ static accumulationResult(block: BlockState): KeccakHash {
8577
+ return (block as BlockState).accumulationResult;
8624
8578
  }
8625
8579
 
8626
- private constructor(
8627
- private readonly current: RecentBlocks | null,
8628
- private readonly legacy: LegacyRecentBlocks | null,
8629
- ) {
8580
+ private constructor(private readonly current: RecentBlocks | null) {
8630
8581
  super();
8631
8582
  }
8632
8583
 
8633
8584
  /** History of recent blocks with maximum size of `MAX_RECENT_HISTORY` */
8634
- get blocks(): readonly (BlockState | LegacyBlockState)[] {
8635
- if (Compatibility.isGreaterOrEqual(GpVersion.V0_6_7) && this.current !== null) {
8585
+ get blocks(): readonly BlockState[] {
8586
+ if (this.current !== null) {
8636
8587
  return this.current.blocks;
8637
8588
  }
8638
- if (this.legacy !== null) {
8639
- return this.legacy.blocks;
8640
- }
8589
+
8641
8590
  throw new Error("RecentBlocksHistory is in invalid state");
8642
8591
  }
8643
8592
 
@@ -8648,15 +8597,8 @@ declare class RecentBlocksHistory extends WithDebug {
8648
8597
  return this.current;
8649
8598
  }
8650
8599
 
8651
- asLegacy() {
8652
- if (this.legacy === null) {
8653
- throw new Error("Cannot access legacy RecentBlocks format");
8654
- }
8655
- return this.legacy;
8656
- }
8657
-
8658
- updateBlocks(blocks: (BlockState | LegacyBlockState)[]): RecentBlocksHistory {
8659
- if (Compatibility.isGreaterOrEqual(GpVersion.V0_6_7) && this.current !== null) {
8600
+ updateBlocks(blocks: BlockState[]): RecentBlocksHistory {
8601
+ if (this.current !== null) {
8660
8602
  return RecentBlocksHistory.create(
8661
8603
  RecentBlocks.create({
8662
8604
  ...this.current,
@@ -8664,13 +8606,7 @@ declare class RecentBlocksHistory extends WithDebug {
8664
8606
  }),
8665
8607
  );
8666
8608
  }
8667
- if (this.legacy !== null) {
8668
- return RecentBlocksHistory.legacyCreate(
8669
- LegacyRecentBlocks.create({
8670
- blocks: asOpaqueType(blocks as LegacyBlockState[]),
8671
- }),
8672
- );
8673
- }
8609
+
8674
8610
  throw new Error("RecentBlocksHistory is in invalid state. Cannot be updated!");
8675
8611
  }
8676
8612
  }
@@ -8858,31 +8794,18 @@ declare const ignoreValueWithDefault = <T>(defaultValue: T) =>
8858
8794
  * https://graypaper.fluffylabs.dev/#/7e6ff6a/108301108301?v=0.6.7
8859
8795
  */
8860
8796
  declare class ServiceAccountInfo extends WithDebug {
8861
- static Codec = Compatibility.isGreaterOrEqual(GpVersion.V0_6_7)
8862
- ? codec.Class(ServiceAccountInfo, {
8863
- codeHash: codec.bytes(HASH_SIZE).asOpaque<CodeHash>(),
8864
- balance: codec.u64,
8865
- accumulateMinGas: codec.u64.convert((x) => x, tryAsServiceGas),
8866
- onTransferMinGas: codec.u64.convert((x) => x, tryAsServiceGas),
8867
- storageUtilisationBytes: codec.u64,
8868
- gratisStorage: codec.u64,
8869
- storageUtilisationCount: codec.u32,
8870
- created: codec.u32.convert((x) => x, tryAsTimeSlot),
8871
- lastAccumulation: codec.u32.convert((x) => x, tryAsTimeSlot),
8872
- parentService: codec.u32.convert((x) => x, tryAsServiceId),
8873
- })
8874
- : codec.Class(ServiceAccountInfo, {
8875
- codeHash: codec.bytes(HASH_SIZE).asOpaque<CodeHash>(),
8876
- balance: codec.u64,
8877
- accumulateMinGas: codec.u64.convert((x) => x, tryAsServiceGas),
8878
- onTransferMinGas: codec.u64.convert((x) => x, tryAsServiceGas),
8879
- storageUtilisationBytes: codec.u64,
8880
- storageUtilisationCount: codec.u32,
8881
- gratisStorage: ignoreValueWithDefault(tryAsU64(0)),
8882
- created: ignoreValueWithDefault(tryAsTimeSlot(0)),
8883
- lastAccumulation: ignoreValueWithDefault(tryAsTimeSlot(0)),
8884
- parentService: ignoreValueWithDefault(tryAsServiceId(0)),
8885
- });
8797
+ static Codec = codec.Class(ServiceAccountInfo, {
8798
+ codeHash: codec.bytes(HASH_SIZE).asOpaque<CodeHash>(),
8799
+ balance: codec.u64,
8800
+ accumulateMinGas: codec.u64.convert((x) => x, tryAsServiceGas),
8801
+ onTransferMinGas: codec.u64.convert((x) => x, tryAsServiceGas),
8802
+ storageUtilisationBytes: codec.u64,
8803
+ gratisStorage: codec.u64,
8804
+ storageUtilisationCount: codec.u32,
8805
+ created: codec.u32.convert((x) => x, tryAsTimeSlot),
8806
+ lastAccumulation: codec.u32.convert((x) => x, tryAsTimeSlot),
8807
+ parentService: codec.u32.convert((x) => x, tryAsServiceId),
8808
+ });
8886
8809
 
8887
8810
  static create(a: CodecRecord<ServiceAccountInfo>) {
8888
8811
  return new ServiceAccountInfo(
@@ -8904,11 +8827,6 @@ declare class ServiceAccountInfo extends WithDebug {
8904
8827
  * https://graypaper.fluffylabs.dev/#/7e6ff6a/119e01119e01?v=0.6.7
8905
8828
  */
8906
8829
  static calculateThresholdBalance(items: U32, bytes: U64, gratisStorage: U64): U64 {
8907
- check(
8908
- gratisStorage === tryAsU64(0) || Compatibility.isGreaterOrEqual(GpVersion.V0_6_7),
8909
- "Gratis storage cannot be non-zero before 0.6.7",
8910
- );
8911
-
8912
8830
  const storageCost =
8913
8831
  BASE_SERVICE_BALANCE + ELECTIVE_ITEM_BALANCE * BigInt(items) + ELECTIVE_BYTE_BALANCE * bytes - gratisStorage;
8914
8832
 
@@ -9219,9 +9137,7 @@ type ServicesUpdate = {
9219
9137
  };
9220
9138
 
9221
9139
  declare const codecServiceId: Descriptor<ServiceId> =
9222
- Compatibility.isSuite(TestSuite.W3F_DAVXY) ||
9223
- Compatibility.isSuite(TestSuite.JAMDUNA, GpVersion.V0_6_5) ||
9224
- Compatibility.isSuite(TestSuite.JAMDUNA, GpVersion.V0_6_7)
9140
+ Compatibility.isSuite(TestSuite.W3F_DAVXY) || Compatibility.isSuite(TestSuite.JAMDUNA, GpVersion.V0_6_7)
9225
9141
  ? codec.u32.asOpaque<ServiceId>()
9226
9142
  : codec.varU32.convert(
9227
9143
  (s) => tryAsU32(s),
@@ -10263,11 +10179,6 @@ declare const index$e_InMemoryService: typeof InMemoryService;
10263
10179
  type index$e_InMemoryState = InMemoryState;
10264
10180
  declare const index$e_InMemoryState: typeof InMemoryState;
10265
10181
  type index$e_InMemoryStateFields = InMemoryStateFields;
10266
- type index$e_LegacyBlockState = LegacyBlockState;
10267
- declare const index$e_LegacyBlockState: typeof LegacyBlockState;
10268
- type index$e_LegacyBlocksState = LegacyBlocksState;
10269
- type index$e_LegacyRecentBlocks = LegacyRecentBlocks;
10270
- declare const index$e_LegacyRecentBlocks: typeof LegacyRecentBlocks;
10271
10182
  type index$e_LookupHistoryItem = LookupHistoryItem;
10272
10183
  declare const index$e_LookupHistoryItem: typeof LookupHistoryItem;
10273
10184
  type index$e_LookupHistorySlots = LookupHistorySlots;
@@ -10338,8 +10249,8 @@ declare const index$e_tryAsPerCore: typeof tryAsPerCore;
10338
10249
  declare const index$e_workReportsSortedSetCodec: typeof workReportsSortedSetCodec;
10339
10250
  declare const index$e_zeroSizeHint: typeof zeroSizeHint;
10340
10251
  declare namespace index$e {
10341
- export { index$e_AccumulationOutput as AccumulationOutput, index$e_AutoAccumulate as AutoAccumulate, index$e_AvailabilityAssignment as AvailabilityAssignment, index$e_BASE_SERVICE_BALANCE as BASE_SERVICE_BALANCE, index$e_BlockState as BlockState, index$e_CoreStatistics as CoreStatistics, index$e_DisputesRecords as DisputesRecords, index$e_ELECTIVE_BYTE_BALANCE as ELECTIVE_BYTE_BALANCE, index$e_ELECTIVE_ITEM_BALANCE as ELECTIVE_ITEM_BALANCE, index$e_InMemoryService as InMemoryService, index$e_InMemoryState as InMemoryState, index$e_LegacyBlockState as LegacyBlockState, index$e_LegacyRecentBlocks as LegacyRecentBlocks, index$e_LookupHistoryItem as LookupHistoryItem, index$e_MAX_LOOKUP_HISTORY_SLOTS as MAX_LOOKUP_HISTORY_SLOTS, index$e_PreimageItem as PreimageItem, index$e_PrivilegedServices as PrivilegedServices, index$e_RecentBlocks as RecentBlocks, index$e_RecentBlocksHistory as RecentBlocksHistory, index$e_SafroleData as SafroleData, index$e_SafroleSealingKeysData as SafroleSealingKeysData, index$e_SafroleSealingKeysKind as SafroleSealingKeysKind, index$e_ServiceAccountInfo as ServiceAccountInfo, index$e_ServiceStatistics as ServiceStatistics, index$e_StatisticsData as StatisticsData, index$e_StorageItem as StorageItem, index$e_UpdateError as UpdateError, index$e_UpdatePreimage as UpdatePreimage, index$e_UpdatePreimageKind as UpdatePreimageKind, index$e_UpdateService as UpdateService, index$e_UpdateServiceKind as UpdateServiceKind, index$e_UpdateStorage as UpdateStorage, index$e_UpdateStorageKind as UpdateStorageKind, index$e_ValidatorData as ValidatorData, index$e_ValidatorStatistics as ValidatorStatistics, index$e_codecBandersnatchKey as codecBandersnatchKey, index$e_codecPerCore as codecPerCore, index$e_codecServiceId as codecServiceId, index$e_codecVarGas as codecVarGas, index$e_codecVarU16 as codecVarU16, index$e_codecWithHash as codecWithHash, index$e_hashComparator as hashComparator, index$e_ignoreValueWithDefault as ignoreValueWithDefault, index$e_serviceDataCodec as serviceDataCodec, index$e_serviceEntriesCodec as serviceEntriesCodec, index$e_sortedSetCodec as sortedSetCodec, index$e_tryAsLookupHistorySlots as tryAsLookupHistorySlots, index$e_tryAsPerCore as tryAsPerCore, index$e_workReportsSortedSetCodec as workReportsSortedSetCodec, index$e_zeroSizeHint as zeroSizeHint };
10342
- export type { index$e_BlocksState as BlocksState, index$e_ENTROPY_ENTRIES as ENTROPY_ENTRIES, index$e_EnumerableState as EnumerableState, index$e_FieldNames as FieldNames, index$e_InMemoryStateFields as InMemoryStateFields, index$e_LegacyBlocksState as LegacyBlocksState, index$e_LookupHistorySlots as LookupHistorySlots, index$e_MAX_RECENT_HISTORY as MAX_RECENT_HISTORY, index$e_PerCore as PerCore, index$e_SafroleSealingKeys as SafroleSealingKeys, index$e_Service as Service, index$e_ServiceData as ServiceData, index$e_ServiceEntries as ServiceEntries, index$e_ServicesUpdate as ServicesUpdate, index$e_State as State, index$e_StorageKey as StorageKey, index$e_VALIDATOR_META_BYTES as VALIDATOR_META_BYTES };
10252
+ export { index$e_AccumulationOutput as AccumulationOutput, index$e_AutoAccumulate as AutoAccumulate, index$e_AvailabilityAssignment as AvailabilityAssignment, index$e_BASE_SERVICE_BALANCE as BASE_SERVICE_BALANCE, index$e_BlockState as BlockState, index$e_CoreStatistics as CoreStatistics, index$e_DisputesRecords as DisputesRecords, index$e_ELECTIVE_BYTE_BALANCE as ELECTIVE_BYTE_BALANCE, index$e_ELECTIVE_ITEM_BALANCE as ELECTIVE_ITEM_BALANCE, index$e_InMemoryService as InMemoryService, index$e_InMemoryState as InMemoryState, index$e_LookupHistoryItem as LookupHistoryItem, index$e_MAX_LOOKUP_HISTORY_SLOTS as MAX_LOOKUP_HISTORY_SLOTS, index$e_PreimageItem as PreimageItem, index$e_PrivilegedServices as PrivilegedServices, index$e_RecentBlocks as RecentBlocks, index$e_RecentBlocksHistory as RecentBlocksHistory, index$e_SafroleData as SafroleData, index$e_SafroleSealingKeysData as SafroleSealingKeysData, index$e_SafroleSealingKeysKind as SafroleSealingKeysKind, index$e_ServiceAccountInfo as ServiceAccountInfo, index$e_ServiceStatistics as ServiceStatistics, index$e_StatisticsData as StatisticsData, index$e_StorageItem as StorageItem, index$e_UpdateError as UpdateError, index$e_UpdatePreimage as UpdatePreimage, index$e_UpdatePreimageKind as UpdatePreimageKind, index$e_UpdateService as UpdateService, index$e_UpdateServiceKind as UpdateServiceKind, index$e_UpdateStorage as UpdateStorage, index$e_UpdateStorageKind as UpdateStorageKind, index$e_ValidatorData as ValidatorData, index$e_ValidatorStatistics as ValidatorStatistics, index$e_codecBandersnatchKey as codecBandersnatchKey, index$e_codecPerCore as codecPerCore, index$e_codecServiceId as codecServiceId, index$e_codecVarGas as codecVarGas, index$e_codecVarU16 as codecVarU16, index$e_codecWithHash as codecWithHash, index$e_hashComparator as hashComparator, index$e_ignoreValueWithDefault as ignoreValueWithDefault, index$e_serviceDataCodec as serviceDataCodec, index$e_serviceEntriesCodec as serviceEntriesCodec, index$e_sortedSetCodec as sortedSetCodec, index$e_tryAsLookupHistorySlots as tryAsLookupHistorySlots, index$e_tryAsPerCore as tryAsPerCore, index$e_workReportsSortedSetCodec as workReportsSortedSetCodec, index$e_zeroSizeHint as zeroSizeHint };
10253
+ export type { index$e_BlocksState as BlocksState, index$e_ENTROPY_ENTRIES as ENTROPY_ENTRIES, index$e_EnumerableState as EnumerableState, index$e_FieldNames as FieldNames, index$e_InMemoryStateFields as InMemoryStateFields, index$e_LookupHistorySlots as LookupHistorySlots, index$e_MAX_RECENT_HISTORY as MAX_RECENT_HISTORY, index$e_PerCore as PerCore, index$e_SafroleSealingKeys as SafroleSealingKeys, index$e_Service as Service, index$e_ServiceData as ServiceData, index$e_ServiceEntries as ServiceEntries, index$e_ServicesUpdate as ServicesUpdate, index$e_State as State, index$e_StorageKey as StorageKey, index$e_VALIDATOR_META_BYTES as VALIDATOR_META_BYTES };
10343
10254
  }
10344
10255
 
10345
10256
  type StateKey$1 = Opaque<OpaqueHash, "stateKey">;
@@ -10483,7 +10394,7 @@ type StateCodec<T> = {
10483
10394
 
10484
10395
  /** Serialization for particular state entries. */
10485
10396
  declare namespace serialize {
10486
- /** C(1): https://graypaper.fluffylabs.dev/#/85129da/38a20138a201?v=0.6.3 */
10397
+ /** C(1): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b15013b1501?v=0.6.7 */
10487
10398
  export const authPools: StateCodec<State["authPools"]> = {
10488
10399
  key: stateKeys.index(StateKeyIdx.Alpha),
10489
10400
  Codec: codecPerCore(
@@ -10496,7 +10407,7 @@ declare namespace serialize {
10496
10407
  extract: (s) => s.authPools,
10497
10408
  };
10498
10409
 
10499
- /** C(2): https://graypaper.fluffylabs.dev/#/85129da/38be0138be01?v=0.6.3 */
10410
+ /** C(2): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b31013b3101?v=0.6.7 */
10500
10411
  export const authQueues: StateCodec<State["authQueues"]> = {
10501
10412
  key: stateKeys.index(StateKeyIdx.Phi),
10502
10413
  Codec: codecPerCore(
@@ -10507,7 +10418,6 @@ declare namespace serialize {
10507
10418
 
10508
10419
  /**
10509
10420
  * C(3): Recent blocks with compatibility
10510
- * https://graypaper.fluffylabs.dev/#/85129da/38cb0138cb01?v=0.6.3
10511
10421
  * https://graypaper.fluffylabs.dev/#/7e6ff6a/3b3e013b3e01?v=0.6.7
10512
10422
  */
10513
10423
  export const recentBlocks: StateCodec<State["recentBlocks"]> = {
@@ -10516,7 +10426,7 @@ declare namespace serialize {
10516
10426
  extract: (s) => s.recentBlocks,
10517
10427
  };
10518
10428
 
10519
- /** C(4): https://graypaper.fluffylabs.dev/#/85129da/38e60138e601?v=0.6.3 */
10429
+ /** C(4): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b63013b6301?v=0.6.7 */
10520
10430
  export const safrole: StateCodec<SafroleData> = {
10521
10431
  key: stateKeys.index(StateKeyIdx.Gamma),
10522
10432
  Codec: SafroleData.Codec,
@@ -10529,63 +10439,63 @@ declare namespace serialize {
10529
10439
  }),
10530
10440
  };
10531
10441
 
10532
- /** C(5): https://graypaper.fluffylabs.dev/#/85129da/383d02383d02?v=0.6.3 */
10442
+ /** C(5): https://graypaper.fluffylabs.dev/#/7e6ff6a/3bba013bba01?v=0.6.7 */
10533
10443
  export const disputesRecords: StateCodec<State["disputesRecords"]> = {
10534
10444
  key: stateKeys.index(StateKeyIdx.Psi),
10535
10445
  Codec: DisputesRecords.Codec,
10536
10446
  extract: (s) => s.disputesRecords,
10537
10447
  };
10538
10448
 
10539
- /** C(6): https://graypaper.fluffylabs.dev/#/85129da/387602387602?v=0.6.3 */
10449
+ /** C(6): https://graypaper.fluffylabs.dev/#/7e6ff6a/3bf3013bf301?v=0.6.7 */
10540
10450
  export const entropy: StateCodec<State["entropy"]> = {
10541
10451
  key: stateKeys.index(StateKeyIdx.Eta),
10542
10452
  Codec: codecFixedSizeArray(codec.bytes(HASH_SIZE).asOpaque<EntropyHash>(), ENTROPY_ENTRIES),
10543
10453
  extract: (s) => s.entropy,
10544
10454
  };
10545
10455
 
10546
- /** C(7): https://graypaper.fluffylabs.dev/#/85129da/388302388302?v=0.6.3 */
10456
+ /** C(7): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b00023b0002?v=0.6.7 */
10547
10457
  export const designatedValidators: StateCodec<State["designatedValidatorData"]> = {
10548
10458
  key: stateKeys.index(StateKeyIdx.Iota),
10549
10459
  Codec: codecPerValidator(ValidatorData.Codec),
10550
10460
  extract: (s) => s.designatedValidatorData,
10551
10461
  };
10552
10462
 
10553
- /** C(8): https://graypaper.fluffylabs.dev/#/85129da/389002389002?v=0.6.3 */
10463
+ /** C(8): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b0d023b0d02?v=0.6.7 */
10554
10464
  export const currentValidators: StateCodec<State["currentValidatorData"]> = {
10555
10465
  key: stateKeys.index(StateKeyIdx.Kappa),
10556
10466
  Codec: codecPerValidator(ValidatorData.Codec),
10557
10467
  extract: (s) => s.currentValidatorData,
10558
10468
  };
10559
10469
 
10560
- /** C(9): https://graypaper.fluffylabs.dev/#/85129da/389d02389d02?v=0.6.3 */
10470
+ /** C(9): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b1a023b1a02?v=0.6.7 */
10561
10471
  export const previousValidators: StateCodec<State["previousValidatorData"]> = {
10562
10472
  key: stateKeys.index(StateKeyIdx.Lambda),
10563
10473
  Codec: codecPerValidator(ValidatorData.Codec),
10564
10474
  extract: (s) => s.previousValidatorData,
10565
10475
  };
10566
10476
 
10567
- /** C(10): https://graypaper.fluffylabs.dev/#/85129da/38aa0238aa02?v=0.6.3 */
10477
+ /** C(10): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b27023b2702?v=0.6.7 */
10568
10478
  export const availabilityAssignment: StateCodec<State["availabilityAssignment"]> = {
10569
10479
  key: stateKeys.index(StateKeyIdx.Rho),
10570
10480
  Codec: codecPerCore(codec.optional(AvailabilityAssignment.Codec)),
10571
10481
  extract: (s) => s.availabilityAssignment,
10572
10482
  };
10573
10483
 
10574
- /** C(11): https://graypaper.fluffylabs.dev/#/85129da/38c10238c102?v=0.6.3 */
10484
+ /** C(11): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b3e023b3e02?v=0.6.7 */
10575
10485
  export const timeslot: StateCodec<State["timeslot"]> = {
10576
10486
  key: stateKeys.index(StateKeyIdx.Tau),
10577
10487
  Codec: codec.u32.asOpaque<TimeSlot>(),
10578
10488
  extract: (s) => s.timeslot,
10579
10489
  };
10580
10490
 
10581
- /** C(12): https://graypaper.fluffylabs.dev/#/85129da/38cf0238cf02?v=0.6.3 */
10491
+ /** C(12): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b4c023b4c02?v=0.6.7 */
10582
10492
  export const privilegedServices: StateCodec<State["privilegedServices"]> = {
10583
10493
  key: stateKeys.index(StateKeyIdx.Chi),
10584
10494
  Codec: PrivilegedServices.Codec,
10585
10495
  extract: (s) => s.privilegedServices,
10586
10496
  };
10587
10497
 
10588
- /** C(13): https://graypaper.fluffylabs.dev/#/85129da/38e10238e102?v=0.6.3 */
10498
+ /** C(13): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b5e023b5e02?v=0.6.7 */
10589
10499
  export const statistics: StateCodec<State["statistics"]> = {
10590
10500
  key: stateKeys.index(StateKeyIdx.Pi),
10591
10501
  Codec: StatisticsData.Codec,
@@ -10599,7 +10509,7 @@ declare namespace serialize {
10599
10509
  extract: (s) => s.accumulationQueue,
10600
10510
  };
10601
10511
 
10602
- /** C(15): https://graypaper.fluffylabs.dev/#/85129da/381903381903?v=0.6.3 */
10512
+ /** C(15): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b96023b9602?v=0.6.7 */
10603
10513
  export const recentlyAccumulated: StateCodec<State["recentlyAccumulated"]> = {
10604
10514
  key: stateKeys.index(StateKeyIdx.Xi),
10605
10515
  Codec: codecPerEpochBlock(
@@ -10695,27 +10605,17 @@ declare function* serializeRemovedServices(servicesRemoved: ServiceId[] | undefi
10695
10605
  }
10696
10606
  }
10697
10607
 
10698
- declare function getLegacyKey(serviceId: ServiceId, rawKey: StorageKey): StorageKey {
10699
- const SERVICE_ID_BYTES = 4;
10700
- const serviceIdAndKey = new Uint8Array(SERVICE_ID_BYTES + rawKey.length);
10701
- serviceIdAndKey.set(u32AsLeBytes(serviceId));
10702
- serviceIdAndKey.set(rawKey.raw, SERVICE_ID_BYTES);
10703
- return asOpaqueType(BytesBlob.blobFrom(blake2b.hashBytes(serviceIdAndKey).raw));
10704
- }
10705
-
10706
10608
  declare function* serializeStorage(storage: UpdateStorage[] | undefined): Generator<StateEntryUpdate> {
10707
10609
  for (const { action, serviceId } of storage ?? []) {
10708
10610
  switch (action.kind) {
10709
10611
  case UpdateStorageKind.Set: {
10710
- const key = Compatibility.isGreaterOrEqual(GpVersion.V0_6_7)
10711
- ? action.storage.key
10712
- : getLegacyKey(serviceId, action.storage.key);
10612
+ const key = action.storage.key;
10713
10613
  const codec = serialize.serviceStorage(serviceId, key);
10714
10614
  yield [StateEntryUpdateAction.Insert, codec.key, action.storage.value];
10715
10615
  break;
10716
10616
  }
10717
10617
  case UpdateStorageKind.Remove: {
10718
- const key = Compatibility.isGreaterOrEqual(GpVersion.V0_6_7) ? action.key : getLegacyKey(serviceId, action.key);
10618
+ const key = action.key;
10719
10619
  const codec = serialize.serviceStorage(serviceId, key);
10720
10620
  yield [StateEntryUpdateAction.Remove, codec.key, EMPTY_BLOB];
10721
10621
  break;
@@ -10855,7 +10755,7 @@ declare function* serializeBasicKeys(spec: ChainSpec, update: Partial<State>) {
10855
10755
  yield doSerialize(update.recentlyAccumulated, serialize.recentlyAccumulated); // C(15)
10856
10756
  }
10857
10757
 
10858
- if (update.accumulationOutputLog !== undefined && Compatibility.isGreaterOrEqual(GpVersion.V0_6_7)) {
10758
+ if (update.accumulationOutputLog !== undefined) {
10859
10759
  yield doSerialize(update.accumulationOutputLog, serialize.accumulationOutputLog); // C(16)
10860
10760
  }
10861
10761
  }
@@ -11650,9 +11550,7 @@ declare function convertInMemoryStateToDictionary(
11650
11550
  doSerialize(serialize.statistics); // C(13)
11651
11551
  doSerialize(serialize.accumulationQueue); // C(14)
11652
11552
  doSerialize(serialize.recentlyAccumulated); // C(15)
11653
- if (Compatibility.isGreaterOrEqual(GpVersion.V0_6_7)) {
11654
- doSerialize(serialize.accumulationOutputLog); // C(16)
11655
- }
11553
+ doSerialize(serialize.accumulationOutputLog); // C(16)
11656
11554
 
11657
11555
  // services
11658
11556
  for (const [serviceId, service] of state.services.entries()) {
@@ -11843,10 +11741,7 @@ declare class SerializedState<T extends SerializedStateBackend = SerializedState
11843
11741
  }
11844
11742
 
11845
11743
  get accumulationOutputLog(): State["accumulationOutputLog"] {
11846
- if (Compatibility.isGreaterOrEqual(GpVersion.V0_6_7)) {
11847
- return this.retrieve(serialize.accumulationOutputLog, "accumulationOutputLog");
11848
- }
11849
- return [];
11744
+ return this.retrieve(serialize.accumulationOutputLog, "accumulationOutputLog");
11850
11745
  }
11851
11746
  }
11852
11747
 
@@ -11993,7 +11888,6 @@ declare const index$c_U32_BYTES: typeof U32_BYTES;
11993
11888
  declare const index$c_binaryMerkleization: typeof binaryMerkleization;
11994
11889
  declare const index$c_convertInMemoryStateToDictionary: typeof convertInMemoryStateToDictionary;
11995
11890
  declare const index$c_dumpCodec: typeof dumpCodec;
11996
- declare const index$c_getLegacyKey: typeof getLegacyKey;
11997
11891
  declare const index$c_getSafroleData: typeof getSafroleData;
11998
11892
  declare const index$c_legacyServiceNested: typeof legacyServiceNested;
11999
11893
  declare const index$c_loadState: typeof loadState;
@@ -12007,7 +11901,7 @@ declare const index$c_serializeStorage: typeof serializeStorage;
12007
11901
  declare const index$c_stateEntriesSequenceCodec: typeof stateEntriesSequenceCodec;
12008
11902
  import index$c_stateKeys = stateKeys;
12009
11903
  declare namespace index$c {
12010
- export { index$c_EMPTY_BLOB as EMPTY_BLOB, index$c_SerializedService as SerializedService, index$c_SerializedState as SerializedState, index$c_StateEntries as StateEntries, index$c_StateEntryUpdateAction as StateEntryUpdateAction, index$c_StateKeyIdx as StateKeyIdx, index$c_TYPICAL_STATE_ITEMS as TYPICAL_STATE_ITEMS, index$c_TYPICAL_STATE_ITEM_LEN as TYPICAL_STATE_ITEM_LEN, index$c_U32_BYTES as U32_BYTES, index$c_binaryMerkleization as binaryMerkleization, index$c_convertInMemoryStateToDictionary as convertInMemoryStateToDictionary, index$c_dumpCodec as dumpCodec, index$c_getLegacyKey as getLegacyKey, index$c_getSafroleData as getSafroleData, index$c_legacyServiceNested as legacyServiceNested, index$c_loadState as loadState, index$c_serialize as serialize, index$c_serializeBasicKeys as serializeBasicKeys, index$c_serializePreimages as serializePreimages, index$c_serializeRemovedServices as serializeRemovedServices, index$c_serializeServiceUpdates as serializeServiceUpdates, index$c_serializeStateUpdate as serializeStateUpdate, index$c_serializeStorage as serializeStorage, index$c_stateEntriesSequenceCodec as stateEntriesSequenceCodec, index$c_stateKeys as stateKeys };
11904
+ export { index$c_EMPTY_BLOB as EMPTY_BLOB, index$c_SerializedService as SerializedService, index$c_SerializedState as SerializedState, index$c_StateEntries as StateEntries, index$c_StateEntryUpdateAction as StateEntryUpdateAction, index$c_StateKeyIdx as StateKeyIdx, index$c_TYPICAL_STATE_ITEMS as TYPICAL_STATE_ITEMS, index$c_TYPICAL_STATE_ITEM_LEN as TYPICAL_STATE_ITEM_LEN, index$c_U32_BYTES as U32_BYTES, index$c_binaryMerkleization as binaryMerkleization, index$c_convertInMemoryStateToDictionary as convertInMemoryStateToDictionary, index$c_dumpCodec as dumpCodec, index$c_getSafroleData as getSafroleData, index$c_legacyServiceNested as legacyServiceNested, index$c_loadState as loadState, index$c_serialize as serialize, index$c_serializeBasicKeys as serializeBasicKeys, index$c_serializePreimages as serializePreimages, index$c_serializeRemovedServices as serializeRemovedServices, index$c_serializeServiceUpdates as serializeServiceUpdates, index$c_serializeStateUpdate as serializeStateUpdate, index$c_serializeStorage as serializeStorage, index$c_stateEntriesSequenceCodec as stateEntriesSequenceCodec, index$c_stateKeys as stateKeys };
12011
11905
  export type { index$c_EncodeFun as EncodeFun, index$c_KeyAndCodec as KeyAndCodec, index$c_SerializedStateBackend as SerializedStateBackend, index$c_StateCodec as StateCodec, index$c_StateEntryUpdate as StateEntryUpdate, StateKey$1 as StateKey };
12012
11906
  }
12013
11907
 
@@ -12367,8 +12261,8 @@ declare function encodePoints(input: Bytes<PIECE_SIZE>): FixedSizeArray<Bytes<PO
12367
12261
  }
12368
12262
 
12369
12263
  // encode and add redundancy shards
12370
- const points = new ShardsCollection(POINT_ALIGNMENT, data);
12371
- const encodedResult = encode(N_CHUNKS_REDUNDANCY, POINT_ALIGNMENT, points);
12264
+ const points = new reedSolomon.ShardsCollection(POINT_ALIGNMENT, data);
12265
+ const encodedResult = reedSolomon.encode(N_CHUNKS_REDUNDANCY, points);
12372
12266
  const encodedData = encodedResult.take_data();
12373
12267
 
12374
12268
  for (let i = 0; i < N_CHUNKS_REDUNDANCY; i++) {
@@ -12410,9 +12304,9 @@ declare function decodePiece(
12410
12304
  result.raw.set(points.raw, pointStartInResult);
12411
12305
  }
12412
12306
  }
12413
- const points = new ShardsCollection(POINT_ALIGNMENT, data, indices);
12307
+ const points = new reedSolomon.ShardsCollection(POINT_ALIGNMENT, data, indices);
12414
12308
 
12415
- const decodingResult = decode(N_CHUNKS_REQUIRED, N_CHUNKS_REDUNDANCY, POINT_ALIGNMENT, points);
12309
+ const decodingResult = reedSolomon.decode(N_CHUNKS_REQUIRED, N_CHUNKS_REDUNDANCY, points);
12416
12310
  const resultIndices = decodingResult.take_indices(); // it has to be called before take_data
12417
12311
  const resultData = decodingResult.take_data(); // it destroys the result object in rust
12418
12312
 
@@ -12649,6 +12543,10 @@ declare function chunksToShards(
12649
12543
  return tryAsPerValidator(result, spec);
12650
12544
  }
12651
12545
 
12546
+ declare const initEc = async () => {
12547
+ await init.reedSolomon();
12548
+ };
12549
+
12652
12550
  declare const index$a_HALF_POINT_SIZE: typeof HALF_POINT_SIZE;
12653
12551
  declare const index$a_N_CHUNKS_REDUNDANCY: typeof N_CHUNKS_REDUNDANCY;
12654
12552
  type index$a_N_CHUNKS_REQUIRED = N_CHUNKS_REQUIRED;
@@ -12662,6 +12560,7 @@ declare const index$a_decodeData: typeof decodeData;
12662
12560
  declare const index$a_decodeDataAndTrim: typeof decodeDataAndTrim;
12663
12561
  declare const index$a_decodePiece: typeof decodePiece;
12664
12562
  declare const index$a_encodePoints: typeof encodePoints;
12563
+ declare const index$a_initEc: typeof initEc;
12665
12564
  declare const index$a_join: typeof join;
12666
12565
  declare const index$a_lace: typeof lace;
12667
12566
  declare const index$a_padAndEncodeData: typeof padAndEncodeData;
@@ -12670,7 +12569,7 @@ declare const index$a_split: typeof split;
12670
12569
  declare const index$a_transpose: typeof transpose;
12671
12570
  declare const index$a_unzip: typeof unzip;
12672
12571
  declare namespace index$a {
12673
- export { index$a_HALF_POINT_SIZE as HALF_POINT_SIZE, index$a_N_CHUNKS_REDUNDANCY as N_CHUNKS_REDUNDANCY, index$a_POINT_ALIGNMENT as POINT_ALIGNMENT, index$a_chunkingFunction as chunkingFunction, index$a_chunksToShards as chunksToShards, index$a_decodeData as decodeData, index$a_decodeDataAndTrim as decodeDataAndTrim, index$a_decodePiece as decodePiece, index$a_encodePoints as encodePoints, index$a_join as join, index$a_lace as lace, index$a_padAndEncodeData as padAndEncodeData, index$a_shardsToChunks as shardsToChunks, index$a_split as split, index$a_transpose as transpose, index$a_unzip as unzip };
12572
+ export { index$a_HALF_POINT_SIZE as HALF_POINT_SIZE, index$a_N_CHUNKS_REDUNDANCY as N_CHUNKS_REDUNDANCY, index$a_POINT_ALIGNMENT as POINT_ALIGNMENT, index$a_chunkingFunction as chunkingFunction, index$a_chunksToShards as chunksToShards, index$a_decodeData as decodeData, index$a_decodeDataAndTrim as decodeDataAndTrim, index$a_decodePiece as decodePiece, index$a_encodePoints as encodePoints, index$a_initEc as initEc, index$a_join as join, index$a_lace as lace, index$a_padAndEncodeData as padAndEncodeData, index$a_shardsToChunks as shardsToChunks, index$a_split as split, index$a_transpose as transpose, index$a_unzip as unzip };
12674
12573
  export type { index$a_N_CHUNKS_REQUIRED as N_CHUNKS_REQUIRED, index$a_N_CHUNKS_TOTAL as N_CHUNKS_TOTAL, index$a_PIECE_SIZE as PIECE_SIZE, index$a_POINT_LENGTH as POINT_LENGTH };
12675
12574
  }
12676
12575
 
@@ -12702,34 +12601,201 @@ declare const HostCallResult = {
12702
12601
  OK: tryAsU64(0n),
12703
12602
  } as const;
12704
12603
 
12604
+ declare enum Level {
12605
+ INSANE = 1,
12606
+ TRACE = 2,
12607
+ LOG = 3,
12608
+ INFO = 4,
12609
+ WARN = 5,
12610
+ ERROR = 6,
12611
+ }
12612
+
12613
+ type Options = {
12614
+ defaultLevel: Level;
12615
+ workingDir: string;
12616
+ modules: Map<string, Level>;
12617
+ };
12618
+
12705
12619
  /**
12706
- * Mask class is an implementation of skip function defined in GP.
12620
+ * A function to parse logger definition (including modules) given as a string.
12707
12621
  *
12708
- * https://graypaper.fluffylabs.dev/#/5f542d7/237201239801
12622
+ * Examples
12623
+ * - `info` - setup default logging level to `info`.
12624
+ * - `trace` - default logging level set to `trace`.
12625
+ * - `debug;consensus=trace` - default level is set to `debug/log`, but consensus is in trace mode.
12709
12626
  */
12710
- declare class Mask {
12711
- /**
12712
- * The lookup table will have `0` at the index which corresponds to an instruction on the same index in the bytecode.
12713
- * In case the value is non-zero it signifies the offset to the index with next instruction.
12714
- *
12715
- * Example:
12716
- * ```
12717
- * 0..1..2..3..4..5..6..7..8..9 # Indices
12718
- * 0..2..1..0..1..0..3..2..1..0 # lookupTable forward values
12719
- * ```
12720
- * There are instructions at indices `0, 3, 5, 9`.
12721
- */
12722
- private lookupTableForward: Uint8Array;
12723
-
12724
- constructor(mask: BitVec) {
12725
- this.lookupTableForward = this.buildLookupTableForward(mask);
12726
- }
12627
+ declare function parseLoggerOptions(input: string, defaultLevel: Level, workingDir?: string): Options {
12628
+ const modules = new Map<string, Level>();
12629
+ const parts = input.toLowerCase().split(",");
12630
+ let defLevel = defaultLevel;
12727
12631
 
12728
- isInstruction(index: number) {
12729
- return this.lookupTableForward[index] === 0;
12632
+ for (const p of parts) {
12633
+ const clean = p.trim();
12634
+ // skip empty objects (forgotten `,` removed)
12635
+ if (clean.length === 0) {
12636
+ continue;
12637
+ }
12638
+ // we just have the default level
12639
+ if (clean.includes("=")) {
12640
+ const [mod, lvl] = clean.split("=");
12641
+ modules.set(mod.trim(), parseLevel(lvl.trim()));
12642
+ } else {
12643
+ defLevel = parseLevel(clean);
12644
+ }
12730
12645
  }
12731
12646
 
12732
- getNoOfBytesToNextInstruction(index: number) {
12647
+ // TODO [ToDr] Fix dirname for workers.
12648
+ const myDir = (import.meta.dirname ?? "").split("/");
12649
+ myDir.pop();
12650
+ myDir.pop();
12651
+ return {
12652
+ defaultLevel: defLevel,
12653
+ modules,
12654
+ workingDir: workingDir ?? myDir.join("/"),
12655
+ };
12656
+ }
12657
+
12658
+ declare const GLOBAL_CONFIG = {
12659
+ options: DEFAULT_OPTIONS,
12660
+ transport: ConsoleTransport.create(DEFAULT_OPTIONS.defaultLevel, DEFAULT_OPTIONS),
12661
+ };
12662
+
12663
+ /**
12664
+ * A logger instance.
12665
+ */
12666
+ declare class Logger {
12667
+ /**
12668
+ * Create a new logger instance given filename and an optional module name.
12669
+ *
12670
+ * If the module name is not given, `fileName` becomes the module name.
12671
+ * The module name can be composed from multiple parts separated with `/`.
12672
+ *
12673
+ * The logger will use a global configuration which can be changed using
12674
+ * [`configureLogger`] function.
12675
+ */
12676
+ static new(fileName?: string, moduleName?: string) {
12677
+ const fName = fileName ?? "unknown";
12678
+ return new Logger(moduleName ?? fName, fName, GLOBAL_CONFIG);
12679
+ }
12680
+
12681
+ /**
12682
+ * Return currently configured level for given module. */
12683
+ static getLevel(moduleName: string): Level {
12684
+ return findLevel(GLOBAL_CONFIG.options, moduleName);
12685
+ }
12686
+
12687
+ /**
12688
+ * Global configuration of all loggers.
12689
+ *
12690
+ * One can specify a default logging level (only logs with level >= default will be printed).
12691
+ * It's also possible to configure per-module logging level that takes precedence
12692
+ * over the default one.
12693
+ *
12694
+ * Changing the options affects all previously created loggers.
12695
+ */
12696
+ static configureAllFromOptions(options: Options) {
12697
+ // find minimal level to optimise logging in case
12698
+ // we don't care about low-level logs.
12699
+ const minimalLevel = Array.from(options.modules.values()).reduce((level, modLevel) => {
12700
+ return level < modLevel ? level : modLevel;
12701
+ }, options.defaultLevel);
12702
+
12703
+ const transport = ConsoleTransport.create(minimalLevel, options);
12704
+
12705
+ // set the global config
12706
+ GLOBAL_CONFIG.options = options;
12707
+ GLOBAL_CONFIG.transport = transport;
12708
+ }
12709
+
12710
+ /**
12711
+ * Global configuration of all loggers.
12712
+ *
12713
+ * Parse configuration options from an input string typically obtained
12714
+ * from environment variable `JAM_LOG`.
12715
+ */
12716
+ static configureAll(input: string, defaultLevel: Level, workingDir?: string) {
12717
+ const options = parseLoggerOptions(input, defaultLevel, workingDir);
12718
+ Logger.configureAllFromOptions(options);
12719
+ }
12720
+
12721
+ constructor(
12722
+ private readonly moduleName: string,
12723
+ private readonly fileName: string,
12724
+ private readonly config: typeof GLOBAL_CONFIG,
12725
+ ) {}
12726
+
12727
+ /** Log a message with `INSANE` level. */
12728
+ insane(val: string) {
12729
+ this.config.transport.insane(this.moduleName, val);
12730
+ }
12731
+
12732
+ /** Log a message with `TRACE` level. */
12733
+ trace(val: string) {
12734
+ this.config.transport.trace(this.moduleName, val);
12735
+ }
12736
+
12737
+ /** Log a message with `DEBUG`/`LOG` level. */
12738
+ log(val: string) {
12739
+ this.config.transport.log(this.moduleName, val);
12740
+ }
12741
+
12742
+ /** Log a message with `INFO` level. */
12743
+ info(val: string) {
12744
+ this.config.transport.info(this.moduleName, val);
12745
+ }
12746
+
12747
+ /** Log a message with `WARN` level. */
12748
+ warn(val: string) {
12749
+ this.config.transport.warn(this.moduleName, val);
12750
+ }
12751
+
12752
+ /** Log a message with `ERROR` level. */
12753
+ error(val: string) {
12754
+ this.config.transport.error(this.moduleName, val);
12755
+ }
12756
+ }
12757
+
12758
+ type index$9_Level = Level;
12759
+ declare const index$9_Level: typeof Level;
12760
+ type index$9_Logger = Logger;
12761
+ declare const index$9_Logger: typeof Logger;
12762
+ declare const index$9_parseLoggerOptions: typeof parseLoggerOptions;
12763
+ declare namespace index$9 {
12764
+ export {
12765
+ index$9_Level as Level,
12766
+ index$9_Logger as Logger,
12767
+ index$9_parseLoggerOptions as parseLoggerOptions,
12768
+ };
12769
+ }
12770
+
12771
+ /**
12772
+ * Mask class is an implementation of skip function defined in GP.
12773
+ *
12774
+ * https://graypaper.fluffylabs.dev/#/5f542d7/237201239801
12775
+ */
12776
+ declare class Mask {
12777
+ /**
12778
+ * The lookup table will have `0` at the index which corresponds to an instruction on the same index in the bytecode.
12779
+ * In case the value is non-zero it signifies the offset to the index with next instruction.
12780
+ *
12781
+ * Example:
12782
+ * ```
12783
+ * 0..1..2..3..4..5..6..7..8..9 # Indices
12784
+ * 0..2..1..0..1..0..3..2..1..0 # lookupTable forward values
12785
+ * ```
12786
+ * There are instructions at indices `0, 3, 5, 9`.
12787
+ */
12788
+ private lookupTableForward: Uint8Array;
12789
+
12790
+ constructor(mask: BitVec) {
12791
+ this.lookupTableForward = this.buildLookupTableForward(mask);
12792
+ }
12793
+
12794
+ isInstruction(index: number) {
12795
+ return this.lookupTableForward[index] === 0;
12796
+ }
12797
+
12798
+ getNoOfBytesToNextInstruction(index: number) {
12733
12799
  check(index >= 0, `index (${index}) cannot be a negative number`);
12734
12800
  return Math.min(this.lookupTableForward[index] ?? 0, MAX_INSTRUCTION_DISTANCE);
12735
12801
  }
@@ -13961,13 +14027,14 @@ declare abstract class MemoryPage {
13961
14027
  * And then a new version of TypeScript is released.
13962
14028
  */
13963
14029
  declare global {
13964
- interface ArrayBufferConstructor {
13965
- new (length: number, options?: { maxByteLength: number }): ArrayBuffer;
13966
- }
13967
-
13968
- interface ArrayBuffer {
13969
- resize(length: number): void;
13970
- }
14030
+ interface ArrayBufferConstructor {
14031
+ new (length: number, options?: {
14032
+ maxByteLength: number;
14033
+ }): ArrayBuffer;
14034
+ }
14035
+ interface ArrayBuffer {
14036
+ resize(length: number): void;
14037
+ }
13971
14038
  }
13972
14039
 
13973
14040
  type InitialMemoryState = {
@@ -13980,6 +14047,7 @@ declare enum AccessType {
13980
14047
  READ = 0,
13981
14048
  WRITE = 1,
13982
14049
  }
14050
+
13983
14051
  declare class Memory {
13984
14052
  static fromInitialMemory(initialMemoryState: InitialMemoryState) {
13985
14053
  return new Memory(
@@ -14016,6 +14084,7 @@ declare class Memory {
14016
14084
  return Result.ok(OK);
14017
14085
  }
14018
14086
 
14087
+ logger.insane(`MEM[${address}] <- ${BytesBlob.blobFrom(bytes)}`);
14019
14088
  const pagesResult = this.getPages(address, bytes.length, AccessType.WRITE);
14020
14089
 
14021
14090
  if (pagesResult.isError) {
@@ -14104,6 +14173,7 @@ declare class Memory {
14104
14173
  bytesLeft -= bytesToRead;
14105
14174
  }
14106
14175
 
14176
+ logger.insane(`MEM[${startAddress}] => ${BytesBlob.blobFrom(result)}`);
14107
14177
  return Result.ok(OK);
14108
14178
  }
14109
14179
 
@@ -15103,6 +15173,10 @@ declare class JumpTable {
15103
15173
  return new JumpTable(0, new Uint8Array());
15104
15174
  }
15105
15175
 
15176
+ getSize() {
15177
+ return this.indices.length;
15178
+ }
15179
+
15106
15180
  copyFrom(jt: JumpTable) {
15107
15181
  this.indices = jt.indices;
15108
15182
  }
@@ -16004,167 +16078,6 @@ declare class OneRegOneExtImmDispatcher {
16004
16078
  }
16005
16079
  }
16006
16080
 
16007
- declare enum Level {
16008
- TRACE = 1,
16009
- LOG = 2,
16010
- INFO = 3,
16011
- WARN = 4,
16012
- ERROR = 5,
16013
- }
16014
-
16015
- type Options = {
16016
- defaultLevel: Level;
16017
- workingDir: string;
16018
- modules: Map<string, Level>;
16019
- };
16020
-
16021
- /**
16022
- * A function to parse logger definition (including modules) given as a string.
16023
- *
16024
- * Examples
16025
- * - `info` - setup default logging level to `info`.
16026
- * - `trace` - default logging level set to `trace`.
16027
- * - `debug;consensus=trace` - default level is set to `debug/log`, but consensus is in trace mode.
16028
- */
16029
- declare function parseLoggerOptions(input: string, defaultLevel: Level, workingDir?: string): Options {
16030
- const modules = new Map<string, Level>();
16031
- const parts = input.toLowerCase().split(",");
16032
- let defLevel = defaultLevel;
16033
-
16034
- for (const p of parts) {
16035
- const clean = p.trim();
16036
- // skip empty objects (forgotten `,` removed)
16037
- if (clean.length === 0) {
16038
- continue;
16039
- }
16040
- // we just have the default level
16041
- if (clean.includes("=")) {
16042
- const [mod, lvl] = clean.split("=");
16043
- modules.set(mod.trim(), parseLevel(lvl.trim()));
16044
- } else {
16045
- defLevel = parseLevel(clean);
16046
- }
16047
- }
16048
-
16049
- // TODO [ToDr] Fix dirname for workers.
16050
- const myDir = (import.meta.dirname ?? "").split("/");
16051
- myDir.pop();
16052
- myDir.pop();
16053
- return {
16054
- defaultLevel: defLevel,
16055
- modules,
16056
- workingDir: workingDir ?? myDir.join("/"),
16057
- };
16058
- }
16059
-
16060
- declare const GLOBAL_CONFIG = {
16061
- options: DEFAULT_OPTIONS,
16062
- transport: ConsoleTransport.create(DEFAULT_OPTIONS.defaultLevel, DEFAULT_OPTIONS),
16063
- };
16064
-
16065
- /**
16066
- * A logger instance.
16067
- */
16068
- declare class Logger {
16069
- /**
16070
- * Create a new logger instance given filename and an optional module name.
16071
- *
16072
- * If the module name is not given, `fileName` becomes the module name.
16073
- * The module name can be composed from multiple parts separated with `/`.
16074
- *
16075
- * The logger will use a global configuration which can be changed using
16076
- * [`configureLogger`] function.
16077
- */
16078
- static new(fileName?: string, moduleName?: string) {
16079
- const fName = fileName ?? "unknown";
16080
- return new Logger(moduleName ?? fName, fName, GLOBAL_CONFIG);
16081
- }
16082
-
16083
- /**
16084
- * Return currently configured level for given module. */
16085
- static getLevel(moduleName: string): Level {
16086
- return findLevel(GLOBAL_CONFIG.options, moduleName);
16087
- }
16088
-
16089
- /**
16090
- * Global configuration of all loggers.
16091
- *
16092
- * One can specify a default logging level (only logs with level >= default will be printed).
16093
- * It's also possible to configure per-module logging level that takes precedence
16094
- * over the default one.
16095
- *
16096
- * Changing the options affects all previously created loggers.
16097
- */
16098
- static configureAllFromOptions(options: Options) {
16099
- // find minimal level to optimise logging in case
16100
- // we don't care about low-level logs.
16101
- const minimalLevel = Array.from(options.modules.values()).reduce((level, modLevel) => {
16102
- return level < modLevel ? level : modLevel;
16103
- }, options.defaultLevel);
16104
-
16105
- const transport = ConsoleTransport.create(minimalLevel, options);
16106
-
16107
- // set the global config
16108
- GLOBAL_CONFIG.options = options;
16109
- GLOBAL_CONFIG.transport = transport;
16110
- }
16111
-
16112
- /**
16113
- * Global configuration of all loggers.
16114
- *
16115
- * Parse configuration options from an input string typically obtained
16116
- * from environment variable `JAM_LOG`.
16117
- */
16118
- static configureAll(input: string, defaultLevel: Level, workingDir?: string) {
16119
- const options = parseLoggerOptions(input, defaultLevel, workingDir);
16120
- Logger.configureAllFromOptions(options);
16121
- }
16122
-
16123
- constructor(
16124
- private readonly moduleName: string,
16125
- private readonly fileName: string,
16126
- private readonly config: typeof GLOBAL_CONFIG,
16127
- ) {}
16128
-
16129
- /** Log a message with `TRACE` level. */
16130
- trace(val: string) {
16131
- this.config.transport.trace(this.moduleName, this.fileName, val);
16132
- }
16133
-
16134
- /** Log a message with `DEBUG`/`LOG` level. */
16135
- log(val: string) {
16136
- this.config.transport.log(this.moduleName, this.fileName, val);
16137
- }
16138
-
16139
- /** Log a message with `INFO` level. */
16140
- info(val: string) {
16141
- this.config.transport.info(this.moduleName, this.fileName, val);
16142
- }
16143
-
16144
- /** Log a message with `WARN` level. */
16145
- warn(val: string) {
16146
- this.config.transport.warn(this.moduleName, this.fileName, val);
16147
- }
16148
-
16149
- /** Log a message with `ERROR` level. */
16150
- error(val: string) {
16151
- this.config.transport.error(this.moduleName, this.fileName, val);
16152
- }
16153
- }
16154
-
16155
- type index$9_Level = Level;
16156
- declare const index$9_Level: typeof Level;
16157
- type index$9_Logger = Logger;
16158
- declare const index$9_Logger: typeof Logger;
16159
- declare const index$9_parseLoggerOptions: typeof parseLoggerOptions;
16160
- declare namespace index$9 {
16161
- export {
16162
- index$9_Level as Level,
16163
- index$9_Logger as Logger,
16164
- index$9_parseLoggerOptions as parseLoggerOptions,
16165
- };
16166
- }
16167
-
16168
16081
  declare enum ProgramDecoderError {
16169
16082
  InvalidProgramError = 0,
16170
16083
  }
@@ -16245,12 +16158,12 @@ declare enum Status {
16245
16158
 
16246
16159
  type InterpreterOptions = {
16247
16160
  useSbrkGas?: boolean;
16248
- ignoreInstructionGas?: boolean;
16249
16161
  };
16250
16162
 
16163
+ declare const logger = Logger.new(import.meta.filename, "pvm");
16164
+
16251
16165
  declare class Interpreter {
16252
16166
  private readonly useSbrkGas: boolean;
16253
- private readonly ignoreInstructionGas: boolean;
16254
16167
  private registers = new Registers();
16255
16168
  private code: Uint8Array = new Uint8Array();
16256
16169
  private mask = Mask.empty();
@@ -16278,9 +16191,8 @@ declare class Interpreter {
16278
16191
  private basicBlocks: BasicBlocks;
16279
16192
  private jumpTable = JumpTable.empty();
16280
16193
 
16281
- constructor({ useSbrkGas = false, ignoreInstructionGas = false }: InterpreterOptions = {}) {
16194
+ constructor({ useSbrkGas = false }: InterpreterOptions = {}) {
16282
16195
  this.useSbrkGas = useSbrkGas;
16283
- this.ignoreInstructionGas = ignoreInstructionGas;
16284
16196
  this.argsDecoder = new ArgsDecoder();
16285
16197
  this.basicBlocks = new BasicBlocks();
16286
16198
  const mathOps = new MathOps(this.registers);
@@ -16376,7 +16288,7 @@ declare class Interpreter {
16376
16288
  const currentInstruction = this.code[this.pc] ?? Instruction.TRAP;
16377
16289
  const isValidInstruction = Instruction[currentInstruction] !== undefined;
16378
16290
  const gasCost = instructionGasMap[currentInstruction] ?? instructionGasMap[Instruction.TRAP];
16379
- const underflow = this.ignoreInstructionGas ? false : this.gas.sub(gasCost);
16291
+ const underflow = this.gas.sub(gasCost);
16380
16292
  if (underflow) {
16381
16293
  this.status = Status.OOG;
16382
16294
  return this.status;
@@ -16385,6 +16297,8 @@ declare class Interpreter {
16385
16297
  const argsResult = this.argsDecodingResults[argsType];
16386
16298
  this.argsDecoder.fillArgs(this.pc, argsResult);
16387
16299
 
16300
+ logger.insane(`[PC: ${this.pc}] ${Instruction[currentInstruction]}`);
16301
+
16388
16302
  if (!isValidInstruction) {
16389
16303
  this.instructionResult.status = Result.PANIC;
16390
16304
  } else {
@@ -16445,12 +16359,6 @@ declare class Interpreter {
16445
16359
  }
16446
16360
 
16447
16361
  if (this.instructionResult.status !== null) {
16448
- // All abnormal terminations should be interpreted as TRAP and we should subtract the gas. In case of FAULT we have to do it manually at the very end.
16449
- if (this.instructionResult.status === Result.FAULT || this.instructionResult.status === Result.FAULT_ACCESS) {
16450
- // TODO [ToDr] underflow?
16451
- this.gas.sub(instructionGasMap[Instruction.TRAP]);
16452
- }
16453
-
16454
16362
  switch (this.instructionResult.status) {
16455
16363
  case Result.FAULT:
16456
16364
  this.status = Status.FAULT;
@@ -16466,6 +16374,7 @@ declare class Interpreter {
16466
16374
  this.status = Status.HOST;
16467
16375
  break;
16468
16376
  }
16377
+ logger.insane(`[PC: ${this.pc}] Status: ${Result[this.instructionResult.status]}`);
16469
16378
  return this.status;
16470
16379
  }
16471
16380
 
@@ -16537,13 +16446,14 @@ declare const index$8_Registers: typeof Registers;
16537
16446
  type index$8_SbrkIndex = SbrkIndex;
16538
16447
  type index$8_SmallGas = SmallGas;
16539
16448
  declare const index$8_gasCounter: typeof gasCounter;
16449
+ declare const index$8_logger: typeof logger;
16540
16450
  declare const index$8_tryAsBigGas: typeof tryAsBigGas;
16541
16451
  declare const index$8_tryAsGas: typeof tryAsGas;
16542
16452
  declare const index$8_tryAsMemoryIndex: typeof tryAsMemoryIndex;
16543
16453
  declare const index$8_tryAsSbrkIndex: typeof tryAsSbrkIndex;
16544
16454
  declare const index$8_tryAsSmallGas: typeof tryAsSmallGas;
16545
16455
  declare namespace index$8 {
16546
- export { index$8_Interpreter as Interpreter, index$8_Memory as Memory, index$8_MemoryBuilder as MemoryBuilder, index$8_Registers as Registers, index$8_gasCounter as gasCounter, index$8_tryAsBigGas as tryAsBigGas, index$8_tryAsGas as tryAsGas, index$8_tryAsMemoryIndex as tryAsMemoryIndex, index$8_tryAsSbrkIndex as tryAsSbrkIndex, index$8_tryAsSmallGas as tryAsSmallGas };
16456
+ export { index$8_Interpreter as Interpreter, index$8_Memory as Memory, index$8_MemoryBuilder as MemoryBuilder, index$8_Registers as Registers, index$8_gasCounter as gasCounter, index$8_logger as logger, index$8_tryAsBigGas as tryAsBigGas, index$8_tryAsGas as tryAsGas, index$8_tryAsMemoryIndex as tryAsMemoryIndex, index$8_tryAsSbrkIndex as tryAsSbrkIndex, index$8_tryAsSmallGas as tryAsSmallGas };
16547
16457
  export type { index$8_BigGas as BigGas, index$8_Gas as Gas, index$8_GasCounter as GasCounter, index$8_InterpreterOptions as InterpreterOptions, index$8_MemoryIndex as MemoryIndex, index$8_SbrkIndex as SbrkIndex, index$8_SmallGas as SmallGas };
16548
16458
  }
16549
16459
 
@@ -16681,7 +16591,7 @@ declare class HostCallsManager {
16681
16591
  return `r${idx}=${value} (0x${value.toString(16)})`;
16682
16592
  })
16683
16593
  .join(", ");
16684
- logger.trace(`[${currentServiceId}] ${context} ${name}${requested}. Gas: ${gas}. Regs: ${registerValues}.`);
16594
+ logger.insane(`[${currentServiceId}] ${context} ${name}${requested}. Gas: ${gas}. Regs: ${registerValues}.`);
16685
16595
  }
16686
16596
  }
16687
16597
 
@@ -16692,16 +16602,10 @@ declare class InterpreterInstanceManager {
16692
16602
  private waitingQueue: ResolveFn[] = [];
16693
16603
 
16694
16604
  constructor(noOfPvmInstances: number) {
16695
- const shouldCountGas =
16696
- Compatibility.isGreaterOrEqual(GpVersion.V0_6_7) ||
16697
- Compatibility.isSuite(TestSuite.JAMDUNA, GpVersion.V0_6_5) ||
16698
- Compatibility.isSuite(TestSuite.W3F_DAVXY, GpVersion.V0_6_6);
16699
-
16700
16605
  for (let i = 0; i < noOfPvmInstances; i++) {
16701
16606
  this.instances.push(
16702
16607
  new Interpreter({
16703
16608
  useSbrkGas: false,
16704
- ignoreInstructionGas: !shouldCountGas,
16705
16609
  }),
16706
16610
  );
16707
16611
  }
@@ -16808,14 +16712,15 @@ declare class HostCalls {
16808
16712
  const gasCost = typeof hostCall.gasCost === "number" ? hostCall.gasCost : hostCall.gasCost(regs);
16809
16713
  const underflow = gas.sub(gasCost);
16810
16714
 
16715
+ const pcLog = `[PC: ${pvmInstance.getPC()}]`;
16811
16716
  if (underflow) {
16812
- this.hostCalls.traceHostCall("OOG", index, hostCall, regs, gas.get());
16717
+ this.hostCalls.traceHostCall(`${pcLog} OOG`, index, hostCall, regs, gas.get());
16813
16718
  return ReturnValue.fromStatus(pvmInstance.getGasConsumed(), Status.OOG);
16814
16719
  }
16815
- this.hostCalls.traceHostCall("Invoking", index, hostCall, regs, gasBefore);
16720
+ this.hostCalls.traceHostCall(`${pcLog} Invoking`, index, hostCall, regs, gasBefore);
16816
16721
  const result = await hostCall.execute(gas, regs, memory);
16817
16722
  this.hostCalls.traceHostCall(
16818
- result === undefined ? "Result" : `Status(${result})`,
16723
+ result === undefined ? `${pcLog} Result` : `${pcLog} Status(${PvmExecution[result]})`,
16819
16724
  index,
16820
16725
  hostCall,
16821
16726
  regs,
@@ -16827,8 +16732,18 @@ declare class HostCalls {
16827
16732
  return this.getReturnValue(status, pvmInstance);
16828
16733
  }
16829
16734
 
16830
- pvmInstance.runProgram();
16831
- status = pvmInstance.getStatus();
16735
+ if (result === PvmExecution.Panic) {
16736
+ status = Status.PANIC;
16737
+ return this.getReturnValue(status, pvmInstance);
16738
+ }
16739
+
16740
+ if (result === undefined) {
16741
+ pvmInstance.runProgram();
16742
+ status = pvmInstance.getStatus();
16743
+ continue;
16744
+ }
16745
+
16746
+ assertNever(result);
16832
16747
  }
16833
16748
  }
16834
16749
 
@@ -18167,6 +18082,7 @@ declare const index$3_getServiceId: typeof getServiceId;
18167
18082
  declare const index$3_getServiceIdOrCurrent: typeof getServiceIdOrCurrent;
18168
18083
  declare const index$3_inspect: typeof inspect;
18169
18084
  declare const index$3_instructionArgumentTypeMap: typeof instructionArgumentTypeMap;
18085
+ declare const index$3_isBrowser: typeof isBrowser;
18170
18086
  declare const index$3_isTaggedError: typeof isTaggedError;
18171
18087
  declare const index$3_maybeTaggedErrorToString: typeof maybeTaggedErrorToString;
18172
18088
  declare const index$3_measure: typeof measure;
@@ -18179,7 +18095,7 @@ declare const index$3_tryAsMachineId: typeof tryAsMachineId;
18179
18095
  declare const index$3_tryAsProgramCounter: typeof tryAsProgramCounter;
18180
18096
  declare const index$3_writeServiceIdAsLeBytes: typeof writeServiceIdAsLeBytes;
18181
18097
  declare namespace index$3 {
18182
- export { index$3_AccumulationStateUpdate as AccumulationStateUpdate, index$3_ArgsDecoder as ArgsDecoder, index$3_ArgumentType as ArgumentType, index$3_BasicBlocks as BasicBlocks, index$3_CURRENT_SERVICE_ID as CURRENT_SERVICE_ID, index$3_EjectError as EjectError, index$3_ExtendedWitdthImmediateDecoder as ExtendedWitdthImmediateDecoder, index$3_ForgetPreimageError as ForgetPreimageError, index$3_HostCallMemory as HostCallMemory, index$3_HostCallRegisters as HostCallRegisters, index$3_HostCallResult as HostCallResult, index$3_ImmediateDecoder as ImmediateDecoder, index$3_MAX_U32 as MAX_U32, index$3_MAX_U32_BIG_INT as MAX_U32_BIG_INT, index$3_MachineInstance as MachineInstance, index$3_Mask as Mask, index$3_MemoryOperation as MemoryOperation, index$3_MemorySegment as MemorySegment, NO_OF_REGISTERS$1 as NO_OF_REGISTERS, index$3_NewServiceError as NewServiceError, index$3_NibblesDecoder as NibblesDecoder, index$3_PagesError as PagesError, index$3_PartiallyUpdatedState as PartiallyUpdatedState, index$3_PeekPokeError as PeekPokeError, index$3_PendingTransfer as PendingTransfer, index$3_PreimageStatusKind as PreimageStatusKind, index$3_Program as Program, index$3_ProgramDecoder as ProgramDecoder, index$3_ProvidePreimageError as ProvidePreimageError, DebuggerAdapter as Pvm, index$3_Registers as Registers, index$3_RequestPreimageError as RequestPreimageError, Result$2 as Result, index$3_RichTaggedError as RichTaggedError, index$3_SERVICE_ID_BYTES as SERVICE_ID_BYTES, index$3_SpiMemory as SpiMemory, index$3_SpiProgram as SpiProgram, index$3_TransferError as TransferError, index$3_UpdatePrivilegesError as UpdatePrivilegesError, index$3_WithDebug as WithDebug, index$3_ZeroVoidError as ZeroVoidError, index$3___OPAQUE_TYPE__ as __OPAQUE_TYPE__, index$3_asOpaqueType as asOpaqueType, index$3_assertEmpty as assertEmpty, index$3_assertNever as assertNever, index$j as block, index$q as bytes, index$3_cast as cast, index$3_check as check, index$3_clampU64ToU32 as clampU64ToU32, index$3_createResults as createResults, index$3_decodeStandardProgram as decodeStandardProgram, index$3_ensure as ensure, index$3_extractCodeAndMetadata as extractCodeAndMetadata, index$3_getServiceId as getServiceId, index$3_getServiceIdOrCurrent as getServiceIdOrCurrent, index$n as hash, index$3_inspect as inspect, index$3_instructionArgumentTypeMap as instructionArgumentTypeMap, index$8 as interpreter, index$3_isTaggedError as isTaggedError, index$3_maybeTaggedErrorToString as maybeTaggedErrorToString, index$3_measure as measure, index$p as numbers, index$3_preimageLenAsU32 as preimageLenAsU32, index$3_resultToString as resultToString, index$3_seeThrough as seeThrough, index$3_slotsToPreimageStatus as slotsToPreimageStatus, index$3_toMemoryOperation as toMemoryOperation, index$3_tryAsMachineId as tryAsMachineId, index$3_tryAsProgramCounter as tryAsProgramCounter, index$3_writeServiceIdAsLeBytes as writeServiceIdAsLeBytes };
18098
+ export { index$3_AccumulationStateUpdate as AccumulationStateUpdate, index$3_ArgsDecoder as ArgsDecoder, index$3_ArgumentType as ArgumentType, index$3_BasicBlocks as BasicBlocks, index$3_CURRENT_SERVICE_ID as CURRENT_SERVICE_ID, index$3_EjectError as EjectError, index$3_ExtendedWitdthImmediateDecoder as ExtendedWitdthImmediateDecoder, index$3_ForgetPreimageError as ForgetPreimageError, index$3_HostCallMemory as HostCallMemory, index$3_HostCallRegisters as HostCallRegisters, index$3_HostCallResult as HostCallResult, index$3_ImmediateDecoder as ImmediateDecoder, index$3_MAX_U32 as MAX_U32, index$3_MAX_U32_BIG_INT as MAX_U32_BIG_INT, index$3_MachineInstance as MachineInstance, index$3_Mask as Mask, index$3_MemoryOperation as MemoryOperation, index$3_MemorySegment as MemorySegment, NO_OF_REGISTERS$1 as NO_OF_REGISTERS, index$3_NewServiceError as NewServiceError, index$3_NibblesDecoder as NibblesDecoder, index$3_PagesError as PagesError, index$3_PartiallyUpdatedState as PartiallyUpdatedState, index$3_PeekPokeError as PeekPokeError, index$3_PendingTransfer as PendingTransfer, index$3_PreimageStatusKind as PreimageStatusKind, index$3_Program as Program, index$3_ProgramDecoder as ProgramDecoder, index$3_ProvidePreimageError as ProvidePreimageError, DebuggerAdapter as Pvm, index$3_Registers as Registers, index$3_RequestPreimageError as RequestPreimageError, Result$2 as Result, index$3_RichTaggedError as RichTaggedError, index$3_SERVICE_ID_BYTES as SERVICE_ID_BYTES, index$3_SpiMemory as SpiMemory, index$3_SpiProgram as SpiProgram, index$3_TransferError as TransferError, index$3_UpdatePrivilegesError as UpdatePrivilegesError, index$3_WithDebug as WithDebug, index$3_ZeroVoidError as ZeroVoidError, index$3___OPAQUE_TYPE__ as __OPAQUE_TYPE__, index$3_asOpaqueType as asOpaqueType, index$3_assertEmpty as assertEmpty, index$3_assertNever as assertNever, index$j as block, index$q as bytes, index$3_cast as cast, index$3_check as check, index$3_clampU64ToU32 as clampU64ToU32, index$3_createResults as createResults, index$3_decodeStandardProgram as decodeStandardProgram, index$3_ensure as ensure, index$3_extractCodeAndMetadata as extractCodeAndMetadata, index$3_getServiceId as getServiceId, index$3_getServiceIdOrCurrent as getServiceIdOrCurrent, index$n as hash, index$3_inspect as inspect, index$3_instructionArgumentTypeMap as instructionArgumentTypeMap, index$8 as interpreter, index$3_isBrowser as isBrowser, index$3_isTaggedError as isTaggedError, index$3_maybeTaggedErrorToString as maybeTaggedErrorToString, index$3_measure as measure, index$p as numbers, index$3_preimageLenAsU32 as preimageLenAsU32, index$3_resultToString as resultToString, index$3_seeThrough as seeThrough, index$3_slotsToPreimageStatus as slotsToPreimageStatus, index$3_toMemoryOperation as toMemoryOperation, index$3_tryAsMachineId as tryAsMachineId, index$3_tryAsProgramCounter as tryAsProgramCounter, index$3_writeServiceIdAsLeBytes as writeServiceIdAsLeBytes };
18183
18099
  export type { index$3_Args as Args, index$3_EnumMapping as EnumMapping, index$3_ErrorResult as ErrorResult, index$3_IHostCallMemory as IHostCallMemory, index$3_IHostCallRegisters as IHostCallRegisters, index$3_InsufficientFundsError as InsufficientFundsError, index$3_MachineId as MachineId, index$3_MachineResult as MachineResult, index$3_MachineStatus as MachineStatus, index$3_NoMachineError as NoMachineError, index$3_OK as OK, index$3_OkResult as OkResult, index$3_Opaque as Opaque, index$3_PartialState as PartialState, index$3_PreimageStatus as PreimageStatus, index$3_ProgramCounter as ProgramCounter, index$3_RefineExternalities as RefineExternalities, index$3_SegmentExportError as SegmentExportError, index$3_ServiceStateUpdate as ServiceStateUpdate, index$3_StateSlice as StateSlice, index$3_StringLiteral as StringLiteral, index$3_TRANSFER_MEMO_BYTES as TRANSFER_MEMO_BYTES, index$3_TaggedError as TaggedError, index$3_TokenOf as TokenOf, index$3_Uninstantiable as Uninstantiable, index$3_UnprivilegedError as UnprivilegedError, index$3_WithOpaque as WithOpaque };
18184
18100
  }
18185
18101
 
@@ -18217,41 +18133,7 @@ declare namespace index$2 {
18217
18133
  };
18218
18134
  }
18219
18135
 
18220
- declare class JsonServiceInfoPre067 {
18221
- static fromJson = json.object<JsonServiceInfoPre067, ServiceAccountInfo>(
18222
- {
18223
- code_hash: fromJson.bytes32(),
18224
- balance: json.fromNumber((x) => tryAsU64(x)),
18225
- min_item_gas: json.fromNumber((x) => tryAsServiceGas(x)),
18226
- min_memo_gas: json.fromNumber((x) => tryAsServiceGas(x)),
18227
- bytes: json.fromNumber((x) => tryAsU64(x)),
18228
- items: "number",
18229
- },
18230
- ({ code_hash, balance, min_item_gas, min_memo_gas, bytes, items }) => {
18231
- return ServiceAccountInfo.create({
18232
- codeHash: code_hash,
18233
- balance,
18234
- accumulateMinGas: min_item_gas,
18235
- onTransferMinGas: min_memo_gas,
18236
- storageUtilisationBytes: bytes,
18237
- storageUtilisationCount: items,
18238
- gratisStorage: tryAsU64(0),
18239
- created: tryAsTimeSlot(0),
18240
- lastAccumulation: tryAsTimeSlot(0),
18241
- parentService: tryAsServiceId(0),
18242
- });
18243
- },
18244
- );
18245
-
18246
- code_hash!: CodeHash;
18247
- balance!: U64;
18248
- min_item_gas!: ServiceGas;
18249
- min_memo_gas!: ServiceGas;
18250
- bytes!: U64;
18251
- items!: U32;
18252
- }
18253
-
18254
- declare class JsonServiceInfo extends JsonServiceInfoPre067 {
18136
+ declare class JsonServiceInfo {
18255
18137
  static fromJson = json.object<JsonServiceInfo, ServiceAccountInfo>(
18256
18138
  {
18257
18139
  code_hash: fromJson.bytes32(),
@@ -18292,6 +18174,12 @@ declare class JsonServiceInfo extends JsonServiceInfoPre067 {
18292
18174
  },
18293
18175
  );
18294
18176
 
18177
+ code_hash!: CodeHash;
18178
+ balance!: U64;
18179
+ min_item_gas!: ServiceGas;
18180
+ min_memo_gas!: ServiceGas;
18181
+ bytes!: U64;
18182
+ items!: U32;
18295
18183
  creation_slot!: TimeSlot;
18296
18184
  deposit_offset!: U64;
18297
18185
  last_accumulation_slot!: TimeSlot;
@@ -18345,9 +18233,7 @@ declare class JsonService {
18345
18233
  {
18346
18234
  id: "number",
18347
18235
  data: {
18348
- service: Compatibility.isGreaterOrEqual(GpVersion.V0_6_7)
18349
- ? JsonServiceInfo.fromJson
18350
- : JsonServiceInfoPre067.fromJson,
18236
+ service: JsonServiceInfo.fromJson,
18351
18237
  preimages: json.optional(json.array(JsonPreimageItem.fromJson)),
18352
18238
  storage: json.optional(json.array(JsonStorageItem.fromJson)),
18353
18239
  lookup_meta: json.optional(json.array(lookupMetaFromJson)),
@@ -18485,7 +18371,7 @@ type JsonRecentBlockState = {
18485
18371
  reported: WorkPackageInfo[];
18486
18372
  };
18487
18373
 
18488
- declare const recentBlocksFromJson = json.object<JsonRecentBlocks, RecentBlocksHistory>(
18374
+ declare const recentBlocksHistoryFromJson = json.object<JsonRecentBlocks, RecentBlocksHistory>(
18489
18375
  {
18490
18376
  history: json.array(recentBlockStateFromJson),
18491
18377
  mmr: {
@@ -18509,49 +18395,6 @@ type JsonRecentBlocks = {
18509
18395
  };
18510
18396
  };
18511
18397
 
18512
- declare const legacyRecentBlockStateFromJson = json.object<JsonRecentBlockStateLegacy, LegacyBlockState>(
18513
- {
18514
- header_hash: fromJson.bytes32(),
18515
- mmr: {
18516
- peaks: json.array(json.nullable(fromJson.bytes32())),
18517
- },
18518
- state_root: fromJson.bytes32(),
18519
- reported: json.array(reportedWorkPackageFromJson),
18520
- },
18521
- ({ header_hash, mmr, state_root, reported }) => {
18522
- return {
18523
- headerHash: header_hash,
18524
- mmr,
18525
- postStateRoot: state_root,
18526
- reported: HashDictionary.fromEntries(reported.map((x) => [x.workPackageHash, x])),
18527
- };
18528
- },
18529
- );
18530
-
18531
- type JsonRecentBlockStateLegacy = {
18532
- header_hash: HeaderHash;
18533
- mmr: {
18534
- peaks: Array<KeccakHash | null>;
18535
- };
18536
- state_root: StateRootHash;
18537
- reported: WorkPackageInfo[];
18538
- };
18539
-
18540
- declare const legacyRecentBlocksFromJson = json.object<LegacyBlocksState, RecentBlocksHistory>(
18541
- json.array(legacyRecentBlockStateFromJson),
18542
- (blocks) => {
18543
- return RecentBlocksHistory.legacyCreate(
18544
- LegacyRecentBlocks.create({
18545
- blocks,
18546
- }),
18547
- );
18548
- },
18549
- );
18550
-
18551
- declare const recentBlocksHistoryFromJson = Compatibility.isGreaterOrEqual(GpVersion.V0_6_7)
18552
- ? recentBlocksFromJson
18553
- : legacyRecentBlocksFromJson;
18554
-
18555
18398
  declare const ticketFromJson: FromJson<Ticket> = json.object<Ticket>(
18556
18399
  {
18557
18400
  id: fromJson.bytes32(),
@@ -18893,138 +18736,6 @@ declare const fullStateDumpFromJson = (spec: ChainSpec) =>
18893
18736
  },
18894
18737
  );
18895
18738
 
18896
- type JsonStateDumpPre067 = {
18897
- alpha: AuthorizerHash[][];
18898
- varphi: AuthorizerHash[][];
18899
- beta: State["recentBlocks"] | null;
18900
- gamma: {
18901
- gamma_k: State["nextValidatorData"];
18902
- gamma_z: State["epochRoot"];
18903
- gamma_s: TicketsOrKeys;
18904
- gamma_a: State["ticketsAccumulator"];
18905
- };
18906
- psi: State["disputesRecords"];
18907
- eta: State["entropy"];
18908
- iota: State["designatedValidatorData"];
18909
- kappa: State["currentValidatorData"];
18910
- lambda: State["previousValidatorData"];
18911
- rho: State["availabilityAssignment"];
18912
- tau: State["timeslot"];
18913
- chi: {
18914
- chi_m: PrivilegedServices["manager"];
18915
- chi_a: ServiceId; // NOTE: [MaSo] pre067
18916
- chi_v: PrivilegedServices["validatorsManager"];
18917
- chi_g: PrivilegedServices["autoAccumulateServices"] | null;
18918
- };
18919
- pi: JsonStatisticsData;
18920
- theta: State["accumulationQueue"];
18921
- xi: PerEpochBlock<WorkPackageHash[]>;
18922
- accounts: InMemoryService[];
18923
- };
18924
-
18925
- declare const fullStateDumpFromJsonPre067 = (spec: ChainSpec) =>
18926
- json.object<JsonStateDumpPre067, InMemoryState>(
18927
- {
18928
- alpha: json.array(json.array(fromJson.bytes32<AuthorizerHash>())),
18929
- varphi: json.array(json.array(fromJson.bytes32<AuthorizerHash>())),
18930
- beta: json.nullable(recentBlocksHistoryFromJson),
18931
- gamma: {
18932
- gamma_k: json.array(validatorDataFromJson),
18933
- gamma_a: json.array(ticketFromJson),
18934
- gamma_s: TicketsOrKeys.fromJson,
18935
- gamma_z: json.fromString((v) => Bytes.parseBytes(v, BANDERSNATCH_RING_ROOT_BYTES).asOpaque()),
18936
- },
18937
- psi: disputesRecordsFromJson,
18938
- eta: json.array(fromJson.bytes32<EntropyHash>()),
18939
- iota: json.array(validatorDataFromJson),
18940
- kappa: json.array(validatorDataFromJson),
18941
- lambda: json.array(validatorDataFromJson),
18942
- rho: json.array(json.nullable(availabilityAssignmentFromJson)),
18943
- tau: "number",
18944
- chi: {
18945
- chi_m: "number",
18946
- chi_a: "number",
18947
- chi_v: "number",
18948
- chi_g: json.nullable(
18949
- json.array({
18950
- service: "number",
18951
- gasLimit: json.fromNumber((v) => tryAsServiceGas(v)),
18952
- }),
18953
- ),
18954
- },
18955
- pi: JsonStatisticsData.fromJson,
18956
- theta: json.array(json.array(notYetAccumulatedFromJson)),
18957
- xi: json.array(json.array(fromJson.bytes32())),
18958
- accounts: json.array(JsonService.fromJson),
18959
- },
18960
- ({
18961
- alpha,
18962
- varphi,
18963
- beta,
18964
- gamma,
18965
- psi,
18966
- eta,
18967
- iota,
18968
- kappa,
18969
- lambda,
18970
- rho,
18971
- tau,
18972
- chi,
18973
- pi,
18974
- theta,
18975
- xi,
18976
- accounts,
18977
- }): InMemoryState => {
18978
- return InMemoryState.create({
18979
- authPools: tryAsPerCore(
18980
- alpha.map((perCore) => {
18981
- if (perCore.length > MAX_AUTH_POOL_SIZE) {
18982
- throw new Error(`AuthPools: expected less than ${MAX_AUTH_POOL_SIZE}, got ${perCore.length}`);
18983
- }
18984
- return asKnownSize(perCore);
18985
- }),
18986
- spec,
18987
- ),
18988
- authQueues: tryAsPerCore(
18989
- varphi.map((perCore) => {
18990
- if (perCore.length !== AUTHORIZATION_QUEUE_SIZE) {
18991
- throw new Error(`AuthQueues: expected ${AUTHORIZATION_QUEUE_SIZE}, got: ${perCore.length}`);
18992
- }
18993
- return asKnownSize(perCore);
18994
- }),
18995
- spec,
18996
- ),
18997
- recentBlocks: beta ?? RecentBlocksHistory.empty(),
18998
- nextValidatorData: gamma.gamma_k,
18999
- epochRoot: gamma.gamma_z,
19000
- sealingKeySeries: TicketsOrKeys.toSafroleSealingKeys(gamma.gamma_s, spec),
19001
- ticketsAccumulator: gamma.gamma_a,
19002
- disputesRecords: psi,
19003
- entropy: eta,
19004
- designatedValidatorData: iota,
19005
- currentValidatorData: kappa,
19006
- previousValidatorData: lambda,
19007
- availabilityAssignment: rho,
19008
- timeslot: tau,
19009
- privilegedServices: PrivilegedServices.create({
19010
- manager: chi.chi_m,
19011
- authManager: tryAsPerCore(new Array(spec.coresCount).fill(chi.chi_a), spec),
19012
- validatorsManager: chi.chi_v,
19013
- autoAccumulateServices: chi.chi_g ?? [],
19014
- }),
19015
- statistics: JsonStatisticsData.toStatisticsData(spec, pi),
19016
- accumulationQueue: theta,
19017
- recentlyAccumulated: tryAsPerEpochBlock(
19018
- xi.map((x) => HashSet.from(x)),
19019
- spec,
19020
- ),
19021
- services: new Map(accounts.map((x) => [x.serviceId, x])),
19022
- // NOTE Field not present in pre067, added here for compatibility reasons
19023
- accumulationOutputLog: [],
19024
- });
19025
- },
19026
- );
19027
-
19028
18739
  type index$1_JsonAvailabilityAssignment = JsonAvailabilityAssignment;
19029
18740
  type index$1_JsonCoreStatistics = JsonCoreStatistics;
19030
18741
  declare const index$1_JsonCoreStatistics: typeof JsonCoreStatistics;
@@ -19034,19 +18745,15 @@ type index$1_JsonLookupMeta = JsonLookupMeta;
19034
18745
  type index$1_JsonPreimageItem = JsonPreimageItem;
19035
18746
  declare const index$1_JsonPreimageItem: typeof JsonPreimageItem;
19036
18747
  type index$1_JsonRecentBlockState = JsonRecentBlockState;
19037
- type index$1_JsonRecentBlockStateLegacy = JsonRecentBlockStateLegacy;
19038
18748
  type index$1_JsonRecentBlocks = JsonRecentBlocks;
19039
18749
  type index$1_JsonReportedWorkPackageInfo = JsonReportedWorkPackageInfo;
19040
18750
  type index$1_JsonService = JsonService;
19041
18751
  declare const index$1_JsonService: typeof JsonService;
19042
18752
  type index$1_JsonServiceInfo = JsonServiceInfo;
19043
18753
  declare const index$1_JsonServiceInfo: typeof JsonServiceInfo;
19044
- type index$1_JsonServiceInfoPre067 = JsonServiceInfoPre067;
19045
- declare const index$1_JsonServiceInfoPre067: typeof JsonServiceInfoPre067;
19046
18754
  type index$1_JsonServiceStatistics = JsonServiceStatistics;
19047
18755
  declare const index$1_JsonServiceStatistics: typeof JsonServiceStatistics;
19048
18756
  type index$1_JsonStateDump = JsonStateDump;
19049
- type index$1_JsonStateDumpPre067 = JsonStateDumpPre067;
19050
18757
  type index$1_JsonStatisticsData = JsonStatisticsData;
19051
18758
  declare const index$1_JsonStatisticsData: typeof JsonStatisticsData;
19052
18759
  type index$1_JsonStorageItem = JsonStorageItem;
@@ -19059,21 +18766,17 @@ declare const index$1_TicketsOrKeys: typeof TicketsOrKeys;
19059
18766
  declare const index$1_availabilityAssignmentFromJson: typeof availabilityAssignmentFromJson;
19060
18767
  declare const index$1_disputesRecordsFromJson: typeof disputesRecordsFromJson;
19061
18768
  declare const index$1_fullStateDumpFromJson: typeof fullStateDumpFromJson;
19062
- declare const index$1_fullStateDumpFromJsonPre067: typeof fullStateDumpFromJsonPre067;
19063
- declare const index$1_legacyRecentBlockStateFromJson: typeof legacyRecentBlockStateFromJson;
19064
- declare const index$1_legacyRecentBlocksFromJson: typeof legacyRecentBlocksFromJson;
19065
18769
  declare const index$1_lookupMetaFromJson: typeof lookupMetaFromJson;
19066
18770
  declare const index$1_notYetAccumulatedFromJson: typeof notYetAccumulatedFromJson;
19067
18771
  declare const index$1_recentBlockStateFromJson: typeof recentBlockStateFromJson;
19068
- declare const index$1_recentBlocksFromJson: typeof recentBlocksFromJson;
19069
18772
  declare const index$1_recentBlocksHistoryFromJson: typeof recentBlocksHistoryFromJson;
19070
18773
  declare const index$1_reportedWorkPackageFromJson: typeof reportedWorkPackageFromJson;
19071
18774
  declare const index$1_serviceStatisticsEntryFromJson: typeof serviceStatisticsEntryFromJson;
19072
18775
  declare const index$1_ticketFromJson: typeof ticketFromJson;
19073
18776
  declare const index$1_validatorDataFromJson: typeof validatorDataFromJson;
19074
18777
  declare namespace index$1 {
19075
- export { index$1_JsonCoreStatistics as JsonCoreStatistics, index$1_JsonDisputesRecords as JsonDisputesRecords, index$1_JsonPreimageItem as JsonPreimageItem, index$1_JsonService as JsonService, index$1_JsonServiceInfo as JsonServiceInfo, index$1_JsonServiceInfoPre067 as JsonServiceInfoPre067, index$1_JsonServiceStatistics as JsonServiceStatistics, index$1_JsonStatisticsData as JsonStatisticsData, index$1_JsonStorageItem as JsonStorageItem, index$1_JsonValidatorStatistics as JsonValidatorStatistics, index$1_TicketsOrKeys as TicketsOrKeys, index$1_availabilityAssignmentFromJson as availabilityAssignmentFromJson, index$1_disputesRecordsFromJson as disputesRecordsFromJson, index$1_fullStateDumpFromJson as fullStateDumpFromJson, index$1_fullStateDumpFromJsonPre067 as fullStateDumpFromJsonPre067, index$1_legacyRecentBlockStateFromJson as legacyRecentBlockStateFromJson, index$1_legacyRecentBlocksFromJson as legacyRecentBlocksFromJson, index$1_lookupMetaFromJson as lookupMetaFromJson, index$1_notYetAccumulatedFromJson as notYetAccumulatedFromJson, index$1_recentBlockStateFromJson as recentBlockStateFromJson, index$1_recentBlocksFromJson as recentBlocksFromJson, index$1_recentBlocksHistoryFromJson as recentBlocksHistoryFromJson, index$1_reportedWorkPackageFromJson as reportedWorkPackageFromJson, index$1_serviceStatisticsEntryFromJson as serviceStatisticsEntryFromJson, index$1_ticketFromJson as ticketFromJson, index$1_validatorDataFromJson as validatorDataFromJson };
19076
- export type { index$1_JsonAvailabilityAssignment as JsonAvailabilityAssignment, index$1_JsonLookupMeta as JsonLookupMeta, index$1_JsonRecentBlockState as JsonRecentBlockState, index$1_JsonRecentBlockStateLegacy as JsonRecentBlockStateLegacy, index$1_JsonRecentBlocks as JsonRecentBlocks, index$1_JsonReportedWorkPackageInfo as JsonReportedWorkPackageInfo, index$1_JsonStateDump as JsonStateDump, index$1_JsonStateDumpPre067 as JsonStateDumpPre067, index$1_ServiceStatisticsEntry as ServiceStatisticsEntry };
18778
+ export { index$1_JsonCoreStatistics as JsonCoreStatistics, index$1_JsonDisputesRecords as JsonDisputesRecords, index$1_JsonPreimageItem as JsonPreimageItem, index$1_JsonService as JsonService, index$1_JsonServiceInfo as JsonServiceInfo, index$1_JsonServiceStatistics as JsonServiceStatistics, index$1_JsonStatisticsData as JsonStatisticsData, index$1_JsonStorageItem as JsonStorageItem, index$1_JsonValidatorStatistics as JsonValidatorStatistics, index$1_TicketsOrKeys as TicketsOrKeys, index$1_availabilityAssignmentFromJson as availabilityAssignmentFromJson, index$1_disputesRecordsFromJson as disputesRecordsFromJson, index$1_fullStateDumpFromJson as fullStateDumpFromJson, index$1_lookupMetaFromJson as lookupMetaFromJson, index$1_notYetAccumulatedFromJson as notYetAccumulatedFromJson, index$1_recentBlockStateFromJson as recentBlockStateFromJson, index$1_recentBlocksHistoryFromJson as recentBlocksHistoryFromJson, index$1_reportedWorkPackageFromJson as reportedWorkPackageFromJson, index$1_serviceStatisticsEntryFromJson as serviceStatisticsEntryFromJson, index$1_ticketFromJson as ticketFromJson, index$1_validatorDataFromJson as validatorDataFromJson };
18779
+ export type { index$1_JsonAvailabilityAssignment as JsonAvailabilityAssignment, index$1_JsonLookupMeta as JsonLookupMeta, index$1_JsonRecentBlockState as JsonRecentBlockState, index$1_JsonRecentBlocks as JsonRecentBlocks, index$1_JsonReportedWorkPackageInfo as JsonReportedWorkPackageInfo, index$1_JsonStateDump as JsonStateDump, index$1_ServiceStatisticsEntry as ServiceStatisticsEntry };
19077
18780
  }
19078
18781
 
19079
18782
  /** Helper function to create most used hashes in the block */