@typeberry/lib 0.0.1-cf41358 → 0.0.1-f0ab5f1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (4) hide show
  1. package/configs/index.d.ts +74 -0
  2. package/index.d.ts +439 -761
  3. package/index.js +1687 -1161
  4. package/package.json +1 -1
package/index.d.ts CHANGED
@@ -59,8 +59,6 @@ declare namespace index$s {
59
59
  }
60
60
 
61
61
  declare enum GpVersion {
62
- V0_6_5 = "0.6.5",
63
- V0_6_6 = "0.6.6",
64
62
  V0_6_7 = "0.6.7",
65
63
  V0_7_0 = "0.7.0-preview",
66
64
  V0_7_1 = "0.7.1-preview",
@@ -73,13 +71,7 @@ declare enum TestSuite {
73
71
 
74
72
  declare const DEFAULT_SUITE = TestSuite.W3F_DAVXY;
75
73
 
76
- declare const ALL_VERSIONS_IN_ORDER = [
77
- GpVersion.V0_6_5,
78
- GpVersion.V0_6_6,
79
- GpVersion.V0_6_7,
80
- GpVersion.V0_7_0,
81
- GpVersion.V0_7_1,
82
- ];
74
+ declare const ALL_VERSIONS_IN_ORDER = [GpVersion.V0_6_7, GpVersion.V0_7_0, GpVersion.V0_7_1];
83
75
 
84
76
  declare const env = typeof process === "undefined" ? {} : process.env;
85
77
  declare const DEFAULT_VERSION = GpVersion.V0_6_7;
@@ -168,6 +160,10 @@ declare class Compatibility {
168
160
  }
169
161
  }
170
162
 
163
+ declare function isBrowser() {
164
+ return typeof process === "undefined" || typeof process.abort === "undefined";
165
+ }
166
+
171
167
  /**
172
168
  * A function to perform runtime assertions.
173
169
  *
@@ -286,20 +282,19 @@ declare function inspect<T>(val: T): string {
286
282
  }
287
283
 
288
284
  /** Utility function to measure time taken for some operation [ms]. */
289
- declare const measure =
290
- typeof process === "undefined"
291
- ? (id: string) => {
292
- const start = performance.now();
293
- return () => `${id} took ${performance.now() - start}ms`;
294
- }
295
- : (id: string) => {
296
- const start = process.hrtime.bigint();
297
- return () => {
298
- const tookNano = process.hrtime.bigint() - start;
299
- const tookMilli = Number(tookNano / 1_000_000n).toFixed(2);
300
- return `${id} took ${tookMilli}ms`;
301
- };
285
+ declare const measure = isBrowser()
286
+ ? (id: string) => {
287
+ const start = performance.now();
288
+ return () => `${id} took ${performance.now() - start}ms`;
289
+ }
290
+ : (id: string) => {
291
+ const start = process.hrtime.bigint();
292
+ return () => {
293
+ const tookNano = process.hrtime.bigint() - start;
294
+ const tookMilli = Number(tookNano / 1_000_000n).toFixed(2);
295
+ return `${id} took ${tookMilli}ms`;
302
296
  };
297
+ };
303
298
 
304
299
  /** A class that adds `toString` method that prints all properties of an object. */
305
300
  declare abstract class WithDebug {
@@ -498,6 +493,8 @@ type DeepEqualOptions = {
498
493
  errorsCollector?: ErrorsCollector;
499
494
  };
500
495
 
496
+ declare let oomWarningPrinted = false;
497
+
501
498
  /** Deeply compare `actual` and `expected` values. */
502
499
  declare function deepEqual<T>(
503
500
  actual: T | undefined,
@@ -530,7 +527,7 @@ declare function deepEqual<T>(
530
527
  try {
531
528
  assert.strictEqual(actualDisp, expectedDisp, message);
532
529
  } catch (e) {
533
- if (isOoMWorkaroundNeeded) {
530
+ if (isOoMWorkaroundNeeded && !oomWarningPrinted) {
534
531
  console.warn(
535
532
  [
536
533
  "Stacktrace may be crappy because of a problem in nodejs.",
@@ -538,6 +535,7 @@ declare function deepEqual<T>(
538
535
  "Maybe we do not need it anymore",
539
536
  ].join("\n"),
540
537
  );
538
+ oomWarningPrinted = true;
541
539
  }
542
540
  throw e;
543
541
  }
@@ -786,17 +784,19 @@ declare const index$r_ensure: typeof ensure;
786
784
  declare const index$r_env: typeof env;
787
785
  declare const index$r_getAllKeysSorted: typeof getAllKeysSorted;
788
786
  declare const index$r_inspect: typeof inspect;
787
+ declare const index$r_isBrowser: typeof isBrowser;
789
788
  declare const index$r_isResult: typeof isResult;
790
789
  declare const index$r_isTaggedError: typeof isTaggedError;
791
790
  declare const index$r_maybeTaggedErrorToString: typeof maybeTaggedErrorToString;
792
791
  declare const index$r_measure: typeof measure;
792
+ declare const index$r_oomWarningPrinted: typeof oomWarningPrinted;
793
793
  declare const index$r_parseCurrentSuite: typeof parseCurrentSuite;
794
794
  declare const index$r_parseCurrentVersion: typeof parseCurrentVersion;
795
795
  declare const index$r_resultToString: typeof resultToString;
796
796
  declare const index$r_seeThrough: typeof seeThrough;
797
797
  declare const index$r_trimStack: typeof trimStack;
798
798
  declare namespace index$r {
799
- export { index$r_ALL_VERSIONS_IN_ORDER as ALL_VERSIONS_IN_ORDER, index$r_CURRENT_SUITE as CURRENT_SUITE, index$r_CURRENT_VERSION as CURRENT_VERSION, index$r_Compatibility as Compatibility, index$r_DEFAULT_SUITE as DEFAULT_SUITE, index$r_DEFAULT_VERSION as DEFAULT_VERSION, index$r_ErrorsCollector as ErrorsCollector, index$r_GpVersion as GpVersion, Result$2 as Result, index$r_RichTaggedError as RichTaggedError, index$r_TEST_COMPARE_USING as TEST_COMPARE_USING, index$r_TestSuite as TestSuite, index$r_WithDebug as WithDebug, index$r___OPAQUE_TYPE__ as __OPAQUE_TYPE__, index$r_asOpaqueType as asOpaqueType, index$r_assertEmpty as assertEmpty, index$r_assertNever as assertNever, index$r_callCompareFunction as callCompareFunction, index$r_cast as cast, index$r_check as check, index$r_deepEqual as deepEqual, index$r_ensure as ensure, index$r_env as env, index$r_getAllKeysSorted as getAllKeysSorted, index$r_inspect as inspect, index$r_isResult as isResult, index$r_isTaggedError as isTaggedError, index$r_maybeTaggedErrorToString as maybeTaggedErrorToString, index$r_measure as measure, index$r_parseCurrentSuite as parseCurrentSuite, index$r_parseCurrentVersion as parseCurrentVersion, index$r_resultToString as resultToString, index$r_seeThrough as seeThrough, index$r_trimStack as trimStack };
799
+ export { index$r_ALL_VERSIONS_IN_ORDER as ALL_VERSIONS_IN_ORDER, index$r_CURRENT_SUITE as CURRENT_SUITE, index$r_CURRENT_VERSION as CURRENT_VERSION, index$r_Compatibility as Compatibility, index$r_DEFAULT_SUITE as DEFAULT_SUITE, index$r_DEFAULT_VERSION as DEFAULT_VERSION, index$r_ErrorsCollector as ErrorsCollector, index$r_GpVersion as GpVersion, Result$2 as Result, index$r_RichTaggedError as RichTaggedError, index$r_TEST_COMPARE_USING as TEST_COMPARE_USING, index$r_TestSuite as TestSuite, index$r_WithDebug as WithDebug, index$r___OPAQUE_TYPE__ as __OPAQUE_TYPE__, index$r_asOpaqueType as asOpaqueType, index$r_assertEmpty as assertEmpty, index$r_assertNever as assertNever, index$r_callCompareFunction as callCompareFunction, index$r_cast as cast, index$r_check as check, index$r_deepEqual as deepEqual, index$r_ensure as ensure, index$r_env as env, index$r_getAllKeysSorted as getAllKeysSorted, index$r_inspect as inspect, index$r_isBrowser as isBrowser, index$r_isResult as isResult, index$r_isTaggedError as isTaggedError, index$r_maybeTaggedErrorToString as maybeTaggedErrorToString, index$r_measure as measure, index$r_oomWarningPrinted as oomWarningPrinted, index$r_parseCurrentSuite as parseCurrentSuite, index$r_parseCurrentVersion as parseCurrentVersion, index$r_resultToString as resultToString, index$r_seeThrough as seeThrough, index$r_trimStack as trimStack };
800
800
  export type { index$r_DeepEqualOptions as DeepEqualOptions, index$r_EnumMapping as EnumMapping, index$r_ErrorResult as ErrorResult, index$r_OK as OK, index$r_OkResult as OkResult, index$r_Opaque as Opaque, index$r_StringLiteral as StringLiteral, index$r_TaggedError as TaggedError, index$r_TokenOf as TokenOf, index$r_Uninstantiable as Uninstantiable, index$r_WithOpaque as WithOpaque };
801
801
  }
802
802
 
@@ -4464,6 +4464,84 @@ declare namespace index$m {
4464
4464
  export type { index$m_HashWithZeroedBit as HashWithZeroedBit, index$m_ImmutableHashDictionary as ImmutableHashDictionary, index$m_ImmutableHashSet as ImmutableHashSet, index$m_ImmutableSortedArray as ImmutableSortedArray, index$m_ImmutableSortedSet as ImmutableSortedSet, index$m_KeyMapper as KeyMapper, index$m_KeyMappers as KeyMappers, index$m_KnownSize as KnownSize, index$m_KnownSizeArray as KnownSizeArray, index$m_KnownSizeId as KnownSizeId, index$m_NestedMaps as NestedMaps };
4465
4465
  }
4466
4466
 
4467
+ declare namespace bandersnatch_d_exports {
4468
+ export { batch_verify_tickets, __wbg_init$2 as default, derive_public_key, initSync$2 as initSync, ring_commitment, verify_seal };
4469
+ export type { InitInput$2 as InitInput, InitOutput$2 as InitOutput, SyncInitInput$2 as SyncInitInput };
4470
+ }
4471
+ /* tslint:disable */
4472
+ /* eslint-disable */
4473
+ /**
4474
+ * @param {Uint8Array} keys
4475
+ * @returns {Uint8Array}
4476
+ */
4477
+ declare function ring_commitment(keys: Uint8Array): Uint8Array;
4478
+ /**
4479
+ * Derive Private and Public Key from Seed
4480
+ *
4481
+ * returns: `Vec<u8>` containing the exit (1 byte) status followed by the (32 bytes) public key
4482
+ * @param {Uint8Array} seed
4483
+ * @returns {Uint8Array}
4484
+ */
4485
+ declare function derive_public_key(seed: Uint8Array): Uint8Array;
4486
+ /**
4487
+ * Seal verification as defined in:
4488
+ * https://graypaper.fluffylabs.dev/#/68eaa1f/0eff000eff00?v=0.6.4
4489
+ * or
4490
+ * https://graypaper.fluffylabs.dev/#/68eaa1f/0e54010e5401?v=0.6.4
4491
+ * @param {Uint8Array} keys
4492
+ * @param {number} signer_key_index
4493
+ * @param {Uint8Array} seal_data
4494
+ * @param {Uint8Array} payload
4495
+ * @param {Uint8Array} aux_data
4496
+ * @returns {Uint8Array}
4497
+ */
4498
+ declare function verify_seal(keys: Uint8Array, signer_key_index: number, seal_data: Uint8Array, payload: Uint8Array, aux_data: Uint8Array): Uint8Array;
4499
+ /**
4500
+ * Verify multiple tickets at once as defined in:
4501
+ * https://graypaper.fluffylabs.dev/#/68eaa1f/0f3e000f3e00?v=0.6.4
4502
+ *
4503
+ * NOTE: the aux_data of VRF function is empty!
4504
+ * @param {Uint8Array} keys
4505
+ * @param {Uint8Array} tickets_data
4506
+ * @param {number} vrf_input_data_len
4507
+ * @returns {Uint8Array}
4508
+ */
4509
+ declare function batch_verify_tickets(keys: Uint8Array, tickets_data: Uint8Array, vrf_input_data_len: number): Uint8Array;
4510
+ type InitInput$2 = RequestInfo | URL | Response | BufferSource | WebAssembly.Module;
4511
+ interface InitOutput$2 {
4512
+ readonly memory: WebAssembly.Memory;
4513
+ readonly ring_commitment: (a: number, b: number, c: number) => void;
4514
+ readonly derive_public_key: (a: number, b: number, c: number) => void;
4515
+ readonly verify_seal: (a: number, b: number, c: number, d: number, e: number, f: number, g: number, h: number, i: number, j: number) => void;
4516
+ readonly batch_verify_tickets: (a: number, b: number, c: number, d: number, e: number, f: number) => void;
4517
+ readonly __wbindgen_add_to_stack_pointer: (a: number) => number;
4518
+ readonly __wbindgen_malloc: (a: number, b: number) => number;
4519
+ readonly __wbindgen_free: (a: number, b: number, c: number) => void;
4520
+ }
4521
+ type SyncInitInput$2 = BufferSource | WebAssembly.Module;
4522
+ /**
4523
+ * Instantiates the given `module`, which can either be bytes or
4524
+ * a precompiled `WebAssembly.Module`.
4525
+ *
4526
+ * @param {SyncInitInput} module
4527
+ *
4528
+ * @returns {InitOutput}
4529
+ */
4530
+ declare function initSync$2(module: SyncInitInput$2): InitOutput$2;
4531
+
4532
+ /**
4533
+ * If `module_or_path` is {RequestInfo} or {URL}, makes a request and
4534
+ * for everything else, calls `WebAssembly.instantiate` directly.
4535
+ *
4536
+ * @param {InitInput | Promise<InitInput>} module_or_path
4537
+ *
4538
+ * @returns {Promise<InitOutput>}
4539
+ */
4540
+ declare function __wbg_init$2(module_or_path?: InitInput$2 | Promise<InitInput$2>): Promise<InitOutput$2>;
4541
+ //#endregion
4542
+ //#region native/index.d.ts
4543
+ declare function initAll(): Promise<void>;
4544
+
4467
4545
  /** ED25519 private key size. */
4468
4546
  declare const ED25519_PRIV_KEY_BYTES = 32;
4469
4547
  type ED25519_PRIV_KEY_BYTES = typeof ED25519_PRIV_KEY_BYTES;
@@ -4556,7 +4634,7 @@ declare async function verify<T extends BytesBlob>(input: Input<T>[]): Promise<b
4556
4634
  offset += messageLength;
4557
4635
  }
4558
4636
 
4559
- const result = Array.from(verify_ed25519(data)).map((x) => x === 1);
4637
+ const result = Array.from(ed25519.verify_ed25519(data)).map((x) => x === 1);
4560
4638
  return Promise.resolve(result);
4561
4639
  }
4562
4640
 
@@ -4578,7 +4656,7 @@ declare async function verifyBatch<T extends BytesBlob>(input: Input<T>[]): Prom
4578
4656
 
4579
4657
  const data = BytesBlob.blobFromParts(first, ...rest).raw;
4580
4658
 
4581
- return Promise.resolve(verify_ed25519_batch(data));
4659
+ return Promise.resolve(ed25519.verify_ed25519_batch(data));
4582
4660
  }
4583
4661
 
4584
4662
  type ed25519_ED25519_KEY_BYTES = ED25519_KEY_BYTES;
@@ -4598,59 +4676,6 @@ declare namespace ed25519 {
4598
4676
  export type { ed25519_ED25519_KEY_BYTES as ED25519_KEY_BYTES, ed25519_ED25519_PRIV_KEY_BYTES as ED25519_PRIV_KEY_BYTES, ed25519_ED25519_SIGNATURE_BYTES as ED25519_SIGNATURE_BYTES, ed25519_Ed25519Key as Ed25519Key, ed25519_Ed25519Signature as Ed25519Signature, ed25519_Input as Input };
4599
4677
  }
4600
4678
 
4601
- /* tslint:disable */
4602
- /* eslint-disable */
4603
- /**
4604
- * @param {Uint8Array} keys
4605
- * @returns {Uint8Array}
4606
- */
4607
- declare function ring_commitment(keys: Uint8Array): Uint8Array;
4608
- /**
4609
- * Derive Private and Public Key from Seed
4610
- *
4611
- * returns: `Vec<u8>` containing the exit (1 byte) status followed by the (32 bytes) public key
4612
- * @param {Uint8Array} seed
4613
- * @returns {Uint8Array}
4614
- */
4615
- declare function derive_public_key(seed: Uint8Array): Uint8Array;
4616
- /**
4617
- * Seal verification as defined in:
4618
- * https://graypaper.fluffylabs.dev/#/68eaa1f/0eff000eff00?v=0.6.4
4619
- * or
4620
- * https://graypaper.fluffylabs.dev/#/68eaa1f/0e54010e5401?v=0.6.4
4621
- * @param {Uint8Array} keys
4622
- * @param {number} signer_key_index
4623
- * @param {Uint8Array} seal_data
4624
- * @param {Uint8Array} payload
4625
- * @param {Uint8Array} aux_data
4626
- * @returns {Uint8Array}
4627
- */
4628
- declare function verify_seal(keys: Uint8Array, signer_key_index: number, seal_data: Uint8Array, payload: Uint8Array, aux_data: Uint8Array): Uint8Array;
4629
- /**
4630
- * Verify multiple tickets at once as defined in:
4631
- * https://graypaper.fluffylabs.dev/#/68eaa1f/0f3e000f3e00?v=0.6.4
4632
- *
4633
- * NOTE: the aux_data of VRF function is empty!
4634
- * @param {Uint8Array} keys
4635
- * @param {Uint8Array} tickets_data
4636
- * @param {number} vrf_input_data_len
4637
- * @returns {Uint8Array}
4638
- */
4639
- declare function batch_verify_tickets(keys: Uint8Array, tickets_data: Uint8Array, vrf_input_data_len: number): Uint8Array;
4640
-
4641
- declare const bandersnatch_d_batch_verify_tickets: typeof batch_verify_tickets;
4642
- declare const bandersnatch_d_derive_public_key: typeof derive_public_key;
4643
- declare const bandersnatch_d_ring_commitment: typeof ring_commitment;
4644
- declare const bandersnatch_d_verify_seal: typeof verify_seal;
4645
- declare namespace bandersnatch_d {
4646
- export {
4647
- bandersnatch_d_batch_verify_tickets as batch_verify_tickets,
4648
- bandersnatch_d_derive_public_key as derive_public_key,
4649
- bandersnatch_d_ring_commitment as ring_commitment,
4650
- bandersnatch_d_verify_seal as verify_seal,
4651
- };
4652
- }
4653
-
4654
4679
  /** Bandersnatch public key size. */
4655
4680
  declare const BANDERSNATCH_KEY_BYTES = 32;
4656
4681
  type BANDERSNATCH_KEY_BYTES = typeof BANDERSNATCH_KEY_BYTES;
@@ -4708,7 +4733,7 @@ type BlsKey = Opaque<Bytes<BLS_KEY_BYTES>, "BlsKey">;
4708
4733
 
4709
4734
  /** Derive a Bandersnatch public key from a seed. */
4710
4735
  declare function publicKey(seed: Uint8Array): BandersnatchKey {
4711
- const key = derive_public_key(seed);
4736
+ const key = bandersnatch.derive_public_key(seed);
4712
4737
 
4713
4738
  check(key[0] === 0, "Invalid Bandersnatch public key derived from seed");
4714
4739
 
@@ -4834,7 +4859,7 @@ declare const index$l_bandersnatch: typeof bandersnatch;
4834
4859
  declare const index$l_ed25519: typeof ed25519;
4835
4860
  declare const index$l_keyDerivation: typeof keyDerivation;
4836
4861
  declare namespace index$l {
4837
- export { index$l_Ed25519Pair as Ed25519Pair, index$l_bandersnatch as bandersnatch, bandersnatch_d as bandersnatchWasm, index$l_ed25519 as ed25519, index$l_keyDerivation as keyDerivation };
4862
+ export { index$l_Ed25519Pair as Ed25519Pair, index$l_bandersnatch as bandersnatch, bandersnatch_d_exports as bandersnatchWasm, index$l_ed25519 as ed25519, initAll as initWasm, index$l_keyDerivation as keyDerivation };
4838
4863
  export type { index$l_BANDERSNATCH_KEY_BYTES as BANDERSNATCH_KEY_BYTES, index$l_BANDERSNATCH_PROOF_BYTES as BANDERSNATCH_PROOF_BYTES, index$l_BANDERSNATCH_RING_ROOT_BYTES as BANDERSNATCH_RING_ROOT_BYTES, index$l_BANDERSNATCH_VRF_SIGNATURE_BYTES as BANDERSNATCH_VRF_SIGNATURE_BYTES, index$l_BLS_KEY_BYTES as BLS_KEY_BYTES, index$l_BandersnatchKey as BandersnatchKey, index$l_BandersnatchProof as BandersnatchProof, index$l_BandersnatchRingRoot as BandersnatchRingRoot, index$l_BandersnatchSecretSeed as BandersnatchSecretSeed, index$l_BandersnatchVrfSignature as BandersnatchVrfSignature, index$l_BlsKey as BlsKey, index$l_ED25519_KEY_BYTES as ED25519_KEY_BYTES, index$l_ED25519_PRIV_KEY_BYTES as ED25519_PRIV_KEY_BYTES, index$l_ED25519_SIGNATURE_BYTES as ED25519_SIGNATURE_BYTES, index$l_Ed25519Key as Ed25519Key, index$l_Ed25519SecretSeed as Ed25519SecretSeed, index$l_Ed25519Signature as Ed25519Signature, KeySeed as PublicKeySeed, index$l_SEED_SIZE as SEED_SIZE };
4839
4864
  }
4840
4865
 
@@ -4914,6 +4939,8 @@ declare class ChainSpec extends WithDebug {
4914
4939
  readonly maxBlockGas: U64;
4915
4940
  /** `G_R`: The gas allocated to invoke a work-package’s Refine logic. */
4916
4941
  readonly maxRefineGas: U64;
4942
+ /** `L`: The maximum age in timeslots of the lookup anchor. */
4943
+ readonly maxLookupAnchorAge: U32;
4917
4944
 
4918
4945
  constructor(data: Omit<ChainSpec, "validatorsSuperMajority" | "thirdOfValidators" | "erasureCodedPieceSize">) {
4919
4946
  super();
@@ -4933,6 +4960,7 @@ declare class ChainSpec extends WithDebug {
4933
4960
  this.erasureCodedPieceSize = tryAsU32(EC_SEGMENT_SIZE / data.numberECPiecesPerSegment);
4934
4961
  this.maxBlockGas = data.maxBlockGas;
4935
4962
  this.maxRefineGas = data.maxRefineGas;
4963
+ this.maxLookupAnchorAge = data.maxLookupAnchorAge;
4936
4964
  }
4937
4965
  }
4938
4966
 
@@ -4951,6 +4979,8 @@ declare const tinyChainSpec = new ChainSpec({
4951
4979
  preimageExpungePeriod: tryAsU32(32),
4952
4980
  maxBlockGas: tryAsU64(20_000_000),
4953
4981
  maxRefineGas: tryAsU64(1_000_000_000),
4982
+ // https://github.com/davxy/jam-conformance/pull/47/files#diff-27e26142b3a96e407dab40d388b63d553f5d9cdb66dec58cd93e63dd434f9e45R260
4983
+ maxLookupAnchorAge: tryAsU32(24),
4954
4984
  });
4955
4985
 
4956
4986
  /**
@@ -4970,6 +5000,7 @@ declare const fullChainSpec = new ChainSpec({
4970
5000
  preimageExpungePeriod: tryAsU32(19_200),
4971
5001
  maxBlockGas: tryAsU64(3_500_000_000),
4972
5002
  maxRefineGas: tryAsU64(5_000_000_000),
5003
+ maxLookupAnchorAge: tryAsU32(14_400),
4973
5004
  });
4974
5005
 
4975
5006
  /**
@@ -5966,17 +5997,13 @@ declare enum WorkExecResultKind {
5966
5997
  /** `☇`: unexpected program termination. */
5967
5998
  panic = 2,
5968
5999
  /** `⊚`: the number of exports made was invalidly reported. */
5969
- // biome-ignore lint/style/useLiteralEnumMembers: Compatibility
5970
- incorrectNumberOfExports = Compatibility.isGreaterOrEqual(GpVersion.V0_6_7) ? 3 : -1,
6000
+ incorrectNumberOfExports = 3,
5971
6001
  /** `⊖`: the size of the digest (refinement output) would cross the acceptable limit. */
5972
- // biome-ignore lint/style/useLiteralEnumMembers: Compatibility
5973
- digestTooBig = Compatibility.isGreaterOrEqual(GpVersion.V0_6_7) ? 4 : -1,
6002
+ digestTooBig = 4,
5974
6003
  /** `BAD`: service code was not available for lookup in state. */
5975
- // biome-ignore lint/style/useLiteralEnumMembers: Compatibility
5976
- badCode = Compatibility.isGreaterOrEqual(GpVersion.V0_6_7) ? 5 : 3,
6004
+ badCode = 5,
5977
6005
  /** `BIG`: the code was too big (beyond the maximum allowed size `W_C`) */
5978
- // biome-ignore lint/style/useLiteralEnumMembers: Compatibility
5979
- codeOversize = Compatibility.isGreaterOrEqual(GpVersion.V0_6_7) ? 6 : 4,
6006
+ codeOversize = 6,
5980
6007
  }
5981
6008
 
5982
6009
  /** The execution result of some work-package. */
@@ -6246,18 +6273,15 @@ declare const WorkReportCodec = codec.Class(WorkReportNoCodec, {
6246
6273
  declare const WorkReportCodecPre070 = codec.Class(WorkReportNoCodec, {
6247
6274
  workPackageSpec: WorkPackageSpec.Codec,
6248
6275
  context: RefineContext.Codec,
6249
- coreIndex:
6250
- Compatibility.isGreaterOrEqual(GpVersion.V0_6_5) && !Compatibility.isSuite(TestSuite.JAMDUNA, GpVersion.V0_6_5)
6251
- ? codec.varU32.convert(
6252
- (o) => tryAsU32(o),
6253
- (i) => {
6254
- if (!isU16(i)) {
6255
- throw new Error(`Core index exceeds U16: ${i}`);
6256
- }
6257
- return tryAsCoreIndex(i);
6258
- },
6259
- )
6260
- : codec.u16.asOpaque<CoreIndex>(),
6276
+ coreIndex: codec.varU32.convert(
6277
+ (o) => tryAsU32(o),
6278
+ (i) => {
6279
+ if (!isU16(i)) {
6280
+ throw new Error(`Core index exceeds U16: ${i}`);
6281
+ }
6282
+ return tryAsCoreIndex(i);
6283
+ },
6284
+ ),
6261
6285
  authorizerHash: codec.bytes(HASH_SIZE).asOpaque<AuthorizerHash>(),
6262
6286
  authorizationOutput: codec.blob,
6263
6287
  segmentRootLookup: readonlyArray(codec.sequenceVarLen(WorkPackageInfo.Codec)),
@@ -7835,7 +7859,7 @@ declare const DEV_CONFIG = "dev";
7835
7859
  declare const DEFAULT_CONFIG = "default";
7836
7860
 
7837
7861
  declare const NODE_DEFAULTS = {
7838
- name: os.hostname(),
7862
+ name: isBrowser() ? "browser" : os.hostname(),
7839
7863
  config: DEFAULT_CONFIG,
7840
7864
  };
7841
7865
 
@@ -7890,11 +7914,11 @@ declare class NodeConfiguration {
7890
7914
 
7891
7915
  declare function loadConfig(configPath: string): NodeConfiguration {
7892
7916
  if (configPath === DEFAULT_CONFIG) {
7893
- return parseFromJson(defaultConfigJson, NodeConfiguration.fromJson);
7917
+ return parseFromJson(configs.default, NodeConfiguration.fromJson);
7894
7918
  }
7895
7919
 
7896
7920
  if (configPath === DEV_CONFIG) {
7897
- return parseFromJson(devConfigJson, NodeConfiguration.fromJson);
7921
+ return parseFromJson(configs.dev, NodeConfiguration.fromJson);
7898
7922
  }
7899
7923
 
7900
7924
  try {
@@ -8235,20 +8259,7 @@ declare class AutoAccumulate {
8235
8259
  declare class PrivilegedServices {
8236
8260
  static Codec = codec.Class(PrivilegedServices, {
8237
8261
  manager: codec.u32.asOpaque<ServiceId>(),
8238
- authManager: Compatibility.isGreaterOrEqual(GpVersion.V0_6_7)
8239
- ? codecPerCore(codec.u32.asOpaque<ServiceId>())
8240
- : codecWithContext((ctx) =>
8241
- codec.u32.asOpaque<ServiceId>().convert(
8242
- // NOTE: [MaSo] In a compatibility mode we are always updating all entries
8243
- // (all the entries are the same)
8244
- // so it doesn't matter which one we take here.
8245
- (perCore: PerCore<ServiceId>) => perCore[0],
8246
- (serviceId: ServiceId) => {
8247
- const array = new Array(ctx.coresCount).fill(serviceId);
8248
- return tryAsPerCore(array, ctx);
8249
- },
8250
- ),
8251
- ),
8262
+ authManager: codecPerCore(codec.u32.asOpaque<ServiceId>()),
8252
8263
  validatorsManager: codec.u32.asOpaque<ServiceId>(),
8253
8264
  autoAccumulateServices: readonlyArray(codec.sequenceVarLen(AutoAccumulate.Codec)),
8254
8265
  });
@@ -8440,60 +8451,6 @@ declare namespace index$f {
8440
8451
  declare const MAX_RECENT_HISTORY = 8;
8441
8452
  type MAX_RECENT_HISTORY = typeof MAX_RECENT_HISTORY;
8442
8453
 
8443
- type LegacyBlocksState = KnownSizeArray<LegacyBlockState, `0..${typeof MAX_RECENT_HISTORY}`>;
8444
-
8445
- declare class LegacyBlockState extends WithDebug {
8446
- static Codec = codec.Class(LegacyBlockState, {
8447
- headerHash: codec.bytes(HASH_SIZE).asOpaque<HeaderHash>(),
8448
- mmr: codec.object({
8449
- peaks: readonlyArray(codec.sequenceVarLen(codec.optional(codec.bytes(HASH_SIZE)))),
8450
- }),
8451
- postStateRoot: codec.bytes(HASH_SIZE).asOpaque<StateRootHash>(),
8452
- reported: codecHashDictionary(WorkPackageInfo.Codec, (x) => x.workPackageHash),
8453
- });
8454
-
8455
- static create({ headerHash, mmr, postStateRoot, reported }: CodecRecord<LegacyBlockState>) {
8456
- return new LegacyBlockState(headerHash, mmr, postStateRoot, reported);
8457
- }
8458
-
8459
- private constructor(
8460
- /** Header hash. */
8461
- public readonly headerHash: HeaderHash,
8462
- /** Merkle mountain range peaks. */
8463
- public readonly mmr: MmrPeaks<KeccakHash>,
8464
- /** Posterior state root filled in with a 1-block delay. */
8465
- public postStateRoot: StateRootHash,
8466
- /** Reported work packages (no more than number of cores). */
8467
- public readonly reported: HashDictionary<WorkPackageHash, WorkPackageInfo>,
8468
- ) {
8469
- super();
8470
- }
8471
- }
8472
-
8473
- declare class LegacyRecentBlocks extends WithDebug {
8474
- static Codec = codec.Class(LegacyRecentBlocks, {
8475
- blocks: codecKnownSizeArray(LegacyBlockState.Codec, {
8476
- minLength: 0,
8477
- maxLength: MAX_RECENT_HISTORY,
8478
- typicalLength: MAX_RECENT_HISTORY,
8479
- }),
8480
- });
8481
-
8482
- static create(a: CodecRecord<LegacyRecentBlocks>) {
8483
- return new LegacyRecentBlocks(a.blocks);
8484
- }
8485
-
8486
- private constructor(
8487
- /**
8488
- * Most recent blocks.
8489
- * https://graypaper.fluffylabs.dev/#/85129da/0fb6010fb601?v=0.6.3
8490
- */
8491
- public readonly blocks: LegacyBlocksState,
8492
- ) {
8493
- super();
8494
- }
8495
- }
8496
-
8497
8454
  /** Array of recent blocks with maximum size of `MAX_RECENT_HISTORY` */
8498
8455
  type BlocksState = KnownSizeArray<BlockState, `0..${typeof MAX_RECENT_HISTORY}`>;
8499
8456
 
@@ -8557,87 +8514,54 @@ declare class RecentBlocks extends WithDebug {
8557
8514
  }
8558
8515
 
8559
8516
  /**
8560
- * Unified recent history of blocks that handles both legacy and current formats.
8517
+ * Recent history of blocks.
8561
8518
  *
8562
- * https://graypaper.fluffylabs.dev/#/85129da/38cb0138cb01?v=0.6.3
8563
8519
  * https://graypaper.fluffylabs.dev/#/7e6ff6a/0fc9010fc901?v=0.6.7
8564
8520
  */
8565
8521
  declare class RecentBlocksHistory extends WithDebug {
8566
8522
  static Codec = Descriptor.new<RecentBlocksHistory>(
8567
8523
  "RecentBlocksHistory",
8568
- Compatibility.isGreaterOrEqual(GpVersion.V0_6_7) ? RecentBlocks.Codec.sizeHint : LegacyRecentBlocks.Codec.sizeHint,
8569
- (encoder, value) =>
8570
- Compatibility.isGreaterOrEqual(GpVersion.V0_6_7)
8571
- ? RecentBlocks.Codec.encode(encoder, value.asCurrent())
8572
- : LegacyRecentBlocks.Codec.encode(encoder, value.asLegacy()),
8524
+ RecentBlocks.Codec.sizeHint,
8525
+ (encoder, value) => RecentBlocks.Codec.encode(encoder, value.asCurrent()),
8573
8526
  (decoder) => {
8574
- if (Compatibility.isGreaterOrEqual(GpVersion.V0_6_7)) {
8575
- const recentBlocks = RecentBlocks.Codec.decode(decoder);
8576
- return RecentBlocksHistory.create(recentBlocks);
8577
- }
8578
- const legacyBlocks = LegacyRecentBlocks.Codec.decode(decoder);
8579
- return RecentBlocksHistory.legacyCreate(legacyBlocks);
8527
+ const recentBlocks = RecentBlocks.Codec.decode(decoder);
8528
+ return RecentBlocksHistory.create(recentBlocks);
8580
8529
  },
8581
- (_sizer) => {
8582
- return Compatibility.isGreaterOrEqual(GpVersion.V0_6_7)
8583
- ? RecentBlocks.Codec.sizeHint
8584
- : LegacyRecentBlocks.Codec.sizeHint;
8530
+ (skip) => {
8531
+ return RecentBlocks.Codec.skip(skip);
8585
8532
  },
8586
8533
  );
8587
8534
 
8588
8535
  static create(recentBlocks: RecentBlocks) {
8589
- return new RecentBlocksHistory(recentBlocks, null);
8590
- }
8591
-
8592
- static legacyCreate(legacyRecentBlocks: LegacyRecentBlocks) {
8593
- return new RecentBlocksHistory(null, legacyRecentBlocks);
8536
+ return new RecentBlocksHistory(recentBlocks);
8594
8537
  }
8595
8538
 
8596
8539
  static empty() {
8597
- if (Compatibility.isGreaterOrEqual(GpVersion.V0_6_7)) {
8598
- return RecentBlocksHistory.create(
8599
- RecentBlocks.create({
8600
- blocks: asKnownSize([]),
8601
- accumulationLog: { peaks: [] },
8602
- }),
8603
- );
8604
- }
8605
- return RecentBlocksHistory.legacyCreate(LegacyRecentBlocks.create({ blocks: asKnownSize([]) }));
8540
+ return RecentBlocksHistory.create(
8541
+ RecentBlocks.create({
8542
+ blocks: asKnownSize([]),
8543
+ accumulationLog: { peaks: [] },
8544
+ }),
8545
+ );
8606
8546
  }
8607
8547
 
8608
8548
  /**
8609
8549
  * Returns the block's BEEFY super peak.
8610
- *
8611
- * NOTE: The `hasher` parameter exists solely for backward compatibility with legacy block format.
8612
8550
  */
8613
- static accumulationResult(
8614
- block: BlockState | LegacyBlockState,
8615
- {
8616
- hasher,
8617
- }: {
8618
- hasher: MmrHasher<KeccakHash>;
8619
- },
8620
- ): KeccakHash {
8621
- return Compatibility.isGreaterOrEqual(GpVersion.V0_6_7)
8622
- ? (block as BlockState).accumulationResult
8623
- : MerkleMountainRange.fromPeaks(hasher, (block as LegacyBlockState).mmr).getSuperPeakHash();
8551
+ static accumulationResult(block: BlockState): KeccakHash {
8552
+ return (block as BlockState).accumulationResult;
8624
8553
  }
8625
8554
 
8626
- private constructor(
8627
- private readonly current: RecentBlocks | null,
8628
- private readonly legacy: LegacyRecentBlocks | null,
8629
- ) {
8555
+ private constructor(private readonly current: RecentBlocks | null) {
8630
8556
  super();
8631
8557
  }
8632
8558
 
8633
8559
  /** History of recent blocks with maximum size of `MAX_RECENT_HISTORY` */
8634
- get blocks(): readonly (BlockState | LegacyBlockState)[] {
8635
- if (Compatibility.isGreaterOrEqual(GpVersion.V0_6_7) && this.current !== null) {
8560
+ get blocks(): readonly BlockState[] {
8561
+ if (this.current !== null) {
8636
8562
  return this.current.blocks;
8637
8563
  }
8638
- if (this.legacy !== null) {
8639
- return this.legacy.blocks;
8640
- }
8564
+
8641
8565
  throw new Error("RecentBlocksHistory is in invalid state");
8642
8566
  }
8643
8567
 
@@ -8648,15 +8572,8 @@ declare class RecentBlocksHistory extends WithDebug {
8648
8572
  return this.current;
8649
8573
  }
8650
8574
 
8651
- asLegacy() {
8652
- if (this.legacy === null) {
8653
- throw new Error("Cannot access legacy RecentBlocks format");
8654
- }
8655
- return this.legacy;
8656
- }
8657
-
8658
- updateBlocks(blocks: (BlockState | LegacyBlockState)[]): RecentBlocksHistory {
8659
- if (Compatibility.isGreaterOrEqual(GpVersion.V0_6_7) && this.current !== null) {
8575
+ updateBlocks(blocks: BlockState[]): RecentBlocksHistory {
8576
+ if (this.current !== null) {
8660
8577
  return RecentBlocksHistory.create(
8661
8578
  RecentBlocks.create({
8662
8579
  ...this.current,
@@ -8664,13 +8581,7 @@ declare class RecentBlocksHistory extends WithDebug {
8664
8581
  }),
8665
8582
  );
8666
8583
  }
8667
- if (this.legacy !== null) {
8668
- return RecentBlocksHistory.legacyCreate(
8669
- LegacyRecentBlocks.create({
8670
- blocks: asOpaqueType(blocks as LegacyBlockState[]),
8671
- }),
8672
- );
8673
- }
8584
+
8674
8585
  throw new Error("RecentBlocksHistory is in invalid state. Cannot be updated!");
8675
8586
  }
8676
8587
  }
@@ -8858,31 +8769,18 @@ declare const ignoreValueWithDefault = <T>(defaultValue: T) =>
8858
8769
  * https://graypaper.fluffylabs.dev/#/7e6ff6a/108301108301?v=0.6.7
8859
8770
  */
8860
8771
  declare class ServiceAccountInfo extends WithDebug {
8861
- static Codec = Compatibility.isGreaterOrEqual(GpVersion.V0_6_7)
8862
- ? codec.Class(ServiceAccountInfo, {
8863
- codeHash: codec.bytes(HASH_SIZE).asOpaque<CodeHash>(),
8864
- balance: codec.u64,
8865
- accumulateMinGas: codec.u64.convert((x) => x, tryAsServiceGas),
8866
- onTransferMinGas: codec.u64.convert((x) => x, tryAsServiceGas),
8867
- storageUtilisationBytes: codec.u64,
8868
- gratisStorage: codec.u64,
8869
- storageUtilisationCount: codec.u32,
8870
- created: codec.u32.convert((x) => x, tryAsTimeSlot),
8871
- lastAccumulation: codec.u32.convert((x) => x, tryAsTimeSlot),
8872
- parentService: codec.u32.convert((x) => x, tryAsServiceId),
8873
- })
8874
- : codec.Class(ServiceAccountInfo, {
8875
- codeHash: codec.bytes(HASH_SIZE).asOpaque<CodeHash>(),
8876
- balance: codec.u64,
8877
- accumulateMinGas: codec.u64.convert((x) => x, tryAsServiceGas),
8878
- onTransferMinGas: codec.u64.convert((x) => x, tryAsServiceGas),
8879
- storageUtilisationBytes: codec.u64,
8880
- storageUtilisationCount: codec.u32,
8881
- gratisStorage: ignoreValueWithDefault(tryAsU64(0)),
8882
- created: ignoreValueWithDefault(tryAsTimeSlot(0)),
8883
- lastAccumulation: ignoreValueWithDefault(tryAsTimeSlot(0)),
8884
- parentService: ignoreValueWithDefault(tryAsServiceId(0)),
8885
- });
8772
+ static Codec = codec.Class(ServiceAccountInfo, {
8773
+ codeHash: codec.bytes(HASH_SIZE).asOpaque<CodeHash>(),
8774
+ balance: codec.u64,
8775
+ accumulateMinGas: codec.u64.convert((x) => x, tryAsServiceGas),
8776
+ onTransferMinGas: codec.u64.convert((x) => x, tryAsServiceGas),
8777
+ storageUtilisationBytes: codec.u64,
8778
+ gratisStorage: codec.u64,
8779
+ storageUtilisationCount: codec.u32,
8780
+ created: codec.u32.convert((x) => x, tryAsTimeSlot),
8781
+ lastAccumulation: codec.u32.convert((x) => x, tryAsTimeSlot),
8782
+ parentService: codec.u32.convert((x) => x, tryAsServiceId),
8783
+ });
8886
8784
 
8887
8785
  static create(a: CodecRecord<ServiceAccountInfo>) {
8888
8786
  return new ServiceAccountInfo(
@@ -8904,11 +8802,6 @@ declare class ServiceAccountInfo extends WithDebug {
8904
8802
  * https://graypaper.fluffylabs.dev/#/7e6ff6a/119e01119e01?v=0.6.7
8905
8803
  */
8906
8804
  static calculateThresholdBalance(items: U32, bytes: U64, gratisStorage: U64): U64 {
8907
- check(
8908
- gratisStorage === tryAsU64(0) || Compatibility.isGreaterOrEqual(GpVersion.V0_6_7),
8909
- "Gratis storage cannot be non-zero before 0.6.7",
8910
- );
8911
-
8912
8805
  const storageCost =
8913
8806
  BASE_SERVICE_BALANCE + ELECTIVE_ITEM_BALANCE * BigInt(items) + ELECTIVE_BYTE_BALANCE * bytes - gratisStorage;
8914
8807
 
@@ -9219,9 +9112,7 @@ type ServicesUpdate = {
9219
9112
  };
9220
9113
 
9221
9114
  declare const codecServiceId: Descriptor<ServiceId> =
9222
- Compatibility.isSuite(TestSuite.W3F_DAVXY) ||
9223
- Compatibility.isSuite(TestSuite.JAMDUNA, GpVersion.V0_6_5) ||
9224
- Compatibility.isSuite(TestSuite.JAMDUNA, GpVersion.V0_6_7)
9115
+ Compatibility.isSuite(TestSuite.W3F_DAVXY) || Compatibility.isSuite(TestSuite.JAMDUNA, GpVersion.V0_6_7)
9225
9116
  ? codec.u32.asOpaque<ServiceId>()
9226
9117
  : codec.varU32.convert(
9227
9118
  (s) => tryAsU32(s),
@@ -10263,11 +10154,6 @@ declare const index$e_InMemoryService: typeof InMemoryService;
10263
10154
  type index$e_InMemoryState = InMemoryState;
10264
10155
  declare const index$e_InMemoryState: typeof InMemoryState;
10265
10156
  type index$e_InMemoryStateFields = InMemoryStateFields;
10266
- type index$e_LegacyBlockState = LegacyBlockState;
10267
- declare const index$e_LegacyBlockState: typeof LegacyBlockState;
10268
- type index$e_LegacyBlocksState = LegacyBlocksState;
10269
- type index$e_LegacyRecentBlocks = LegacyRecentBlocks;
10270
- declare const index$e_LegacyRecentBlocks: typeof LegacyRecentBlocks;
10271
10157
  type index$e_LookupHistoryItem = LookupHistoryItem;
10272
10158
  declare const index$e_LookupHistoryItem: typeof LookupHistoryItem;
10273
10159
  type index$e_LookupHistorySlots = LookupHistorySlots;
@@ -10338,8 +10224,8 @@ declare const index$e_tryAsPerCore: typeof tryAsPerCore;
10338
10224
  declare const index$e_workReportsSortedSetCodec: typeof workReportsSortedSetCodec;
10339
10225
  declare const index$e_zeroSizeHint: typeof zeroSizeHint;
10340
10226
  declare namespace index$e {
10341
- export { index$e_AccumulationOutput as AccumulationOutput, index$e_AutoAccumulate as AutoAccumulate, index$e_AvailabilityAssignment as AvailabilityAssignment, index$e_BASE_SERVICE_BALANCE as BASE_SERVICE_BALANCE, index$e_BlockState as BlockState, index$e_CoreStatistics as CoreStatistics, index$e_DisputesRecords as DisputesRecords, index$e_ELECTIVE_BYTE_BALANCE as ELECTIVE_BYTE_BALANCE, index$e_ELECTIVE_ITEM_BALANCE as ELECTIVE_ITEM_BALANCE, index$e_InMemoryService as InMemoryService, index$e_InMemoryState as InMemoryState, index$e_LegacyBlockState as LegacyBlockState, index$e_LegacyRecentBlocks as LegacyRecentBlocks, index$e_LookupHistoryItem as LookupHistoryItem, index$e_MAX_LOOKUP_HISTORY_SLOTS as MAX_LOOKUP_HISTORY_SLOTS, index$e_PreimageItem as PreimageItem, index$e_PrivilegedServices as PrivilegedServices, index$e_RecentBlocks as RecentBlocks, index$e_RecentBlocksHistory as RecentBlocksHistory, index$e_SafroleData as SafroleData, index$e_SafroleSealingKeysData as SafroleSealingKeysData, index$e_SafroleSealingKeysKind as SafroleSealingKeysKind, index$e_ServiceAccountInfo as ServiceAccountInfo, index$e_ServiceStatistics as ServiceStatistics, index$e_StatisticsData as StatisticsData, index$e_StorageItem as StorageItem, index$e_UpdateError as UpdateError, index$e_UpdatePreimage as UpdatePreimage, index$e_UpdatePreimageKind as UpdatePreimageKind, index$e_UpdateService as UpdateService, index$e_UpdateServiceKind as UpdateServiceKind, index$e_UpdateStorage as UpdateStorage, index$e_UpdateStorageKind as UpdateStorageKind, index$e_ValidatorData as ValidatorData, index$e_ValidatorStatistics as ValidatorStatistics, index$e_codecBandersnatchKey as codecBandersnatchKey, index$e_codecPerCore as codecPerCore, index$e_codecServiceId as codecServiceId, index$e_codecVarGas as codecVarGas, index$e_codecVarU16 as codecVarU16, index$e_codecWithHash as codecWithHash, index$e_hashComparator as hashComparator, index$e_ignoreValueWithDefault as ignoreValueWithDefault, index$e_serviceDataCodec as serviceDataCodec, index$e_serviceEntriesCodec as serviceEntriesCodec, index$e_sortedSetCodec as sortedSetCodec, index$e_tryAsLookupHistorySlots as tryAsLookupHistorySlots, index$e_tryAsPerCore as tryAsPerCore, index$e_workReportsSortedSetCodec as workReportsSortedSetCodec, index$e_zeroSizeHint as zeroSizeHint };
10342
- export type { index$e_BlocksState as BlocksState, index$e_ENTROPY_ENTRIES as ENTROPY_ENTRIES, index$e_EnumerableState as EnumerableState, index$e_FieldNames as FieldNames, index$e_InMemoryStateFields as InMemoryStateFields, index$e_LegacyBlocksState as LegacyBlocksState, index$e_LookupHistorySlots as LookupHistorySlots, index$e_MAX_RECENT_HISTORY as MAX_RECENT_HISTORY, index$e_PerCore as PerCore, index$e_SafroleSealingKeys as SafroleSealingKeys, index$e_Service as Service, index$e_ServiceData as ServiceData, index$e_ServiceEntries as ServiceEntries, index$e_ServicesUpdate as ServicesUpdate, index$e_State as State, index$e_StorageKey as StorageKey, index$e_VALIDATOR_META_BYTES as VALIDATOR_META_BYTES };
10227
+ export { index$e_AccumulationOutput as AccumulationOutput, index$e_AutoAccumulate as AutoAccumulate, index$e_AvailabilityAssignment as AvailabilityAssignment, index$e_BASE_SERVICE_BALANCE as BASE_SERVICE_BALANCE, index$e_BlockState as BlockState, index$e_CoreStatistics as CoreStatistics, index$e_DisputesRecords as DisputesRecords, index$e_ELECTIVE_BYTE_BALANCE as ELECTIVE_BYTE_BALANCE, index$e_ELECTIVE_ITEM_BALANCE as ELECTIVE_ITEM_BALANCE, index$e_InMemoryService as InMemoryService, index$e_InMemoryState as InMemoryState, index$e_LookupHistoryItem as LookupHistoryItem, index$e_MAX_LOOKUP_HISTORY_SLOTS as MAX_LOOKUP_HISTORY_SLOTS, index$e_PreimageItem as PreimageItem, index$e_PrivilegedServices as PrivilegedServices, index$e_RecentBlocks as RecentBlocks, index$e_RecentBlocksHistory as RecentBlocksHistory, index$e_SafroleData as SafroleData, index$e_SafroleSealingKeysData as SafroleSealingKeysData, index$e_SafroleSealingKeysKind as SafroleSealingKeysKind, index$e_ServiceAccountInfo as ServiceAccountInfo, index$e_ServiceStatistics as ServiceStatistics, index$e_StatisticsData as StatisticsData, index$e_StorageItem as StorageItem, index$e_UpdateError as UpdateError, index$e_UpdatePreimage as UpdatePreimage, index$e_UpdatePreimageKind as UpdatePreimageKind, index$e_UpdateService as UpdateService, index$e_UpdateServiceKind as UpdateServiceKind, index$e_UpdateStorage as UpdateStorage, index$e_UpdateStorageKind as UpdateStorageKind, index$e_ValidatorData as ValidatorData, index$e_ValidatorStatistics as ValidatorStatistics, index$e_codecBandersnatchKey as codecBandersnatchKey, index$e_codecPerCore as codecPerCore, index$e_codecServiceId as codecServiceId, index$e_codecVarGas as codecVarGas, index$e_codecVarU16 as codecVarU16, index$e_codecWithHash as codecWithHash, index$e_hashComparator as hashComparator, index$e_ignoreValueWithDefault as ignoreValueWithDefault, index$e_serviceDataCodec as serviceDataCodec, index$e_serviceEntriesCodec as serviceEntriesCodec, index$e_sortedSetCodec as sortedSetCodec, index$e_tryAsLookupHistorySlots as tryAsLookupHistorySlots, index$e_tryAsPerCore as tryAsPerCore, index$e_workReportsSortedSetCodec as workReportsSortedSetCodec, index$e_zeroSizeHint as zeroSizeHint };
10228
+ export type { index$e_BlocksState as BlocksState, index$e_ENTROPY_ENTRIES as ENTROPY_ENTRIES, index$e_EnumerableState as EnumerableState, index$e_FieldNames as FieldNames, index$e_InMemoryStateFields as InMemoryStateFields, index$e_LookupHistorySlots as LookupHistorySlots, index$e_MAX_RECENT_HISTORY as MAX_RECENT_HISTORY, index$e_PerCore as PerCore, index$e_SafroleSealingKeys as SafroleSealingKeys, index$e_Service as Service, index$e_ServiceData as ServiceData, index$e_ServiceEntries as ServiceEntries, index$e_ServicesUpdate as ServicesUpdate, index$e_State as State, index$e_StorageKey as StorageKey, index$e_VALIDATOR_META_BYTES as VALIDATOR_META_BYTES };
10343
10229
  }
10344
10230
 
10345
10231
  type StateKey$1 = Opaque<OpaqueHash, "stateKey">;
@@ -10483,7 +10369,7 @@ type StateCodec<T> = {
10483
10369
 
10484
10370
  /** Serialization for particular state entries. */
10485
10371
  declare namespace serialize {
10486
- /** C(1): https://graypaper.fluffylabs.dev/#/85129da/38a20138a201?v=0.6.3 */
10372
+ /** C(1): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b15013b1501?v=0.6.7 */
10487
10373
  export const authPools: StateCodec<State["authPools"]> = {
10488
10374
  key: stateKeys.index(StateKeyIdx.Alpha),
10489
10375
  Codec: codecPerCore(
@@ -10496,7 +10382,7 @@ declare namespace serialize {
10496
10382
  extract: (s) => s.authPools,
10497
10383
  };
10498
10384
 
10499
- /** C(2): https://graypaper.fluffylabs.dev/#/85129da/38be0138be01?v=0.6.3 */
10385
+ /** C(2): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b31013b3101?v=0.6.7 */
10500
10386
  export const authQueues: StateCodec<State["authQueues"]> = {
10501
10387
  key: stateKeys.index(StateKeyIdx.Phi),
10502
10388
  Codec: codecPerCore(
@@ -10507,7 +10393,6 @@ declare namespace serialize {
10507
10393
 
10508
10394
  /**
10509
10395
  * C(3): Recent blocks with compatibility
10510
- * https://graypaper.fluffylabs.dev/#/85129da/38cb0138cb01?v=0.6.3
10511
10396
  * https://graypaper.fluffylabs.dev/#/7e6ff6a/3b3e013b3e01?v=0.6.7
10512
10397
  */
10513
10398
  export const recentBlocks: StateCodec<State["recentBlocks"]> = {
@@ -10516,7 +10401,7 @@ declare namespace serialize {
10516
10401
  extract: (s) => s.recentBlocks,
10517
10402
  };
10518
10403
 
10519
- /** C(4): https://graypaper.fluffylabs.dev/#/85129da/38e60138e601?v=0.6.3 */
10404
+ /** C(4): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b63013b6301?v=0.6.7 */
10520
10405
  export const safrole: StateCodec<SafroleData> = {
10521
10406
  key: stateKeys.index(StateKeyIdx.Gamma),
10522
10407
  Codec: SafroleData.Codec,
@@ -10529,63 +10414,63 @@ declare namespace serialize {
10529
10414
  }),
10530
10415
  };
10531
10416
 
10532
- /** C(5): https://graypaper.fluffylabs.dev/#/85129da/383d02383d02?v=0.6.3 */
10417
+ /** C(5): https://graypaper.fluffylabs.dev/#/7e6ff6a/3bba013bba01?v=0.6.7 */
10533
10418
  export const disputesRecords: StateCodec<State["disputesRecords"]> = {
10534
10419
  key: stateKeys.index(StateKeyIdx.Psi),
10535
10420
  Codec: DisputesRecords.Codec,
10536
10421
  extract: (s) => s.disputesRecords,
10537
10422
  };
10538
10423
 
10539
- /** C(6): https://graypaper.fluffylabs.dev/#/85129da/387602387602?v=0.6.3 */
10424
+ /** C(6): https://graypaper.fluffylabs.dev/#/7e6ff6a/3bf3013bf301?v=0.6.7 */
10540
10425
  export const entropy: StateCodec<State["entropy"]> = {
10541
10426
  key: stateKeys.index(StateKeyIdx.Eta),
10542
10427
  Codec: codecFixedSizeArray(codec.bytes(HASH_SIZE).asOpaque<EntropyHash>(), ENTROPY_ENTRIES),
10543
10428
  extract: (s) => s.entropy,
10544
10429
  };
10545
10430
 
10546
- /** C(7): https://graypaper.fluffylabs.dev/#/85129da/388302388302?v=0.6.3 */
10431
+ /** C(7): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b00023b0002?v=0.6.7 */
10547
10432
  export const designatedValidators: StateCodec<State["designatedValidatorData"]> = {
10548
10433
  key: stateKeys.index(StateKeyIdx.Iota),
10549
10434
  Codec: codecPerValidator(ValidatorData.Codec),
10550
10435
  extract: (s) => s.designatedValidatorData,
10551
10436
  };
10552
10437
 
10553
- /** C(8): https://graypaper.fluffylabs.dev/#/85129da/389002389002?v=0.6.3 */
10438
+ /** C(8): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b0d023b0d02?v=0.6.7 */
10554
10439
  export const currentValidators: StateCodec<State["currentValidatorData"]> = {
10555
10440
  key: stateKeys.index(StateKeyIdx.Kappa),
10556
10441
  Codec: codecPerValidator(ValidatorData.Codec),
10557
10442
  extract: (s) => s.currentValidatorData,
10558
10443
  };
10559
10444
 
10560
- /** C(9): https://graypaper.fluffylabs.dev/#/85129da/389d02389d02?v=0.6.3 */
10445
+ /** C(9): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b1a023b1a02?v=0.6.7 */
10561
10446
  export const previousValidators: StateCodec<State["previousValidatorData"]> = {
10562
10447
  key: stateKeys.index(StateKeyIdx.Lambda),
10563
10448
  Codec: codecPerValidator(ValidatorData.Codec),
10564
10449
  extract: (s) => s.previousValidatorData,
10565
10450
  };
10566
10451
 
10567
- /** C(10): https://graypaper.fluffylabs.dev/#/85129da/38aa0238aa02?v=0.6.3 */
10452
+ /** C(10): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b27023b2702?v=0.6.7 */
10568
10453
  export const availabilityAssignment: StateCodec<State["availabilityAssignment"]> = {
10569
10454
  key: stateKeys.index(StateKeyIdx.Rho),
10570
10455
  Codec: codecPerCore(codec.optional(AvailabilityAssignment.Codec)),
10571
10456
  extract: (s) => s.availabilityAssignment,
10572
10457
  };
10573
10458
 
10574
- /** C(11): https://graypaper.fluffylabs.dev/#/85129da/38c10238c102?v=0.6.3 */
10459
+ /** C(11): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b3e023b3e02?v=0.6.7 */
10575
10460
  export const timeslot: StateCodec<State["timeslot"]> = {
10576
10461
  key: stateKeys.index(StateKeyIdx.Tau),
10577
10462
  Codec: codec.u32.asOpaque<TimeSlot>(),
10578
10463
  extract: (s) => s.timeslot,
10579
10464
  };
10580
10465
 
10581
- /** C(12): https://graypaper.fluffylabs.dev/#/85129da/38cf0238cf02?v=0.6.3 */
10466
+ /** C(12): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b4c023b4c02?v=0.6.7 */
10582
10467
  export const privilegedServices: StateCodec<State["privilegedServices"]> = {
10583
10468
  key: stateKeys.index(StateKeyIdx.Chi),
10584
10469
  Codec: PrivilegedServices.Codec,
10585
10470
  extract: (s) => s.privilegedServices,
10586
10471
  };
10587
10472
 
10588
- /** C(13): https://graypaper.fluffylabs.dev/#/85129da/38e10238e102?v=0.6.3 */
10473
+ /** C(13): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b5e023b5e02?v=0.6.7 */
10589
10474
  export const statistics: StateCodec<State["statistics"]> = {
10590
10475
  key: stateKeys.index(StateKeyIdx.Pi),
10591
10476
  Codec: StatisticsData.Codec,
@@ -10599,7 +10484,7 @@ declare namespace serialize {
10599
10484
  extract: (s) => s.accumulationQueue,
10600
10485
  };
10601
10486
 
10602
- /** C(15): https://graypaper.fluffylabs.dev/#/85129da/381903381903?v=0.6.3 */
10487
+ /** C(15): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b96023b9602?v=0.6.7 */
10603
10488
  export const recentlyAccumulated: StateCodec<State["recentlyAccumulated"]> = {
10604
10489
  key: stateKeys.index(StateKeyIdx.Xi),
10605
10490
  Codec: codecPerEpochBlock(
@@ -10695,27 +10580,17 @@ declare function* serializeRemovedServices(servicesRemoved: ServiceId[] | undefi
10695
10580
  }
10696
10581
  }
10697
10582
 
10698
- declare function getLegacyKey(serviceId: ServiceId, rawKey: StorageKey): StorageKey {
10699
- const SERVICE_ID_BYTES = 4;
10700
- const serviceIdAndKey = new Uint8Array(SERVICE_ID_BYTES + rawKey.length);
10701
- serviceIdAndKey.set(u32AsLeBytes(serviceId));
10702
- serviceIdAndKey.set(rawKey.raw, SERVICE_ID_BYTES);
10703
- return asOpaqueType(BytesBlob.blobFrom(blake2b.hashBytes(serviceIdAndKey).raw));
10704
- }
10705
-
10706
10583
  declare function* serializeStorage(storage: UpdateStorage[] | undefined): Generator<StateEntryUpdate> {
10707
10584
  for (const { action, serviceId } of storage ?? []) {
10708
10585
  switch (action.kind) {
10709
10586
  case UpdateStorageKind.Set: {
10710
- const key = Compatibility.isGreaterOrEqual(GpVersion.V0_6_7)
10711
- ? action.storage.key
10712
- : getLegacyKey(serviceId, action.storage.key);
10587
+ const key = action.storage.key;
10713
10588
  const codec = serialize.serviceStorage(serviceId, key);
10714
10589
  yield [StateEntryUpdateAction.Insert, codec.key, action.storage.value];
10715
10590
  break;
10716
10591
  }
10717
10592
  case UpdateStorageKind.Remove: {
10718
- const key = Compatibility.isGreaterOrEqual(GpVersion.V0_6_7) ? action.key : getLegacyKey(serviceId, action.key);
10593
+ const key = action.key;
10719
10594
  const codec = serialize.serviceStorage(serviceId, key);
10720
10595
  yield [StateEntryUpdateAction.Remove, codec.key, EMPTY_BLOB];
10721
10596
  break;
@@ -10855,7 +10730,7 @@ declare function* serializeBasicKeys(spec: ChainSpec, update: Partial<State>) {
10855
10730
  yield doSerialize(update.recentlyAccumulated, serialize.recentlyAccumulated); // C(15)
10856
10731
  }
10857
10732
 
10858
- if (update.accumulationOutputLog !== undefined && Compatibility.isGreaterOrEqual(GpVersion.V0_6_7)) {
10733
+ if (update.accumulationOutputLog !== undefined) {
10859
10734
  yield doSerialize(update.accumulationOutputLog, serialize.accumulationOutputLog); // C(16)
10860
10735
  }
10861
10736
  }
@@ -11650,9 +11525,7 @@ declare function convertInMemoryStateToDictionary(
11650
11525
  doSerialize(serialize.statistics); // C(13)
11651
11526
  doSerialize(serialize.accumulationQueue); // C(14)
11652
11527
  doSerialize(serialize.recentlyAccumulated); // C(15)
11653
- if (Compatibility.isGreaterOrEqual(GpVersion.V0_6_7)) {
11654
- doSerialize(serialize.accumulationOutputLog); // C(16)
11655
- }
11528
+ doSerialize(serialize.accumulationOutputLog); // C(16)
11656
11529
 
11657
11530
  // services
11658
11531
  for (const [serviceId, service] of state.services.entries()) {
@@ -11843,10 +11716,7 @@ declare class SerializedState<T extends SerializedStateBackend = SerializedState
11843
11716
  }
11844
11717
 
11845
11718
  get accumulationOutputLog(): State["accumulationOutputLog"] {
11846
- if (Compatibility.isGreaterOrEqual(GpVersion.V0_6_7)) {
11847
- return this.retrieve(serialize.accumulationOutputLog, "accumulationOutputLog");
11848
- }
11849
- return [];
11719
+ return this.retrieve(serialize.accumulationOutputLog, "accumulationOutputLog");
11850
11720
  }
11851
11721
  }
11852
11722
 
@@ -11993,7 +11863,6 @@ declare const index$c_U32_BYTES: typeof U32_BYTES;
11993
11863
  declare const index$c_binaryMerkleization: typeof binaryMerkleization;
11994
11864
  declare const index$c_convertInMemoryStateToDictionary: typeof convertInMemoryStateToDictionary;
11995
11865
  declare const index$c_dumpCodec: typeof dumpCodec;
11996
- declare const index$c_getLegacyKey: typeof getLegacyKey;
11997
11866
  declare const index$c_getSafroleData: typeof getSafroleData;
11998
11867
  declare const index$c_legacyServiceNested: typeof legacyServiceNested;
11999
11868
  declare const index$c_loadState: typeof loadState;
@@ -12007,7 +11876,7 @@ declare const index$c_serializeStorage: typeof serializeStorage;
12007
11876
  declare const index$c_stateEntriesSequenceCodec: typeof stateEntriesSequenceCodec;
12008
11877
  import index$c_stateKeys = stateKeys;
12009
11878
  declare namespace index$c {
12010
- export { index$c_EMPTY_BLOB as EMPTY_BLOB, index$c_SerializedService as SerializedService, index$c_SerializedState as SerializedState, index$c_StateEntries as StateEntries, index$c_StateEntryUpdateAction as StateEntryUpdateAction, index$c_StateKeyIdx as StateKeyIdx, index$c_TYPICAL_STATE_ITEMS as TYPICAL_STATE_ITEMS, index$c_TYPICAL_STATE_ITEM_LEN as TYPICAL_STATE_ITEM_LEN, index$c_U32_BYTES as U32_BYTES, index$c_binaryMerkleization as binaryMerkleization, index$c_convertInMemoryStateToDictionary as convertInMemoryStateToDictionary, index$c_dumpCodec as dumpCodec, index$c_getLegacyKey as getLegacyKey, index$c_getSafroleData as getSafroleData, index$c_legacyServiceNested as legacyServiceNested, index$c_loadState as loadState, index$c_serialize as serialize, index$c_serializeBasicKeys as serializeBasicKeys, index$c_serializePreimages as serializePreimages, index$c_serializeRemovedServices as serializeRemovedServices, index$c_serializeServiceUpdates as serializeServiceUpdates, index$c_serializeStateUpdate as serializeStateUpdate, index$c_serializeStorage as serializeStorage, index$c_stateEntriesSequenceCodec as stateEntriesSequenceCodec, index$c_stateKeys as stateKeys };
11879
+ export { index$c_EMPTY_BLOB as EMPTY_BLOB, index$c_SerializedService as SerializedService, index$c_SerializedState as SerializedState, index$c_StateEntries as StateEntries, index$c_StateEntryUpdateAction as StateEntryUpdateAction, index$c_StateKeyIdx as StateKeyIdx, index$c_TYPICAL_STATE_ITEMS as TYPICAL_STATE_ITEMS, index$c_TYPICAL_STATE_ITEM_LEN as TYPICAL_STATE_ITEM_LEN, index$c_U32_BYTES as U32_BYTES, index$c_binaryMerkleization as binaryMerkleization, index$c_convertInMemoryStateToDictionary as convertInMemoryStateToDictionary, index$c_dumpCodec as dumpCodec, index$c_getSafroleData as getSafroleData, index$c_legacyServiceNested as legacyServiceNested, index$c_loadState as loadState, index$c_serialize as serialize, index$c_serializeBasicKeys as serializeBasicKeys, index$c_serializePreimages as serializePreimages, index$c_serializeRemovedServices as serializeRemovedServices, index$c_serializeServiceUpdates as serializeServiceUpdates, index$c_serializeStateUpdate as serializeStateUpdate, index$c_serializeStorage as serializeStorage, index$c_stateEntriesSequenceCodec as stateEntriesSequenceCodec, index$c_stateKeys as stateKeys };
12011
11880
  export type { index$c_EncodeFun as EncodeFun, index$c_KeyAndCodec as KeyAndCodec, index$c_SerializedStateBackend as SerializedStateBackend, index$c_StateCodec as StateCodec, index$c_StateEntryUpdate as StateEntryUpdate, StateKey$1 as StateKey };
12012
11881
  }
12013
11882
 
@@ -12367,8 +12236,8 @@ declare function encodePoints(input: Bytes<PIECE_SIZE>): FixedSizeArray<Bytes<PO
12367
12236
  }
12368
12237
 
12369
12238
  // encode and add redundancy shards
12370
- const points = new ShardsCollection(POINT_ALIGNMENT, data);
12371
- const encodedResult = encode(N_CHUNKS_REDUNDANCY, POINT_ALIGNMENT, points);
12239
+ const points = new reedSolomon.ShardsCollection(POINT_ALIGNMENT, data);
12240
+ const encodedResult = reedSolomon.encode(N_CHUNKS_REDUNDANCY, points);
12372
12241
  const encodedData = encodedResult.take_data();
12373
12242
 
12374
12243
  for (let i = 0; i < N_CHUNKS_REDUNDANCY; i++) {
@@ -12410,9 +12279,9 @@ declare function decodePiece(
12410
12279
  result.raw.set(points.raw, pointStartInResult);
12411
12280
  }
12412
12281
  }
12413
- const points = new ShardsCollection(POINT_ALIGNMENT, data, indices);
12282
+ const points = new reedSolomon.ShardsCollection(POINT_ALIGNMENT, data, indices);
12414
12283
 
12415
- const decodingResult = decode(N_CHUNKS_REQUIRED, N_CHUNKS_REDUNDANCY, POINT_ALIGNMENT, points);
12284
+ const decodingResult = reedSolomon.decode(N_CHUNKS_REQUIRED, N_CHUNKS_REDUNDANCY, points);
12416
12285
  const resultIndices = decodingResult.take_indices(); // it has to be called before take_data
12417
12286
  const resultData = decodingResult.take_data(); // it destroys the result object in rust
12418
12287
 
@@ -12649,6 +12518,10 @@ declare function chunksToShards(
12649
12518
  return tryAsPerValidator(result, spec);
12650
12519
  }
12651
12520
 
12521
+ declare const initEc = async () => {
12522
+ await init.reedSolomon();
12523
+ };
12524
+
12652
12525
  declare const index$a_HALF_POINT_SIZE: typeof HALF_POINT_SIZE;
12653
12526
  declare const index$a_N_CHUNKS_REDUNDANCY: typeof N_CHUNKS_REDUNDANCY;
12654
12527
  type index$a_N_CHUNKS_REQUIRED = N_CHUNKS_REQUIRED;
@@ -12662,6 +12535,7 @@ declare const index$a_decodeData: typeof decodeData;
12662
12535
  declare const index$a_decodeDataAndTrim: typeof decodeDataAndTrim;
12663
12536
  declare const index$a_decodePiece: typeof decodePiece;
12664
12537
  declare const index$a_encodePoints: typeof encodePoints;
12538
+ declare const index$a_initEc: typeof initEc;
12665
12539
  declare const index$a_join: typeof join;
12666
12540
  declare const index$a_lace: typeof lace;
12667
12541
  declare const index$a_padAndEncodeData: typeof padAndEncodeData;
@@ -12670,7 +12544,7 @@ declare const index$a_split: typeof split;
12670
12544
  declare const index$a_transpose: typeof transpose;
12671
12545
  declare const index$a_unzip: typeof unzip;
12672
12546
  declare namespace index$a {
12673
- export { index$a_HALF_POINT_SIZE as HALF_POINT_SIZE, index$a_N_CHUNKS_REDUNDANCY as N_CHUNKS_REDUNDANCY, index$a_POINT_ALIGNMENT as POINT_ALIGNMENT, index$a_chunkingFunction as chunkingFunction, index$a_chunksToShards as chunksToShards, index$a_decodeData as decodeData, index$a_decodeDataAndTrim as decodeDataAndTrim, index$a_decodePiece as decodePiece, index$a_encodePoints as encodePoints, index$a_join as join, index$a_lace as lace, index$a_padAndEncodeData as padAndEncodeData, index$a_shardsToChunks as shardsToChunks, index$a_split as split, index$a_transpose as transpose, index$a_unzip as unzip };
12547
+ export { index$a_HALF_POINT_SIZE as HALF_POINT_SIZE, index$a_N_CHUNKS_REDUNDANCY as N_CHUNKS_REDUNDANCY, index$a_POINT_ALIGNMENT as POINT_ALIGNMENT, index$a_chunkingFunction as chunkingFunction, index$a_chunksToShards as chunksToShards, index$a_decodeData as decodeData, index$a_decodeDataAndTrim as decodeDataAndTrim, index$a_decodePiece as decodePiece, index$a_encodePoints as encodePoints, index$a_initEc as initEc, index$a_join as join, index$a_lace as lace, index$a_padAndEncodeData as padAndEncodeData, index$a_shardsToChunks as shardsToChunks, index$a_split as split, index$a_transpose as transpose, index$a_unzip as unzip };
12674
12548
  export type { index$a_N_CHUNKS_REQUIRED as N_CHUNKS_REQUIRED, index$a_N_CHUNKS_TOTAL as N_CHUNKS_TOTAL, index$a_PIECE_SIZE as PIECE_SIZE, index$a_POINT_LENGTH as POINT_LENGTH };
12675
12549
  }
12676
12550
 
@@ -12702,35 +12576,202 @@ declare const HostCallResult = {
12702
12576
  OK: tryAsU64(0n),
12703
12577
  } as const;
12704
12578
 
12579
+ declare enum Level {
12580
+ INSANE = 1,
12581
+ TRACE = 2,
12582
+ LOG = 3,
12583
+ INFO = 4,
12584
+ WARN = 5,
12585
+ ERROR = 6,
12586
+ }
12587
+
12588
+ type Options = {
12589
+ defaultLevel: Level;
12590
+ workingDir: string;
12591
+ modules: Map<string, Level>;
12592
+ };
12593
+
12705
12594
  /**
12706
- * Mask class is an implementation of skip function defined in GP.
12595
+ * A function to parse logger definition (including modules) given as a string.
12707
12596
  *
12708
- * https://graypaper.fluffylabs.dev/#/5f542d7/237201239801
12597
+ * Examples
12598
+ * - `info` - setup default logging level to `info`.
12599
+ * - `trace` - default logging level set to `trace`.
12600
+ * - `debug;consensus=trace` - default level is set to `debug/log`, but consensus is in trace mode.
12709
12601
  */
12710
- declare class Mask {
12711
- /**
12712
- * The lookup table will have `0` at the index which corresponds to an instruction on the same index in the bytecode.
12713
- * In case the value is non-zero it signifies the offset to the index with next instruction.
12714
- *
12715
- * Example:
12716
- * ```
12717
- * 0..1..2..3..4..5..6..7..8..9 # Indices
12718
- * 0..2..1..0..1..0..3..2..1..0 # lookupTable forward values
12719
- * ```
12720
- * There are instructions at indices `0, 3, 5, 9`.
12721
- */
12722
- private lookupTableForward: Uint8Array;
12602
+ declare function parseLoggerOptions(input: string, defaultLevel: Level, workingDir?: string): Options {
12603
+ const modules = new Map<string, Level>();
12604
+ const parts = input.toLowerCase().split(",");
12605
+ let defLevel = defaultLevel;
12723
12606
 
12724
- constructor(mask: BitVec) {
12725
- this.lookupTableForward = this.buildLookupTableForward(mask);
12607
+ for (const p of parts) {
12608
+ const clean = p.trim();
12609
+ // skip empty objects (forgotten `,` removed)
12610
+ if (clean.length === 0) {
12611
+ continue;
12612
+ }
12613
+ // we just have the default level
12614
+ if (clean.includes("=")) {
12615
+ const [mod, lvl] = clean.split("=");
12616
+ modules.set(mod.trim(), parseLevel(lvl.trim()));
12617
+ } else {
12618
+ defLevel = parseLevel(clean);
12619
+ }
12726
12620
  }
12727
12621
 
12728
- isInstruction(index: number) {
12729
- return this.lookupTableForward[index] === 0;
12730
- }
12622
+ // TODO [ToDr] Fix dirname for workers.
12623
+ const myDir = (import.meta.dirname ?? "").split("/");
12624
+ myDir.pop();
12625
+ myDir.pop();
12626
+ return {
12627
+ defaultLevel: defLevel,
12628
+ modules,
12629
+ workingDir: workingDir ?? myDir.join("/"),
12630
+ };
12631
+ }
12731
12632
 
12732
- getNoOfBytesToNextInstruction(index: number) {
12733
- check(index >= 0, `index (${index}) cannot be a negative number`);
12633
+ declare const GLOBAL_CONFIG = {
12634
+ options: DEFAULT_OPTIONS,
12635
+ transport: ConsoleTransport.create(DEFAULT_OPTIONS.defaultLevel, DEFAULT_OPTIONS),
12636
+ };
12637
+
12638
+ /**
12639
+ * A logger instance.
12640
+ */
12641
+ declare class Logger {
12642
+ /**
12643
+ * Create a new logger instance given filename and an optional module name.
12644
+ *
12645
+ * If the module name is not given, `fileName` becomes the module name.
12646
+ * The module name can be composed from multiple parts separated with `/`.
12647
+ *
12648
+ * The logger will use a global configuration which can be changed using
12649
+ * [`configureLogger`] function.
12650
+ */
12651
+ static new(fileName?: string, moduleName?: string) {
12652
+ const fName = fileName ?? "unknown";
12653
+ return new Logger(moduleName ?? fName, fName, GLOBAL_CONFIG);
12654
+ }
12655
+
12656
+ /**
12657
+ * Return currently configured level for given module. */
12658
+ static getLevel(moduleName: string): Level {
12659
+ return findLevel(GLOBAL_CONFIG.options, moduleName);
12660
+ }
12661
+
12662
+ /**
12663
+ * Global configuration of all loggers.
12664
+ *
12665
+ * One can specify a default logging level (only logs with level >= default will be printed).
12666
+ * It's also possible to configure per-module logging level that takes precedence
12667
+ * over the default one.
12668
+ *
12669
+ * Changing the options affects all previously created loggers.
12670
+ */
12671
+ static configureAllFromOptions(options: Options) {
12672
+ // find minimal level to optimise logging in case
12673
+ // we don't care about low-level logs.
12674
+ const minimalLevel = Array.from(options.modules.values()).reduce((level, modLevel) => {
12675
+ return level < modLevel ? level : modLevel;
12676
+ }, options.defaultLevel);
12677
+
12678
+ const transport = ConsoleTransport.create(minimalLevel, options);
12679
+
12680
+ // set the global config
12681
+ GLOBAL_CONFIG.options = options;
12682
+ GLOBAL_CONFIG.transport = transport;
12683
+ }
12684
+
12685
+ /**
12686
+ * Global configuration of all loggers.
12687
+ *
12688
+ * Parse configuration options from an input string typically obtained
12689
+ * from environment variable `JAM_LOG`.
12690
+ */
12691
+ static configureAll(input: string, defaultLevel: Level, workingDir?: string) {
12692
+ const options = parseLoggerOptions(input, defaultLevel, workingDir);
12693
+ Logger.configureAllFromOptions(options);
12694
+ }
12695
+
12696
+ constructor(
12697
+ private readonly moduleName: string,
12698
+ private readonly fileName: string,
12699
+ private readonly config: typeof GLOBAL_CONFIG,
12700
+ ) {}
12701
+
12702
+ /** Log a message with `INSANE` level. */
12703
+ insane(val: string) {
12704
+ this.config.transport.insane(this.moduleName, val);
12705
+ }
12706
+
12707
+ /** Log a message with `TRACE` level. */
12708
+ trace(val: string) {
12709
+ this.config.transport.trace(this.moduleName, val);
12710
+ }
12711
+
12712
+ /** Log a message with `DEBUG`/`LOG` level. */
12713
+ log(val: string) {
12714
+ this.config.transport.log(this.moduleName, val);
12715
+ }
12716
+
12717
+ /** Log a message with `INFO` level. */
12718
+ info(val: string) {
12719
+ this.config.transport.info(this.moduleName, val);
12720
+ }
12721
+
12722
+ /** Log a message with `WARN` level. */
12723
+ warn(val: string) {
12724
+ this.config.transport.warn(this.moduleName, val);
12725
+ }
12726
+
12727
+ /** Log a message with `ERROR` level. */
12728
+ error(val: string) {
12729
+ this.config.transport.error(this.moduleName, val);
12730
+ }
12731
+ }
12732
+
12733
+ type index$9_Level = Level;
12734
+ declare const index$9_Level: typeof Level;
12735
+ type index$9_Logger = Logger;
12736
+ declare const index$9_Logger: typeof Logger;
12737
+ declare const index$9_parseLoggerOptions: typeof parseLoggerOptions;
12738
+ declare namespace index$9 {
12739
+ export {
12740
+ index$9_Level as Level,
12741
+ index$9_Logger as Logger,
12742
+ index$9_parseLoggerOptions as parseLoggerOptions,
12743
+ };
12744
+ }
12745
+
12746
+ /**
12747
+ * Mask class is an implementation of skip function defined in GP.
12748
+ *
12749
+ * https://graypaper.fluffylabs.dev/#/5f542d7/237201239801
12750
+ */
12751
+ declare class Mask {
12752
+ /**
12753
+ * The lookup table will have `0` at the index which corresponds to an instruction on the same index in the bytecode.
12754
+ * In case the value is non-zero it signifies the offset to the index with next instruction.
12755
+ *
12756
+ * Example:
12757
+ * ```
12758
+ * 0..1..2..3..4..5..6..7..8..9 # Indices
12759
+ * 0..2..1..0..1..0..3..2..1..0 # lookupTable forward values
12760
+ * ```
12761
+ * There are instructions at indices `0, 3, 5, 9`.
12762
+ */
12763
+ private lookupTableForward: Uint8Array;
12764
+
12765
+ constructor(mask: BitVec) {
12766
+ this.lookupTableForward = this.buildLookupTableForward(mask);
12767
+ }
12768
+
12769
+ isInstruction(index: number) {
12770
+ return this.lookupTableForward[index] === 0;
12771
+ }
12772
+
12773
+ getNoOfBytesToNextInstruction(index: number) {
12774
+ check(index >= 0, `index (${index}) cannot be a negative number`);
12734
12775
  return Math.min(this.lookupTableForward[index] ?? 0, MAX_INSTRUCTION_DISTANCE);
12735
12776
  }
12736
12777
 
@@ -13961,13 +14002,14 @@ declare abstract class MemoryPage {
13961
14002
  * And then a new version of TypeScript is released.
13962
14003
  */
13963
14004
  declare global {
13964
- interface ArrayBufferConstructor {
13965
- new (length: number, options?: { maxByteLength: number }): ArrayBuffer;
13966
- }
13967
-
13968
- interface ArrayBuffer {
13969
- resize(length: number): void;
13970
- }
14005
+ interface ArrayBufferConstructor {
14006
+ new (length: number, options?: {
14007
+ maxByteLength: number;
14008
+ }): ArrayBuffer;
14009
+ }
14010
+ interface ArrayBuffer {
14011
+ resize(length: number): void;
14012
+ }
13971
14013
  }
13972
14014
 
13973
14015
  type InitialMemoryState = {
@@ -13980,6 +14022,7 @@ declare enum AccessType {
13980
14022
  READ = 0,
13981
14023
  WRITE = 1,
13982
14024
  }
14025
+
13983
14026
  declare class Memory {
13984
14027
  static fromInitialMemory(initialMemoryState: InitialMemoryState) {
13985
14028
  return new Memory(
@@ -14016,6 +14059,7 @@ declare class Memory {
14016
14059
  return Result.ok(OK);
14017
14060
  }
14018
14061
 
14062
+ logger.insane(`MEM[${address}] <- ${BytesBlob.blobFrom(bytes)}`);
14019
14063
  const pagesResult = this.getPages(address, bytes.length, AccessType.WRITE);
14020
14064
 
14021
14065
  if (pagesResult.isError) {
@@ -14104,6 +14148,7 @@ declare class Memory {
14104
14148
  bytesLeft -= bytesToRead;
14105
14149
  }
14106
14150
 
14151
+ logger.insane(`MEM[${startAddress}] => ${BytesBlob.blobFrom(result)}`);
14107
14152
  return Result.ok(OK);
14108
14153
  }
14109
14154
 
@@ -15103,6 +15148,10 @@ declare class JumpTable {
15103
15148
  return new JumpTable(0, new Uint8Array());
15104
15149
  }
15105
15150
 
15151
+ getSize() {
15152
+ return this.indices.length;
15153
+ }
15154
+
15106
15155
  copyFrom(jt: JumpTable) {
15107
15156
  this.indices = jt.indices;
15108
15157
  }
@@ -16004,167 +16053,6 @@ declare class OneRegOneExtImmDispatcher {
16004
16053
  }
16005
16054
  }
16006
16055
 
16007
- declare enum Level {
16008
- TRACE = 1,
16009
- LOG = 2,
16010
- INFO = 3,
16011
- WARN = 4,
16012
- ERROR = 5,
16013
- }
16014
-
16015
- type Options = {
16016
- defaultLevel: Level;
16017
- workingDir: string;
16018
- modules: Map<string, Level>;
16019
- };
16020
-
16021
- /**
16022
- * A function to parse logger definition (including modules) given as a string.
16023
- *
16024
- * Examples
16025
- * - `info` - setup default logging level to `info`.
16026
- * - `trace` - default logging level set to `trace`.
16027
- * - `debug;consensus=trace` - default level is set to `debug/log`, but consensus is in trace mode.
16028
- */
16029
- declare function parseLoggerOptions(input: string, defaultLevel: Level, workingDir?: string): Options {
16030
- const modules = new Map<string, Level>();
16031
- const parts = input.toLowerCase().split(",");
16032
- let defLevel = defaultLevel;
16033
-
16034
- for (const p of parts) {
16035
- const clean = p.trim();
16036
- // skip empty objects (forgotten `,` removed)
16037
- if (clean.length === 0) {
16038
- continue;
16039
- }
16040
- // we just have the default level
16041
- if (clean.includes("=")) {
16042
- const [mod, lvl] = clean.split("=");
16043
- modules.set(mod.trim(), parseLevel(lvl.trim()));
16044
- } else {
16045
- defLevel = parseLevel(clean);
16046
- }
16047
- }
16048
-
16049
- // TODO [ToDr] Fix dirname for workers.
16050
- const myDir = (import.meta.dirname ?? "").split("/");
16051
- myDir.pop();
16052
- myDir.pop();
16053
- return {
16054
- defaultLevel: defLevel,
16055
- modules,
16056
- workingDir: workingDir ?? myDir.join("/"),
16057
- };
16058
- }
16059
-
16060
- declare const GLOBAL_CONFIG = {
16061
- options: DEFAULT_OPTIONS,
16062
- transport: ConsoleTransport.create(DEFAULT_OPTIONS.defaultLevel, DEFAULT_OPTIONS),
16063
- };
16064
-
16065
- /**
16066
- * A logger instance.
16067
- */
16068
- declare class Logger {
16069
- /**
16070
- * Create a new logger instance given filename and an optional module name.
16071
- *
16072
- * If the module name is not given, `fileName` becomes the module name.
16073
- * The module name can be composed from multiple parts separated with `/`.
16074
- *
16075
- * The logger will use a global configuration which can be changed using
16076
- * [`configureLogger`] function.
16077
- */
16078
- static new(fileName?: string, moduleName?: string) {
16079
- const fName = fileName ?? "unknown";
16080
- return new Logger(moduleName ?? fName, fName, GLOBAL_CONFIG);
16081
- }
16082
-
16083
- /**
16084
- * Return currently configured level for given module. */
16085
- static getLevel(moduleName: string): Level {
16086
- return findLevel(GLOBAL_CONFIG.options, moduleName);
16087
- }
16088
-
16089
- /**
16090
- * Global configuration of all loggers.
16091
- *
16092
- * One can specify a default logging level (only logs with level >= default will be printed).
16093
- * It's also possible to configure per-module logging level that takes precedence
16094
- * over the default one.
16095
- *
16096
- * Changing the options affects all previously created loggers.
16097
- */
16098
- static configureAllFromOptions(options: Options) {
16099
- // find minimal level to optimise logging in case
16100
- // we don't care about low-level logs.
16101
- const minimalLevel = Array.from(options.modules.values()).reduce((level, modLevel) => {
16102
- return level < modLevel ? level : modLevel;
16103
- }, options.defaultLevel);
16104
-
16105
- const transport = ConsoleTransport.create(minimalLevel, options);
16106
-
16107
- // set the global config
16108
- GLOBAL_CONFIG.options = options;
16109
- GLOBAL_CONFIG.transport = transport;
16110
- }
16111
-
16112
- /**
16113
- * Global configuration of all loggers.
16114
- *
16115
- * Parse configuration options from an input string typically obtained
16116
- * from environment variable `JAM_LOG`.
16117
- */
16118
- static configureAll(input: string, defaultLevel: Level, workingDir?: string) {
16119
- const options = parseLoggerOptions(input, defaultLevel, workingDir);
16120
- Logger.configureAllFromOptions(options);
16121
- }
16122
-
16123
- constructor(
16124
- private readonly moduleName: string,
16125
- private readonly fileName: string,
16126
- private readonly config: typeof GLOBAL_CONFIG,
16127
- ) {}
16128
-
16129
- /** Log a message with `TRACE` level. */
16130
- trace(val: string) {
16131
- this.config.transport.trace(this.moduleName, this.fileName, val);
16132
- }
16133
-
16134
- /** Log a message with `DEBUG`/`LOG` level. */
16135
- log(val: string) {
16136
- this.config.transport.log(this.moduleName, this.fileName, val);
16137
- }
16138
-
16139
- /** Log a message with `INFO` level. */
16140
- info(val: string) {
16141
- this.config.transport.info(this.moduleName, this.fileName, val);
16142
- }
16143
-
16144
- /** Log a message with `WARN` level. */
16145
- warn(val: string) {
16146
- this.config.transport.warn(this.moduleName, this.fileName, val);
16147
- }
16148
-
16149
- /** Log a message with `ERROR` level. */
16150
- error(val: string) {
16151
- this.config.transport.error(this.moduleName, this.fileName, val);
16152
- }
16153
- }
16154
-
16155
- type index$9_Level = Level;
16156
- declare const index$9_Level: typeof Level;
16157
- type index$9_Logger = Logger;
16158
- declare const index$9_Logger: typeof Logger;
16159
- declare const index$9_parseLoggerOptions: typeof parseLoggerOptions;
16160
- declare namespace index$9 {
16161
- export {
16162
- index$9_Level as Level,
16163
- index$9_Logger as Logger,
16164
- index$9_parseLoggerOptions as parseLoggerOptions,
16165
- };
16166
- }
16167
-
16168
16056
  declare enum ProgramDecoderError {
16169
16057
  InvalidProgramError = 0,
16170
16058
  }
@@ -16245,12 +16133,12 @@ declare enum Status {
16245
16133
 
16246
16134
  type InterpreterOptions = {
16247
16135
  useSbrkGas?: boolean;
16248
- ignoreInstructionGas?: boolean;
16249
16136
  };
16250
16137
 
16138
+ declare const logger = Logger.new(import.meta.filename, "pvm");
16139
+
16251
16140
  declare class Interpreter {
16252
16141
  private readonly useSbrkGas: boolean;
16253
- private readonly ignoreInstructionGas: boolean;
16254
16142
  private registers = new Registers();
16255
16143
  private code: Uint8Array = new Uint8Array();
16256
16144
  private mask = Mask.empty();
@@ -16278,9 +16166,8 @@ declare class Interpreter {
16278
16166
  private basicBlocks: BasicBlocks;
16279
16167
  private jumpTable = JumpTable.empty();
16280
16168
 
16281
- constructor({ useSbrkGas = false, ignoreInstructionGas = false }: InterpreterOptions = {}) {
16169
+ constructor({ useSbrkGas = false }: InterpreterOptions = {}) {
16282
16170
  this.useSbrkGas = useSbrkGas;
16283
- this.ignoreInstructionGas = ignoreInstructionGas;
16284
16171
  this.argsDecoder = new ArgsDecoder();
16285
16172
  this.basicBlocks = new BasicBlocks();
16286
16173
  const mathOps = new MathOps(this.registers);
@@ -16376,7 +16263,7 @@ declare class Interpreter {
16376
16263
  const currentInstruction = this.code[this.pc] ?? Instruction.TRAP;
16377
16264
  const isValidInstruction = Instruction[currentInstruction] !== undefined;
16378
16265
  const gasCost = instructionGasMap[currentInstruction] ?? instructionGasMap[Instruction.TRAP];
16379
- const underflow = this.ignoreInstructionGas ? false : this.gas.sub(gasCost);
16266
+ const underflow = this.gas.sub(gasCost);
16380
16267
  if (underflow) {
16381
16268
  this.status = Status.OOG;
16382
16269
  return this.status;
@@ -16385,6 +16272,8 @@ declare class Interpreter {
16385
16272
  const argsResult = this.argsDecodingResults[argsType];
16386
16273
  this.argsDecoder.fillArgs(this.pc, argsResult);
16387
16274
 
16275
+ logger.insane(`[PC: ${this.pc}] ${Instruction[currentInstruction]}`);
16276
+
16388
16277
  if (!isValidInstruction) {
16389
16278
  this.instructionResult.status = Result.PANIC;
16390
16279
  } else {
@@ -16445,12 +16334,6 @@ declare class Interpreter {
16445
16334
  }
16446
16335
 
16447
16336
  if (this.instructionResult.status !== null) {
16448
- // All abnormal terminations should be interpreted as TRAP and we should subtract the gas. In case of FAULT we have to do it manually at the very end.
16449
- if (this.instructionResult.status === Result.FAULT || this.instructionResult.status === Result.FAULT_ACCESS) {
16450
- // TODO [ToDr] underflow?
16451
- this.gas.sub(instructionGasMap[Instruction.TRAP]);
16452
- }
16453
-
16454
16337
  switch (this.instructionResult.status) {
16455
16338
  case Result.FAULT:
16456
16339
  this.status = Status.FAULT;
@@ -16466,6 +16349,7 @@ declare class Interpreter {
16466
16349
  this.status = Status.HOST;
16467
16350
  break;
16468
16351
  }
16352
+ logger.insane(`[PC: ${this.pc}] Status: ${Result[this.instructionResult.status]}`);
16469
16353
  return this.status;
16470
16354
  }
16471
16355
 
@@ -16537,13 +16421,14 @@ declare const index$8_Registers: typeof Registers;
16537
16421
  type index$8_SbrkIndex = SbrkIndex;
16538
16422
  type index$8_SmallGas = SmallGas;
16539
16423
  declare const index$8_gasCounter: typeof gasCounter;
16424
+ declare const index$8_logger: typeof logger;
16540
16425
  declare const index$8_tryAsBigGas: typeof tryAsBigGas;
16541
16426
  declare const index$8_tryAsGas: typeof tryAsGas;
16542
16427
  declare const index$8_tryAsMemoryIndex: typeof tryAsMemoryIndex;
16543
16428
  declare const index$8_tryAsSbrkIndex: typeof tryAsSbrkIndex;
16544
16429
  declare const index$8_tryAsSmallGas: typeof tryAsSmallGas;
16545
16430
  declare namespace index$8 {
16546
- export { index$8_Interpreter as Interpreter, index$8_Memory as Memory, index$8_MemoryBuilder as MemoryBuilder, index$8_Registers as Registers, index$8_gasCounter as gasCounter, index$8_tryAsBigGas as tryAsBigGas, index$8_tryAsGas as tryAsGas, index$8_tryAsMemoryIndex as tryAsMemoryIndex, index$8_tryAsSbrkIndex as tryAsSbrkIndex, index$8_tryAsSmallGas as tryAsSmallGas };
16431
+ export { index$8_Interpreter as Interpreter, index$8_Memory as Memory, index$8_MemoryBuilder as MemoryBuilder, index$8_Registers as Registers, index$8_gasCounter as gasCounter, index$8_logger as logger, index$8_tryAsBigGas as tryAsBigGas, index$8_tryAsGas as tryAsGas, index$8_tryAsMemoryIndex as tryAsMemoryIndex, index$8_tryAsSbrkIndex as tryAsSbrkIndex, index$8_tryAsSmallGas as tryAsSmallGas };
16547
16432
  export type { index$8_BigGas as BigGas, index$8_Gas as Gas, index$8_GasCounter as GasCounter, index$8_InterpreterOptions as InterpreterOptions, index$8_MemoryIndex as MemoryIndex, index$8_SbrkIndex as SbrkIndex, index$8_SmallGas as SmallGas };
16548
16433
  }
16549
16434
 
@@ -16681,7 +16566,7 @@ declare class HostCallsManager {
16681
16566
  return `r${idx}=${value} (0x${value.toString(16)})`;
16682
16567
  })
16683
16568
  .join(", ");
16684
- logger.trace(`[${currentServiceId}] ${context} ${name}${requested}. Gas: ${gas}. Regs: ${registerValues}.`);
16569
+ logger.insane(`[${currentServiceId}] ${context} ${name}${requested}. Gas: ${gas}. Regs: ${registerValues}.`);
16685
16570
  }
16686
16571
  }
16687
16572
 
@@ -16692,16 +16577,10 @@ declare class InterpreterInstanceManager {
16692
16577
  private waitingQueue: ResolveFn[] = [];
16693
16578
 
16694
16579
  constructor(noOfPvmInstances: number) {
16695
- const shouldCountGas =
16696
- Compatibility.isGreaterOrEqual(GpVersion.V0_6_7) ||
16697
- Compatibility.isSuite(TestSuite.JAMDUNA, GpVersion.V0_6_5) ||
16698
- Compatibility.isSuite(TestSuite.W3F_DAVXY, GpVersion.V0_6_6);
16699
-
16700
16580
  for (let i = 0; i < noOfPvmInstances; i++) {
16701
16581
  this.instances.push(
16702
16582
  new Interpreter({
16703
16583
  useSbrkGas: false,
16704
- ignoreInstructionGas: !shouldCountGas,
16705
16584
  }),
16706
16585
  );
16707
16586
  }
@@ -16808,14 +16687,15 @@ declare class HostCalls {
16808
16687
  const gasCost = typeof hostCall.gasCost === "number" ? hostCall.gasCost : hostCall.gasCost(regs);
16809
16688
  const underflow = gas.sub(gasCost);
16810
16689
 
16690
+ const pcLog = `[PC: ${pvmInstance.getPC()}]`;
16811
16691
  if (underflow) {
16812
- this.hostCalls.traceHostCall("OOG", index, hostCall, regs, gas.get());
16692
+ this.hostCalls.traceHostCall(`${pcLog} OOG`, index, hostCall, regs, gas.get());
16813
16693
  return ReturnValue.fromStatus(pvmInstance.getGasConsumed(), Status.OOG);
16814
16694
  }
16815
- this.hostCalls.traceHostCall("Invoking", index, hostCall, regs, gasBefore);
16695
+ this.hostCalls.traceHostCall(`${pcLog} Invoking`, index, hostCall, regs, gasBefore);
16816
16696
  const result = await hostCall.execute(gas, regs, memory);
16817
16697
  this.hostCalls.traceHostCall(
16818
- result === undefined ? "Result" : `Status(${result})`,
16698
+ result === undefined ? `${pcLog} Result` : `${pcLog} Status(${PvmExecution[result]})`,
16819
16699
  index,
16820
16700
  hostCall,
16821
16701
  regs,
@@ -16827,8 +16707,18 @@ declare class HostCalls {
16827
16707
  return this.getReturnValue(status, pvmInstance);
16828
16708
  }
16829
16709
 
16830
- pvmInstance.runProgram();
16831
- status = pvmInstance.getStatus();
16710
+ if (result === PvmExecution.Panic) {
16711
+ status = Status.PANIC;
16712
+ return this.getReturnValue(status, pvmInstance);
16713
+ }
16714
+
16715
+ if (result === undefined) {
16716
+ pvmInstance.runProgram();
16717
+ status = pvmInstance.getStatus();
16718
+ continue;
16719
+ }
16720
+
16721
+ assertNever(result);
16832
16722
  }
16833
16723
  }
16834
16724
 
@@ -18167,6 +18057,7 @@ declare const index$3_getServiceId: typeof getServiceId;
18167
18057
  declare const index$3_getServiceIdOrCurrent: typeof getServiceIdOrCurrent;
18168
18058
  declare const index$3_inspect: typeof inspect;
18169
18059
  declare const index$3_instructionArgumentTypeMap: typeof instructionArgumentTypeMap;
18060
+ declare const index$3_isBrowser: typeof isBrowser;
18170
18061
  declare const index$3_isTaggedError: typeof isTaggedError;
18171
18062
  declare const index$3_maybeTaggedErrorToString: typeof maybeTaggedErrorToString;
18172
18063
  declare const index$3_measure: typeof measure;
@@ -18179,7 +18070,7 @@ declare const index$3_tryAsMachineId: typeof tryAsMachineId;
18179
18070
  declare const index$3_tryAsProgramCounter: typeof tryAsProgramCounter;
18180
18071
  declare const index$3_writeServiceIdAsLeBytes: typeof writeServiceIdAsLeBytes;
18181
18072
  declare namespace index$3 {
18182
- export { index$3_AccumulationStateUpdate as AccumulationStateUpdate, index$3_ArgsDecoder as ArgsDecoder, index$3_ArgumentType as ArgumentType, index$3_BasicBlocks as BasicBlocks, index$3_CURRENT_SERVICE_ID as CURRENT_SERVICE_ID, index$3_EjectError as EjectError, index$3_ExtendedWitdthImmediateDecoder as ExtendedWitdthImmediateDecoder, index$3_ForgetPreimageError as ForgetPreimageError, index$3_HostCallMemory as HostCallMemory, index$3_HostCallRegisters as HostCallRegisters, index$3_HostCallResult as HostCallResult, index$3_ImmediateDecoder as ImmediateDecoder, index$3_MAX_U32 as MAX_U32, index$3_MAX_U32_BIG_INT as MAX_U32_BIG_INT, index$3_MachineInstance as MachineInstance, index$3_Mask as Mask, index$3_MemoryOperation as MemoryOperation, index$3_MemorySegment as MemorySegment, NO_OF_REGISTERS$1 as NO_OF_REGISTERS, index$3_NewServiceError as NewServiceError, index$3_NibblesDecoder as NibblesDecoder, index$3_PagesError as PagesError, index$3_PartiallyUpdatedState as PartiallyUpdatedState, index$3_PeekPokeError as PeekPokeError, index$3_PendingTransfer as PendingTransfer, index$3_PreimageStatusKind as PreimageStatusKind, index$3_Program as Program, index$3_ProgramDecoder as ProgramDecoder, index$3_ProvidePreimageError as ProvidePreimageError, DebuggerAdapter as Pvm, index$3_Registers as Registers, index$3_RequestPreimageError as RequestPreimageError, Result$2 as Result, index$3_RichTaggedError as RichTaggedError, index$3_SERVICE_ID_BYTES as SERVICE_ID_BYTES, index$3_SpiMemory as SpiMemory, index$3_SpiProgram as SpiProgram, index$3_TransferError as TransferError, index$3_UpdatePrivilegesError as UpdatePrivilegesError, index$3_WithDebug as WithDebug, index$3_ZeroVoidError as ZeroVoidError, index$3___OPAQUE_TYPE__ as __OPAQUE_TYPE__, index$3_asOpaqueType as asOpaqueType, index$3_assertEmpty as assertEmpty, index$3_assertNever as assertNever, index$j as block, index$q as bytes, index$3_cast as cast, index$3_check as check, index$3_clampU64ToU32 as clampU64ToU32, index$3_createResults as createResults, index$3_decodeStandardProgram as decodeStandardProgram, index$3_ensure as ensure, index$3_extractCodeAndMetadata as extractCodeAndMetadata, index$3_getServiceId as getServiceId, index$3_getServiceIdOrCurrent as getServiceIdOrCurrent, index$n as hash, index$3_inspect as inspect, index$3_instructionArgumentTypeMap as instructionArgumentTypeMap, index$8 as interpreter, index$3_isTaggedError as isTaggedError, index$3_maybeTaggedErrorToString as maybeTaggedErrorToString, index$3_measure as measure, index$p as numbers, index$3_preimageLenAsU32 as preimageLenAsU32, index$3_resultToString as resultToString, index$3_seeThrough as seeThrough, index$3_slotsToPreimageStatus as slotsToPreimageStatus, index$3_toMemoryOperation as toMemoryOperation, index$3_tryAsMachineId as tryAsMachineId, index$3_tryAsProgramCounter as tryAsProgramCounter, index$3_writeServiceIdAsLeBytes as writeServiceIdAsLeBytes };
18073
+ export { index$3_AccumulationStateUpdate as AccumulationStateUpdate, index$3_ArgsDecoder as ArgsDecoder, index$3_ArgumentType as ArgumentType, index$3_BasicBlocks as BasicBlocks, index$3_CURRENT_SERVICE_ID as CURRENT_SERVICE_ID, index$3_EjectError as EjectError, index$3_ExtendedWitdthImmediateDecoder as ExtendedWitdthImmediateDecoder, index$3_ForgetPreimageError as ForgetPreimageError, index$3_HostCallMemory as HostCallMemory, index$3_HostCallRegisters as HostCallRegisters, index$3_HostCallResult as HostCallResult, index$3_ImmediateDecoder as ImmediateDecoder, index$3_MAX_U32 as MAX_U32, index$3_MAX_U32_BIG_INT as MAX_U32_BIG_INT, index$3_MachineInstance as MachineInstance, index$3_Mask as Mask, index$3_MemoryOperation as MemoryOperation, index$3_MemorySegment as MemorySegment, NO_OF_REGISTERS$1 as NO_OF_REGISTERS, index$3_NewServiceError as NewServiceError, index$3_NibblesDecoder as NibblesDecoder, index$3_PagesError as PagesError, index$3_PartiallyUpdatedState as PartiallyUpdatedState, index$3_PeekPokeError as PeekPokeError, index$3_PendingTransfer as PendingTransfer, index$3_PreimageStatusKind as PreimageStatusKind, index$3_Program as Program, index$3_ProgramDecoder as ProgramDecoder, index$3_ProvidePreimageError as ProvidePreimageError, DebuggerAdapter as Pvm, index$3_Registers as Registers, index$3_RequestPreimageError as RequestPreimageError, Result$2 as Result, index$3_RichTaggedError as RichTaggedError, index$3_SERVICE_ID_BYTES as SERVICE_ID_BYTES, index$3_SpiMemory as SpiMemory, index$3_SpiProgram as SpiProgram, index$3_TransferError as TransferError, index$3_UpdatePrivilegesError as UpdatePrivilegesError, index$3_WithDebug as WithDebug, index$3_ZeroVoidError as ZeroVoidError, index$3___OPAQUE_TYPE__ as __OPAQUE_TYPE__, index$3_asOpaqueType as asOpaqueType, index$3_assertEmpty as assertEmpty, index$3_assertNever as assertNever, index$j as block, index$q as bytes, index$3_cast as cast, index$3_check as check, index$3_clampU64ToU32 as clampU64ToU32, index$3_createResults as createResults, index$3_decodeStandardProgram as decodeStandardProgram, index$3_ensure as ensure, index$3_extractCodeAndMetadata as extractCodeAndMetadata, index$3_getServiceId as getServiceId, index$3_getServiceIdOrCurrent as getServiceIdOrCurrent, index$n as hash, index$3_inspect as inspect, index$3_instructionArgumentTypeMap as instructionArgumentTypeMap, index$8 as interpreter, index$3_isBrowser as isBrowser, index$3_isTaggedError as isTaggedError, index$3_maybeTaggedErrorToString as maybeTaggedErrorToString, index$3_measure as measure, index$p as numbers, index$3_preimageLenAsU32 as preimageLenAsU32, index$3_resultToString as resultToString, index$3_seeThrough as seeThrough, index$3_slotsToPreimageStatus as slotsToPreimageStatus, index$3_toMemoryOperation as toMemoryOperation, index$3_tryAsMachineId as tryAsMachineId, index$3_tryAsProgramCounter as tryAsProgramCounter, index$3_writeServiceIdAsLeBytes as writeServiceIdAsLeBytes };
18183
18074
  export type { index$3_Args as Args, index$3_EnumMapping as EnumMapping, index$3_ErrorResult as ErrorResult, index$3_IHostCallMemory as IHostCallMemory, index$3_IHostCallRegisters as IHostCallRegisters, index$3_InsufficientFundsError as InsufficientFundsError, index$3_MachineId as MachineId, index$3_MachineResult as MachineResult, index$3_MachineStatus as MachineStatus, index$3_NoMachineError as NoMachineError, index$3_OK as OK, index$3_OkResult as OkResult, index$3_Opaque as Opaque, index$3_PartialState as PartialState, index$3_PreimageStatus as PreimageStatus, index$3_ProgramCounter as ProgramCounter, index$3_RefineExternalities as RefineExternalities, index$3_SegmentExportError as SegmentExportError, index$3_ServiceStateUpdate as ServiceStateUpdate, index$3_StateSlice as StateSlice, index$3_StringLiteral as StringLiteral, index$3_TRANSFER_MEMO_BYTES as TRANSFER_MEMO_BYTES, index$3_TaggedError as TaggedError, index$3_TokenOf as TokenOf, index$3_Uninstantiable as Uninstantiable, index$3_UnprivilegedError as UnprivilegedError, index$3_WithOpaque as WithOpaque };
18184
18075
  }
18185
18076
 
@@ -18217,41 +18108,7 @@ declare namespace index$2 {
18217
18108
  };
18218
18109
  }
18219
18110
 
18220
- declare class JsonServiceInfoPre067 {
18221
- static fromJson = json.object<JsonServiceInfoPre067, ServiceAccountInfo>(
18222
- {
18223
- code_hash: fromJson.bytes32(),
18224
- balance: json.fromNumber((x) => tryAsU64(x)),
18225
- min_item_gas: json.fromNumber((x) => tryAsServiceGas(x)),
18226
- min_memo_gas: json.fromNumber((x) => tryAsServiceGas(x)),
18227
- bytes: json.fromNumber((x) => tryAsU64(x)),
18228
- items: "number",
18229
- },
18230
- ({ code_hash, balance, min_item_gas, min_memo_gas, bytes, items }) => {
18231
- return ServiceAccountInfo.create({
18232
- codeHash: code_hash,
18233
- balance,
18234
- accumulateMinGas: min_item_gas,
18235
- onTransferMinGas: min_memo_gas,
18236
- storageUtilisationBytes: bytes,
18237
- storageUtilisationCount: items,
18238
- gratisStorage: tryAsU64(0),
18239
- created: tryAsTimeSlot(0),
18240
- lastAccumulation: tryAsTimeSlot(0),
18241
- parentService: tryAsServiceId(0),
18242
- });
18243
- },
18244
- );
18245
-
18246
- code_hash!: CodeHash;
18247
- balance!: U64;
18248
- min_item_gas!: ServiceGas;
18249
- min_memo_gas!: ServiceGas;
18250
- bytes!: U64;
18251
- items!: U32;
18252
- }
18253
-
18254
- declare class JsonServiceInfo extends JsonServiceInfoPre067 {
18111
+ declare class JsonServiceInfo {
18255
18112
  static fromJson = json.object<JsonServiceInfo, ServiceAccountInfo>(
18256
18113
  {
18257
18114
  code_hash: fromJson.bytes32(),
@@ -18292,6 +18149,12 @@ declare class JsonServiceInfo extends JsonServiceInfoPre067 {
18292
18149
  },
18293
18150
  );
18294
18151
 
18152
+ code_hash!: CodeHash;
18153
+ balance!: U64;
18154
+ min_item_gas!: ServiceGas;
18155
+ min_memo_gas!: ServiceGas;
18156
+ bytes!: U64;
18157
+ items!: U32;
18295
18158
  creation_slot!: TimeSlot;
18296
18159
  deposit_offset!: U64;
18297
18160
  last_accumulation_slot!: TimeSlot;
@@ -18345,9 +18208,7 @@ declare class JsonService {
18345
18208
  {
18346
18209
  id: "number",
18347
18210
  data: {
18348
- service: Compatibility.isGreaterOrEqual(GpVersion.V0_6_7)
18349
- ? JsonServiceInfo.fromJson
18350
- : JsonServiceInfoPre067.fromJson,
18211
+ service: JsonServiceInfo.fromJson,
18351
18212
  preimages: json.optional(json.array(JsonPreimageItem.fromJson)),
18352
18213
  storage: json.optional(json.array(JsonStorageItem.fromJson)),
18353
18214
  lookup_meta: json.optional(json.array(lookupMetaFromJson)),
@@ -18485,7 +18346,7 @@ type JsonRecentBlockState = {
18485
18346
  reported: WorkPackageInfo[];
18486
18347
  };
18487
18348
 
18488
- declare const recentBlocksFromJson = json.object<JsonRecentBlocks, RecentBlocksHistory>(
18349
+ declare const recentBlocksHistoryFromJson = json.object<JsonRecentBlocks, RecentBlocksHistory>(
18489
18350
  {
18490
18351
  history: json.array(recentBlockStateFromJson),
18491
18352
  mmr: {
@@ -18509,49 +18370,6 @@ type JsonRecentBlocks = {
18509
18370
  };
18510
18371
  };
18511
18372
 
18512
- declare const legacyRecentBlockStateFromJson = json.object<JsonRecentBlockStateLegacy, LegacyBlockState>(
18513
- {
18514
- header_hash: fromJson.bytes32(),
18515
- mmr: {
18516
- peaks: json.array(json.nullable(fromJson.bytes32())),
18517
- },
18518
- state_root: fromJson.bytes32(),
18519
- reported: json.array(reportedWorkPackageFromJson),
18520
- },
18521
- ({ header_hash, mmr, state_root, reported }) => {
18522
- return {
18523
- headerHash: header_hash,
18524
- mmr,
18525
- postStateRoot: state_root,
18526
- reported: HashDictionary.fromEntries(reported.map((x) => [x.workPackageHash, x])),
18527
- };
18528
- },
18529
- );
18530
-
18531
- type JsonRecentBlockStateLegacy = {
18532
- header_hash: HeaderHash;
18533
- mmr: {
18534
- peaks: Array<KeccakHash | null>;
18535
- };
18536
- state_root: StateRootHash;
18537
- reported: WorkPackageInfo[];
18538
- };
18539
-
18540
- declare const legacyRecentBlocksFromJson = json.object<LegacyBlocksState, RecentBlocksHistory>(
18541
- json.array(legacyRecentBlockStateFromJson),
18542
- (blocks) => {
18543
- return RecentBlocksHistory.legacyCreate(
18544
- LegacyRecentBlocks.create({
18545
- blocks,
18546
- }),
18547
- );
18548
- },
18549
- );
18550
-
18551
- declare const recentBlocksHistoryFromJson = Compatibility.isGreaterOrEqual(GpVersion.V0_6_7)
18552
- ? recentBlocksFromJson
18553
- : legacyRecentBlocksFromJson;
18554
-
18555
18373
  declare const ticketFromJson: FromJson<Ticket> = json.object<Ticket>(
18556
18374
  {
18557
18375
  id: fromJson.bytes32(),
@@ -18893,138 +18711,6 @@ declare const fullStateDumpFromJson = (spec: ChainSpec) =>
18893
18711
  },
18894
18712
  );
18895
18713
 
18896
- type JsonStateDumpPre067 = {
18897
- alpha: AuthorizerHash[][];
18898
- varphi: AuthorizerHash[][];
18899
- beta: State["recentBlocks"] | null;
18900
- gamma: {
18901
- gamma_k: State["nextValidatorData"];
18902
- gamma_z: State["epochRoot"];
18903
- gamma_s: TicketsOrKeys;
18904
- gamma_a: State["ticketsAccumulator"];
18905
- };
18906
- psi: State["disputesRecords"];
18907
- eta: State["entropy"];
18908
- iota: State["designatedValidatorData"];
18909
- kappa: State["currentValidatorData"];
18910
- lambda: State["previousValidatorData"];
18911
- rho: State["availabilityAssignment"];
18912
- tau: State["timeslot"];
18913
- chi: {
18914
- chi_m: PrivilegedServices["manager"];
18915
- chi_a: ServiceId; // NOTE: [MaSo] pre067
18916
- chi_v: PrivilegedServices["validatorsManager"];
18917
- chi_g: PrivilegedServices["autoAccumulateServices"] | null;
18918
- };
18919
- pi: JsonStatisticsData;
18920
- theta: State["accumulationQueue"];
18921
- xi: PerEpochBlock<WorkPackageHash[]>;
18922
- accounts: InMemoryService[];
18923
- };
18924
-
18925
- declare const fullStateDumpFromJsonPre067 = (spec: ChainSpec) =>
18926
- json.object<JsonStateDumpPre067, InMemoryState>(
18927
- {
18928
- alpha: json.array(json.array(fromJson.bytes32<AuthorizerHash>())),
18929
- varphi: json.array(json.array(fromJson.bytes32<AuthorizerHash>())),
18930
- beta: json.nullable(recentBlocksHistoryFromJson),
18931
- gamma: {
18932
- gamma_k: json.array(validatorDataFromJson),
18933
- gamma_a: json.array(ticketFromJson),
18934
- gamma_s: TicketsOrKeys.fromJson,
18935
- gamma_z: json.fromString((v) => Bytes.parseBytes(v, BANDERSNATCH_RING_ROOT_BYTES).asOpaque()),
18936
- },
18937
- psi: disputesRecordsFromJson,
18938
- eta: json.array(fromJson.bytes32<EntropyHash>()),
18939
- iota: json.array(validatorDataFromJson),
18940
- kappa: json.array(validatorDataFromJson),
18941
- lambda: json.array(validatorDataFromJson),
18942
- rho: json.array(json.nullable(availabilityAssignmentFromJson)),
18943
- tau: "number",
18944
- chi: {
18945
- chi_m: "number",
18946
- chi_a: "number",
18947
- chi_v: "number",
18948
- chi_g: json.nullable(
18949
- json.array({
18950
- service: "number",
18951
- gasLimit: json.fromNumber((v) => tryAsServiceGas(v)),
18952
- }),
18953
- ),
18954
- },
18955
- pi: JsonStatisticsData.fromJson,
18956
- theta: json.array(json.array(notYetAccumulatedFromJson)),
18957
- xi: json.array(json.array(fromJson.bytes32())),
18958
- accounts: json.array(JsonService.fromJson),
18959
- },
18960
- ({
18961
- alpha,
18962
- varphi,
18963
- beta,
18964
- gamma,
18965
- psi,
18966
- eta,
18967
- iota,
18968
- kappa,
18969
- lambda,
18970
- rho,
18971
- tau,
18972
- chi,
18973
- pi,
18974
- theta,
18975
- xi,
18976
- accounts,
18977
- }): InMemoryState => {
18978
- return InMemoryState.create({
18979
- authPools: tryAsPerCore(
18980
- alpha.map((perCore) => {
18981
- if (perCore.length > MAX_AUTH_POOL_SIZE) {
18982
- throw new Error(`AuthPools: expected less than ${MAX_AUTH_POOL_SIZE}, got ${perCore.length}`);
18983
- }
18984
- return asKnownSize(perCore);
18985
- }),
18986
- spec,
18987
- ),
18988
- authQueues: tryAsPerCore(
18989
- varphi.map((perCore) => {
18990
- if (perCore.length !== AUTHORIZATION_QUEUE_SIZE) {
18991
- throw new Error(`AuthQueues: expected ${AUTHORIZATION_QUEUE_SIZE}, got: ${perCore.length}`);
18992
- }
18993
- return asKnownSize(perCore);
18994
- }),
18995
- spec,
18996
- ),
18997
- recentBlocks: beta ?? RecentBlocksHistory.empty(),
18998
- nextValidatorData: gamma.gamma_k,
18999
- epochRoot: gamma.gamma_z,
19000
- sealingKeySeries: TicketsOrKeys.toSafroleSealingKeys(gamma.gamma_s, spec),
19001
- ticketsAccumulator: gamma.gamma_a,
19002
- disputesRecords: psi,
19003
- entropy: eta,
19004
- designatedValidatorData: iota,
19005
- currentValidatorData: kappa,
19006
- previousValidatorData: lambda,
19007
- availabilityAssignment: rho,
19008
- timeslot: tau,
19009
- privilegedServices: PrivilegedServices.create({
19010
- manager: chi.chi_m,
19011
- authManager: tryAsPerCore(new Array(spec.coresCount).fill(chi.chi_a), spec),
19012
- validatorsManager: chi.chi_v,
19013
- autoAccumulateServices: chi.chi_g ?? [],
19014
- }),
19015
- statistics: JsonStatisticsData.toStatisticsData(spec, pi),
19016
- accumulationQueue: theta,
19017
- recentlyAccumulated: tryAsPerEpochBlock(
19018
- xi.map((x) => HashSet.from(x)),
19019
- spec,
19020
- ),
19021
- services: new Map(accounts.map((x) => [x.serviceId, x])),
19022
- // NOTE Field not present in pre067, added here for compatibility reasons
19023
- accumulationOutputLog: [],
19024
- });
19025
- },
19026
- );
19027
-
19028
18714
  type index$1_JsonAvailabilityAssignment = JsonAvailabilityAssignment;
19029
18715
  type index$1_JsonCoreStatistics = JsonCoreStatistics;
19030
18716
  declare const index$1_JsonCoreStatistics: typeof JsonCoreStatistics;
@@ -19034,19 +18720,15 @@ type index$1_JsonLookupMeta = JsonLookupMeta;
19034
18720
  type index$1_JsonPreimageItem = JsonPreimageItem;
19035
18721
  declare const index$1_JsonPreimageItem: typeof JsonPreimageItem;
19036
18722
  type index$1_JsonRecentBlockState = JsonRecentBlockState;
19037
- type index$1_JsonRecentBlockStateLegacy = JsonRecentBlockStateLegacy;
19038
18723
  type index$1_JsonRecentBlocks = JsonRecentBlocks;
19039
18724
  type index$1_JsonReportedWorkPackageInfo = JsonReportedWorkPackageInfo;
19040
18725
  type index$1_JsonService = JsonService;
19041
18726
  declare const index$1_JsonService: typeof JsonService;
19042
18727
  type index$1_JsonServiceInfo = JsonServiceInfo;
19043
18728
  declare const index$1_JsonServiceInfo: typeof JsonServiceInfo;
19044
- type index$1_JsonServiceInfoPre067 = JsonServiceInfoPre067;
19045
- declare const index$1_JsonServiceInfoPre067: typeof JsonServiceInfoPre067;
19046
18729
  type index$1_JsonServiceStatistics = JsonServiceStatistics;
19047
18730
  declare const index$1_JsonServiceStatistics: typeof JsonServiceStatistics;
19048
18731
  type index$1_JsonStateDump = JsonStateDump;
19049
- type index$1_JsonStateDumpPre067 = JsonStateDumpPre067;
19050
18732
  type index$1_JsonStatisticsData = JsonStatisticsData;
19051
18733
  declare const index$1_JsonStatisticsData: typeof JsonStatisticsData;
19052
18734
  type index$1_JsonStorageItem = JsonStorageItem;
@@ -19059,21 +18741,17 @@ declare const index$1_TicketsOrKeys: typeof TicketsOrKeys;
19059
18741
  declare const index$1_availabilityAssignmentFromJson: typeof availabilityAssignmentFromJson;
19060
18742
  declare const index$1_disputesRecordsFromJson: typeof disputesRecordsFromJson;
19061
18743
  declare const index$1_fullStateDumpFromJson: typeof fullStateDumpFromJson;
19062
- declare const index$1_fullStateDumpFromJsonPre067: typeof fullStateDumpFromJsonPre067;
19063
- declare const index$1_legacyRecentBlockStateFromJson: typeof legacyRecentBlockStateFromJson;
19064
- declare const index$1_legacyRecentBlocksFromJson: typeof legacyRecentBlocksFromJson;
19065
18744
  declare const index$1_lookupMetaFromJson: typeof lookupMetaFromJson;
19066
18745
  declare const index$1_notYetAccumulatedFromJson: typeof notYetAccumulatedFromJson;
19067
18746
  declare const index$1_recentBlockStateFromJson: typeof recentBlockStateFromJson;
19068
- declare const index$1_recentBlocksFromJson: typeof recentBlocksFromJson;
19069
18747
  declare const index$1_recentBlocksHistoryFromJson: typeof recentBlocksHistoryFromJson;
19070
18748
  declare const index$1_reportedWorkPackageFromJson: typeof reportedWorkPackageFromJson;
19071
18749
  declare const index$1_serviceStatisticsEntryFromJson: typeof serviceStatisticsEntryFromJson;
19072
18750
  declare const index$1_ticketFromJson: typeof ticketFromJson;
19073
18751
  declare const index$1_validatorDataFromJson: typeof validatorDataFromJson;
19074
18752
  declare namespace index$1 {
19075
- export { index$1_JsonCoreStatistics as JsonCoreStatistics, index$1_JsonDisputesRecords as JsonDisputesRecords, index$1_JsonPreimageItem as JsonPreimageItem, index$1_JsonService as JsonService, index$1_JsonServiceInfo as JsonServiceInfo, index$1_JsonServiceInfoPre067 as JsonServiceInfoPre067, index$1_JsonServiceStatistics as JsonServiceStatistics, index$1_JsonStatisticsData as JsonStatisticsData, index$1_JsonStorageItem as JsonStorageItem, index$1_JsonValidatorStatistics as JsonValidatorStatistics, index$1_TicketsOrKeys as TicketsOrKeys, index$1_availabilityAssignmentFromJson as availabilityAssignmentFromJson, index$1_disputesRecordsFromJson as disputesRecordsFromJson, index$1_fullStateDumpFromJson as fullStateDumpFromJson, index$1_fullStateDumpFromJsonPre067 as fullStateDumpFromJsonPre067, index$1_legacyRecentBlockStateFromJson as legacyRecentBlockStateFromJson, index$1_legacyRecentBlocksFromJson as legacyRecentBlocksFromJson, index$1_lookupMetaFromJson as lookupMetaFromJson, index$1_notYetAccumulatedFromJson as notYetAccumulatedFromJson, index$1_recentBlockStateFromJson as recentBlockStateFromJson, index$1_recentBlocksFromJson as recentBlocksFromJson, index$1_recentBlocksHistoryFromJson as recentBlocksHistoryFromJson, index$1_reportedWorkPackageFromJson as reportedWorkPackageFromJson, index$1_serviceStatisticsEntryFromJson as serviceStatisticsEntryFromJson, index$1_ticketFromJson as ticketFromJson, index$1_validatorDataFromJson as validatorDataFromJson };
19076
- export type { index$1_JsonAvailabilityAssignment as JsonAvailabilityAssignment, index$1_JsonLookupMeta as JsonLookupMeta, index$1_JsonRecentBlockState as JsonRecentBlockState, index$1_JsonRecentBlockStateLegacy as JsonRecentBlockStateLegacy, index$1_JsonRecentBlocks as JsonRecentBlocks, index$1_JsonReportedWorkPackageInfo as JsonReportedWorkPackageInfo, index$1_JsonStateDump as JsonStateDump, index$1_JsonStateDumpPre067 as JsonStateDumpPre067, index$1_ServiceStatisticsEntry as ServiceStatisticsEntry };
18753
+ export { index$1_JsonCoreStatistics as JsonCoreStatistics, index$1_JsonDisputesRecords as JsonDisputesRecords, index$1_JsonPreimageItem as JsonPreimageItem, index$1_JsonService as JsonService, index$1_JsonServiceInfo as JsonServiceInfo, index$1_JsonServiceStatistics as JsonServiceStatistics, index$1_JsonStatisticsData as JsonStatisticsData, index$1_JsonStorageItem as JsonStorageItem, index$1_JsonValidatorStatistics as JsonValidatorStatistics, index$1_TicketsOrKeys as TicketsOrKeys, index$1_availabilityAssignmentFromJson as availabilityAssignmentFromJson, index$1_disputesRecordsFromJson as disputesRecordsFromJson, index$1_fullStateDumpFromJson as fullStateDumpFromJson, index$1_lookupMetaFromJson as lookupMetaFromJson, index$1_notYetAccumulatedFromJson as notYetAccumulatedFromJson, index$1_recentBlockStateFromJson as recentBlockStateFromJson, index$1_recentBlocksHistoryFromJson as recentBlocksHistoryFromJson, index$1_reportedWorkPackageFromJson as reportedWorkPackageFromJson, index$1_serviceStatisticsEntryFromJson as serviceStatisticsEntryFromJson, index$1_ticketFromJson as ticketFromJson, index$1_validatorDataFromJson as validatorDataFromJson };
18754
+ export type { index$1_JsonAvailabilityAssignment as JsonAvailabilityAssignment, index$1_JsonLookupMeta as JsonLookupMeta, index$1_JsonRecentBlockState as JsonRecentBlockState, index$1_JsonRecentBlocks as JsonRecentBlocks, index$1_JsonReportedWorkPackageInfo as JsonReportedWorkPackageInfo, index$1_JsonStateDump as JsonStateDump, index$1_ServiceStatisticsEntry as ServiceStatisticsEntry };
19077
18755
  }
19078
18756
 
19079
18757
  /** Helper function to create most used hashes in the block */