@typeberry/lib 0.0.1-1c07527 → 0.0.1-4a742e9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/index.d.ts +271 -454
  2. package/index.js +360 -514
  3. package/package.json +1 -1
package/index.js CHANGED
@@ -68,8 +68,6 @@ var index$s = /*#__PURE__*/Object.freeze({
68
68
 
69
69
  var GpVersion;
70
70
  (function (GpVersion) {
71
- GpVersion["V0_6_5"] = "0.6.5";
72
- GpVersion["V0_6_6"] = "0.6.6";
73
71
  GpVersion["V0_6_7"] = "0.6.7";
74
72
  GpVersion["V0_7_0"] = "0.7.0-preview";
75
73
  GpVersion["V0_7_1"] = "0.7.1-preview";
@@ -80,13 +78,7 @@ var TestSuite;
80
78
  TestSuite["JAMDUNA"] = "jamduna";
81
79
  })(TestSuite || (TestSuite = {}));
82
80
  const DEFAULT_SUITE = TestSuite.W3F_DAVXY;
83
- const ALL_VERSIONS_IN_ORDER = [
84
- GpVersion.V0_6_5,
85
- GpVersion.V0_6_6,
86
- GpVersion.V0_6_7,
87
- GpVersion.V0_7_0,
88
- GpVersion.V0_7_1,
89
- ];
81
+ const ALL_VERSIONS_IN_ORDER = [GpVersion.V0_6_7, GpVersion.V0_7_0, GpVersion.V0_7_1];
90
82
  const env = typeof process === "undefined" ? {} : process.env;
91
83
  const DEFAULT_VERSION = GpVersion.V0_6_7;
92
84
  let CURRENT_VERSION = parseCurrentVersion(env.GP_VERSION) ?? DEFAULT_VERSION;
@@ -1847,6 +1839,73 @@ class Skipper {
1847
1839
  }
1848
1840
  }
1849
1841
 
1842
+ /**
1843
+ * Type descriptor definition.
1844
+ *
1845
+ * The type descriptor can encode & decode given type `T`, but
1846
+ * also have a `name` and a byte-size hint.
1847
+ *
1848
+ * Descriptors can be composed to form more complex typings.
1849
+ */
1850
+ class Descriptor {
1851
+ name;
1852
+ sizeHint;
1853
+ encode;
1854
+ decode;
1855
+ skip;
1856
+ /** A "lightweight" version of the object. */
1857
+ View;
1858
+ /** New descriptor with specialized `View`. */
1859
+ static withView(name, sizeHint, encode, decode, skip, view) {
1860
+ return new Descriptor(name, sizeHint, encode, decode, skip, view);
1861
+ }
1862
+ /** Create a new descriptor without a specialized `View`. */
1863
+ static new(name, sizeHint, encode, decode, skip) {
1864
+ return new Descriptor(name, sizeHint, encode, decode, skip, null);
1865
+ }
1866
+ constructor(
1867
+ /** Descriptive name of the coded data. */
1868
+ name,
1869
+ /** A byte size hint for encoded data. */
1870
+ sizeHint,
1871
+ /** Encoding function. */
1872
+ encode,
1873
+ /** Decoding function. */
1874
+ decode,
1875
+ /** Skipping function. */
1876
+ skip,
1877
+ /** view object. It can be `null` iff T===V. */
1878
+ view) {
1879
+ this.name = name;
1880
+ this.sizeHint = sizeHint;
1881
+ this.encode = encode;
1882
+ this.decode = decode;
1883
+ this.skip = skip;
1884
+ // We cast here to make sure that the field is always set.
1885
+ this.View = view ?? this;
1886
+ }
1887
+ /**
1888
+ * Extract an encoded version of this type from the decoder.
1889
+ *
1890
+ * This function skips the object instead of decoding it,
1891
+ * allowing to retrieve the encoded portion of the object from `Decoder`.
1892
+ */
1893
+ skipEncoded(decoder) {
1894
+ const initBytes = decoder.bytesRead();
1895
+ this.skip(new Skipper(decoder));
1896
+ const endBytes = decoder.bytesRead();
1897
+ return BytesBlob.blobFrom(decoder.source.subarray(initBytes, endBytes));
1898
+ }
1899
+ /** Return a new descriptor that converts data into some other type. */
1900
+ convert(input, output) {
1901
+ return new Descriptor(this.name, this.sizeHint, (e, elem) => this.encode(e, input(elem)), (d) => output(this.decode(d)), this.skip, this.View);
1902
+ }
1903
+ /** Safely cast the descriptor value to a opaque type. */
1904
+ asOpaque() {
1905
+ return this.convert((i) => seeThrough(i), (o) => asOpaqueType(o));
1906
+ }
1907
+ }
1908
+
1850
1909
  /** Validate that given sequence length is within expected range. */
1851
1910
  function validateLength(range, length, context) {
1852
1911
  if (length < range.minLength) {
@@ -2078,72 +2137,6 @@ const TYPICAL_SEQUENCE_LENGTH = 64;
2078
2137
  * TODO [ToDr] [opti] This value should be updated when we run some real-data bechmarks.
2079
2138
  */
2080
2139
  const TYPICAL_DICTIONARY_LENGTH = 32;
2081
- /**
2082
- * Type descriptor definition.
2083
- *
2084
- * The type descriptor can encode & decode given type `T`, but
2085
- * also have a `name` and a byte-size hint.
2086
- *
2087
- * Descriptors can be composed to form more complex typings.
2088
- */
2089
- class Descriptor {
2090
- name;
2091
- sizeHint;
2092
- encode;
2093
- decode;
2094
- skip;
2095
- /** A "lightweight" version of the object. */
2096
- View;
2097
- /** New descriptor with specialized `View`. */
2098
- static withView(name, sizeHint, encode, decode, skip, view) {
2099
- return new Descriptor(name, sizeHint, encode, decode, skip, view);
2100
- }
2101
- /** Create a new descriptor without a specialized `View`. */
2102
- static new(name, sizeHint, encode, decode, skip) {
2103
- return new Descriptor(name, sizeHint, encode, decode, skip, null);
2104
- }
2105
- constructor(
2106
- /** Descriptive name of the coded data. */
2107
- name,
2108
- /** A byte size hint for encoded data. */
2109
- sizeHint,
2110
- /** Encoding function. */
2111
- encode,
2112
- /** Decoding function. */
2113
- decode,
2114
- /** Skipping function. */
2115
- skip,
2116
- /** view object. It can be `null` iff T===V. */
2117
- view) {
2118
- this.name = name;
2119
- this.sizeHint = sizeHint;
2120
- this.encode = encode;
2121
- this.decode = decode;
2122
- this.skip = skip;
2123
- // We cast here to make sure that the field is always set.
2124
- this.View = view ?? this;
2125
- }
2126
- /**
2127
- * Extract an encoded version of this type from the decoder.
2128
- *
2129
- * This function skips the object instead of decoding it,
2130
- * allowing to retrieve the encoded portion of the object from `Decoder`.
2131
- */
2132
- skipEncoded(decoder) {
2133
- const initBytes = decoder.bytesRead();
2134
- this.skip(new Skipper(decoder));
2135
- const endBytes = decoder.bytesRead();
2136
- return BytesBlob.blobFrom(decoder.source.subarray(initBytes, endBytes));
2137
- }
2138
- /** Return a new descriptor that converts data into some other type. */
2139
- convert(input, output) {
2140
- return new Descriptor(this.name, this.sizeHint, (e, elem) => this.encode(e, input(elem)), (d) => output(this.decode(d)), this.skip, this.View);
2141
- }
2142
- /** Safely cast the descriptor value to a opaque type. */
2143
- asOpaque() {
2144
- return this.convert((i) => seeThrough(i), (o) => asOpaqueType(o));
2145
- }
2146
- }
2147
2140
  /**
2148
2141
  * Convert a descriptor for regular array into readonly one.
2149
2142
  *
@@ -2452,6 +2445,7 @@ var index$o = /*#__PURE__*/Object.freeze({
2452
2445
  ObjectView: ObjectView,
2453
2446
  SequenceView: SequenceView,
2454
2447
  TYPICAL_DICTIONARY_LENGTH: TYPICAL_DICTIONARY_LENGTH,
2448
+ ViewField: ViewField,
2455
2449
  addSizeHints: addSizeHints,
2456
2450
  get codec () { return codec$1; },
2457
2451
  decodeVariableLengthExtraBytes: decodeVariableLengthExtraBytes,
@@ -5752,6 +5746,31 @@ const codecPerEpochBlock = (val) => codecWithContext((context) => {
5752
5746
  return codecKnownSizeArray(val, { fixedLength: context.epochLength });
5753
5747
  });
5754
5748
 
5749
+ /**
5750
+ * Mapping between work package hash and root hash of it's exports.
5751
+ *
5752
+ * Used to construct a dictionary.
5753
+ */
5754
+ class WorkPackageInfo extends WithDebug {
5755
+ workPackageHash;
5756
+ segmentTreeRoot;
5757
+ static Codec = codec$1.Class(WorkPackageInfo, {
5758
+ workPackageHash: codec$1.bytes(HASH_SIZE).asOpaque(),
5759
+ segmentTreeRoot: codec$1.bytes(HASH_SIZE).asOpaque(),
5760
+ });
5761
+ constructor(
5762
+ /** Hash of the described work package. */
5763
+ workPackageHash,
5764
+ /** Exports root hash. */
5765
+ segmentTreeRoot) {
5766
+ super();
5767
+ this.workPackageHash = workPackageHash;
5768
+ this.segmentTreeRoot = segmentTreeRoot;
5769
+ }
5770
+ static create({ workPackageHash, segmentTreeRoot }) {
5771
+ return new WorkPackageInfo(workPackageHash, segmentTreeRoot);
5772
+ }
5773
+ }
5755
5774
  /**
5756
5775
  * `X`: Refinement Context - state of the chain at the point
5757
5776
  * that the report's corresponding work-package was evaluated.
@@ -5801,7 +5820,8 @@ class RefineContext extends WithDebug {
5801
5820
 
5802
5821
  var refineContext = /*#__PURE__*/Object.freeze({
5803
5822
  __proto__: null,
5804
- RefineContext: RefineContext
5823
+ RefineContext: RefineContext,
5824
+ WorkPackageInfo: WorkPackageInfo
5805
5825
  });
5806
5826
 
5807
5827
  /** `W_E`: The basic size of erasure-coded pieces in octets. See equation H.6. */
@@ -6070,17 +6090,13 @@ var WorkExecResultKind;
6070
6090
  /** `☇`: unexpected program termination. */
6071
6091
  WorkExecResultKind[WorkExecResultKind["panic"] = 2] = "panic";
6072
6092
  /** `⊚`: the number of exports made was invalidly reported. */
6073
- // biome-ignore lint/style/useLiteralEnumMembers: Compatibility
6074
- WorkExecResultKind[WorkExecResultKind["incorrectNumberOfExports"] = Compatibility.isGreaterOrEqual(GpVersion.V0_6_7) ? 3 : -1] = "incorrectNumberOfExports";
6093
+ WorkExecResultKind[WorkExecResultKind["incorrectNumberOfExports"] = 3] = "incorrectNumberOfExports";
6075
6094
  /** `⊖`: the size of the digest (refinement output) would cross the acceptable limit. */
6076
- // biome-ignore lint/style/useLiteralEnumMembers: Compatibility
6077
- WorkExecResultKind[WorkExecResultKind["digestTooBig"] = Compatibility.isGreaterOrEqual(GpVersion.V0_6_7) ? 4 : -1] = "digestTooBig";
6095
+ WorkExecResultKind[WorkExecResultKind["digestTooBig"] = 4] = "digestTooBig";
6078
6096
  /** `BAD`: service code was not available for lookup in state. */
6079
- // biome-ignore lint/style/useLiteralEnumMembers: Compatibility
6080
- WorkExecResultKind[WorkExecResultKind["badCode"] = Compatibility.isGreaterOrEqual(GpVersion.V0_6_7) ? 5 : 3] = "badCode";
6097
+ WorkExecResultKind[WorkExecResultKind["badCode"] = 5] = "badCode";
6081
6098
  /** `BIG`: the code was too big (beyond the maximum allowed size `W_C`) */
6082
- // biome-ignore lint/style/useLiteralEnumMembers: Compatibility
6083
- WorkExecResultKind[WorkExecResultKind["codeOversize"] = Compatibility.isGreaterOrEqual(GpVersion.V0_6_7) ? 6 : 4] = "codeOversize";
6099
+ WorkExecResultKind[WorkExecResultKind["codeOversize"] = 6] = "codeOversize";
6084
6100
  })(WorkExecResultKind || (WorkExecResultKind = {}));
6085
6101
  /** The execution result of some work-package. */
6086
6102
  class WorkExecResult extends WithDebug {
@@ -6272,31 +6288,6 @@ class WorkPackageSpec extends WithDebug {
6272
6288
  this.exportsCount = exportsCount;
6273
6289
  }
6274
6290
  }
6275
- /**
6276
- * Mapping between work package hash and root hash of it's exports.
6277
- *
6278
- * Used to construct a dictionary.
6279
- */
6280
- class WorkPackageInfo extends WithDebug {
6281
- workPackageHash;
6282
- segmentTreeRoot;
6283
- static Codec = codec$1.Class(WorkPackageInfo, {
6284
- workPackageHash: codec$1.bytes(HASH_SIZE).asOpaque(),
6285
- segmentTreeRoot: codec$1.bytes(HASH_SIZE).asOpaque(),
6286
- });
6287
- constructor(
6288
- /** Hash of the described work package. */
6289
- workPackageHash,
6290
- /** Exports root hash. */
6291
- segmentTreeRoot) {
6292
- super();
6293
- this.workPackageHash = workPackageHash;
6294
- this.segmentTreeRoot = segmentTreeRoot;
6295
- }
6296
- static create({ workPackageHash, segmentTreeRoot }) {
6297
- return new WorkPackageInfo(workPackageHash, segmentTreeRoot);
6298
- }
6299
- }
6300
6291
  /**
6301
6292
  * A report of execution of some work package.
6302
6293
  *
@@ -6363,14 +6354,12 @@ const WorkReportCodec = codec$1.Class(WorkReportNoCodec, {
6363
6354
  const WorkReportCodecPre070 = codec$1.Class(WorkReportNoCodec, {
6364
6355
  workPackageSpec: WorkPackageSpec.Codec,
6365
6356
  context: RefineContext.Codec,
6366
- coreIndex: Compatibility.isGreaterOrEqual(GpVersion.V0_6_5) && !Compatibility.isSuite(TestSuite.JAMDUNA, GpVersion.V0_6_5)
6367
- ? codec$1.varU32.convert((o) => tryAsU32(o), (i) => {
6368
- if (!isU16(i)) {
6369
- throw new Error(`Core index exceeds U16: ${i}`);
6370
- }
6371
- return tryAsCoreIndex(i);
6372
- })
6373
- : codec$1.u16.asOpaque(),
6357
+ coreIndex: codec$1.varU32.convert((o) => tryAsU32(o), (i) => {
6358
+ if (!isU16(i)) {
6359
+ throw new Error(`Core index exceeds U16: ${i}`);
6360
+ }
6361
+ return tryAsCoreIndex(i);
6362
+ }),
6374
6363
  authorizerHash: codec$1.bytes(HASH_SIZE).asOpaque(),
6375
6364
  authorizationOutput: codec$1.blob,
6376
6365
  segmentRootLookup: readonlyArray(codec$1.sequenceVarLen(WorkPackageInfo.Codec)),
@@ -6385,7 +6374,6 @@ class WorkReport extends WorkReportNoCodec {
6385
6374
 
6386
6375
  var workReport = /*#__PURE__*/Object.freeze({
6387
6376
  __proto__: null,
6388
- WorkPackageInfo: WorkPackageInfo,
6389
6377
  WorkPackageSpec: WorkPackageSpec,
6390
6378
  WorkReport: WorkReport,
6391
6379
  WorkReportNoCodec: WorkReportNoCodec
@@ -7801,244 +7789,68 @@ class AutoAccumulate {
7801
7789
  static create({ service, gasLimit }) {
7802
7790
  return new AutoAccumulate(service, gasLimit);
7803
7791
  }
7804
- constructor(
7805
- /** Service id that auto-accumulates. */
7806
- service,
7807
- /** Gas limit for auto-accumulation. */
7808
- gasLimit) {
7809
- this.service = service;
7810
- this.gasLimit = gasLimit;
7811
- }
7812
- }
7813
- /**
7814
- * https://graypaper.fluffylabs.dev/#/7e6ff6a/11da0111da01?v=0.6.7
7815
- */
7816
- class PrivilegedServices {
7817
- manager;
7818
- authManager;
7819
- validatorsManager;
7820
- autoAccumulateServices;
7821
- static Codec = codec$1.Class(PrivilegedServices, {
7822
- manager: codec$1.u32.asOpaque(),
7823
- authManager: Compatibility.isGreaterOrEqual(GpVersion.V0_6_7)
7824
- ? codecPerCore(codec$1.u32.asOpaque())
7825
- : codecWithContext((ctx) => codec$1.u32.asOpaque().convert(
7826
- // NOTE: [MaSo] In a compatibility mode we are always updating all entries
7827
- // (all the entries are the same)
7828
- // so it doesn't matter which one we take here.
7829
- (perCore) => perCore[0], (serviceId) => {
7830
- const array = new Array(ctx.coresCount).fill(serviceId);
7831
- return tryAsPerCore(array, ctx);
7832
- })),
7833
- validatorsManager: codec$1.u32.asOpaque(),
7834
- autoAccumulateServices: readonlyArray(codec$1.sequenceVarLen(AutoAccumulate.Codec)),
7835
- });
7836
- static create({ manager, authManager, validatorsManager, autoAccumulateServices }) {
7837
- return new PrivilegedServices(manager, authManager, validatorsManager, autoAccumulateServices);
7838
- }
7839
- constructor(
7840
- /**
7841
- * `chi_m`: The first, χm, is the index of the manager service which is
7842
- * the service able to effect an alteration of χ from block to block,
7843
- * as well as bestow services with storage deposit credits.
7844
- * https://graypaper.fluffylabs.dev/#/7e6ff6a/11a40111a801?v=0.6.7
7845
- */
7846
- manager,
7847
- /** `chi_a`: Manages authorization queue one for each core. */
7848
- authManager,
7849
- /** `chi_v`: Managers validator keys. */
7850
- validatorsManager,
7851
- /** `chi_g`: Dictionary of services that auto-accumulate every block with their gas limit. */
7852
- autoAccumulateServices) {
7853
- this.manager = manager;
7854
- this.authManager = authManager;
7855
- this.validatorsManager = validatorsManager;
7856
- this.autoAccumulateServices = autoAccumulateServices;
7857
- }
7858
- }
7859
-
7860
- const SUPER_PEAK_STRING = BytesBlob.blobFromString("peak");
7861
- /**
7862
- * Merkle Mountain Range.
7863
- *
7864
- * https://graypaper.fluffylabs.dev/#/5f542d7/3aa0023aa002?v=0.6.2
7865
- */
7866
- class MerkleMountainRange {
7867
- hasher;
7868
- mountains;
7869
- /** Construct an empty MMR. */
7870
- static empty(hasher) {
7871
- return new MerkleMountainRange(hasher);
7872
- }
7873
- /** Construct a new MMR from existing peaks. */
7874
- static fromPeaks(hasher, mmr) {
7875
- return new MerkleMountainRange(hasher, mmr.peaks
7876
- .reduce((acc, peak, index) => {
7877
- if (peak !== null) {
7878
- acc.push(Mountain.fromPeak(peak, 2 ** index));
7879
- }
7880
- return acc;
7881
- }, [])
7882
- .reverse());
7883
- }
7884
- constructor(hasher,
7885
- /** Store non-empty merkle tries (mountains) ordered by descending size. */
7886
- mountains = []) {
7887
- this.hasher = hasher;
7888
- this.mountains = mountains;
7889
- }
7890
- /**
7891
- * Append a new hash to the MMR structure.
7892
- *
7893
- * https://graypaper.fluffylabs.dev/#/5f542d7/3b11003b1100?v=0.6.2
7894
- */
7895
- append(hash) {
7896
- let newMountain = Mountain.fromPeak(hash, 1);
7897
- for (;;) {
7898
- const last = this.mountains.pop();
7899
- if (last === undefined) {
7900
- this.mountains.push(newMountain);
7901
- return;
7902
- }
7903
- if (last.size !== newMountain.size) {
7904
- this.mountains.push(last);
7905
- this.mountains.push(newMountain);
7906
- return;
7907
- }
7908
- newMountain = last.mergeWith(this.hasher, newMountain);
7909
- }
7910
- }
7911
- /**
7912
- * Root of the entire structure.
7913
- *
7914
- * https://graypaper.fluffylabs.dev/#/5f542d7/3b20013b2001?v=0.6.2
7915
- */
7916
- getSuperPeakHash() {
7917
- if (this.mountains.length === 0) {
7918
- return Bytes.zero(HASH_SIZE).asOpaque();
7919
- }
7920
- const revMountains = this.mountains.slice().reverse();
7921
- const length = revMountains.length;
7922
- let lastHash = revMountains[0].peak;
7923
- for (let i = 1; i < length; i++) {
7924
- const mountain = revMountains[i];
7925
- lastHash = this.hasher.hashConcatPrepend(SUPER_PEAK_STRING, lastHash, mountain.peak);
7926
- }
7927
- return lastHash;
7928
- }
7929
- /** Get current peaks. */
7930
- getPeaks() {
7931
- const peaks = [];
7932
- const mountains = this.mountains;
7933
- // always 2**index
7934
- let currentSize = 1;
7935
- let currentIdx = mountains.length - 1;
7936
- while (currentIdx >= 0) {
7937
- const currentItem = mountains[currentIdx];
7938
- if (currentItem.size >= currentSize && currentItem.size < 2 * currentSize) {
7939
- peaks.push(currentItem.peak);
7940
- currentIdx -= 1;
7941
- }
7942
- else {
7943
- peaks.push(null);
7944
- }
7945
- // move to the next index.
7946
- currentSize = currentSize << 1;
7947
- }
7948
- return { peaks };
7949
- }
7950
- }
7951
- /** An internal helper structure to represent a merkle trie for MMR. */
7952
- class Mountain {
7953
- peak;
7954
- size;
7955
- constructor(peak, size) {
7956
- this.peak = peak;
7957
- this.size = size;
7958
- }
7959
- static fromPeak(peak, size) {
7960
- return new Mountain(peak, size);
7961
- }
7962
- static fromChildren(hasher, children) {
7963
- const [left, right] = children;
7964
- const peak = hasher.hashConcat(left.peak, right.peak);
7965
- const size = left.size + right.size;
7966
- return new Mountain(peak, size);
7967
- }
7968
- /** Merge with another montain of the same size. */
7969
- mergeWith(hasher, other) {
7970
- return Mountain.fromChildren(hasher, [this, other]);
7971
- }
7972
- toString() {
7973
- return `${this.size} @ ${this.peak}`;
7974
- }
7975
- }
7976
-
7977
- var index$f = /*#__PURE__*/Object.freeze({
7978
- __proto__: null,
7979
- MerkleMountainRange: MerkleMountainRange
7980
- });
7981
-
7982
- /**
7983
- * `H = 8`: The size of recent history, in blocks.
7984
- *
7985
- * https://graypaper.fluffylabs.dev/#/579bd12/416300416500
7986
- */
7987
- const MAX_RECENT_HISTORY = 8;
7988
- class LegacyBlockState extends WithDebug {
7989
- headerHash;
7990
- mmr;
7991
- postStateRoot;
7992
- reported;
7993
- static Codec = codec$1.Class(LegacyBlockState, {
7994
- headerHash: codec$1.bytes(HASH_SIZE).asOpaque(),
7995
- mmr: codec$1.object({
7996
- peaks: readonlyArray(codec$1.sequenceVarLen(codec$1.optional(codec$1.bytes(HASH_SIZE)))),
7997
- }),
7998
- postStateRoot: codec$1.bytes(HASH_SIZE).asOpaque(),
7999
- reported: codecHashDictionary(WorkPackageInfo.Codec, (x) => x.workPackageHash),
8000
- });
8001
- static create({ headerHash, mmr, postStateRoot, reported }) {
8002
- return new LegacyBlockState(headerHash, mmr, postStateRoot, reported);
8003
- }
8004
- constructor(
8005
- /** Header hash. */
8006
- headerHash,
8007
- /** Merkle mountain range peaks. */
8008
- mmr,
8009
- /** Posterior state root filled in with a 1-block delay. */
8010
- postStateRoot,
8011
- /** Reported work packages (no more than number of cores). */
8012
- reported) {
8013
- super();
8014
- this.headerHash = headerHash;
8015
- this.mmr = mmr;
8016
- this.postStateRoot = postStateRoot;
8017
- this.reported = reported;
8018
- }
7792
+ constructor(
7793
+ /** Service id that auto-accumulates. */
7794
+ service,
7795
+ /** Gas limit for auto-accumulation. */
7796
+ gasLimit) {
7797
+ this.service = service;
7798
+ this.gasLimit = gasLimit;
7799
+ }
8019
7800
  }
8020
- class LegacyRecentBlocks extends WithDebug {
8021
- blocks;
8022
- static Codec = codec$1.Class(LegacyRecentBlocks, {
8023
- blocks: codecKnownSizeArray(LegacyBlockState.Codec, {
8024
- minLength: 0,
8025
- maxLength: MAX_RECENT_HISTORY,
8026
- typicalLength: MAX_RECENT_HISTORY,
8027
- }),
7801
+ /**
7802
+ * https://graypaper.fluffylabs.dev/#/7e6ff6a/11da0111da01?v=0.6.7
7803
+ */
7804
+ class PrivilegedServices {
7805
+ manager;
7806
+ authManager;
7807
+ validatorsManager;
7808
+ autoAccumulateServices;
7809
+ static Codec = codec$1.Class(PrivilegedServices, {
7810
+ manager: codec$1.u32.asOpaque(),
7811
+ authManager: Compatibility.isGreaterOrEqual(GpVersion.V0_6_7)
7812
+ ? codecPerCore(codec$1.u32.asOpaque())
7813
+ : codecWithContext((ctx) => codec$1.u32.asOpaque().convert(
7814
+ // NOTE: [MaSo] In a compatibility mode we are always updating all entries
7815
+ // (all the entries are the same)
7816
+ // so it doesn't matter which one we take here.
7817
+ (perCore) => perCore[0], (serviceId) => {
7818
+ const array = new Array(ctx.coresCount).fill(serviceId);
7819
+ return tryAsPerCore(array, ctx);
7820
+ })),
7821
+ validatorsManager: codec$1.u32.asOpaque(),
7822
+ autoAccumulateServices: readonlyArray(codec$1.sequenceVarLen(AutoAccumulate.Codec)),
8028
7823
  });
8029
- static create(a) {
8030
- return new LegacyRecentBlocks(a.blocks);
7824
+ static create({ manager, authManager, validatorsManager, autoAccumulateServices }) {
7825
+ return new PrivilegedServices(manager, authManager, validatorsManager, autoAccumulateServices);
8031
7826
  }
8032
7827
  constructor(
8033
7828
  /**
8034
- * Most recent blocks.
8035
- * https://graypaper.fluffylabs.dev/#/85129da/0fb6010fb601?v=0.6.3
7829
+ * `chi_m`: The first, χm, is the index of the manager service which is
7830
+ * the service able to effect an alteration of χ from block to block,
7831
+ * as well as bestow services with storage deposit credits.
7832
+ * https://graypaper.fluffylabs.dev/#/7e6ff6a/11a40111a801?v=0.6.7
8036
7833
  */
8037
- blocks) {
8038
- super();
8039
- this.blocks = blocks;
7834
+ manager,
7835
+ /** `chi_a`: Manages authorization queue one for each core. */
7836
+ authManager,
7837
+ /** `chi_v`: Managers validator keys. */
7838
+ validatorsManager,
7839
+ /** `chi_g`: Dictionary of services that auto-accumulate every block with their gas limit. */
7840
+ autoAccumulateServices) {
7841
+ this.manager = manager;
7842
+ this.authManager = authManager;
7843
+ this.validatorsManager = validatorsManager;
7844
+ this.autoAccumulateServices = autoAccumulateServices;
8040
7845
  }
8041
7846
  }
7847
+
7848
+ /**
7849
+ * `H = 8`: The size of recent history, in blocks.
7850
+ *
7851
+ * https://graypaper.fluffylabs.dev/#/579bd12/416300416500
7852
+ */
7853
+ const MAX_RECENT_HISTORY = 8;
8042
7854
  /** Recent history of a single block. */
8043
7855
  class BlockState extends WithDebug {
8044
7856
  headerHash;
@@ -8103,66 +7915,42 @@ class RecentBlocks extends WithDebug {
8103
7915
  }
8104
7916
  }
8105
7917
  /**
8106
- * Unified recent history of blocks that handles both legacy and current formats.
7918
+ * Recent history of blocks.
8107
7919
  *
8108
- * https://graypaper.fluffylabs.dev/#/85129da/38cb0138cb01?v=0.6.3
8109
7920
  * https://graypaper.fluffylabs.dev/#/7e6ff6a/0fc9010fc901?v=0.6.7
8110
7921
  */
8111
7922
  class RecentBlocksHistory extends WithDebug {
8112
7923
  current;
8113
- legacy;
8114
- static Codec = Descriptor.new("RecentBlocksHistory", Compatibility.isGreaterOrEqual(GpVersion.V0_6_7) ? RecentBlocks.Codec.sizeHint : LegacyRecentBlocks.Codec.sizeHint, (encoder, value) => Compatibility.isGreaterOrEqual(GpVersion.V0_6_7)
8115
- ? RecentBlocks.Codec.encode(encoder, value.asCurrent())
8116
- : LegacyRecentBlocks.Codec.encode(encoder, value.asLegacy()), (decoder) => {
8117
- if (Compatibility.isGreaterOrEqual(GpVersion.V0_6_7)) {
8118
- const recentBlocks = RecentBlocks.Codec.decode(decoder);
8119
- return RecentBlocksHistory.create(recentBlocks);
8120
- }
8121
- const legacyBlocks = LegacyRecentBlocks.Codec.decode(decoder);
8122
- return RecentBlocksHistory.legacyCreate(legacyBlocks);
8123
- }, (_sizer) => {
8124
- return Compatibility.isGreaterOrEqual(GpVersion.V0_6_7)
8125
- ? RecentBlocks.Codec.sizeHint
8126
- : LegacyRecentBlocks.Codec.sizeHint;
7924
+ static Codec = Descriptor.new("RecentBlocksHistory", RecentBlocks.Codec.sizeHint, (encoder, value) => RecentBlocks.Codec.encode(encoder, value.asCurrent()), (decoder) => {
7925
+ const recentBlocks = RecentBlocks.Codec.decode(decoder);
7926
+ return RecentBlocksHistory.create(recentBlocks);
7927
+ }, (skip) => {
7928
+ return RecentBlocks.Codec.skip(skip);
8127
7929
  });
8128
7930
  static create(recentBlocks) {
8129
- return new RecentBlocksHistory(recentBlocks, null);
8130
- }
8131
- static legacyCreate(legacyRecentBlocks) {
8132
- return new RecentBlocksHistory(null, legacyRecentBlocks);
7931
+ return new RecentBlocksHistory(recentBlocks);
8133
7932
  }
8134
7933
  static empty() {
8135
- if (Compatibility.isGreaterOrEqual(GpVersion.V0_6_7)) {
8136
- return RecentBlocksHistory.create(RecentBlocks.create({
8137
- blocks: asKnownSize([]),
8138
- accumulationLog: { peaks: [] },
8139
- }));
8140
- }
8141
- return RecentBlocksHistory.legacyCreate(LegacyRecentBlocks.create({ blocks: asKnownSize([]) }));
7934
+ return RecentBlocksHistory.create(RecentBlocks.create({
7935
+ blocks: asKnownSize([]),
7936
+ accumulationLog: { peaks: [] },
7937
+ }));
8142
7938
  }
8143
7939
  /**
8144
7940
  * Returns the block's BEEFY super peak.
8145
- *
8146
- * NOTE: The `hasher` parameter exists solely for backward compatibility with legacy block format.
8147
7941
  */
8148
- static accumulationResult(block, { hasher, }) {
8149
- return Compatibility.isGreaterOrEqual(GpVersion.V0_6_7)
8150
- ? block.accumulationResult
8151
- : MerkleMountainRange.fromPeaks(hasher, block.mmr).getSuperPeakHash();
7942
+ static accumulationResult(block) {
7943
+ return block.accumulationResult;
8152
7944
  }
8153
- constructor(current, legacy) {
7945
+ constructor(current) {
8154
7946
  super();
8155
7947
  this.current = current;
8156
- this.legacy = legacy;
8157
7948
  }
8158
7949
  /** History of recent blocks with maximum size of `MAX_RECENT_HISTORY` */
8159
7950
  get blocks() {
8160
- if (Compatibility.isGreaterOrEqual(GpVersion.V0_6_7) && this.current !== null) {
7951
+ if (this.current !== null) {
8161
7952
  return this.current.blocks;
8162
7953
  }
8163
- if (this.legacy !== null) {
8164
- return this.legacy.blocks;
8165
- }
8166
7954
  throw new Error("RecentBlocksHistory is in invalid state");
8167
7955
  }
8168
7956
  asCurrent() {
@@ -8171,24 +7959,13 @@ class RecentBlocksHistory extends WithDebug {
8171
7959
  }
8172
7960
  return this.current;
8173
7961
  }
8174
- asLegacy() {
8175
- if (this.legacy === null) {
8176
- throw new Error("Cannot access legacy RecentBlocks format");
8177
- }
8178
- return this.legacy;
8179
- }
8180
7962
  updateBlocks(blocks) {
8181
- if (Compatibility.isGreaterOrEqual(GpVersion.V0_6_7) && this.current !== null) {
7963
+ if (this.current !== null) {
8182
7964
  return RecentBlocksHistory.create(RecentBlocks.create({
8183
7965
  ...this.current,
8184
7966
  blocks: asOpaqueType(blocks),
8185
7967
  }));
8186
7968
  }
8187
- if (this.legacy !== null) {
8188
- return RecentBlocksHistory.legacyCreate(LegacyRecentBlocks.create({
8189
- blocks: asOpaqueType(blocks),
8190
- }));
8191
- }
8192
7969
  throw new Error("RecentBlocksHistory is in invalid state. Cannot be updated!");
8193
7970
  }
8194
7971
  }
@@ -8365,31 +8142,18 @@ class ServiceAccountInfo extends WithDebug {
8365
8142
  created;
8366
8143
  lastAccumulation;
8367
8144
  parentService;
8368
- static Codec = Compatibility.isGreaterOrEqual(GpVersion.V0_6_7)
8369
- ? codec$1.Class(ServiceAccountInfo, {
8370
- codeHash: codec$1.bytes(HASH_SIZE).asOpaque(),
8371
- balance: codec$1.u64,
8372
- accumulateMinGas: codec$1.u64.convert((x) => x, tryAsServiceGas),
8373
- onTransferMinGas: codec$1.u64.convert((x) => x, tryAsServiceGas),
8374
- storageUtilisationBytes: codec$1.u64,
8375
- gratisStorage: codec$1.u64,
8376
- storageUtilisationCount: codec$1.u32,
8377
- created: codec$1.u32.convert((x) => x, tryAsTimeSlot),
8378
- lastAccumulation: codec$1.u32.convert((x) => x, tryAsTimeSlot),
8379
- parentService: codec$1.u32.convert((x) => x, tryAsServiceId),
8380
- })
8381
- : codec$1.Class(ServiceAccountInfo, {
8382
- codeHash: codec$1.bytes(HASH_SIZE).asOpaque(),
8383
- balance: codec$1.u64,
8384
- accumulateMinGas: codec$1.u64.convert((x) => x, tryAsServiceGas),
8385
- onTransferMinGas: codec$1.u64.convert((x) => x, tryAsServiceGas),
8386
- storageUtilisationBytes: codec$1.u64,
8387
- storageUtilisationCount: codec$1.u32,
8388
- gratisStorage: ignoreValueWithDefault(tryAsU64(0)),
8389
- created: ignoreValueWithDefault(tryAsTimeSlot(0)),
8390
- lastAccumulation: ignoreValueWithDefault(tryAsTimeSlot(0)),
8391
- parentService: ignoreValueWithDefault(tryAsServiceId(0)),
8392
- });
8145
+ static Codec = codec$1.Class(ServiceAccountInfo, {
8146
+ codeHash: codec$1.bytes(HASH_SIZE).asOpaque(),
8147
+ balance: codec$1.u64,
8148
+ accumulateMinGas: codec$1.u64.convert((x) => x, tryAsServiceGas),
8149
+ onTransferMinGas: codec$1.u64.convert((x) => x, tryAsServiceGas),
8150
+ storageUtilisationBytes: codec$1.u64,
8151
+ gratisStorage: codec$1.u64,
8152
+ storageUtilisationCount: codec$1.u32,
8153
+ created: codec$1.u32.convert((x) => x, tryAsTimeSlot),
8154
+ lastAccumulation: codec$1.u32.convert((x) => x, tryAsTimeSlot),
8155
+ parentService: codec$1.u32.convert((x) => x, tryAsServiceId),
8156
+ });
8393
8157
  static create(a) {
8394
8158
  return new ServiceAccountInfo(a.codeHash, a.balance, a.accumulateMinGas, a.onTransferMinGas, a.storageUtilisationBytes, a.gratisStorage, a.storageUtilisationCount, a.created, a.lastAccumulation, a.parentService);
8395
8159
  }
@@ -8662,9 +8426,7 @@ class UpdateStorage {
8662
8426
  */
8663
8427
  const ENTROPY_ENTRIES = 4;
8664
8428
 
8665
- const codecServiceId = Compatibility.isSuite(TestSuite.W3F_DAVXY) ||
8666
- Compatibility.isSuite(TestSuite.JAMDUNA, GpVersion.V0_6_5) ||
8667
- Compatibility.isSuite(TestSuite.JAMDUNA, GpVersion.V0_6_7)
8429
+ const codecServiceId = Compatibility.isSuite(TestSuite.W3F_DAVXY) || Compatibility.isSuite(TestSuite.JAMDUNA, GpVersion.V0_6_7)
8668
8430
  ? codec$1.u32.asOpaque()
8669
8431
  : codec$1.varU32.convert((s) => tryAsU32(s), (i) => tryAsServiceId(i));
8670
8432
  /**
@@ -9332,7 +9094,7 @@ const serviceDataCodec = codec$1.dictionary(codec$1.u32.asOpaque(), serviceEntri
9332
9094
  sortKeys: (a, b) => a - b,
9333
9095
  });
9334
9096
 
9335
- var index$e = /*#__PURE__*/Object.freeze({
9097
+ var index$f = /*#__PURE__*/Object.freeze({
9336
9098
  __proto__: null,
9337
9099
  AccumulationOutput: AccumulationOutput,
9338
9100
  AutoAccumulate: AutoAccumulate,
@@ -9346,8 +9108,6 @@ var index$e = /*#__PURE__*/Object.freeze({
9346
9108
  ENTROPY_ENTRIES: ENTROPY_ENTRIES,
9347
9109
  InMemoryService: InMemoryService,
9348
9110
  InMemoryState: InMemoryState,
9349
- LegacyBlockState: LegacyBlockState,
9350
- LegacyRecentBlocks: LegacyRecentBlocks,
9351
9111
  LookupHistoryItem: LookupHistoryItem,
9352
9112
  MAX_RECENT_HISTORY: MAX_RECENT_HISTORY,
9353
9113
  PreimageItem: PreimageItem,
@@ -9549,7 +9309,7 @@ function legacyServiceNested(serviceId, hash) {
9549
9309
  /** Serialization for particular state entries. */
9550
9310
  var serialize;
9551
9311
  (function (serialize) {
9552
- /** C(1): https://graypaper.fluffylabs.dev/#/85129da/38a20138a201?v=0.6.3 */
9312
+ /** C(1): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b15013b1501?v=0.6.7 */
9553
9313
  serialize.authPools = {
9554
9314
  key: stateKeys.index(StateKeyIdx.Alpha),
9555
9315
  Codec: codecPerCore(codecKnownSizeArray(codec$1.bytes(HASH_SIZE).asOpaque(), {
@@ -9559,7 +9319,7 @@ var serialize;
9559
9319
  })),
9560
9320
  extract: (s) => s.authPools,
9561
9321
  };
9562
- /** C(2): https://graypaper.fluffylabs.dev/#/85129da/38be0138be01?v=0.6.3 */
9322
+ /** C(2): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b31013b3101?v=0.6.7 */
9563
9323
  serialize.authQueues = {
9564
9324
  key: stateKeys.index(StateKeyIdx.Phi),
9565
9325
  Codec: codecPerCore(codecFixedSizeArray(codec$1.bytes(HASH_SIZE).asOpaque(), AUTHORIZATION_QUEUE_SIZE)),
@@ -9567,7 +9327,6 @@ var serialize;
9567
9327
  };
9568
9328
  /**
9569
9329
  * C(3): Recent blocks with compatibility
9570
- * https://graypaper.fluffylabs.dev/#/85129da/38cb0138cb01?v=0.6.3
9571
9330
  * https://graypaper.fluffylabs.dev/#/7e6ff6a/3b3e013b3e01?v=0.6.7
9572
9331
  */
9573
9332
  serialize.recentBlocks = {
@@ -9575,7 +9334,7 @@ var serialize;
9575
9334
  Codec: RecentBlocksHistory.Codec,
9576
9335
  extract: (s) => s.recentBlocks,
9577
9336
  };
9578
- /** C(4): https://graypaper.fluffylabs.dev/#/85129da/38e60138e601?v=0.6.3 */
9337
+ /** C(4): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b63013b6301?v=0.6.7 */
9579
9338
  serialize.safrole = {
9580
9339
  key: stateKeys.index(StateKeyIdx.Gamma),
9581
9340
  Codec: SafroleData.Codec,
@@ -9586,55 +9345,55 @@ var serialize;
9586
9345
  ticketsAccumulator: s.ticketsAccumulator,
9587
9346
  }),
9588
9347
  };
9589
- /** C(5): https://graypaper.fluffylabs.dev/#/85129da/383d02383d02?v=0.6.3 */
9348
+ /** C(5): https://graypaper.fluffylabs.dev/#/7e6ff6a/3bba013bba01?v=0.6.7 */
9590
9349
  serialize.disputesRecords = {
9591
9350
  key: stateKeys.index(StateKeyIdx.Psi),
9592
9351
  Codec: DisputesRecords.Codec,
9593
9352
  extract: (s) => s.disputesRecords,
9594
9353
  };
9595
- /** C(6): https://graypaper.fluffylabs.dev/#/85129da/387602387602?v=0.6.3 */
9354
+ /** C(6): https://graypaper.fluffylabs.dev/#/7e6ff6a/3bf3013bf301?v=0.6.7 */
9596
9355
  serialize.entropy = {
9597
9356
  key: stateKeys.index(StateKeyIdx.Eta),
9598
9357
  Codec: codecFixedSizeArray(codec$1.bytes(HASH_SIZE).asOpaque(), ENTROPY_ENTRIES),
9599
9358
  extract: (s) => s.entropy,
9600
9359
  };
9601
- /** C(7): https://graypaper.fluffylabs.dev/#/85129da/388302388302?v=0.6.3 */
9360
+ /** C(7): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b00023b0002?v=0.6.7 */
9602
9361
  serialize.designatedValidators = {
9603
9362
  key: stateKeys.index(StateKeyIdx.Iota),
9604
9363
  Codec: codecPerValidator(ValidatorData.Codec),
9605
9364
  extract: (s) => s.designatedValidatorData,
9606
9365
  };
9607
- /** C(8): https://graypaper.fluffylabs.dev/#/85129da/389002389002?v=0.6.3 */
9366
+ /** C(8): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b0d023b0d02?v=0.6.7 */
9608
9367
  serialize.currentValidators = {
9609
9368
  key: stateKeys.index(StateKeyIdx.Kappa),
9610
9369
  Codec: codecPerValidator(ValidatorData.Codec),
9611
9370
  extract: (s) => s.currentValidatorData,
9612
9371
  };
9613
- /** C(9): https://graypaper.fluffylabs.dev/#/85129da/389d02389d02?v=0.6.3 */
9372
+ /** C(9): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b1a023b1a02?v=0.6.7 */
9614
9373
  serialize.previousValidators = {
9615
9374
  key: stateKeys.index(StateKeyIdx.Lambda),
9616
9375
  Codec: codecPerValidator(ValidatorData.Codec),
9617
9376
  extract: (s) => s.previousValidatorData,
9618
9377
  };
9619
- /** C(10): https://graypaper.fluffylabs.dev/#/85129da/38aa0238aa02?v=0.6.3 */
9378
+ /** C(10): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b27023b2702?v=0.6.7 */
9620
9379
  serialize.availabilityAssignment = {
9621
9380
  key: stateKeys.index(StateKeyIdx.Rho),
9622
9381
  Codec: codecPerCore(codec$1.optional(AvailabilityAssignment.Codec)),
9623
9382
  extract: (s) => s.availabilityAssignment,
9624
9383
  };
9625
- /** C(11): https://graypaper.fluffylabs.dev/#/85129da/38c10238c102?v=0.6.3 */
9384
+ /** C(11): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b3e023b3e02?v=0.6.7 */
9626
9385
  serialize.timeslot = {
9627
9386
  key: stateKeys.index(StateKeyIdx.Tau),
9628
9387
  Codec: codec$1.u32.asOpaque(),
9629
9388
  extract: (s) => s.timeslot,
9630
9389
  };
9631
- /** C(12): https://graypaper.fluffylabs.dev/#/85129da/38cf0238cf02?v=0.6.3 */
9390
+ /** C(12): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b4c023b4c02?v=0.6.7 */
9632
9391
  serialize.privilegedServices = {
9633
9392
  key: stateKeys.index(StateKeyIdx.Chi),
9634
9393
  Codec: PrivilegedServices.Codec,
9635
9394
  extract: (s) => s.privilegedServices,
9636
9395
  };
9637
- /** C(13): https://graypaper.fluffylabs.dev/#/85129da/38e10238e102?v=0.6.3 */
9396
+ /** C(13): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b5e023b5e02?v=0.6.7 */
9638
9397
  serialize.statistics = {
9639
9398
  key: stateKeys.index(StateKeyIdx.Pi),
9640
9399
  Codec: StatisticsData.Codec,
@@ -9646,7 +9405,7 @@ var serialize;
9646
9405
  Codec: codecPerEpochBlock(readonlyArray(codec$1.sequenceVarLen(NotYetAccumulatedReport.Codec))),
9647
9406
  extract: (s) => s.accumulationQueue,
9648
9407
  };
9649
- /** C(15): https://graypaper.fluffylabs.dev/#/85129da/381903381903?v=0.6.3 */
9408
+ /** C(15): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b96023b9602?v=0.6.7 */
9650
9409
  serialize.recentlyAccumulated = {
9651
9410
  key: stateKeys.index(StateKeyIdx.Xi),
9652
9411
  Codec: codecPerEpochBlock(codec$1.sequenceVarLen(codec$1.bytes(HASH_SIZE).asOpaque()).convert((x) => Array.from(x), (x) => HashSet.from(x))),
@@ -10555,7 +10314,7 @@ function trieStringify(root, nodes) {
10555
10314
  return `\nLeaf('${leaf.getKey().toString()}',${value})`;
10556
10315
  }
10557
10316
 
10558
- var index$d = /*#__PURE__*/Object.freeze({
10317
+ var index$e = /*#__PURE__*/Object.freeze({
10559
10318
  __proto__: null,
10560
10319
  BranchNode: BranchNode,
10561
10320
  InMemoryTrie: InMemoryTrie,
@@ -10775,7 +10534,7 @@ function loadState(spec, entries) {
10775
10534
  * hashmap of `key -> value` entries.
10776
10535
  */
10777
10536
 
10778
- var index$c = /*#__PURE__*/Object.freeze({
10537
+ var index$d = /*#__PURE__*/Object.freeze({
10779
10538
  __proto__: null,
10780
10539
  SerializedService: SerializedService,
10781
10540
  SerializedState: SerializedState,
@@ -11061,7 +10820,7 @@ var LookupKind;
11061
10820
  LookupKind[LookupKind["DbKey"] = 1] = "DbKey";
11062
10821
  })(LookupKind || (LookupKind = {}));
11063
10822
 
11064
- var index$b = /*#__PURE__*/Object.freeze({
10823
+ var index$c = /*#__PURE__*/Object.freeze({
11065
10824
  __proto__: null,
11066
10825
  InMemoryBlocks: InMemoryBlocks,
11067
10826
  InMemoryStates: InMemoryStates,
@@ -11720,7 +11479,7 @@ function chunksToShards(spec, chunks) {
11720
11479
  return tryAsPerValidator(result, spec);
11721
11480
  }
11722
11481
 
11723
- var index$a = /*#__PURE__*/Object.freeze({
11482
+ var index$b = /*#__PURE__*/Object.freeze({
11724
11483
  __proto__: null,
11725
11484
  N_CHUNKS_REDUNDANCY: N_CHUNKS_REDUNDANCY,
11726
11485
  N_CHUNKS_REQUIRED: N_CHUNKS_REQUIRED,
@@ -12292,7 +12051,7 @@ function preimageLenAsU32(length) {
12292
12051
  return length >= 2n ** 32n ? null : tryAsU32(Number(length));
12293
12052
  }
12294
12053
 
12295
- var index$9 = /*#__PURE__*/Object.freeze({
12054
+ var index$a = /*#__PURE__*/Object.freeze({
12296
12055
  __proto__: null,
12297
12056
  AccumulationStateUpdate: AccumulationStateUpdate,
12298
12057
  CURRENT_SERVICE_ID: CURRENT_SERVICE_ID,
@@ -12603,13 +12362,135 @@ class Logger {
12603
12362
  }
12604
12363
  }
12605
12364
 
12606
- var index$8 = /*#__PURE__*/Object.freeze({
12365
+ var index$9 = /*#__PURE__*/Object.freeze({
12607
12366
  __proto__: null,
12608
12367
  get Level () { return Level; },
12609
12368
  Logger: Logger,
12610
12369
  parseLoggerOptions: parseLoggerOptions
12611
12370
  });
12612
12371
 
12372
+ const SUPER_PEAK_STRING = BytesBlob.blobFromString("peak");
12373
+ /**
12374
+ * Merkle Mountain Range.
12375
+ *
12376
+ * https://graypaper.fluffylabs.dev/#/5f542d7/3aa0023aa002?v=0.6.2
12377
+ */
12378
+ class MerkleMountainRange {
12379
+ hasher;
12380
+ mountains;
12381
+ /** Construct an empty MMR. */
12382
+ static empty(hasher) {
12383
+ return new MerkleMountainRange(hasher);
12384
+ }
12385
+ /** Construct a new MMR from existing peaks. */
12386
+ static fromPeaks(hasher, mmr) {
12387
+ return new MerkleMountainRange(hasher, mmr.peaks
12388
+ .reduce((acc, peak, index) => {
12389
+ if (peak !== null) {
12390
+ acc.push(Mountain.fromPeak(peak, 2 ** index));
12391
+ }
12392
+ return acc;
12393
+ }, [])
12394
+ .reverse());
12395
+ }
12396
+ constructor(hasher,
12397
+ /** Store non-empty merkle tries (mountains) ordered by descending size. */
12398
+ mountains = []) {
12399
+ this.hasher = hasher;
12400
+ this.mountains = mountains;
12401
+ }
12402
+ /**
12403
+ * Append a new hash to the MMR structure.
12404
+ *
12405
+ * https://graypaper.fluffylabs.dev/#/5f542d7/3b11003b1100?v=0.6.2
12406
+ */
12407
+ append(hash) {
12408
+ let newMountain = Mountain.fromPeak(hash, 1);
12409
+ for (;;) {
12410
+ const last = this.mountains.pop();
12411
+ if (last === undefined) {
12412
+ this.mountains.push(newMountain);
12413
+ return;
12414
+ }
12415
+ if (last.size !== newMountain.size) {
12416
+ this.mountains.push(last);
12417
+ this.mountains.push(newMountain);
12418
+ return;
12419
+ }
12420
+ newMountain = last.mergeWith(this.hasher, newMountain);
12421
+ }
12422
+ }
12423
+ /**
12424
+ * Root of the entire structure.
12425
+ *
12426
+ * https://graypaper.fluffylabs.dev/#/5f542d7/3b20013b2001?v=0.6.2
12427
+ */
12428
+ getSuperPeakHash() {
12429
+ if (this.mountains.length === 0) {
12430
+ return Bytes.zero(HASH_SIZE).asOpaque();
12431
+ }
12432
+ const revMountains = this.mountains.slice().reverse();
12433
+ const length = revMountains.length;
12434
+ let lastHash = revMountains[0].peak;
12435
+ for (let i = 1; i < length; i++) {
12436
+ const mountain = revMountains[i];
12437
+ lastHash = this.hasher.hashConcatPrepend(SUPER_PEAK_STRING, lastHash, mountain.peak);
12438
+ }
12439
+ return lastHash;
12440
+ }
12441
+ /** Get current peaks. */
12442
+ getPeaks() {
12443
+ const peaks = [];
12444
+ const mountains = this.mountains;
12445
+ // always 2**index
12446
+ let currentSize = 1;
12447
+ let currentIdx = mountains.length - 1;
12448
+ while (currentIdx >= 0) {
12449
+ const currentItem = mountains[currentIdx];
12450
+ if (currentItem.size >= currentSize && currentItem.size < 2 * currentSize) {
12451
+ peaks.push(currentItem.peak);
12452
+ currentIdx -= 1;
12453
+ }
12454
+ else {
12455
+ peaks.push(null);
12456
+ }
12457
+ // move to the next index.
12458
+ currentSize = currentSize << 1;
12459
+ }
12460
+ return { peaks };
12461
+ }
12462
+ }
12463
+ /** An internal helper structure to represent a merkle trie for MMR. */
12464
+ class Mountain {
12465
+ peak;
12466
+ size;
12467
+ constructor(peak, size) {
12468
+ this.peak = peak;
12469
+ this.size = size;
12470
+ }
12471
+ static fromPeak(peak, size) {
12472
+ return new Mountain(peak, size);
12473
+ }
12474
+ static fromChildren(hasher, children) {
12475
+ const [left, right] = children;
12476
+ const peak = hasher.hashConcat(left.peak, right.peak);
12477
+ const size = left.size + right.size;
12478
+ return new Mountain(peak, size);
12479
+ }
12480
+ /** Merge with another montain of the same size. */
12481
+ mergeWith(hasher, other) {
12482
+ return Mountain.fromChildren(hasher, [this, other]);
12483
+ }
12484
+ toString() {
12485
+ return `${this.size} @ ${this.peak}`;
12486
+ }
12487
+ }
12488
+
12489
+ var index$8 = /*#__PURE__*/Object.freeze({
12490
+ __proto__: null,
12491
+ MerkleMountainRange: MerkleMountainRange
12492
+ });
12493
+
12613
12494
  /**
12614
12495
  * Upper bound of instruction distance - it is equal to max value of GP's skip function + 1
12615
12496
  */
@@ -13445,9 +13326,7 @@ const instructionsWithoutArgs = [
13445
13326
  [Instruction.TRAP, 1],
13446
13327
  [Instruction.FALLTHROUGH, 1],
13447
13328
  ];
13448
- const instructionsWithOneImmediate = [
13449
- [Instruction.ECALLI, Compatibility.isSuite(TestSuite.JAMDUNA, GpVersion.V0_6_5) ? 0 : 1],
13450
- ];
13329
+ const instructionsWithOneImmediate = [[Instruction.ECALLI, 1]];
13451
13330
  const instructionsWithOneRegisterAndOneExtendedWidthImmediate = [[Instruction.LOAD_IMM_64, 1]];
13452
13331
  const instructionsWithTwoImmediates = [
13453
13332
  [Instruction.STORE_IMM_U8, 1],
@@ -15923,7 +15802,6 @@ class ProgramDecoder {
15923
15802
 
15924
15803
  class Interpreter {
15925
15804
  useSbrkGas;
15926
- ignoreInstructionGas;
15927
15805
  registers = new Registers();
15928
15806
  code = new Uint8Array();
15929
15807
  mask = Mask.empty();
@@ -15950,9 +15828,8 @@ class Interpreter {
15950
15828
  argsDecodingResults = createResults();
15951
15829
  basicBlocks;
15952
15830
  jumpTable = JumpTable.empty();
15953
- constructor({ useSbrkGas = false, ignoreInstructionGas = false } = {}) {
15831
+ constructor({ useSbrkGas = false } = {}) {
15954
15832
  this.useSbrkGas = useSbrkGas;
15955
- this.ignoreInstructionGas = ignoreInstructionGas;
15956
15833
  this.argsDecoder = new ArgsDecoder();
15957
15834
  this.basicBlocks = new BasicBlocks();
15958
15835
  const mathOps = new MathOps(this.registers);
@@ -16032,7 +15909,7 @@ class Interpreter {
16032
15909
  const currentInstruction = this.code[this.pc] ?? Instruction.TRAP;
16033
15910
  const isValidInstruction = Instruction[currentInstruction] !== undefined;
16034
15911
  const gasCost = instructionGasMap[currentInstruction] ?? instructionGasMap[Instruction.TRAP];
16035
- const underflow = this.ignoreInstructionGas ? false : this.gas.sub(gasCost);
15912
+ const underflow = this.gas.sub(gasCost);
16036
15913
  if (underflow) {
16037
15914
  this.status = Status.OOG;
16038
15915
  return this.status;
@@ -16096,11 +15973,6 @@ class Interpreter {
16096
15973
  }
16097
15974
  }
16098
15975
  if (this.instructionResult.status !== null) {
16099
- // All abnormal terminations should be interpreted as TRAP and we should subtract the gas. In case of FAULT we have to do it manually at the very end.
16100
- if (this.instructionResult.status === Result.FAULT || this.instructionResult.status === Result.FAULT_ACCESS) {
16101
- // TODO [ToDr] underflow?
16102
- this.gas.sub(instructionGasMap[Instruction.TRAP]);
16103
- }
16104
15976
  switch (this.instructionResult.status) {
16105
15977
  case Result.FAULT:
16106
15978
  this.status = Status.FAULT;
@@ -16544,7 +16416,7 @@ class HostCalls {
16544
16416
  }
16545
16417
  this.hostCalls.traceHostCall("Invoking", index, hostCall, regs, gasBefore);
16546
16418
  const result = await hostCall.execute(gas, regs, memory);
16547
- this.hostCalls.traceHostCall(result === undefined ? "Result" : `Status(${result})`, index, hostCall, regs, gas.get());
16419
+ this.hostCalls.traceHostCall(result === undefined ? "Result" : `Status(${PvmExecution[result]})`, index, hostCall, regs, gas.get());
16548
16420
  if (result === PvmExecution.Halt) {
16549
16421
  status = Status.HALT;
16550
16422
  return this.getReturnValue(status, pvmInstance);
@@ -16569,13 +16441,9 @@ class InterpreterInstanceManager {
16569
16441
  instances = [];
16570
16442
  waitingQueue = [];
16571
16443
  constructor(noOfPvmInstances) {
16572
- const shouldCountGas = Compatibility.isGreaterOrEqual(GpVersion.V0_6_7) ||
16573
- Compatibility.isSuite(TestSuite.JAMDUNA, GpVersion.V0_6_5) ||
16574
- Compatibility.isSuite(TestSuite.W3F_DAVXY, GpVersion.V0_6_6);
16575
16444
  for (let i = 0; i < noOfPvmInstances; i++) {
16576
16445
  this.instances.push(new Interpreter({
16577
16446
  useSbrkGas: false,
16578
- ignoreInstructionGas: !shouldCountGas,
16579
16447
  }));
16580
16448
  }
16581
16449
  }
@@ -16598,13 +16466,13 @@ class InterpreterInstanceManager {
16598
16466
  }
16599
16467
 
16600
16468
  const logger = Logger.new(undefined, "host-calls-pvm");
16601
- // TODO [ToDr] Rename to just `HostCalls`
16602
16469
  /** Container for all available host calls. */
16603
16470
  class HostCallsManager {
16604
16471
  hostCalls = new Map();
16605
- missing = new Missing();
16606
- constructor(...hostCallHandlers) {
16607
- for (const handler of hostCallHandlers) {
16472
+ missing;
16473
+ constructor({ missing, handlers = [], }) {
16474
+ this.missing = missing;
16475
+ for (const handler of handlers) {
16608
16476
  check(this.hostCalls.get(handler.index) === undefined, `Overwriting host call handler at index ${handler.index}`);
16609
16477
  this.hostCalls.set(handler.index, handler);
16610
16478
  }
@@ -16627,16 +16495,6 @@ class HostCallsManager {
16627
16495
  logger.trace(`[${currentServiceId}] ${context} ${name}${requested}. Gas: ${gas}. Regs: ${registerValues}.`);
16628
16496
  }
16629
16497
  }
16630
- class Missing {
16631
- index = tryAsHostCallIndex(2 ** 32 - 1);
16632
- gasCost = tryAsSmallGas(10);
16633
- currentServiceId = CURRENT_SERVICE_ID;
16634
- tracedRegisters = traceRegisters(7);
16635
- execute(_gas, regs, _memory) {
16636
- regs.set(7, HostCallResult.WHAT);
16637
- return Promise.resolve(undefined);
16638
- }
16639
- }
16640
16498
 
16641
16499
  var index$4 = /*#__PURE__*/Object.freeze({
16642
16500
  __proto__: null,
@@ -16938,7 +16796,7 @@ const recentBlockStateFromJson = json.object({
16938
16796
  reported: HashDictionary.fromEntries(reported.map((x) => [x.workPackageHash, x])),
16939
16797
  });
16940
16798
  });
16941
- const recentBlocksFromJson = json.object({
16799
+ const recentBlocksHistoryFromJson = json.object({
16942
16800
  history: json.array(recentBlockStateFromJson),
16943
16801
  mmr: {
16944
16802
  peaks: json.array(json.nullable(fromJson.bytes32())),
@@ -16949,29 +16807,6 @@ const recentBlocksFromJson = json.object({
16949
16807
  accumulationLog: mmr,
16950
16808
  }));
16951
16809
  });
16952
- const legacyRecentBlockStateFromJson = json.object({
16953
- header_hash: fromJson.bytes32(),
16954
- mmr: {
16955
- peaks: json.array(json.nullable(fromJson.bytes32())),
16956
- },
16957
- state_root: fromJson.bytes32(),
16958
- reported: json.array(reportedWorkPackageFromJson),
16959
- }, ({ header_hash, mmr, state_root, reported }) => {
16960
- return {
16961
- headerHash: header_hash,
16962
- mmr,
16963
- postStateRoot: state_root,
16964
- reported: HashDictionary.fromEntries(reported.map((x) => [x.workPackageHash, x])),
16965
- };
16966
- });
16967
- const legacyRecentBlocksFromJson = json.object(json.array(legacyRecentBlockStateFromJson), (blocks) => {
16968
- return RecentBlocksHistory.legacyCreate(LegacyRecentBlocks.create({
16969
- blocks,
16970
- }));
16971
- });
16972
- const recentBlocksHistoryFromJson = Compatibility.isGreaterOrEqual(GpVersion.V0_6_7)
16973
- ? recentBlocksFromJson
16974
- : legacyRecentBlocksFromJson;
16975
16810
 
16976
16811
  const ticketFromJson = json.object({
16977
16812
  id: fromJson.bytes32(),
@@ -17288,6 +17123,17 @@ var index$1 = /*#__PURE__*/Object.freeze({
17288
17123
  validatorDataFromJson: validatorDataFromJson
17289
17124
  });
17290
17125
 
17126
+ class Missing {
17127
+ index = tryAsHostCallIndex(2 ** 32 - 1);
17128
+ gasCost = tryAsSmallGas(10);
17129
+ currentServiceId = CURRENT_SERVICE_ID;
17130
+ tracedRegisters = traceRegisters(7);
17131
+ execute(_gas, regs, _memory) {
17132
+ regs.set(7, HostCallResult.WHAT);
17133
+ return Promise.resolve(undefined);
17134
+ }
17135
+ }
17136
+
17291
17137
  var ServiceExecutorError;
17292
17138
  (function (ServiceExecutorError) {
17293
17139
  ServiceExecutorError[ServiceExecutorError["NoLookup"] = 0] = "NoLookup";
@@ -17395,7 +17241,7 @@ class WorkPackageExecutor {
17395
17241
  class PvmExecutor {
17396
17242
  serviceCode;
17397
17243
  pvm;
17398
- hostCalls = new HostCallsManager();
17244
+ hostCalls = new HostCallsManager({ missing: new Missing() });
17399
17245
  pvmInstanceManager = new InterpreterInstanceManager(4);
17400
17246
  constructor(serviceCode) {
17401
17247
  this.serviceCode = serviceCode;
@@ -17544,13 +17390,13 @@ exports.collections = index$l;
17544
17390
  exports.config = index$k;
17545
17391
  exports.config_node = index$g;
17546
17392
  exports.crypto = index$m;
17547
- exports.database = index$b;
17548
- exports.erasure_coding = index$a;
17393
+ exports.database = index$c;
17394
+ exports.erasure_coding = index$b;
17549
17395
  exports.hash = index$n;
17550
- exports.jam_host_calls = index$9;
17396
+ exports.jam_host_calls = index$a;
17551
17397
  exports.json_parser = index$i;
17552
- exports.logger = index$8;
17553
- exports.mmr = index$f;
17398
+ exports.logger = index$9;
17399
+ exports.mmr = index$8;
17554
17400
  exports.numbers = index$p;
17555
17401
  exports.ordering = index$s;
17556
17402
  exports.pvm = index$3;
@@ -17559,9 +17405,9 @@ exports.pvm_interpreter = index$7;
17559
17405
  exports.pvm_program = index$5;
17560
17406
  exports.pvm_spi_decoder = index$6;
17561
17407
  exports.shuffling = index$2;
17562
- exports.state = index$e;
17408
+ exports.state = index$f;
17563
17409
  exports.state_json = index$1;
17564
- exports.state_merkleization = index$c;
17410
+ exports.state_merkleization = index$d;
17565
17411
  exports.transition = index;
17566
- exports.trie = index$d;
17412
+ exports.trie = index$e;
17567
17413
  exports.utils = index$r;