@typeberry/convert 0.5.1-aff8b4f → 0.5.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/index.js CHANGED
@@ -3871,17 +3871,6 @@ module.exports = __nccwpck_require__.p + "ccf8ada94096a8f232f5.js?reed_solomon_w
3871
3871
  /******/ __nccwpck_require__.o = (obj, prop) => (Object.prototype.hasOwnProperty.call(obj, prop))
3872
3872
  /******/ })();
3873
3873
  /******/
3874
- /******/ /* webpack/runtime/make namespace object */
3875
- /******/ (() => {
3876
- /******/ // define __esModule on exports
3877
- /******/ __nccwpck_require__.r = (exports) => {
3878
- /******/ if(typeof Symbol !== 'undefined' && Symbol.toStringTag) {
3879
- /******/ Object.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });
3880
- /******/ }
3881
- /******/ Object.defineProperty(exports, '__esModule', { value: true });
3882
- /******/ };
3883
- /******/ })();
3884
- /******/
3885
3874
  /******/ /* webpack/runtime/publicPath */
3886
3875
  /******/ (() => {
3887
3876
  /******/ var scriptUrl;
@@ -3924,44 +3913,6 @@ module.exports = __nccwpck_require__.p + "ccf8ada94096a8f232f5.js?reed_solomon_w
3924
3913
  /************************************************************************/
3925
3914
  var __webpack_exports__ = {};
3926
3915
 
3927
- // NAMESPACE OBJECT: ./packages/core/codec/descriptors.ts
3928
- var descriptors_namespaceObject = {};
3929
- __nccwpck_require__.r(descriptors_namespaceObject);
3930
- __nccwpck_require__.d(descriptors_namespaceObject, {
3931
- Class: () => (Class),
3932
- TYPICAL_DICTIONARY_LENGTH: () => (TYPICAL_DICTIONARY_LENGTH),
3933
- bitVecFixLen: () => (bitVecFixLen),
3934
- bitVecVarLen: () => (bitVecVarLen),
3935
- blob: () => (blob),
3936
- bool: () => (bool),
3937
- bytes: () => (bytes),
3938
- custom: () => (custom),
3939
- dictionary: () => (dictionary),
3940
- forEachDescriptor: () => (forEachDescriptor),
3941
- i16: () => (i16),
3942
- i24: () => (i24),
3943
- i32: () => (i32),
3944
- i64: () => (i64),
3945
- i8: () => (i8),
3946
- nothing: () => (nothing),
3947
- object: () => (object),
3948
- optional: () => (optional),
3949
- pair: () => (pair),
3950
- readonlyArray: () => (readonlyArray),
3951
- select: () => (descriptors_select),
3952
- sequenceFixLen: () => (sequenceFixLen),
3953
- sequenceVarLen: () => (sequenceVarLen),
3954
- string: () => (string),
3955
- u16: () => (u16),
3956
- u24: () => (u24),
3957
- u32: () => (u32),
3958
- u64: () => (u64),
3959
- u8: () => (u8),
3960
- union: () => (union),
3961
- varU32: () => (varU32),
3962
- varU64: () => (varU64)
3963
- });
3964
-
3965
3916
  ;// CONCATENATED MODULE: ./packages/core/logger/options.ts
3966
3917
  var Level;
3967
3918
  (function (Level) {
@@ -6676,48 +6627,6 @@ const pair = (a, b) => {
6676
6627
  };
6677
6628
  /** Custom encoding / decoding logic. */
6678
6629
  const custom = ({ name, sizeHint = { bytes: 0, isExact: false }, }, encode, decode, skip) => Descriptor.new(name, sizeHint, encode, decode, skip);
6679
- /** Tagged union type encoding. */
6680
- const union = (name, variants) => {
6681
- const keys = Object.keys(variants).map(Number);
6682
- const variantMap = Object.fromEntries(keys.map((key, idx) => [key, idx]));
6683
- const indexToKey = Object.fromEntries(keys.map((key, idx) => [idx, key]));
6684
- // Calculate size hint as the minimum variant size + index size
6685
- const minVariantSize = Math.max(...keys.map((key) => variants[key].sizeHint.bytes));
6686
- const sizeHint = {
6687
- bytes: 1 + minVariantSize, // varU32 index + smallest variant
6688
- isExact: false,
6689
- };
6690
- const encode = (e, x) => {
6691
- const idx = variantMap[x.kind];
6692
- if (idx === undefined) {
6693
- throw new Error(`Unknown variant type: ${x.kind} for ${name}`);
6694
- }
6695
- e.varU32(numbers_tryAsU32(idx));
6696
- const codec = variants[x.kind];
6697
- // I'm sorry but I can't figure out a better typing here :)
6698
- codec.encode(e, x);
6699
- };
6700
- const decode = (d) => {
6701
- const idx = d.varU32();
6702
- const kind = indexToKey[idx];
6703
- if (kind === undefined) {
6704
- throw new Error(`Unknown variant index: ${idx} for ${name}`);
6705
- }
6706
- const codec = variants[kind];
6707
- const value = codec.decode(d);
6708
- return { kind, ...value };
6709
- };
6710
- const skip = (s) => {
6711
- const idx = s.decoder.varU32();
6712
- const kind = indexToKey[idx];
6713
- if (kind === undefined) {
6714
- throw new Error(`Unknown variant index: ${idx} for ${name}`);
6715
- }
6716
- const codec = variants[kind];
6717
- codec.skip(s);
6718
- };
6719
- return Descriptor.new(name, sizeHint, encode, decode, skip);
6720
- };
6721
6630
  /** Choose a descriptor depending on the encoding/decoding context. */
6722
6631
  const descriptors_select = ({ name, sizeHint, }, chooser) => {
6723
6632
  const Self = chooser(null);
@@ -6869,11 +6778,6 @@ function sequenceViewFixLen(type, { fixedLength }) {
6869
6778
 
6870
6779
 
6871
6780
 
6872
- // additional re-export of descriptors namespace under `codec`
6873
- // note we export descriptors in top level as well,
6874
- // because writing `codec.codec.u32` when using the library looks weird
6875
-
6876
- const codec_codec = descriptors_namespaceObject;
6877
6781
 
6878
6782
  ;// CONCATENATED MODULE: ./node_modules/@typeberry/native/chunk-CPmnHcRE.js
6879
6783
  //#region rolldown:runtime
@@ -9213,7 +9117,7 @@ function codecWithContext(chooser) {
9213
9117
  const defaultContext = chain_spec_fullChainSpec;
9214
9118
  const { name, sizeHint } = chooser(defaultContext);
9215
9119
  const cache = new Map();
9216
- return codec_codec.select({
9120
+ return descriptors_select({
9217
9121
  name,
9218
9122
  sizeHint: { bytes: sizeHint.bytes, isExact: false },
9219
9123
  }, (context) => {
@@ -9240,9 +9144,9 @@ function codecWithContext(chooser) {
9240
9144
  /** Codec for a known-size array with length validation. */
9241
9145
  const codecKnownSizeArray = (val, options, _id) => {
9242
9146
  if ("fixedLength" in options) {
9243
- return codec_codec.readonlyArray(codec_codec.sequenceFixLen(val, options.fixedLength)).convert(seeThrough, sized_array_asKnownSize);
9147
+ return readonlyArray(sequenceFixLen(val, options.fixedLength)).convert(seeThrough, sized_array_asKnownSize);
9244
9148
  }
9245
- return codec_codec.readonlyArray(codec_codec.sequenceVarLen(val, options)).convert(seeThrough, sized_array_asKnownSize);
9149
+ return readonlyArray(sequenceVarLen(val, options)).convert(seeThrough, sized_array_asKnownSize);
9246
9150
  };
9247
9151
  /** Codec for a fixed-size array with length validation. */
9248
9152
  const codecFixedSizeArray = (val, len) => {
@@ -9251,7 +9155,7 @@ const codecFixedSizeArray = (val, len) => {
9251
9155
  throw new Error(`[${val.name}] Invalid size of fixed-size array. Got ${actual}, expected: ${len}`);
9252
9156
  }
9253
9157
  };
9254
- return codec_codec.sequenceFixLen(val, len).convert((i) => {
9158
+ return sequenceFixLen(val, len).convert((i) => {
9255
9159
  checkLength(i.length);
9256
9160
  return i;
9257
9161
  }, (o) => {
@@ -9260,7 +9164,7 @@ const codecFixedSizeArray = (val, len) => {
9260
9164
  });
9261
9165
  };
9262
9166
  /** Codec for a hash-dictionary. */
9263
- const codecHashDictionary = (value, extractKey, { typicalLength = codec_codec.TYPICAL_DICTIONARY_LENGTH, compare = (a, b) => extractKey(a).compare(extractKey(b)), } = {}) => {
9167
+ const codecHashDictionary = (value, extractKey, { typicalLength = TYPICAL_DICTIONARY_LENGTH, compare = (a, b) => extractKey(a).compare(extractKey(b)), } = {}) => {
9264
9168
  return Descriptor.new(`HashDictionary<${value.name}>[?]`, {
9265
9169
  bytes: typicalLength * value.sizeHint.bytes,
9266
9170
  isExact: false,
@@ -9312,13 +9216,13 @@ class AvailabilityAssurance extends WithDebug {
9312
9216
  bitfield;
9313
9217
  validatorIndex;
9314
9218
  signature;
9315
- static Codec = codec_codec.Class(AvailabilityAssurance, {
9316
- anchor: codec_codec.bytes(hash_HASH_SIZE).asOpaque(),
9219
+ static Codec = Class(AvailabilityAssurance, {
9220
+ anchor: bytes(hash_HASH_SIZE).asOpaque(),
9317
9221
  bitfield: codecWithContext((context) => {
9318
- return codec_codec.bitVecFixLen(context.coresCount);
9222
+ return bitVecFixLen(context.coresCount);
9319
9223
  }),
9320
- validatorIndex: codec_codec.u16.asOpaque(),
9321
- signature: codec_codec.bytes(ED25519_SIGNATURE_BYTES).asOpaque(),
9224
+ validatorIndex: u16.asOpaque(),
9225
+ signature: bytes(ED25519_SIGNATURE_BYTES).asOpaque(),
9322
9226
  });
9323
9227
  static create({ anchor, bitfield, validatorIndex, signature }) {
9324
9228
  return new AvailabilityAssurance(anchor, bitfield, validatorIndex, signature);
@@ -9410,11 +9314,11 @@ class Fault extends WithDebug {
9410
9314
  wasConsideredValid;
9411
9315
  key;
9412
9316
  signature;
9413
- static Codec = codec_codec.Class(Fault, {
9414
- workReportHash: codec_codec.bytes(hash_HASH_SIZE).asOpaque(),
9415
- wasConsideredValid: codec_codec.bool,
9416
- key: codec_codec.bytes(ED25519_KEY_BYTES).asOpaque(),
9417
- signature: codec_codec.bytes(ED25519_SIGNATURE_BYTES).asOpaque(),
9317
+ static Codec = Class(Fault, {
9318
+ workReportHash: bytes(hash_HASH_SIZE).asOpaque(),
9319
+ wasConsideredValid: bool,
9320
+ key: bytes(ED25519_KEY_BYTES).asOpaque(),
9321
+ signature: bytes(ED25519_SIGNATURE_BYTES).asOpaque(),
9418
9322
  });
9419
9323
  static create({ workReportHash, wasConsideredValid, key, signature }) {
9420
9324
  return new Fault(workReportHash, wasConsideredValid, key, signature);
@@ -9442,10 +9346,10 @@ class Culprit extends WithDebug {
9442
9346
  workReportHash;
9443
9347
  key;
9444
9348
  signature;
9445
- static Codec = codec_codec.Class(Culprit, {
9446
- workReportHash: codec_codec.bytes(hash_HASH_SIZE).asOpaque(),
9447
- key: codec_codec.bytes(ED25519_KEY_BYTES).asOpaque(),
9448
- signature: codec_codec.bytes(ED25519_SIGNATURE_BYTES).asOpaque(),
9349
+ static Codec = Class(Culprit, {
9350
+ workReportHash: bytes(hash_HASH_SIZE).asOpaque(),
9351
+ key: bytes(ED25519_KEY_BYTES).asOpaque(),
9352
+ signature: bytes(ED25519_SIGNATURE_BYTES).asOpaque(),
9449
9353
  });
9450
9354
  static create({ workReportHash, key, signature }) {
9451
9355
  return new Culprit(workReportHash, key, signature);
@@ -9470,10 +9374,10 @@ class Judgement extends WithDebug {
9470
9374
  isWorkReportValid;
9471
9375
  index;
9472
9376
  signature;
9473
- static Codec = codec_codec.Class(Judgement, {
9474
- isWorkReportValid: codec_codec.bool,
9475
- index: codec_codec.u16.asOpaque(),
9476
- signature: codec_codec.bytes(ED25519_SIGNATURE_BYTES).asOpaque(),
9377
+ static Codec = Class(Judgement, {
9378
+ isWorkReportValid: bool,
9379
+ index: u16.asOpaque(),
9380
+ signature: bytes(ED25519_SIGNATURE_BYTES).asOpaque(),
9477
9381
  });
9478
9382
  static create({ isWorkReportValid, index, signature }) {
9479
9383
  return new Judgement(isWorkReportValid, index, signature);
@@ -9502,12 +9406,11 @@ class Verdict extends WithDebug {
9502
9406
  workReportHash;
9503
9407
  votesEpoch;
9504
9408
  votes;
9505
- static Codec = codec_codec.Class(Verdict, {
9506
- workReportHash: codec_codec.bytes(hash_HASH_SIZE).asOpaque(),
9507
- votesEpoch: codec_codec.u32.asOpaque(),
9409
+ static Codec = Class(Verdict, {
9410
+ workReportHash: bytes(hash_HASH_SIZE).asOpaque(),
9411
+ votesEpoch: u32.asOpaque(),
9508
9412
  votes: codecWithContext((context) => {
9509
- return codec_codec
9510
- .readonlyArray(codec_codec.sequenceFixLen(Judgement.Codec, context.validatorsSuperMajority))
9413
+ return readonlyArray(sequenceFixLen(Judgement.Codec, context.validatorsSuperMajority))
9511
9414
  .convert(seeThrough, sized_array_asKnownSize);
9512
9415
  }),
9513
9416
  });
@@ -9549,10 +9452,10 @@ class DisputesExtrinsic extends WithDebug {
9549
9452
  verdicts;
9550
9453
  culprits;
9551
9454
  faults;
9552
- static Codec = codec_codec.Class(DisputesExtrinsic, {
9553
- verdicts: codec_codec.sequenceVarLen(Verdict.Codec),
9554
- culprits: codec_codec.sequenceVarLen(Culprit.Codec),
9555
- faults: codec_codec.sequenceVarLen(Fault.Codec),
9455
+ static Codec = Class(DisputesExtrinsic, {
9456
+ verdicts: sequenceVarLen(Verdict.Codec),
9457
+ culprits: sequenceVarLen(Culprit.Codec),
9458
+ faults: sequenceVarLen(Fault.Codec),
9556
9459
  });
9557
9460
  static create({ verdicts, culprits, faults }) {
9558
9461
  return new DisputesExtrinsic(verdicts, culprits, faults);
@@ -9598,9 +9501,9 @@ class DisputesExtrinsic extends WithDebug {
9598
9501
  class WorkPackageInfo extends WithDebug {
9599
9502
  workPackageHash;
9600
9503
  segmentTreeRoot;
9601
- static Codec = codec_codec.Class(WorkPackageInfo, {
9602
- workPackageHash: codec_codec.bytes(hash_HASH_SIZE).asOpaque(),
9603
- segmentTreeRoot: codec_codec.bytes(hash_HASH_SIZE).asOpaque(),
9504
+ static Codec = Class(WorkPackageInfo, {
9505
+ workPackageHash: bytes(hash_HASH_SIZE).asOpaque(),
9506
+ segmentTreeRoot: bytes(hash_HASH_SIZE).asOpaque(),
9604
9507
  });
9605
9508
  constructor(
9606
9509
  /** Hash of the described work package. */
@@ -9628,13 +9531,13 @@ class RefineContext extends WithDebug {
9628
9531
  lookupAnchor;
9629
9532
  lookupAnchorSlot;
9630
9533
  prerequisites;
9631
- static Codec = codec_codec.Class(RefineContext, {
9632
- anchor: codec_codec.bytes(hash_HASH_SIZE).asOpaque(),
9633
- stateRoot: codec_codec.bytes(hash_HASH_SIZE).asOpaque(),
9634
- beefyRoot: codec_codec.bytes(hash_HASH_SIZE).asOpaque(),
9635
- lookupAnchor: codec_codec.bytes(hash_HASH_SIZE).asOpaque(),
9636
- lookupAnchorSlot: codec_codec.u32.asOpaque(),
9637
- prerequisites: codec_codec.sequenceVarLen(codec_codec.bytes(hash_HASH_SIZE).asOpaque()),
9534
+ static Codec = Class(RefineContext, {
9535
+ anchor: bytes(hash_HASH_SIZE).asOpaque(),
9536
+ stateRoot: bytes(hash_HASH_SIZE).asOpaque(),
9537
+ beefyRoot: bytes(hash_HASH_SIZE).asOpaque(),
9538
+ lookupAnchor: bytes(hash_HASH_SIZE).asOpaque(),
9539
+ lookupAnchorSlot: u32.asOpaque(),
9540
+ prerequisites: sequenceVarLen(bytes(hash_HASH_SIZE).asOpaque()),
9638
9541
  });
9639
9542
  static create({ anchor, stateRoot, beefyRoot, lookupAnchor, lookupAnchorSlot, prerequisites, }) {
9640
9543
  return new RefineContext(anchor, stateRoot, beefyRoot, lookupAnchor, lookupAnchorSlot, prerequisites);
@@ -9691,9 +9594,9 @@ const tryAsSegmentIndex = (v) => asOpaqueType(tryAsU16(v));
9691
9594
  class ImportSpec extends WithDebug {
9692
9595
  treeRoot;
9693
9596
  index;
9694
- static Codec = codec_codec.Class(ImportSpec, {
9695
- treeRoot: codec_codec.bytes(hash_HASH_SIZE),
9696
- index: codec_codec.u16.asOpaque(),
9597
+ static Codec = Class(ImportSpec, {
9598
+ treeRoot: bytes(hash_HASH_SIZE),
9599
+ index: u16.asOpaque(),
9697
9600
  });
9698
9601
  static create({ treeRoot, index }) {
9699
9602
  return new ImportSpec(treeRoot, index);
@@ -9715,9 +9618,9 @@ class ImportSpec extends WithDebug {
9715
9618
  class WorkItemExtrinsicSpec extends WithDebug {
9716
9619
  hash;
9717
9620
  len;
9718
- static Codec = codec_codec.Class(WorkItemExtrinsicSpec, {
9719
- hash: codec_codec.bytes(hash_HASH_SIZE).asOpaque(),
9720
- len: codec_codec.u32,
9621
+ static Codec = Class(WorkItemExtrinsicSpec, {
9622
+ hash: bytes(hash_HASH_SIZE).asOpaque(),
9623
+ len: u32,
9721
9624
  });
9722
9625
  static create({ hash, len }) {
9723
9626
  return new WorkItemExtrinsicSpec(hash, len);
@@ -9777,19 +9680,19 @@ class work_item_WorkItem extends WithDebug {
9777
9680
  importSegments;
9778
9681
  extrinsic;
9779
9682
  exportCount;
9780
- static Codec = codec_codec.Class(work_item_WorkItem, {
9781
- service: codec_codec.u32.asOpaque(),
9782
- codeHash: codec_codec.bytes(hash_HASH_SIZE).asOpaque(),
9783
- refineGasLimit: codec_codec.u64.asOpaque(),
9784
- accumulateGasLimit: codec_codec.u64.asOpaque(),
9785
- exportCount: codec_codec.u16,
9786
- payload: codec_codec.blob,
9683
+ static Codec = Class(work_item_WorkItem, {
9684
+ service: u32.asOpaque(),
9685
+ codeHash: bytes(hash_HASH_SIZE).asOpaque(),
9686
+ refineGasLimit: u64.asOpaque(),
9687
+ accumulateGasLimit: u64.asOpaque(),
9688
+ exportCount: u16,
9689
+ payload: blob,
9787
9690
  importSegments: codecKnownSizeArray(ImportSpec.Codec, {
9788
9691
  minLength: 0,
9789
9692
  maxLength: MAX_NUMBER_OF_SEGMENTS,
9790
9693
  typicalLength: MAX_NUMBER_OF_SEGMENTS,
9791
9694
  }),
9792
- extrinsic: codec_codec.sequenceVarLen(WorkItemExtrinsicSpec.Codec),
9695
+ extrinsic: sequenceVarLen(WorkItemExtrinsicSpec.Codec),
9793
9696
  });
9794
9697
  static create({ service, codeHash, payload, refineGasLimit, accumulateGasLimit, importSegments, extrinsic, exportCount, }) {
9795
9698
  return new work_item_WorkItem(service, codeHash, payload, refineGasLimit, accumulateGasLimit, importSegments, extrinsic, exportCount);
@@ -9862,13 +9765,13 @@ class work_package_WorkPackage extends WithDebug {
9862
9765
  parametrization;
9863
9766
  context;
9864
9767
  items;
9865
- static Codec = codec_codec.Class(work_package_WorkPackage, {
9866
- authCodeHost: codec_codec.u32.asOpaque(),
9867
- authCodeHash: codec_codec.bytes(hash_HASH_SIZE).asOpaque(),
9768
+ static Codec = Class(work_package_WorkPackage, {
9769
+ authCodeHost: u32.asOpaque(),
9770
+ authCodeHash: bytes(hash_HASH_SIZE).asOpaque(),
9868
9771
  context: RefineContext.Codec,
9869
- authorization: codec_codec.blob,
9870
- parametrization: codec_codec.blob,
9871
- items: codec_codec.sequenceVarLen(work_item_WorkItem.Codec).convert((x) => x, (items) => FixedSizeArray.new(items, tryAsWorkItemsCount(items.length))),
9772
+ authorization: blob,
9773
+ parametrization: blob,
9774
+ items: sequenceVarLen(work_item_WorkItem.Codec).convert((x) => x, (items) => FixedSizeArray.new(items, tryAsWorkItemsCount(items.length))),
9872
9775
  });
9873
9776
  static create({ authorization, authCodeHost, authCodeHash, parametrization, context, items, }) {
9874
9777
  return new work_package_WorkPackage(authorization, authCodeHost, authCodeHash, parametrization, context, items);
@@ -9928,22 +9831,30 @@ var WorkExecResultKind;
9928
9831
  class WorkExecResult extends WithDebug {
9929
9832
  kind;
9930
9833
  okBlob;
9931
- static Codec = codec_codec
9932
- .union("WorkExecResult", {
9933
- [WorkExecResultKind.ok]: codec_codec.object({ okBlob: codec_codec.blob }),
9934
- [WorkExecResultKind.outOfGas]: codec_codec.object({}),
9935
- [WorkExecResultKind.panic]: codec_codec.object({}),
9936
- [WorkExecResultKind.incorrectNumberOfExports]: codec_codec.object({}),
9937
- [WorkExecResultKind.digestTooBig]: codec_codec.object({}),
9938
- [WorkExecResultKind.badCode]: codec_codec.object({}),
9939
- [WorkExecResultKind.codeOversize]: codec_codec.object({}),
9940
- })
9941
- .convert((x) => {
9942
- if (x.kind === WorkExecResultKind.ok) {
9943
- return { kind: WorkExecResultKind.ok, okBlob: x.okBlob ?? bytes_BytesBlob.empty() };
9944
- }
9945
- return { kind: x.kind };
9946
- }, (x) => new WorkExecResult(x.kind, x.kind === WorkExecResultKind.ok ? x.okBlob : null));
9834
+ static Codec = custom({
9835
+ name: "WorkExecResult",
9836
+ sizeHint: { bytes: 1, isExact: false },
9837
+ }, (e, x) => {
9838
+ e.varU32(numbers_tryAsU32(x.kind));
9839
+ if (x.kind === WorkExecResultKind.ok && x.okBlob !== null) {
9840
+ e.bytesBlob(x.okBlob);
9841
+ }
9842
+ }, (d) => {
9843
+ const kind = d.varU32();
9844
+ if (kind === WorkExecResultKind.ok) {
9845
+ const blob = d.bytesBlob();
9846
+ return new WorkExecResult(kind, blob);
9847
+ }
9848
+ if (kind > WorkExecResultKind.codeOversize) {
9849
+ throw new Error(`Invalid WorkExecResultKind: ${kind}`);
9850
+ }
9851
+ return new WorkExecResult(kind);
9852
+ }, (s) => {
9853
+ const kind = s.decoder.varU32();
9854
+ if (kind === WorkExecResultKind.ok) {
9855
+ s.bytesBlob();
9856
+ }
9857
+ });
9947
9858
  constructor(
9948
9859
  /** The execution result tag. */
9949
9860
  kind,
@@ -9967,12 +9878,12 @@ class WorkRefineLoad extends WithDebug {
9967
9878
  extrinsicCount;
9968
9879
  extrinsicSize;
9969
9880
  exportedSegments;
9970
- static Codec = codec_codec.Class(WorkRefineLoad, {
9971
- gasUsed: codec_codec.varU64.asOpaque(),
9972
- importedSegments: codec_codec.varU32,
9973
- extrinsicCount: codec_codec.varU32,
9974
- extrinsicSize: codec_codec.varU32,
9975
- exportedSegments: codec_codec.varU32,
9881
+ static Codec = Class(WorkRefineLoad, {
9882
+ gasUsed: varU64.asOpaque(),
9883
+ importedSegments: varU32,
9884
+ extrinsicCount: varU32,
9885
+ extrinsicSize: varU32,
9886
+ exportedSegments: varU32,
9976
9887
  });
9977
9888
  static create({ gasUsed, importedSegments, extrinsicCount, extrinsicSize, exportedSegments, }) {
9978
9889
  return new WorkRefineLoad(gasUsed, importedSegments, extrinsicCount, extrinsicSize, exportedSegments);
@@ -10008,11 +9919,11 @@ class WorkResult {
10008
9919
  gas;
10009
9920
  result;
10010
9921
  load;
10011
- static Codec = codec_codec.Class(WorkResult, {
10012
- serviceId: codec_codec.u32.asOpaque(),
10013
- codeHash: codec_codec.bytes(hash_HASH_SIZE).asOpaque(),
10014
- payloadHash: codec_codec.bytes(hash_HASH_SIZE),
10015
- gas: codec_codec.u64.asOpaque(),
9922
+ static Codec = Class(WorkResult, {
9923
+ serviceId: u32.asOpaque(),
9924
+ codeHash: bytes(hash_HASH_SIZE).asOpaque(),
9925
+ payloadHash: bytes(hash_HASH_SIZE),
9926
+ gas: u64.asOpaque(),
10016
9927
  result: WorkExecResult.Codec,
10017
9928
  load: WorkRefineLoad.Codec,
10018
9929
  });
@@ -10079,12 +9990,12 @@ class WorkPackageSpec extends WithDebug {
10079
9990
  erasureRoot;
10080
9991
  exportsRoot;
10081
9992
  exportsCount;
10082
- static Codec = codec_codec.Class(WorkPackageSpec, {
10083
- hash: codec_codec.bytes(hash_HASH_SIZE).asOpaque(),
10084
- length: codec_codec.u32,
10085
- erasureRoot: codec_codec.bytes(hash_HASH_SIZE),
10086
- exportsRoot: codec_codec.bytes(hash_HASH_SIZE).asOpaque(),
10087
- exportsCount: codec_codec.u16,
9993
+ static Codec = Class(WorkPackageSpec, {
9994
+ hash: bytes(hash_HASH_SIZE).asOpaque(),
9995
+ length: u32,
9996
+ erasureRoot: bytes(hash_HASH_SIZE),
9997
+ exportsRoot: bytes(hash_HASH_SIZE).asOpaque(),
9998
+ exportsCount: u16,
10088
9999
  });
10089
10000
  static create({ hash, length, erasureRoot, exportsRoot, exportsCount }) {
10090
10001
  return new WorkPackageSpec(hash, length, erasureRoot, exportsRoot, exportsCount);
@@ -10122,20 +10033,20 @@ class WorkReport extends WithDebug {
10122
10033
  segmentRootLookup;
10123
10034
  results;
10124
10035
  authorizationGasUsed;
10125
- static Codec = codec_codec.Class(WorkReport, {
10036
+ static Codec = Class(WorkReport, {
10126
10037
  workPackageSpec: WorkPackageSpec.Codec,
10127
10038
  context: RefineContext.Codec,
10128
- coreIndex: codec_codec.varU32.convert((o) => numbers_tryAsU32(o), (i) => {
10039
+ coreIndex: varU32.convert((o) => numbers_tryAsU32(o), (i) => {
10129
10040
  if (!isU16(i)) {
10130
10041
  throw new Error(`Core index exceeds U16: ${i}`);
10131
10042
  }
10132
10043
  return tryAsCoreIndex(i);
10133
10044
  }),
10134
- authorizerHash: codec_codec.bytes(hash_HASH_SIZE).asOpaque(),
10135
- authorizationGasUsed: codec_codec.varU64.asOpaque(),
10136
- authorizationOutput: codec_codec.blob,
10137
- segmentRootLookup: codec_codec.readonlyArray(codec_codec.sequenceVarLen(WorkPackageInfo.Codec)),
10138
- results: codec_codec.sequenceVarLen(WorkResult.Codec).convert((x) => x, (items) => FixedSizeArray.new(items, tryAsWorkItemsCount(items.length))),
10045
+ authorizerHash: bytes(hash_HASH_SIZE).asOpaque(),
10046
+ authorizationGasUsed: varU64.asOpaque(),
10047
+ authorizationOutput: blob,
10048
+ segmentRootLookup: readonlyArray(sequenceVarLen(WorkPackageInfo.Codec)),
10049
+ results: sequenceVarLen(WorkResult.Codec).convert((x) => x, (items) => FixedSizeArray.new(items, tryAsWorkItemsCount(items.length))),
10139
10050
  });
10140
10051
  static create({ workPackageSpec, context, coreIndex, authorizerHash, authorizationOutput, segmentRootLookup, results, authorizationGasUsed, }) {
10141
10052
  return new WorkReport(workPackageSpec, context, coreIndex, authorizerHash, authorizationOutput, segmentRootLookup, results, authorizationGasUsed);
@@ -10183,9 +10094,9 @@ const REQUIRED_CREDENTIALS_RANGE = [2, 3];
10183
10094
  class Credential extends WithDebug {
10184
10095
  validatorIndex;
10185
10096
  signature;
10186
- static Codec = codec_codec.Class(Credential, {
10187
- validatorIndex: codec_codec.u16.asOpaque(),
10188
- signature: codec_codec.bytes(ED25519_SIGNATURE_BYTES).asOpaque(),
10097
+ static Codec = Class(Credential, {
10098
+ validatorIndex: u16.asOpaque(),
10099
+ signature: bytes(ED25519_SIGNATURE_BYTES).asOpaque(),
10189
10100
  });
10190
10101
  static create({ validatorIndex, signature }) {
10191
10102
  return new Credential(validatorIndex, signature);
@@ -10209,9 +10120,9 @@ class ReportGuarantee extends WithDebug {
10209
10120
  report;
10210
10121
  slot;
10211
10122
  credentials;
10212
- static Codec = codec_codec.Class(ReportGuarantee, {
10123
+ static Codec = Class(ReportGuarantee, {
10213
10124
  report: WorkReport.Codec,
10214
- slot: codec_codec.u32.asOpaque(),
10125
+ slot: u32.asOpaque(),
10215
10126
  credentials: codecKnownSizeArray(Credential.Codec, {
10216
10127
  minLength: REQUIRED_CREDENTIALS_RANGE[0],
10217
10128
  maxLength: REQUIRED_CREDENTIALS_RANGE[1],
@@ -10261,10 +10172,10 @@ function tryAsTicketAttempt(x) {
10261
10172
  class SignedTicket extends WithDebug {
10262
10173
  attempt;
10263
10174
  signature;
10264
- static Codec = codec_codec.Class(SignedTicket, {
10175
+ static Codec = Class(SignedTicket, {
10265
10176
  // TODO [ToDr] we should verify that attempt is either 0|1|2.
10266
- attempt: codec_codec.u8.asOpaque(),
10267
- signature: codec_codec.bytes(BANDERSNATCH_PROOF_BYTES).asOpaque(),
10177
+ attempt: u8.asOpaque(),
10178
+ signature: bytes(BANDERSNATCH_PROOF_BYTES).asOpaque(),
10268
10179
  });
10269
10180
  static create({ attempt, signature }) {
10270
10181
  return new SignedTicket(attempt, signature);
@@ -10283,10 +10194,10 @@ class SignedTicket extends WithDebug {
10283
10194
  class Ticket extends WithDebug {
10284
10195
  id;
10285
10196
  attempt;
10286
- static Codec = codec_codec.Class(Ticket, {
10287
- id: codec_codec.bytes(hash_HASH_SIZE),
10197
+ static Codec = Class(Ticket, {
10198
+ id: bytes(hash_HASH_SIZE),
10288
10199
  // TODO [ToDr] we should verify that attempt is either 0|1|2.
10289
- attempt: codec_codec.u8.asOpaque(),
10200
+ attempt: u8.asOpaque(),
10290
10201
  });
10291
10202
  static create({ id, attempt }) {
10292
10203
  return new Ticket(id, attempt);
@@ -10338,9 +10249,9 @@ const ticketsExtrinsicCodec = codecWithContext((context) => {
10338
10249
  class ValidatorKeys extends WithDebug {
10339
10250
  bandersnatch;
10340
10251
  ed25519;
10341
- static Codec = codec_codec.Class(ValidatorKeys, {
10342
- bandersnatch: codec_codec.bytes(BANDERSNATCH_KEY_BYTES).asOpaque(),
10343
- ed25519: codec_codec.bytes(ED25519_KEY_BYTES).asOpaque(),
10252
+ static Codec = Class(ValidatorKeys, {
10253
+ bandersnatch: bytes(BANDERSNATCH_KEY_BYTES).asOpaque(),
10254
+ ed25519: bytes(ED25519_KEY_BYTES).asOpaque(),
10344
10255
  });
10345
10256
  static create({ bandersnatch, ed25519 }) {
10346
10257
  return new ValidatorKeys(bandersnatch, ed25519);
@@ -10357,7 +10268,7 @@ class ValidatorKeys extends WithDebug {
10357
10268
  }
10358
10269
  class TicketsMarker extends WithDebug {
10359
10270
  tickets;
10360
- static Codec = codec_codec.Class(TicketsMarker, {
10271
+ static Codec = Class(TicketsMarker, {
10361
10272
  tickets: codecPerEpochBlock(Ticket.Codec),
10362
10273
  });
10363
10274
  static create({ tickets }) {
@@ -10379,9 +10290,9 @@ class EpochMarker extends WithDebug {
10379
10290
  entropy;
10380
10291
  ticketsEntropy;
10381
10292
  validators;
10382
- static Codec = codec_codec.Class(EpochMarker, {
10383
- entropy: codec_codec.bytes(hash_HASH_SIZE).asOpaque(),
10384
- ticketsEntropy: codec_codec.bytes(hash_HASH_SIZE).asOpaque(),
10293
+ static Codec = Class(EpochMarker, {
10294
+ entropy: bytes(hash_HASH_SIZE).asOpaque(),
10295
+ ticketsEntropy: bytes(hash_HASH_SIZE).asOpaque(),
10385
10296
  validators: codecPerValidator(ValidatorKeys.Codec),
10386
10297
  });
10387
10298
  static create({ entropy, ticketsEntropy, validators }) {
@@ -10418,17 +10329,17 @@ const encodeUnsealedHeader = (view) => {
10418
10329
  * https://graypaper.fluffylabs.dev/#/ab2cdbd/0c66000c7200?v=0.7.2
10419
10330
  */
10420
10331
  class header_Header extends WithDebug {
10421
- static Codec = codec_codec.Class(header_Header, {
10422
- parentHeaderHash: codec_codec.bytes(hash_HASH_SIZE).asOpaque(),
10423
- priorStateRoot: codec_codec.bytes(hash_HASH_SIZE).asOpaque(),
10424
- extrinsicHash: codec_codec.bytes(hash_HASH_SIZE).asOpaque(),
10425
- timeSlotIndex: codec_codec.u32.asOpaque(),
10426
- epochMarker: codec_codec.optional(EpochMarker.Codec),
10427
- ticketsMarker: codec_codec.optional(TicketsMarker.Codec),
10428
- bandersnatchBlockAuthorIndex: codec_codec.u16.asOpaque(),
10429
- entropySource: codec_codec.bytes(bandersnatch_BANDERSNATCH_VRF_SIGNATURE_BYTES).asOpaque(),
10430
- offendersMarker: codec_codec.sequenceVarLen(codec_codec.bytes(ED25519_KEY_BYTES).asOpaque()),
10431
- seal: codec_codec.bytes(bandersnatch_BANDERSNATCH_VRF_SIGNATURE_BYTES).asOpaque(),
10332
+ static Codec = Class(header_Header, {
10333
+ parentHeaderHash: bytes(hash_HASH_SIZE).asOpaque(),
10334
+ priorStateRoot: bytes(hash_HASH_SIZE).asOpaque(),
10335
+ extrinsicHash: bytes(hash_HASH_SIZE).asOpaque(),
10336
+ timeSlotIndex: u32.asOpaque(),
10337
+ epochMarker: optional(EpochMarker.Codec),
10338
+ ticketsMarker: optional(TicketsMarker.Codec),
10339
+ bandersnatchBlockAuthorIndex: u16.asOpaque(),
10340
+ entropySource: bytes(bandersnatch_BANDERSNATCH_VRF_SIGNATURE_BYTES).asOpaque(),
10341
+ offendersMarker: sequenceVarLen(bytes(ED25519_KEY_BYTES).asOpaque()),
10342
+ seal: bytes(bandersnatch_BANDERSNATCH_VRF_SIGNATURE_BYTES).asOpaque(),
10432
10343
  });
10433
10344
  static create(h) {
10434
10345
  return Object.assign(header_Header.empty(), h);
@@ -10483,8 +10394,8 @@ class header_Header extends WithDebug {
10483
10394
  * `DescriptorRecord` or `CodecRecord` for some reason.
10484
10395
  */
10485
10396
  class HeaderViewWithHash extends WithHash {
10486
- static Codec = codec_codec.Class(HeaderViewWithHash, {
10487
- hash: codec_codec.bytes(hash_HASH_SIZE).asOpaque(),
10397
+ static Codec = Class(HeaderViewWithHash, {
10398
+ hash: bytes(hash_HASH_SIZE).asOpaque(),
10488
10399
  data: header_Header.Codec.View,
10489
10400
  });
10490
10401
  static create({ hash, data }) {
@@ -10505,9 +10416,9 @@ const headerViewWithHashCodec = HeaderViewWithHash.Codec;
10505
10416
  class Preimage extends WithDebug {
10506
10417
  requester;
10507
10418
  blob;
10508
- static Codec = codec_codec.Class(Preimage, {
10509
- requester: codec_codec.u32.asOpaque(),
10510
- blob: codec_codec.blob,
10419
+ static Codec = Class(Preimage, {
10420
+ requester: u32.asOpaque(),
10421
+ blob: blob,
10511
10422
  });
10512
10423
  static create({ requester, blob }) {
10513
10424
  return new Preimage(requester, blob);
@@ -10522,7 +10433,7 @@ class Preimage extends WithDebug {
10522
10433
  this.blob = blob;
10523
10434
  }
10524
10435
  }
10525
- const preimagesExtrinsicCodec = codec_codec.sequenceVarLen(Preimage.Codec);
10436
+ const preimagesExtrinsicCodec = sequenceVarLen(Preimage.Codec);
10526
10437
 
10527
10438
  ;// CONCATENATED MODULE: ./packages/jam/block/block.ts
10528
10439
 
@@ -10548,7 +10459,7 @@ class Extrinsic extends WithDebug {
10548
10459
  guarantees;
10549
10460
  assurances;
10550
10461
  disputes;
10551
- static Codec = codec_codec.Class(Extrinsic, {
10462
+ static Codec = Class(Extrinsic, {
10552
10463
  tickets: ticketsExtrinsicCodec,
10553
10464
  preimages: preimagesExtrinsicCodec,
10554
10465
  guarantees: guaranteesExtrinsicCodec,
@@ -10601,7 +10512,7 @@ class Extrinsic extends WithDebug {
10601
10512
  class Block extends WithDebug {
10602
10513
  header;
10603
10514
  extrinsic;
10604
- static Codec = codec_codec.Class(Block, {
10515
+ static Codec = Class(Block, {
10605
10516
  header: header_Header.Codec,
10606
10517
  extrinsic: Extrinsic.Codec,
10607
10518
  });
@@ -11582,10 +11493,10 @@ class Version extends WithDebug {
11582
11493
  major;
11583
11494
  minor;
11584
11495
  patch;
11585
- static Codec = codec_codec.Class(Version, {
11586
- major: codec_codec.u8,
11587
- minor: codec_codec.u8,
11588
- patch: codec_codec.u8,
11496
+ static Codec = Class(Version, {
11497
+ major: u8,
11498
+ minor: u8,
11499
+ patch: u8,
11589
11500
  });
11590
11501
  static tryFromString(str) {
11591
11502
  const parse = (v) => tryAsU8(Number(v));
@@ -11637,12 +11548,12 @@ class PeerInfo extends WithDebug {
11637
11548
  jamVersion;
11638
11549
  appVersion;
11639
11550
  name;
11640
- static Codec = codec_codec.Class(PeerInfo, {
11641
- fuzzVersion: codec_codec.u8,
11642
- features: codec_codec.u32,
11551
+ static Codec = Class(PeerInfo, {
11552
+ fuzzVersion: u8,
11553
+ features: u32,
11643
11554
  jamVersion: Version.Codec,
11644
11555
  appVersion: Version.Codec,
11645
- name: codec_codec.string,
11556
+ name: string,
11646
11557
  });
11647
11558
  static create({ fuzzVersion, features, appVersion, jamVersion, name }) {
11648
11559
  return new PeerInfo(fuzzVersion, features, jamVersion, appVersion, name);
@@ -11665,9 +11576,9 @@ class PeerInfo extends WithDebug {
11665
11576
  class AncestryItem extends WithDebug {
11666
11577
  slot;
11667
11578
  headerHash;
11668
- static Codec = codec_codec.Class(AncestryItem, {
11669
- slot: codec_codec.u32.asOpaque(),
11670
- headerHash: codec_codec.bytes(hash_HASH_SIZE).asOpaque(),
11579
+ static Codec = Class(AncestryItem, {
11580
+ slot: u32.asOpaque(),
11581
+ headerHash: bytes(hash_HASH_SIZE).asOpaque(),
11671
11582
  });
11672
11583
  static create({ slot, headerHash }) {
11673
11584
  return new AncestryItem(slot, headerHash);
@@ -11687,9 +11598,9 @@ class AncestryItem extends WithDebug {
11687
11598
  class KeyValue extends WithDebug {
11688
11599
  key;
11689
11600
  value;
11690
- static Codec = codec_codec.Class(KeyValue, {
11691
- key: codec_codec.bytes(TRUNCATED_HASH_SIZE),
11692
- value: codec_codec.blob,
11601
+ static Codec = Class(KeyValue, {
11602
+ key: bytes(TRUNCATED_HASH_SIZE),
11603
+ value: blob,
11693
11604
  });
11694
11605
  static create({ key, value }) {
11695
11606
  return new KeyValue(key, value);
@@ -11701,12 +11612,12 @@ class KeyValue extends WithDebug {
11701
11612
  }
11702
11613
  }
11703
11614
  /** State ::= SEQUENCE OF KeyValue */
11704
- const stateCodec = codec_codec.sequenceVarLen(KeyValue.Codec);
11615
+ const stateCodec = sequenceVarLen(KeyValue.Codec);
11705
11616
  /**
11706
11617
  * Ancestry ::= SEQUENCE (SIZE(0..24)) OF AncestryItem
11707
11618
  * Empty when `feature-ancestry` is not supported by both parties
11708
11619
  */
11709
- const ancestryCodec = codec_codec.sequenceVarLen(AncestryItem.Codec, {
11620
+ const ancestryCodec = sequenceVarLen(AncestryItem.Codec, {
11710
11621
  minLength: 0,
11711
11622
  maxLength: 24,
11712
11623
  });
@@ -11721,7 +11632,7 @@ class Initialize extends WithDebug {
11721
11632
  header;
11722
11633
  keyvals;
11723
11634
  ancestry;
11724
- static Codec = codec_codec.Class(Initialize, {
11635
+ static Codec = Class(Initialize, {
11725
11636
  header: header_Header.Codec,
11726
11637
  keyvals: stateCodec,
11727
11638
  ancestry: ancestryCodec,
@@ -11737,14 +11648,14 @@ class Initialize extends WithDebug {
11737
11648
  }
11738
11649
  }
11739
11650
  /** GetState ::= HeaderHash */
11740
- const getStateCodec = codec_codec.bytes(hash_HASH_SIZE).asOpaque();
11651
+ const getStateCodec = bytes(hash_HASH_SIZE).asOpaque();
11741
11652
  /** StateRoot ::= StateRootHash */
11742
- const stateRootCodec = codec_codec.bytes(hash_HASH_SIZE).asOpaque();
11653
+ const stateRootCodec = bytes(hash_HASH_SIZE).asOpaque();
11743
11654
  /** Error ::= UTF8String */
11744
11655
  class ErrorMessage extends WithDebug {
11745
11656
  message;
11746
- static Codec = codec_codec.Class(ErrorMessage, {
11747
- message: codec_codec.string,
11657
+ static Codec = Class(ErrorMessage, {
11658
+ message: string,
11748
11659
  });
11749
11660
  static create({ message }) {
11750
11661
  return new ErrorMessage(message);
@@ -11776,7 +11687,7 @@ var types_MessageType;
11776
11687
  * error [255] Error
11777
11688
  * }
11778
11689
  */
11779
- const types_messageCodec = codec_codec.custom({
11690
+ const types_messageCodec = custom({
11780
11691
  name: "Message",
11781
11692
  sizeHint: { bytes: 1, isExact: false },
11782
11693
  }, (e, msg) => {
@@ -12142,9 +12053,9 @@ function getRegisters(argsLength) {
12142
12053
  class AccumulationOutput {
12143
12054
  serviceId;
12144
12055
  output;
12145
- static Codec = codec_codec.Class(AccumulationOutput, {
12146
- serviceId: codec_codec.u32.asOpaque(),
12147
- output: codec_codec.bytes(hash_HASH_SIZE),
12056
+ static Codec = Class(AccumulationOutput, {
12057
+ serviceId: u32.asOpaque(),
12058
+ output: bytes(hash_HASH_SIZE),
12148
12059
  });
12149
12060
  static create(a) {
12150
12061
  return new AccumulationOutput(a.serviceId, a.output);
@@ -12235,9 +12146,9 @@ const MAX_REPORT_DEPENDENCIES = 8;
12235
12146
  class NotYetAccumulatedReport extends WithDebug {
12236
12147
  report;
12237
12148
  dependencies;
12238
- static Codec = codec_codec.Class(NotYetAccumulatedReport, {
12149
+ static Codec = Class(NotYetAccumulatedReport, {
12239
12150
  report: WorkReport.Codec,
12240
- dependencies: codecKnownSizeArray(codec_codec.bytes(hash_HASH_SIZE).asOpaque(), {
12151
+ dependencies: codecKnownSizeArray(bytes(hash_HASH_SIZE).asOpaque(), {
12241
12152
  typicalLength: MAX_REPORT_DEPENDENCIES / 2,
12242
12153
  maxLength: MAX_REPORT_DEPENDENCIES,
12243
12154
  minLength: 0,
@@ -12264,7 +12175,7 @@ class NotYetAccumulatedReport extends WithDebug {
12264
12175
  this.dependencies = dependencies;
12265
12176
  }
12266
12177
  }
12267
- const accumulationQueueCodec = codecPerEpochBlock(codec_codec.readonlyArray(codec_codec.sequenceVarLen(NotYetAccumulatedReport.Codec)));
12178
+ const accumulationQueueCodec = codecPerEpochBlock(readonlyArray(sequenceVarLen(NotYetAccumulatedReport.Codec)));
12268
12179
 
12269
12180
  ;// CONCATENATED MODULE: ./packages/jam/state/common.ts
12270
12181
 
@@ -12297,9 +12208,9 @@ const codecPerCore = (val) => codecWithContext((context) => {
12297
12208
  class AvailabilityAssignment extends WithDebug {
12298
12209
  workReport;
12299
12210
  timeout;
12300
- static Codec = codec_codec.Class(AvailabilityAssignment, {
12211
+ static Codec = Class(AvailabilityAssignment, {
12301
12212
  workReport: WorkReport.Codec,
12302
- timeout: codec_codec.u32.asOpaque(),
12213
+ timeout: u32.asOpaque(),
12303
12214
  });
12304
12215
  static create({ workReport, timeout }) {
12305
12216
  return new AvailabilityAssignment(workReport, timeout);
@@ -12314,7 +12225,7 @@ class AvailabilityAssignment extends WithDebug {
12314
12225
  this.timeout = timeout;
12315
12226
  }
12316
12227
  }
12317
- const availabilityAssignmentsCodec = codecPerCore(codec_codec.optional(AvailabilityAssignment.Codec));
12228
+ const availabilityAssignmentsCodec = codecPerCore(optional(AvailabilityAssignment.Codec));
12318
12229
 
12319
12230
  ;// CONCATENATED MODULE: ./packages/jam/state/auth.ts
12320
12231
 
@@ -12326,18 +12237,18 @@ const availabilityAssignmentsCodec = codecPerCore(codec_codec.optional(Availabil
12326
12237
  const MAX_AUTH_POOL_SIZE = O;
12327
12238
  /** `Q`: Size of the authorization queue. */
12328
12239
  const AUTHORIZATION_QUEUE_SIZE = Q;
12329
- const authPoolsCodec = codecPerCore(codecKnownSizeArray(codec_codec.bytes(hash_HASH_SIZE).asOpaque(), {
12240
+ const authPoolsCodec = codecPerCore(codecKnownSizeArray(bytes(hash_HASH_SIZE).asOpaque(), {
12330
12241
  minLength: 0,
12331
12242
  maxLength: MAX_AUTH_POOL_SIZE,
12332
12243
  typicalLength: MAX_AUTH_POOL_SIZE,
12333
12244
  }));
12334
- const authQueuesCodec = codecPerCore(codecFixedSizeArray(codec_codec.bytes(hash_HASH_SIZE).asOpaque(), AUTHORIZATION_QUEUE_SIZE));
12245
+ const authQueuesCodec = codecPerCore(codecFixedSizeArray(bytes(hash_HASH_SIZE).asOpaque(), AUTHORIZATION_QUEUE_SIZE));
12335
12246
 
12336
12247
  ;// CONCATENATED MODULE: ./packages/jam/state/disputes.ts
12337
12248
 
12338
12249
 
12339
12250
 
12340
- const sortedSetCodec = () => codec_codec.readonlyArray(codec_codec.sequenceVarLen(codec_codec.bytes(hash_HASH_SIZE))).convert((input) => input.array, (output) => {
12251
+ const sortedSetCodec = () => readonlyArray(sequenceVarLen(bytes(hash_HASH_SIZE))).convert((input) => input.array, (output) => {
12341
12252
  const typed = output.map((x) => x.asOpaque());
12342
12253
  return SortedSet.fromSortedArray(hashComparator, typed);
12343
12254
  });
@@ -12352,7 +12263,7 @@ class DisputesRecords {
12352
12263
  badSet;
12353
12264
  wonkySet;
12354
12265
  punishSet;
12355
- static Codec = codec_codec.Class(DisputesRecords, {
12266
+ static Codec = Class(DisputesRecords, {
12356
12267
  goodSet: workReportsSortedSetCodec,
12357
12268
  badSet: workReportsSortedSetCodec,
12358
12269
  wonkySet: workReportsSortedSetCodec,
@@ -12425,10 +12336,10 @@ class BlockState extends WithDebug {
12425
12336
  accumulationResult;
12426
12337
  postStateRoot;
12427
12338
  reported;
12428
- static Codec = codec_codec.Class(BlockState, {
12429
- headerHash: codec_codec.bytes(hash_HASH_SIZE).asOpaque(),
12430
- accumulationResult: codec_codec.bytes(hash_HASH_SIZE),
12431
- postStateRoot: codec_codec.bytes(hash_HASH_SIZE).asOpaque(),
12339
+ static Codec = Class(BlockState, {
12340
+ headerHash: bytes(hash_HASH_SIZE).asOpaque(),
12341
+ accumulationResult: bytes(hash_HASH_SIZE),
12342
+ postStateRoot: bytes(hash_HASH_SIZE).asOpaque(),
12432
12343
  reported: codecHashDictionary(WorkPackageInfo.Codec, (x) => x.workPackageHash),
12433
12344
  });
12434
12345
  static create({ headerHash, accumulationResult, postStateRoot, reported }) {
@@ -12458,14 +12369,14 @@ class BlockState extends WithDebug {
12458
12369
  class RecentBlocks extends WithDebug {
12459
12370
  blocks;
12460
12371
  accumulationLog;
12461
- static Codec = codec_codec.Class(RecentBlocks, {
12372
+ static Codec = Class(RecentBlocks, {
12462
12373
  blocks: codecKnownSizeArray(BlockState.Codec, {
12463
12374
  minLength: 0,
12464
12375
  maxLength: MAX_RECENT_HISTORY,
12465
12376
  typicalLength: MAX_RECENT_HISTORY,
12466
12377
  }),
12467
- accumulationLog: codec_codec.object({
12468
- peaks: codec_codec.readonlyArray(codec_codec.sequenceVarLen(codec_codec.optional(codec_codec.bytes(hash_HASH_SIZE)))),
12378
+ accumulationLog: object({
12379
+ peaks: readonlyArray(sequenceVarLen(optional(bytes(hash_HASH_SIZE)))),
12469
12380
  }),
12470
12381
  });
12471
12382
  static empty() {
@@ -12498,7 +12409,7 @@ class RecentBlocks extends WithDebug {
12498
12409
 
12499
12410
 
12500
12411
 
12501
- const recentlyAccumulatedCodec = codecPerEpochBlock(codec_codec.sequenceVarLen(codec_codec.bytes(hash_HASH_SIZE).asOpaque()).convert((x) => Array.from(x), (x) => HashSet.from(x)));
12412
+ const recentlyAccumulatedCodec = codecPerEpochBlock(sequenceVarLen(bytes(hash_HASH_SIZE).asOpaque()).convert((x) => Array.from(x), (x) => HashSet.from(x)));
12502
12413
 
12503
12414
  ;// CONCATENATED MODULE: ./packages/jam/state/validator-data.ts
12504
12415
 
@@ -12521,11 +12432,11 @@ class ValidatorData extends WithDebug {
12521
12432
  ed25519;
12522
12433
  bls;
12523
12434
  metadata;
12524
- static Codec = codec_codec.Class(ValidatorData, {
12525
- bandersnatch: codec_codec.bytes(BANDERSNATCH_KEY_BYTES).asOpaque(),
12526
- ed25519: codec_codec.bytes(ED25519_KEY_BYTES).asOpaque(),
12527
- bls: codec_codec.bytes(BLS_KEY_BYTES).asOpaque(),
12528
- metadata: codec_codec.bytes(VALIDATOR_META_BYTES),
12435
+ static Codec = Class(ValidatorData, {
12436
+ bandersnatch: bytes(BANDERSNATCH_KEY_BYTES).asOpaque(),
12437
+ ed25519: bytes(ED25519_KEY_BYTES).asOpaque(),
12438
+ bls: bytes(BLS_KEY_BYTES).asOpaque(),
12439
+ metadata: bytes(VALIDATOR_META_BYTES),
12529
12440
  });
12530
12441
  static create({ ed25519, bandersnatch, bls, metadata }) {
12531
12442
  return new ValidatorData(bandersnatch, ed25519, bls, metadata);
@@ -12558,31 +12469,53 @@ const validatorsDataCodec = codecPerValidator(ValidatorData.Codec);
12558
12469
 
12559
12470
 
12560
12471
 
12472
+
12473
+
12561
12474
  var SafroleSealingKeysKind;
12562
12475
  (function (SafroleSealingKeysKind) {
12563
12476
  SafroleSealingKeysKind[SafroleSealingKeysKind["Tickets"] = 0] = "Tickets";
12564
12477
  SafroleSealingKeysKind[SafroleSealingKeysKind["Keys"] = 1] = "Keys";
12565
12478
  })(SafroleSealingKeysKind || (SafroleSealingKeysKind = {}));
12566
- const codecBandersnatchKey = codec_codec.bytes(BANDERSNATCH_KEY_BYTES).asOpaque();
12479
+ const codecBandersnatchKey = bytes(BANDERSNATCH_KEY_BYTES).asOpaque();
12567
12480
  class SafroleSealingKeysData extends WithDebug {
12568
12481
  kind;
12569
12482
  keys;
12570
12483
  tickets;
12571
12484
  static Codec = codecWithContext((context) => {
12572
- const keysCodec = codec_codec
12573
- .sequenceFixLen(codecBandersnatchKey, context.epochLength)
12574
- .convert((keys) => Array.from(keys), (keys) => tryAsPerEpochBlock(keys, context));
12575
- const ticketsCodec = codec_codec.sequenceFixLen(Ticket.Codec, context.epochLength).convert((tickets) => Array.from(tickets), (tickets) => tryAsPerEpochBlock(tickets, context));
12576
- return codec_codec
12577
- .union("SafroleSealingKeys", {
12578
- [SafroleSealingKeysKind.Keys]: codec_codec.object({ keys: keysCodec }),
12579
- [SafroleSealingKeysKind.Tickets]: codec_codec.object({ tickets: ticketsCodec }),
12580
- })
12581
- .convert((x) => x, (x) => {
12485
+ return custom({
12486
+ name: "SafroleSealingKeys",
12487
+ sizeHint: { bytes: 1 + hash_HASH_SIZE * context.epochLength, isExact: false },
12488
+ }, (e, x) => {
12489
+ e.varU32(numbers_tryAsU32(x.kind));
12582
12490
  if (x.kind === SafroleSealingKeysKind.Keys) {
12583
- return SafroleSealingKeysData.keys(x.keys);
12491
+ e.sequenceFixLen(codecBandersnatchKey, x.keys);
12492
+ }
12493
+ else {
12494
+ e.sequenceFixLen(Ticket.Codec, x.tickets);
12495
+ }
12496
+ }, (d) => {
12497
+ const epochLength = context.epochLength;
12498
+ const kind = d.varU32();
12499
+ if (kind === SafroleSealingKeysKind.Keys) {
12500
+ const keys = d.sequenceFixLen(codecBandersnatchKey, epochLength);
12501
+ return SafroleSealingKeysData.keys(tryAsPerEpochBlock(keys, context));
12502
+ }
12503
+ if (kind === SafroleSealingKeysKind.Tickets) {
12504
+ const tickets = d.sequenceFixLen(Ticket.Codec, epochLength);
12505
+ return SafroleSealingKeysData.tickets(tryAsPerEpochBlock(tickets, context));
12506
+ }
12507
+ throw new Error(`Unexpected safrole sealing keys kind: ${kind}`);
12508
+ }, (s) => {
12509
+ const kind = s.decoder.varU32();
12510
+ if (kind === SafroleSealingKeysKind.Keys) {
12511
+ s.sequenceFixLen(codecBandersnatchKey, context.epochLength);
12512
+ return;
12513
+ }
12514
+ if (kind === SafroleSealingKeysKind.Tickets) {
12515
+ s.sequenceFixLen(Ticket.Codec, context.epochLength);
12516
+ return;
12584
12517
  }
12585
- return SafroleSealingKeysData.tickets(x.tickets);
12518
+ throw new Error(`Unexpected safrole sealing keys kind: ${kind}`);
12586
12519
  });
12587
12520
  });
12588
12521
  static keys(keys) {
@@ -12603,11 +12536,11 @@ class safrole_data_SafroleData {
12603
12536
  epochRoot;
12604
12537
  sealingKeySeries;
12605
12538
  ticketsAccumulator;
12606
- static Codec = codec_codec.Class(safrole_data_SafroleData, {
12539
+ static Codec = Class(safrole_data_SafroleData, {
12607
12540
  nextValidatorData: codecPerValidator(ValidatorData.Codec),
12608
- epochRoot: codec_codec.bytes(BANDERSNATCH_RING_ROOT_BYTES).asOpaque(),
12541
+ epochRoot: bytes(BANDERSNATCH_RING_ROOT_BYTES).asOpaque(),
12609
12542
  sealingKeySeries: SafroleSealingKeysData.Codec,
12610
- ticketsAccumulator: codec_codec.readonlyArray(codec_codec.sequenceVarLen(Ticket.Codec)).convert(seeThrough, sized_array_asKnownSize),
12543
+ ticketsAccumulator: readonlyArray(sequenceVarLen(Ticket.Codec)).convert(seeThrough, sized_array_asKnownSize),
12611
12544
  });
12612
12545
  static create({ nextValidatorData, epochRoot, sealingKeySeries, ticketsAccumulator }) {
12613
12546
  return new safrole_data_SafroleData(nextValidatorData, epochRoot, sealingKeySeries, ticketsAccumulator);
@@ -12692,17 +12625,17 @@ class ServiceAccountInfo extends WithDebug {
12692
12625
  created;
12693
12626
  lastAccumulation;
12694
12627
  parentService;
12695
- static Codec = codec_codec.Class(ServiceAccountInfo, {
12696
- codeHash: codec_codec.bytes(hash_HASH_SIZE).asOpaque(),
12697
- balance: codec_codec.u64,
12698
- accumulateMinGas: codec_codec.u64.convert((x) => x, tryAsServiceGas),
12699
- onTransferMinGas: codec_codec.u64.convert((x) => x, tryAsServiceGas),
12700
- storageUtilisationBytes: codec_codec.u64,
12701
- gratisStorage: codec_codec.u64,
12702
- storageUtilisationCount: codec_codec.u32,
12703
- created: codec_codec.u32.convert((x) => x, common_tryAsTimeSlot),
12704
- lastAccumulation: codec_codec.u32.convert((x) => x, common_tryAsTimeSlot),
12705
- parentService: codec_codec.u32.convert((x) => x, tryAsServiceId),
12628
+ static Codec = Class(ServiceAccountInfo, {
12629
+ codeHash: bytes(hash_HASH_SIZE).asOpaque(),
12630
+ balance: u64,
12631
+ accumulateMinGas: u64.convert((x) => x, tryAsServiceGas),
12632
+ onTransferMinGas: u64.convert((x) => x, tryAsServiceGas),
12633
+ storageUtilisationBytes: u64,
12634
+ gratisStorage: u64,
12635
+ storageUtilisationCount: u32,
12636
+ created: u32.convert((x) => x, common_tryAsTimeSlot),
12637
+ lastAccumulation: u32.convert((x) => x, common_tryAsTimeSlot),
12638
+ parentService: u32.convert((x) => x, tryAsServiceId),
12706
12639
  });
12707
12640
  static create(a) {
12708
12641
  return new ServiceAccountInfo(a.codeHash, a.balance, a.accumulateMinGas, a.onTransferMinGas, a.storageUtilisationBytes, a.gratisStorage, a.storageUtilisationCount, a.created, a.lastAccumulation, a.parentService);
@@ -12758,9 +12691,9 @@ class ServiceAccountInfo extends WithDebug {
12758
12691
  class PreimageItem extends WithDebug {
12759
12692
  hash;
12760
12693
  blob;
12761
- static Codec = codec_codec.Class(PreimageItem, {
12762
- hash: codec_codec.bytes(hash_HASH_SIZE).asOpaque(),
12763
- blob: codec_codec.blob,
12694
+ static Codec = Class(PreimageItem, {
12695
+ hash: bytes(hash_HASH_SIZE).asOpaque(),
12696
+ blob: blob,
12764
12697
  });
12765
12698
  static create({ hash, blob }) {
12766
12699
  return new PreimageItem(hash, blob);
@@ -12774,9 +12707,9 @@ class PreimageItem extends WithDebug {
12774
12707
  class StorageItem extends WithDebug {
12775
12708
  key;
12776
12709
  value;
12777
- static Codec = codec_codec.Class(StorageItem, {
12778
- key: codec_codec.blob.convert((i) => i, (o) => opaque_asOpaqueType(o)),
12779
- value: codec_codec.blob,
12710
+ static Codec = Class(StorageItem, {
12711
+ key: blob.convert((i) => i, (o) => opaque_asOpaqueType(o)),
12712
+ value: blob,
12780
12713
  });
12781
12714
  static create({ key, value }) {
12782
12715
  return new StorageItem(key, value);
@@ -12837,13 +12770,13 @@ class ValidatorStatistics {
12837
12770
  preImagesSize;
12838
12771
  guarantees;
12839
12772
  assurances;
12840
- static Codec = codec_codec.Class(ValidatorStatistics, {
12841
- blocks: codec_codec.u32,
12842
- tickets: codec_codec.u32,
12843
- preImages: codec_codec.u32,
12844
- preImagesSize: codec_codec.u32,
12845
- guarantees: codec_codec.u32,
12846
- assurances: codec_codec.u32,
12773
+ static Codec = Class(ValidatorStatistics, {
12774
+ blocks: u32,
12775
+ tickets: u32,
12776
+ preImages: u32,
12777
+ preImagesSize: u32,
12778
+ guarantees: u32,
12779
+ assurances: u32,
12847
12780
  });
12848
12781
  static create({ blocks, tickets, preImages, preImagesSize, guarantees, assurances, }) {
12849
12782
  return new ValidatorStatistics(blocks, tickets, preImages, preImagesSize, guarantees, assurances);
@@ -12873,9 +12806,9 @@ class ValidatorStatistics {
12873
12806
  return new ValidatorStatistics(zero, zero, zero, zero, zero, zero);
12874
12807
  }
12875
12808
  }
12876
- const codecVarU16 = codec_codec.varU32.convert((i) => numbers_tryAsU32(i), (o) => numbers_tryAsU16(o));
12809
+ const codecVarU16 = varU32.convert((i) => numbers_tryAsU32(i), (o) => numbers_tryAsU16(o));
12877
12810
  /** Encode/decode unsigned gas. */
12878
- const codecVarGas = codec_codec.varU64.convert((g) => numbers_tryAsU64(g), (i) => tryAsServiceGas(i));
12811
+ const codecVarGas = varU64.convert((g) => numbers_tryAsU64(g), (i) => tryAsServiceGas(i));
12879
12812
  /**
12880
12813
  * Single core statistics.
12881
12814
  * Updated per block, based on incoming work reports (`w`).
@@ -12892,14 +12825,14 @@ class CoreStatistics {
12892
12825
  extrinsicCount;
12893
12826
  bundleSize;
12894
12827
  gasUsed;
12895
- static Codec = codec_codec.Class(CoreStatistics, {
12896
- dataAvailabilityLoad: codec_codec.varU32,
12828
+ static Codec = Class(CoreStatistics, {
12829
+ dataAvailabilityLoad: varU32,
12897
12830
  popularity: codecVarU16,
12898
12831
  imports: codecVarU16,
12899
12832
  extrinsicCount: codecVarU16,
12900
- extrinsicSize: codec_codec.varU32,
12833
+ extrinsicSize: varU32,
12901
12834
  exports: codecVarU16,
12902
- bundleSize: codec_codec.varU32,
12835
+ bundleSize: varU32,
12903
12836
  gasUsed: codecVarGas,
12904
12837
  });
12905
12838
  static create(v) {
@@ -12958,31 +12891,31 @@ class ServiceStatistics {
12958
12891
  onTransfersCount;
12959
12892
  onTransfersGasUsed;
12960
12893
  static Codec = Compatibility.selectIfGreaterOrEqual({
12961
- fallback: codec_codec.Class(ServiceStatistics, {
12894
+ fallback: Class(ServiceStatistics, {
12962
12895
  providedCount: codecVarU16,
12963
- providedSize: codec_codec.varU32,
12964
- refinementCount: codec_codec.varU32,
12896
+ providedSize: varU32,
12897
+ refinementCount: varU32,
12965
12898
  refinementGasUsed: codecVarGas,
12966
12899
  imports: codecVarU16,
12967
12900
  extrinsicCount: codecVarU16,
12968
- extrinsicSize: codec_codec.varU32,
12901
+ extrinsicSize: varU32,
12969
12902
  exports: codecVarU16,
12970
- accumulateCount: codec_codec.varU32,
12903
+ accumulateCount: varU32,
12971
12904
  accumulateGasUsed: codecVarGas,
12972
- onTransfersCount: codec_codec.varU32,
12905
+ onTransfersCount: varU32,
12973
12906
  onTransfersGasUsed: codecVarGas,
12974
12907
  }),
12975
12908
  versions: {
12976
- [GpVersion.V0_7_1]: codec_codec.Class(ServiceStatistics, {
12909
+ [GpVersion.V0_7_1]: Class(ServiceStatistics, {
12977
12910
  providedCount: codecVarU16,
12978
- providedSize: codec_codec.varU32,
12979
- refinementCount: codec_codec.varU32,
12911
+ providedSize: varU32,
12912
+ refinementCount: varU32,
12980
12913
  refinementGasUsed: codecVarGas,
12981
12914
  imports: codecVarU16,
12982
12915
  extrinsicCount: codecVarU16,
12983
- extrinsicSize: codec_codec.varU32,
12916
+ extrinsicSize: varU32,
12984
12917
  exports: codecVarU16,
12985
- accumulateCount: codec_codec.varU32,
12918
+ accumulateCount: varU32,
12986
12919
  accumulateGasUsed: codecVarGas,
12987
12920
  onTransfersCount: ignoreValueWithDefault(numbers_tryAsU32(0)),
12988
12921
  onTransfersGasUsed: ignoreValueWithDefault(tryAsServiceGas(0)),
@@ -13043,11 +12976,11 @@ class StatisticsData {
13043
12976
  previous;
13044
12977
  cores;
13045
12978
  services;
13046
- static Codec = codec_codec.Class(StatisticsData, {
12979
+ static Codec = Class(StatisticsData, {
13047
12980
  current: codecPerValidator(ValidatorStatistics.Codec),
13048
12981
  previous: codecPerValidator(ValidatorStatistics.Codec),
13049
12982
  cores: codecPerCore(CoreStatistics.Codec),
13050
- services: codec_codec.dictionary(codec_codec.u32.asOpaque(), ServiceStatistics.Codec, {
12983
+ services: dictionary(u32.asOpaque(), ServiceStatistics.Codec, {
13051
12984
  sortKeys: (a, b) => a - b,
13052
12985
  }),
13053
12986
  });
@@ -13146,14 +13079,14 @@ class PrivilegedServices {
13146
13079
  assigners;
13147
13080
  autoAccumulateServices;
13148
13081
  /** https://graypaper.fluffylabs.dev/#/ab2cdbd/3bbd023bcb02?v=0.7.2 */
13149
- static Codec = codec_codec.Class(PrivilegedServices, {
13150
- manager: codec_codec.u32.asOpaque(),
13151
- assigners: codecPerCore(codec_codec.u32.asOpaque()),
13152
- delegator: codec_codec.u32.asOpaque(),
13082
+ static Codec = Class(PrivilegedServices, {
13083
+ manager: u32.asOpaque(),
13084
+ assigners: codecPerCore(u32.asOpaque()),
13085
+ delegator: u32.asOpaque(),
13153
13086
  registrar: Compatibility.isGreaterOrEqual(GpVersion.V0_7_1)
13154
- ? codec_codec.u32.asOpaque()
13087
+ ? u32.asOpaque()
13155
13088
  : ignoreValueWithDefault(tryAsServiceId(2 ** 32 - 1)),
13156
- autoAccumulateServices: codec_codec.dictionary(codec_codec.u32.asOpaque(), codec_codec.u64.asOpaque(), {
13089
+ autoAccumulateServices: dictionary(u32.asOpaque(), u64.asOpaque(), {
13157
13090
  sortKeys: (a, b) => a - b,
13158
13091
  }),
13159
13092
  });
@@ -13826,15 +13759,15 @@ class InMemoryState extends WithDebug {
13826
13759
  });
13827
13760
  }
13828
13761
  }
13829
- const serviceEntriesCodec = codec_codec.object({
13830
- storageKeys: codec_codec.sequenceVarLen(codec_codec.blob.convert((i) => i, (o) => opaque_asOpaqueType(o))),
13831
- preimages: codec_codec.sequenceVarLen(codec_codec.bytes(hash_HASH_SIZE).asOpaque()),
13832
- lookupHistory: codec_codec.sequenceVarLen(codec_codec.object({
13833
- hash: codec_codec.bytes(hash_HASH_SIZE).asOpaque(),
13834
- length: codec_codec.u32,
13762
+ const serviceEntriesCodec = object({
13763
+ storageKeys: sequenceVarLen(blob.convert((i) => i, (o) => opaque_asOpaqueType(o))),
13764
+ preimages: sequenceVarLen(bytes(hash_HASH_SIZE).asOpaque()),
13765
+ lookupHistory: sequenceVarLen(object({
13766
+ hash: bytes(hash_HASH_SIZE).asOpaque(),
13767
+ length: u32,
13835
13768
  })),
13836
13769
  });
13837
- const serviceDataCodec = codec_codec.dictionary(codec_codec.u32.asOpaque(), serviceEntriesCodec, {
13770
+ const serviceDataCodec = dictionary(u32.asOpaque(), serviceEntriesCodec, {
13838
13771
  sortKeys: (a, b) => a - b,
13839
13772
  });
13840
13773
 
@@ -14552,7 +14485,7 @@ var serialize_serialize;
14552
14485
  /** C(6): https://graypaper.fluffylabs.dev/#/7e6ff6a/3bf3013bf301?v=0.6.7 */
14553
14486
  serialize.entropy = {
14554
14487
  key: stateKeys.index(StateKeyIdx.Eta),
14555
- Codec: codecFixedSizeArray(codec_codec.bytes(hash_HASH_SIZE).asOpaque(), ENTROPY_ENTRIES),
14488
+ Codec: codecFixedSizeArray(bytes(hash_HASH_SIZE).asOpaque(), ENTROPY_ENTRIES),
14556
14489
  extract: (s) => s.entropy,
14557
14490
  };
14558
14491
  /** C(7): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b00023b0002?v=0.6.7 */
@@ -14582,7 +14515,7 @@ var serialize_serialize;
14582
14515
  /** C(11): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b3e023b3e02?v=0.6.7 */
14583
14516
  serialize.timeslot = {
14584
14517
  key: stateKeys.index(StateKeyIdx.Tau),
14585
- Codec: codec_codec.u32.asOpaque(),
14518
+ Codec: u32.asOpaque(),
14586
14519
  extract: (s) => s.timeslot,
14587
14520
  };
14588
14521
  /** C(12): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b4c023b4c02?v=0.6.7 */
@@ -14612,7 +14545,7 @@ var serialize_serialize;
14612
14545
  /** C(16): https://graypaper.fluffylabs.dev/#/38c4e62/3b46033b4603?v=0.7.0 */
14613
14546
  serialize.accumulationOutputLog = {
14614
14547
  key: stateKeys.index(StateKeyIdx.Theta),
14615
- Codec: codec_codec.sequenceVarLen(AccumulationOutput.Codec).convert((i) => i.array, (o) => SortedArray.fromSortedArray(accumulationOutputComparator, o)),
14548
+ Codec: sequenceVarLen(AccumulationOutput.Codec).convert((i) => i.array, (o) => SortedArray.fromSortedArray(accumulationOutputComparator, o)),
14616
14549
  extract: (s) => s.accumulationOutputLog,
14617
14550
  };
14618
14551
  /** C(255, s): https://graypaper.fluffylabs.dev/#/85129da/383103383103?v=0.6.3 */
@@ -14635,7 +14568,7 @@ var serialize_serialize;
14635
14568
  /** https://graypaper.fluffylabs.dev/#/85129da/387603387603?v=0.6.3 */
14636
14569
  serialize.serviceLookupHistory = (blake2b, serviceId, hash, len) => ({
14637
14570
  key: stateKeys.serviceLookupHistory(blake2b, serviceId, hash, len),
14638
- Codec: codec_codec.readonlyArray(codec_codec.sequenceVarLen(codec_codec.u32)),
14571
+ Codec: readonlyArray(sequenceVarLen(u32)),
14639
14572
  });
14640
14573
  })(serialize_serialize || (serialize_serialize = {}));
14641
14574
  /**
@@ -15762,7 +15695,7 @@ function getSafroleData(nextValidatorData, epochRoot, sealingKeySeries, ticketsA
15762
15695
 
15763
15696
  const TYPICAL_STATE_ITEMS = 50;
15764
15697
  const TYPICAL_STATE_ITEM_LEN = 50;
15765
- const stateEntriesSequenceCodec = codec_codec.sequenceVarLen(codec_codec.pair(codec_codec.bytes(TRUNCATED_HASH_SIZE), codec_codec.blob));
15698
+ const stateEntriesSequenceCodec = sequenceVarLen(pair(bytes(TRUNCATED_HASH_SIZE), blob));
15766
15699
  /**
15767
15700
  * Full, in-memory state represented as serialized entries dictionary.
15768
15701
  *
@@ -15770,7 +15703,7 @@ const stateEntriesSequenceCodec = codec_codec.sequenceVarLen(codec_codec.pair(co
15770
15703
  */
15771
15704
  class state_entries_StateEntries {
15772
15705
  dictionary;
15773
- static Codec = codec_codec.custom({
15706
+ static Codec = custom({
15774
15707
  name: "StateEntries",
15775
15708
  sizeHint: {
15776
15709
  isExact: false,
@@ -15962,7 +15895,7 @@ function loadState(spec, blake2b, entries) {
15962
15895
 
15963
15896
 
15964
15897
  /** Codec for a map with string keys. */
15965
- const codecMap = (value, extractKey, { typicalLength = codec_codec.TYPICAL_DICTIONARY_LENGTH, compare = (a, b) => {
15898
+ const codecMap = (value, extractKey, { typicalLength = TYPICAL_DICTIONARY_LENGTH, compare = (a, b) => {
15966
15899
  const keyA = extractKey(a);
15967
15900
  const keyB = extractKey(b);
15968
15901
  if (keyA < keyB) {
@@ -16005,19 +15938,17 @@ const codecMap = (value, extractKey, { typicalLength = codec_codec.TYPICAL_DICTI
16005
15938
  s.sequenceFixLen(value, len);
16006
15939
  });
16007
15940
  };
16008
- const lookupHistoryItemCodec = codec_codec.object({
16009
- hash: codec_codec.bytes(hash_HASH_SIZE).asOpaque(),
16010
- length: codec_codec.u32,
16011
- slots: codec_codec
16012
- .readonlyArray(codec_codec.sequenceVarLen(codec_codec.u32.asOpaque()))
15941
+ const lookupHistoryItemCodec = object({
15942
+ hash: bytes(hash_HASH_SIZE).asOpaque(),
15943
+ length: u32,
15944
+ slots: readonlyArray(sequenceVarLen(u32.asOpaque()))
16013
15945
  .convert(seeThrough, service_tryAsLookupHistorySlots),
16014
15946
  }, "LookupHistoryItem", ({ hash, length, slots }) => new LookupHistoryItem(hash, length, slots));
16015
- const lookupHistoryEntryCodec = codec_codec.object({
16016
- key: codec_codec.bytes(hash_HASH_SIZE).asOpaque(),
16017
- data: codec_codec.sequenceVarLen(lookupHistoryItemCodec),
15947
+ const lookupHistoryEntryCodec = object({
15948
+ key: bytes(hash_HASH_SIZE).asOpaque(),
15949
+ data: sequenceVarLen(lookupHistoryItemCodec),
16018
15950
  });
16019
- const lookupHistoryCodec = codec_codec
16020
- .sequenceVarLen(lookupHistoryEntryCodec)
15951
+ const lookupHistoryCodec = sequenceVarLen(lookupHistoryEntryCodec)
16021
15952
  .convert((dict) => {
16022
15953
  const entries = [];
16023
15954
  for (const [key, data] of dict) {
@@ -16037,9 +15968,9 @@ const lookupHistoryCodec = codec_codec
16037
15968
  return dict;
16038
15969
  });
16039
15970
  class ServiceWithCodec extends InMemoryService {
16040
- static Codec = codec_codec.Class(ServiceWithCodec, {
16041
- serviceId: codec_codec.u32.asOpaque(),
16042
- data: codec_codec.object({
15971
+ static Codec = Class(ServiceWithCodec, {
15972
+ serviceId: u32.asOpaque(),
15973
+ data: object({
16043
15974
  info: ServiceAccountInfo.Codec,
16044
15975
  preimages: codecHashDictionary(PreimageItem.Codec, (x) => x.hash),
16045
15976
  lookupHistory: lookupHistoryCodec,
@@ -16053,7 +15984,7 @@ class ServiceWithCodec extends InMemoryService {
16053
15984
  return new ServiceWithCodec(serviceId, data);
16054
15985
  }
16055
15986
  }
16056
- const inMemoryStateCodec = (spec) => codec_codec.Class(class State extends InMemoryState {
15987
+ const inMemoryStateCodec = (spec) => Class(class State extends InMemoryState {
16057
15988
  static create(data) {
16058
15989
  return InMemoryState.new(spec, data);
16059
15990
  }
@@ -16067,12 +15998,11 @@ const inMemoryStateCodec = (spec) => codec_codec.Class(class State extends InMem
16067
15998
  // gamma_k
16068
15999
  nextValidatorData: codecPerValidator(ValidatorData.Codec),
16069
16000
  // gamma_z
16070
- epochRoot: codec_codec.bytes(BANDERSNATCH_RING_ROOT_BYTES).asOpaque(),
16001
+ epochRoot: bytes(BANDERSNATCH_RING_ROOT_BYTES).asOpaque(),
16071
16002
  // gamma_s
16072
16003
  sealingKeySeries: SafroleSealingKeysData.Codec,
16073
16004
  // gamma_a
16074
- ticketsAccumulator: codec_codec
16075
- .readonlyArray(codec_codec.sequenceVarLen(Ticket.Codec))
16005
+ ticketsAccumulator: readonlyArray(sequenceVarLen(Ticket.Codec))
16076
16006
  .convert((x) => x, sized_array_asKnownSize),
16077
16007
  // psi
16078
16008
  disputesRecords: serialize_serialize.disputesRecords.Codec,
@@ -16099,7 +16029,7 @@ const inMemoryStateCodec = (spec) => codec_codec.Class(class State extends InMem
16099
16029
  // theta
16100
16030
  accumulationOutputLog: serialize_serialize.accumulationOutputLog.Codec,
16101
16031
  // delta
16102
- services: codec_codec.dictionary(codec_codec.u32.asOpaque(), ServiceWithCodec.Codec, {
16032
+ services: dictionary(u32.asOpaque(), ServiceWithCodec.Codec, {
16103
16033
  sortKeys: (a, b) => a - b,
16104
16034
  }),
16105
16035
  });
@@ -16124,11 +16054,11 @@ class TestState {
16124
16054
  state_root: fromJson.bytes32(),
16125
16055
  keyvals: json.array(StateKeyVal.fromJson),
16126
16056
  };
16127
- static Codec = codec_codec.object({
16128
- state_root: codec_codec.bytes(hash_HASH_SIZE).asOpaque(),
16129
- keyvals: codec_codec.sequenceVarLen(codec_codec.object({
16130
- key: codec_codec.bytes(TRUNCATED_HASH_SIZE),
16131
- value: codec_codec.blob,
16057
+ static Codec = object({
16058
+ state_root: bytes(hash_HASH_SIZE).asOpaque(),
16059
+ keyvals: sequenceVarLen(object({
16060
+ key: bytes(TRUNCATED_HASH_SIZE),
16061
+ value: blob,
16132
16062
  })),
16133
16063
  });
16134
16064
  state_root;
@@ -16139,7 +16069,7 @@ class StateTransitionGenesis {
16139
16069
  header: headerFromJson,
16140
16070
  state: TestState.fromJson,
16141
16071
  };
16142
- static Codec = codec_codec.object({
16072
+ static Codec = object({
16143
16073
  header: header_Header.Codec,
16144
16074
  state: TestState.Codec,
16145
16075
  });
@@ -16152,7 +16082,7 @@ class StateTransition {
16152
16082
  post_state: TestState.fromJson,
16153
16083
  block: blockFromJson(chain_spec_tinyChainSpec),
16154
16084
  };
16155
- static Codec = codec_codec.object({
16085
+ static Codec = object({
16156
16086
  pre_state: TestState.Codec,
16157
16087
  block: Block.Codec,
16158
16088
  post_state: TestState.Codec,
@@ -20229,7 +20159,7 @@ const SUPPORTED_TYPES = [
20229
20159
  if (option === "as-hash") {
20230
20160
  return looseType({
20231
20161
  value: blake2b.hashBytes(encoder_Encoder.encodeObject(header_Header.Codec, header, spec)),
20232
- encode: codec_codec.bytes(hash_HASH_SIZE),
20162
+ encode: bytes(hash_HASH_SIZE),
20233
20163
  });
20234
20164
  }
20235
20165
  throw new Error(`Invalid processing option: ${option}`);
@@ -20283,7 +20213,7 @@ const SUPPORTED_TYPES = [
20283
20213
  if (option === "as-root-hash") {
20284
20214
  return looseType({
20285
20215
  value: state_entries_StateEntries.serializeInMemory(spec, blake2b, state).getRootHash(blake2b),
20286
- encode: codec_codec.bytes(hash_HASH_SIZE),
20216
+ encode: bytes(hash_HASH_SIZE),
20287
20217
  });
20288
20218
  }
20289
20219
  throw new Error(`Invalid processing option: ${option}`);