@typeberry/lib 0.5.1-1dda9d6 → 0.5.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2379,48 +2379,6 @@ const pair = (a, b) => {
2379
2379
  };
2380
2380
  /** Custom encoding / decoding logic. */
2381
2381
  const custom = ({ name, sizeHint = { bytes: 0, isExact: false }, }, encode, decode, skip) => Descriptor.new(name, sizeHint, encode, decode, skip);
2382
- /** Tagged union type encoding. */
2383
- const union = (name, variants) => {
2384
- const keys = Object.keys(variants).map(Number);
2385
- const variantMap = Object.fromEntries(keys.map((key, idx) => [key, idx]));
2386
- const indexToKey = Object.fromEntries(keys.map((key, idx) => [idx, key]));
2387
- // Calculate size hint as the minimum variant size + index size
2388
- const minVariantSize = Math.max(...keys.map((key) => variants[key].sizeHint.bytes));
2389
- const sizeHint = {
2390
- bytes: 1 + minVariantSize, // varU32 index + smallest variant
2391
- isExact: false,
2392
- };
2393
- const encode = (e, x) => {
2394
- const idx = variantMap[x.kind];
2395
- if (idx === undefined) {
2396
- throw new Error(`Unknown variant type: ${x.kind} for ${name}`);
2397
- }
2398
- e.varU32(tryAsU32(idx));
2399
- const codec = variants[x.kind];
2400
- // I'm sorry but I can't figure out a better typing here :)
2401
- codec.encode(e, x);
2402
- };
2403
- const decode = (d) => {
2404
- const idx = d.varU32();
2405
- const kind = indexToKey[idx];
2406
- if (kind === undefined) {
2407
- throw new Error(`Unknown variant index: ${idx} for ${name}`);
2408
- }
2409
- const codec = variants[kind];
2410
- const value = codec.decode(d);
2411
- return { kind, ...value };
2412
- };
2413
- const skip = (s) => {
2414
- const idx = s.decoder.varU32();
2415
- const kind = indexToKey[idx];
2416
- if (kind === undefined) {
2417
- throw new Error(`Unknown variant index: ${idx} for ${name}`);
2418
- }
2419
- const codec = variants[kind];
2420
- codec.skip(s);
2421
- };
2422
- return Descriptor.new(name, sizeHint, encode, decode, skip);
2423
- };
2424
2382
  /** Choose a descriptor depending on the encoding/decoding context. */
2425
2383
  const select = ({ name, sizeHint, }, chooser) => {
2426
2384
  const Self = chooser(null);
@@ -2596,59 +2554,24 @@ var descriptors = /*#__PURE__*/Object.freeze({
2596
2554
  u32: u32,
2597
2555
  u64: u64,
2598
2556
  u8: u8,
2599
- union: union,
2600
2557
  varU32: varU32,
2601
2558
  varU64: varU64
2602
2559
  });
2603
2560
 
2604
- const codec = descriptors;
2605
-
2606
2561
  var index$x = /*#__PURE__*/Object.freeze({
2607
2562
  __proto__: null,
2608
- Class: Class,
2609
2563
  Decoder: Decoder,
2610
2564
  Descriptor: Descriptor,
2611
2565
  Encoder: Encoder,
2612
2566
  EndOfDataError: EndOfDataError,
2613
2567
  ObjectView: ObjectView,
2614
2568
  SequenceView: SequenceView,
2615
- TYPICAL_DICTIONARY_LENGTH: TYPICAL_DICTIONARY_LENGTH,
2616
2569
  ViewField: ViewField,
2617
2570
  addSizeHints: addSizeHints,
2618
- bitVecFixLen: bitVecFixLen,
2619
- bitVecVarLen: bitVecVarLen,
2620
- blob: blob,
2621
- bool: bool,
2622
- bytes: bytes,
2623
- codec: codec,
2624
- custom: custom,
2571
+ codec: descriptors,
2625
2572
  decodeVariableLengthExtraBytes: decodeVariableLengthExtraBytes,
2626
- dictionary: dictionary,
2627
- forEachDescriptor: forEachDescriptor,
2628
- i16: i16,
2629
- i24: i24,
2630
- i32: i32,
2631
- i64: i64,
2632
- i8: i8,
2633
- nothing: nothing,
2634
- object: object,
2635
- optional: optional,
2636
- pair: pair,
2637
- readonlyArray: readonlyArray,
2638
- select: select,
2639
- sequenceFixLen: sequenceFixLen,
2640
- sequenceVarLen: sequenceVarLen,
2641
- string: string,
2642
2573
  tryAsExactBytes: tryAsExactBytes,
2643
- u16: u16,
2644
- u24: u24,
2645
- u32: u32,
2646
- u64: u64,
2647
- u8: u8,
2648
- union: union,
2649
- validateLength: validateLength,
2650
- varU32: varU32,
2651
- varU64: varU64
2574
+ validateLength: validateLength
2652
2575
  });
2653
2576
 
2654
2577
  //#region rolldown:runtime
@@ -5859,7 +5782,7 @@ function codecWithContext(chooser) {
5859
5782
  const defaultContext = fullChainSpec;
5860
5783
  const { name, sizeHint } = chooser(defaultContext);
5861
5784
  const cache = new Map();
5862
- return codec.select({
5785
+ return select({
5863
5786
  name,
5864
5787
  sizeHint: { bytes: sizeHint.bytes, isExact: false },
5865
5788
  }, (context) => {
@@ -5886,9 +5809,9 @@ function codecWithContext(chooser) {
5886
5809
  /** Codec for a known-size array with length validation. */
5887
5810
  const codecKnownSizeArray = (val, options, _id) => {
5888
5811
  if ("fixedLength" in options) {
5889
- return codec.readonlyArray(codec.sequenceFixLen(val, options.fixedLength)).convert(seeThrough, asKnownSize);
5812
+ return readonlyArray(sequenceFixLen(val, options.fixedLength)).convert(seeThrough, asKnownSize);
5890
5813
  }
5891
- return codec.readonlyArray(codec.sequenceVarLen(val, options)).convert(seeThrough, asKnownSize);
5814
+ return readonlyArray(sequenceVarLen(val, options)).convert(seeThrough, asKnownSize);
5892
5815
  };
5893
5816
  /** Codec for a fixed-size array with length validation. */
5894
5817
  const codecFixedSizeArray = (val, len) => {
@@ -5897,7 +5820,7 @@ const codecFixedSizeArray = (val, len) => {
5897
5820
  throw new Error(`[${val.name}] Invalid size of fixed-size array. Got ${actual}, expected: ${len}`);
5898
5821
  }
5899
5822
  };
5900
- return codec.sequenceFixLen(val, len).convert((i) => {
5823
+ return sequenceFixLen(val, len).convert((i) => {
5901
5824
  checkLength(i.length);
5902
5825
  return i;
5903
5826
  }, (o) => {
@@ -5906,7 +5829,7 @@ const codecFixedSizeArray = (val, len) => {
5906
5829
  });
5907
5830
  };
5908
5831
  /** Codec for a hash-dictionary. */
5909
- const codecHashDictionary = (value, extractKey, { typicalLength = codec.TYPICAL_DICTIONARY_LENGTH, compare = (a, b) => extractKey(a).compare(extractKey(b)), } = {}) => {
5832
+ const codecHashDictionary = (value, extractKey, { typicalLength = TYPICAL_DICTIONARY_LENGTH, compare = (a, b) => extractKey(a).compare(extractKey(b)), } = {}) => {
5910
5833
  return Descriptor.new(`HashDictionary<${value.name}>[?]`, {
5911
5834
  bytes: typicalLength * value.sizeHint.bytes,
5912
5835
  isExact: false,
@@ -5960,13 +5883,13 @@ class AvailabilityAssurance extends WithDebug {
5960
5883
  bitfield;
5961
5884
  validatorIndex;
5962
5885
  signature;
5963
- static Codec = codec.Class(AvailabilityAssurance, {
5964
- anchor: codec.bytes(HASH_SIZE).asOpaque(),
5886
+ static Codec = Class(AvailabilityAssurance, {
5887
+ anchor: bytes(HASH_SIZE).asOpaque(),
5965
5888
  bitfield: codecWithContext((context) => {
5966
- return codec.bitVecFixLen(context.coresCount);
5889
+ return bitVecFixLen(context.coresCount);
5967
5890
  }),
5968
- validatorIndex: codec.u16.asOpaque(),
5969
- signature: codec.bytes(ED25519_SIGNATURE_BYTES).asOpaque(),
5891
+ validatorIndex: u16.asOpaque(),
5892
+ signature: bytes(ED25519_SIGNATURE_BYTES).asOpaque(),
5970
5893
  });
5971
5894
  static create({ anchor, bitfield, validatorIndex, signature }) {
5972
5895
  return new AvailabilityAssurance(anchor, bitfield, validatorIndex, signature);
@@ -6051,11 +5974,11 @@ class Fault extends WithDebug {
6051
5974
  wasConsideredValid;
6052
5975
  key;
6053
5976
  signature;
6054
- static Codec = codec.Class(Fault, {
6055
- workReportHash: codec.bytes(HASH_SIZE).asOpaque(),
6056
- wasConsideredValid: codec.bool,
6057
- key: codec.bytes(ED25519_KEY_BYTES).asOpaque(),
6058
- signature: codec.bytes(ED25519_SIGNATURE_BYTES).asOpaque(),
5977
+ static Codec = Class(Fault, {
5978
+ workReportHash: bytes(HASH_SIZE).asOpaque(),
5979
+ wasConsideredValid: bool,
5980
+ key: bytes(ED25519_KEY_BYTES).asOpaque(),
5981
+ signature: bytes(ED25519_SIGNATURE_BYTES).asOpaque(),
6059
5982
  });
6060
5983
  static create({ workReportHash, wasConsideredValid, key, signature }) {
6061
5984
  return new Fault(workReportHash, wasConsideredValid, key, signature);
@@ -6083,10 +6006,10 @@ class Culprit extends WithDebug {
6083
6006
  workReportHash;
6084
6007
  key;
6085
6008
  signature;
6086
- static Codec = codec.Class(Culprit, {
6087
- workReportHash: codec.bytes(HASH_SIZE).asOpaque(),
6088
- key: codec.bytes(ED25519_KEY_BYTES).asOpaque(),
6089
- signature: codec.bytes(ED25519_SIGNATURE_BYTES).asOpaque(),
6009
+ static Codec = Class(Culprit, {
6010
+ workReportHash: bytes(HASH_SIZE).asOpaque(),
6011
+ key: bytes(ED25519_KEY_BYTES).asOpaque(),
6012
+ signature: bytes(ED25519_SIGNATURE_BYTES).asOpaque(),
6090
6013
  });
6091
6014
  static create({ workReportHash, key, signature }) {
6092
6015
  return new Culprit(workReportHash, key, signature);
@@ -6111,10 +6034,10 @@ class Judgement extends WithDebug {
6111
6034
  isWorkReportValid;
6112
6035
  index;
6113
6036
  signature;
6114
- static Codec = codec.Class(Judgement, {
6115
- isWorkReportValid: codec.bool,
6116
- index: codec.u16.asOpaque(),
6117
- signature: codec.bytes(ED25519_SIGNATURE_BYTES).asOpaque(),
6037
+ static Codec = Class(Judgement, {
6038
+ isWorkReportValid: bool,
6039
+ index: u16.asOpaque(),
6040
+ signature: bytes(ED25519_SIGNATURE_BYTES).asOpaque(),
6118
6041
  });
6119
6042
  static create({ isWorkReportValid, index, signature }) {
6120
6043
  return new Judgement(isWorkReportValid, index, signature);
@@ -6143,12 +6066,11 @@ class Verdict extends WithDebug {
6143
6066
  workReportHash;
6144
6067
  votesEpoch;
6145
6068
  votes;
6146
- static Codec = codec.Class(Verdict, {
6147
- workReportHash: codec.bytes(HASH_SIZE).asOpaque(),
6148
- votesEpoch: codec.u32.asOpaque(),
6069
+ static Codec = Class(Verdict, {
6070
+ workReportHash: bytes(HASH_SIZE).asOpaque(),
6071
+ votesEpoch: u32.asOpaque(),
6149
6072
  votes: codecWithContext((context) => {
6150
- return codec
6151
- .readonlyArray(codec.sequenceFixLen(Judgement.Codec, context.validatorsSuperMajority))
6073
+ return readonlyArray(sequenceFixLen(Judgement.Codec, context.validatorsSuperMajority))
6152
6074
  .convert(seeThrough, asKnownSize);
6153
6075
  }),
6154
6076
  });
@@ -6190,10 +6112,10 @@ class DisputesExtrinsic extends WithDebug {
6190
6112
  verdicts;
6191
6113
  culprits;
6192
6114
  faults;
6193
- static Codec = codec.Class(DisputesExtrinsic, {
6194
- verdicts: codec.sequenceVarLen(Verdict.Codec),
6195
- culprits: codec.sequenceVarLen(Culprit.Codec),
6196
- faults: codec.sequenceVarLen(Fault.Codec),
6115
+ static Codec = Class(DisputesExtrinsic, {
6116
+ verdicts: sequenceVarLen(Verdict.Codec),
6117
+ culprits: sequenceVarLen(Culprit.Codec),
6118
+ faults: sequenceVarLen(Fault.Codec),
6197
6119
  });
6198
6120
  static create({ verdicts, culprits, faults }) {
6199
6121
  return new DisputesExtrinsic(verdicts, culprits, faults);
@@ -6244,9 +6166,9 @@ var disputes = /*#__PURE__*/Object.freeze({
6244
6166
  class WorkPackageInfo extends WithDebug {
6245
6167
  workPackageHash;
6246
6168
  segmentTreeRoot;
6247
- static Codec = codec.Class(WorkPackageInfo, {
6248
- workPackageHash: codec.bytes(HASH_SIZE).asOpaque(),
6249
- segmentTreeRoot: codec.bytes(HASH_SIZE).asOpaque(),
6169
+ static Codec = Class(WorkPackageInfo, {
6170
+ workPackageHash: bytes(HASH_SIZE).asOpaque(),
6171
+ segmentTreeRoot: bytes(HASH_SIZE).asOpaque(),
6250
6172
  });
6251
6173
  constructor(
6252
6174
  /** Hash of the described work package. */
@@ -6274,13 +6196,13 @@ class RefineContext extends WithDebug {
6274
6196
  lookupAnchor;
6275
6197
  lookupAnchorSlot;
6276
6198
  prerequisites;
6277
- static Codec = codec.Class(RefineContext, {
6278
- anchor: codec.bytes(HASH_SIZE).asOpaque(),
6279
- stateRoot: codec.bytes(HASH_SIZE).asOpaque(),
6280
- beefyRoot: codec.bytes(HASH_SIZE).asOpaque(),
6281
- lookupAnchor: codec.bytes(HASH_SIZE).asOpaque(),
6282
- lookupAnchorSlot: codec.u32.asOpaque(),
6283
- prerequisites: codec.sequenceVarLen(codec.bytes(HASH_SIZE).asOpaque()),
6199
+ static Codec = Class(RefineContext, {
6200
+ anchor: bytes(HASH_SIZE).asOpaque(),
6201
+ stateRoot: bytes(HASH_SIZE).asOpaque(),
6202
+ beefyRoot: bytes(HASH_SIZE).asOpaque(),
6203
+ lookupAnchor: bytes(HASH_SIZE).asOpaque(),
6204
+ lookupAnchorSlot: u32.asOpaque(),
6205
+ prerequisites: sequenceVarLen(bytes(HASH_SIZE).asOpaque()),
6284
6206
  });
6285
6207
  static create({ anchor, stateRoot, beefyRoot, lookupAnchor, lookupAnchorSlot, prerequisites, }) {
6286
6208
  return new RefineContext(anchor, stateRoot, beefyRoot, lookupAnchor, lookupAnchorSlot, prerequisites);
@@ -6332,9 +6254,9 @@ const tryAsSegmentIndex = (v) => asOpaqueType(tryAsU16(v));
6332
6254
  class ImportSpec extends WithDebug {
6333
6255
  treeRoot;
6334
6256
  index;
6335
- static Codec = codec.Class(ImportSpec, {
6336
- treeRoot: codec.bytes(HASH_SIZE),
6337
- index: codec.u16.asOpaque(),
6257
+ static Codec = Class(ImportSpec, {
6258
+ treeRoot: bytes(HASH_SIZE),
6259
+ index: u16.asOpaque(),
6338
6260
  });
6339
6261
  static create({ treeRoot, index }) {
6340
6262
  return new ImportSpec(treeRoot, index);
@@ -6356,9 +6278,9 @@ class ImportSpec extends WithDebug {
6356
6278
  class WorkItemExtrinsicSpec extends WithDebug {
6357
6279
  hash;
6358
6280
  len;
6359
- static Codec = codec.Class(WorkItemExtrinsicSpec, {
6360
- hash: codec.bytes(HASH_SIZE).asOpaque(),
6361
- len: codec.u32,
6281
+ static Codec = Class(WorkItemExtrinsicSpec, {
6282
+ hash: bytes(HASH_SIZE).asOpaque(),
6283
+ len: u32,
6362
6284
  });
6363
6285
  static create({ hash, len }) {
6364
6286
  return new WorkItemExtrinsicSpec(hash, len);
@@ -6388,7 +6310,7 @@ function workItemExtrinsicsCodec(workItems) {
6388
6310
  if (sum.overflow) {
6389
6311
  throw new Error("Unable to create a decoder, because the length of extrinsics overflows!");
6390
6312
  }
6391
- return codec.custom({
6313
+ return custom({
6392
6314
  name: "WorkItemExtrinsics",
6393
6315
  sizeHint: { bytes: sum.value, isExact: true },
6394
6316
  }, (e, val) => {
@@ -6418,19 +6340,19 @@ class WorkItem extends WithDebug {
6418
6340
  importSegments;
6419
6341
  extrinsic;
6420
6342
  exportCount;
6421
- static Codec = codec.Class(WorkItem, {
6422
- service: codec.u32.asOpaque(),
6423
- codeHash: codec.bytes(HASH_SIZE).asOpaque(),
6424
- refineGasLimit: codec.u64.asOpaque(),
6425
- accumulateGasLimit: codec.u64.asOpaque(),
6426
- exportCount: codec.u16,
6427
- payload: codec.blob,
6343
+ static Codec = Class(WorkItem, {
6344
+ service: u32.asOpaque(),
6345
+ codeHash: bytes(HASH_SIZE).asOpaque(),
6346
+ refineGasLimit: u64.asOpaque(),
6347
+ accumulateGasLimit: u64.asOpaque(),
6348
+ exportCount: u16,
6349
+ payload: blob,
6428
6350
  importSegments: codecKnownSizeArray(ImportSpec.Codec, {
6429
6351
  minLength: 0,
6430
6352
  maxLength: MAX_NUMBER_OF_SEGMENTS,
6431
6353
  typicalLength: MAX_NUMBER_OF_SEGMENTS,
6432
6354
  }),
6433
- extrinsic: codec.sequenceVarLen(WorkItemExtrinsicSpec.Codec),
6355
+ extrinsic: sequenceVarLen(WorkItemExtrinsicSpec.Codec),
6434
6356
  });
6435
6357
  static create({ service, codeHash, payload, refineGasLimit, accumulateGasLimit, importSegments, extrinsic, exportCount, }) {
6436
6358
  return new WorkItem(service, codeHash, payload, refineGasLimit, accumulateGasLimit, importSegments, extrinsic, exportCount);
@@ -6503,13 +6425,13 @@ class WorkPackage extends WithDebug {
6503
6425
  parametrization;
6504
6426
  context;
6505
6427
  items;
6506
- static Codec = codec.Class(WorkPackage, {
6507
- authCodeHost: codec.u32.asOpaque(),
6508
- authCodeHash: codec.bytes(HASH_SIZE).asOpaque(),
6428
+ static Codec = Class(WorkPackage, {
6429
+ authCodeHost: u32.asOpaque(),
6430
+ authCodeHash: bytes(HASH_SIZE).asOpaque(),
6509
6431
  context: RefineContext.Codec,
6510
- authorization: codec.blob,
6511
- parametrization: codec.blob,
6512
- items: codec.sequenceVarLen(WorkItem.Codec).convert((x) => x, (items) => FixedSizeArray.new(items, tryAsWorkItemsCount(items.length))),
6432
+ authorization: blob,
6433
+ parametrization: blob,
6434
+ items: sequenceVarLen(WorkItem.Codec).convert((x) => x, (items) => FixedSizeArray.new(items, tryAsWorkItemsCount(items.length))),
6513
6435
  });
6514
6436
  static create({ authorization, authCodeHost, authCodeHash, parametrization, context, items, }) {
6515
6437
  return new WorkPackage(authorization, authCodeHost, authCodeHash, parametrization, context, items);
@@ -6572,22 +6494,30 @@ var WorkExecResultKind;
6572
6494
  class WorkExecResult extends WithDebug {
6573
6495
  kind;
6574
6496
  okBlob;
6575
- static Codec = codec
6576
- .union("WorkExecResult", {
6577
- [WorkExecResultKind.ok]: codec.object({ okBlob: codec.blob }),
6578
- [WorkExecResultKind.outOfGas]: codec.object({}),
6579
- [WorkExecResultKind.panic]: codec.object({}),
6580
- [WorkExecResultKind.incorrectNumberOfExports]: codec.object({}),
6581
- [WorkExecResultKind.digestTooBig]: codec.object({}),
6582
- [WorkExecResultKind.badCode]: codec.object({}),
6583
- [WorkExecResultKind.codeOversize]: codec.object({}),
6584
- })
6585
- .convert((x) => {
6586
- if (x.kind === WorkExecResultKind.ok) {
6587
- return { kind: WorkExecResultKind.ok, okBlob: x.okBlob ?? BytesBlob.empty() };
6588
- }
6589
- return { kind: x.kind };
6590
- }, (x) => new WorkExecResult(x.kind, x.kind === WorkExecResultKind.ok ? x.okBlob : null));
6497
+ static Codec = custom({
6498
+ name: "WorkExecResult",
6499
+ sizeHint: { bytes: 1, isExact: false },
6500
+ }, (e, x) => {
6501
+ e.varU32(tryAsU32(x.kind));
6502
+ if (x.kind === WorkExecResultKind.ok && x.okBlob !== null) {
6503
+ e.bytesBlob(x.okBlob);
6504
+ }
6505
+ }, (d) => {
6506
+ const kind = d.varU32();
6507
+ if (kind === WorkExecResultKind.ok) {
6508
+ const blob = d.bytesBlob();
6509
+ return new WorkExecResult(kind, blob);
6510
+ }
6511
+ if (kind > WorkExecResultKind.codeOversize) {
6512
+ throw new Error(`Invalid WorkExecResultKind: ${kind}`);
6513
+ }
6514
+ return new WorkExecResult(kind);
6515
+ }, (s) => {
6516
+ const kind = s.decoder.varU32();
6517
+ if (kind === WorkExecResultKind.ok) {
6518
+ s.bytesBlob();
6519
+ }
6520
+ });
6591
6521
  constructor(
6592
6522
  /** The execution result tag. */
6593
6523
  kind,
@@ -6611,12 +6541,12 @@ class WorkRefineLoad extends WithDebug {
6611
6541
  extrinsicCount;
6612
6542
  extrinsicSize;
6613
6543
  exportedSegments;
6614
- static Codec = codec.Class(WorkRefineLoad, {
6615
- gasUsed: codec.varU64.asOpaque(),
6616
- importedSegments: codec.varU32,
6617
- extrinsicCount: codec.varU32,
6618
- extrinsicSize: codec.varU32,
6619
- exportedSegments: codec.varU32,
6544
+ static Codec = Class(WorkRefineLoad, {
6545
+ gasUsed: varU64.asOpaque(),
6546
+ importedSegments: varU32,
6547
+ extrinsicCount: varU32,
6548
+ extrinsicSize: varU32,
6549
+ exportedSegments: varU32,
6620
6550
  });
6621
6551
  static create({ gasUsed, importedSegments, extrinsicCount, extrinsicSize, exportedSegments, }) {
6622
6552
  return new WorkRefineLoad(gasUsed, importedSegments, extrinsicCount, extrinsicSize, exportedSegments);
@@ -6652,11 +6582,11 @@ class WorkResult {
6652
6582
  gas;
6653
6583
  result;
6654
6584
  load;
6655
- static Codec = codec.Class(WorkResult, {
6656
- serviceId: codec.u32.asOpaque(),
6657
- codeHash: codec.bytes(HASH_SIZE).asOpaque(),
6658
- payloadHash: codec.bytes(HASH_SIZE),
6659
- gas: codec.u64.asOpaque(),
6585
+ static Codec = Class(WorkResult, {
6586
+ serviceId: u32.asOpaque(),
6587
+ codeHash: bytes(HASH_SIZE).asOpaque(),
6588
+ payloadHash: bytes(HASH_SIZE),
6589
+ gas: u64.asOpaque(),
6660
6590
  result: WorkExecResult.Codec,
6661
6591
  load: WorkRefineLoad.Codec,
6662
6592
  });
@@ -6721,12 +6651,12 @@ class WorkPackageSpec extends WithDebug {
6721
6651
  erasureRoot;
6722
6652
  exportsRoot;
6723
6653
  exportsCount;
6724
- static Codec = codec.Class(WorkPackageSpec, {
6725
- hash: codec.bytes(HASH_SIZE).asOpaque(),
6726
- length: codec.u32,
6727
- erasureRoot: codec.bytes(HASH_SIZE),
6728
- exportsRoot: codec.bytes(HASH_SIZE).asOpaque(),
6729
- exportsCount: codec.u16,
6654
+ static Codec = Class(WorkPackageSpec, {
6655
+ hash: bytes(HASH_SIZE).asOpaque(),
6656
+ length: u32,
6657
+ erasureRoot: bytes(HASH_SIZE),
6658
+ exportsRoot: bytes(HASH_SIZE).asOpaque(),
6659
+ exportsCount: u16,
6730
6660
  });
6731
6661
  static create({ hash, length, erasureRoot, exportsRoot, exportsCount }) {
6732
6662
  return new WorkPackageSpec(hash, length, erasureRoot, exportsRoot, exportsCount);
@@ -6764,20 +6694,20 @@ class WorkReport extends WithDebug {
6764
6694
  segmentRootLookup;
6765
6695
  results;
6766
6696
  authorizationGasUsed;
6767
- static Codec = codec.Class(WorkReport, {
6697
+ static Codec = Class(WorkReport, {
6768
6698
  workPackageSpec: WorkPackageSpec.Codec,
6769
6699
  context: RefineContext.Codec,
6770
- coreIndex: codec.varU32.convert((o) => tryAsU32(o), (i) => {
6700
+ coreIndex: varU32.convert((o) => tryAsU32(o), (i) => {
6771
6701
  if (!isU16(i)) {
6772
6702
  throw new Error(`Core index exceeds U16: ${i}`);
6773
6703
  }
6774
6704
  return tryAsCoreIndex(i);
6775
6705
  }),
6776
- authorizerHash: codec.bytes(HASH_SIZE).asOpaque(),
6777
- authorizationGasUsed: codec.varU64.asOpaque(),
6778
- authorizationOutput: codec.blob,
6779
- segmentRootLookup: codec.readonlyArray(codec.sequenceVarLen(WorkPackageInfo.Codec)),
6780
- results: codec.sequenceVarLen(WorkResult.Codec).convert((x) => x, (items) => FixedSizeArray.new(items, tryAsWorkItemsCount(items.length))),
6706
+ authorizerHash: bytes(HASH_SIZE).asOpaque(),
6707
+ authorizationGasUsed: varU64.asOpaque(),
6708
+ authorizationOutput: blob,
6709
+ segmentRootLookup: readonlyArray(sequenceVarLen(WorkPackageInfo.Codec)),
6710
+ results: sequenceVarLen(WorkResult.Codec).convert((x) => x, (items) => FixedSizeArray.new(items, tryAsWorkItemsCount(items.length))),
6781
6711
  });
6782
6712
  static create({ workPackageSpec, context, coreIndex, authorizerHash, authorizationOutput, segmentRootLookup, results, authorizationGasUsed, }) {
6783
6713
  return new WorkReport(workPackageSpec, context, coreIndex, authorizerHash, authorizationOutput, segmentRootLookup, results, authorizationGasUsed);
@@ -6825,9 +6755,9 @@ const REQUIRED_CREDENTIALS_RANGE = [2, 3];
6825
6755
  class Credential extends WithDebug {
6826
6756
  validatorIndex;
6827
6757
  signature;
6828
- static Codec = codec.Class(Credential, {
6829
- validatorIndex: codec.u16.asOpaque(),
6830
- signature: codec.bytes(ED25519_SIGNATURE_BYTES).asOpaque(),
6758
+ static Codec = Class(Credential, {
6759
+ validatorIndex: u16.asOpaque(),
6760
+ signature: bytes(ED25519_SIGNATURE_BYTES).asOpaque(),
6831
6761
  });
6832
6762
  static create({ validatorIndex, signature }) {
6833
6763
  return new Credential(validatorIndex, signature);
@@ -6851,9 +6781,9 @@ class ReportGuarantee extends WithDebug {
6851
6781
  report;
6852
6782
  slot;
6853
6783
  credentials;
6854
- static Codec = codec.Class(ReportGuarantee, {
6784
+ static Codec = Class(ReportGuarantee, {
6855
6785
  report: WorkReport.Codec,
6856
- slot: codec.u32.asOpaque(),
6786
+ slot: u32.asOpaque(),
6857
6787
  credentials: codecKnownSizeArray(Credential.Codec, {
6858
6788
  minLength: REQUIRED_CREDENTIALS_RANGE[0],
6859
6789
  maxLength: REQUIRED_CREDENTIALS_RANGE[1],
@@ -6905,10 +6835,10 @@ function tryAsTicketAttempt(x) {
6905
6835
  class SignedTicket extends WithDebug {
6906
6836
  attempt;
6907
6837
  signature;
6908
- static Codec = codec.Class(SignedTicket, {
6838
+ static Codec = Class(SignedTicket, {
6909
6839
  // TODO [ToDr] we should verify that attempt is either 0|1|2.
6910
- attempt: codec.u8.asOpaque(),
6911
- signature: codec.bytes(BANDERSNATCH_PROOF_BYTES).asOpaque(),
6840
+ attempt: u8.asOpaque(),
6841
+ signature: bytes(BANDERSNATCH_PROOF_BYTES).asOpaque(),
6912
6842
  });
6913
6843
  static create({ attempt, signature }) {
6914
6844
  return new SignedTicket(attempt, signature);
@@ -6927,10 +6857,10 @@ class SignedTicket extends WithDebug {
6927
6857
  class Ticket extends WithDebug {
6928
6858
  id;
6929
6859
  attempt;
6930
- static Codec = codec.Class(Ticket, {
6931
- id: codec.bytes(HASH_SIZE),
6860
+ static Codec = Class(Ticket, {
6861
+ id: bytes(HASH_SIZE),
6932
6862
  // TODO [ToDr] we should verify that attempt is either 0|1|2.
6933
- attempt: codec.u8.asOpaque(),
6863
+ attempt: u8.asOpaque(),
6934
6864
  });
6935
6865
  static create({ id, attempt }) {
6936
6866
  return new Ticket(id, attempt);
@@ -6972,9 +6902,9 @@ var tickets = /*#__PURE__*/Object.freeze({
6972
6902
  class ValidatorKeys extends WithDebug {
6973
6903
  bandersnatch;
6974
6904
  ed25519;
6975
- static Codec = codec.Class(ValidatorKeys, {
6976
- bandersnatch: codec.bytes(BANDERSNATCH_KEY_BYTES).asOpaque(),
6977
- ed25519: codec.bytes(ED25519_KEY_BYTES).asOpaque(),
6905
+ static Codec = Class(ValidatorKeys, {
6906
+ bandersnatch: bytes(BANDERSNATCH_KEY_BYTES).asOpaque(),
6907
+ ed25519: bytes(ED25519_KEY_BYTES).asOpaque(),
6978
6908
  });
6979
6909
  static create({ bandersnatch, ed25519 }) {
6980
6910
  return new ValidatorKeys(bandersnatch, ed25519);
@@ -6991,7 +6921,7 @@ class ValidatorKeys extends WithDebug {
6991
6921
  }
6992
6922
  class TicketsMarker extends WithDebug {
6993
6923
  tickets;
6994
- static Codec = codec.Class(TicketsMarker, {
6924
+ static Codec = Class(TicketsMarker, {
6995
6925
  tickets: codecPerEpochBlock(Ticket.Codec),
6996
6926
  });
6997
6927
  static create({ tickets }) {
@@ -7013,9 +6943,9 @@ class EpochMarker extends WithDebug {
7013
6943
  entropy;
7014
6944
  ticketsEntropy;
7015
6945
  validators;
7016
- static Codec = codec.Class(EpochMarker, {
7017
- entropy: codec.bytes(HASH_SIZE).asOpaque(),
7018
- ticketsEntropy: codec.bytes(HASH_SIZE).asOpaque(),
6946
+ static Codec = Class(EpochMarker, {
6947
+ entropy: bytes(HASH_SIZE).asOpaque(),
6948
+ ticketsEntropy: bytes(HASH_SIZE).asOpaque(),
7019
6949
  validators: codecPerValidator(ValidatorKeys.Codec),
7020
6950
  });
7021
6951
  static create({ entropy, ticketsEntropy, validators }) {
@@ -7052,17 +6982,17 @@ const encodeUnsealedHeader = (view) => {
7052
6982
  * https://graypaper.fluffylabs.dev/#/ab2cdbd/0c66000c7200?v=0.7.2
7053
6983
  */
7054
6984
  class Header extends WithDebug {
7055
- static Codec = codec.Class(Header, {
7056
- parentHeaderHash: codec.bytes(HASH_SIZE).asOpaque(),
7057
- priorStateRoot: codec.bytes(HASH_SIZE).asOpaque(),
7058
- extrinsicHash: codec.bytes(HASH_SIZE).asOpaque(),
7059
- timeSlotIndex: codec.u32.asOpaque(),
7060
- epochMarker: codec.optional(EpochMarker.Codec),
7061
- ticketsMarker: codec.optional(TicketsMarker.Codec),
7062
- bandersnatchBlockAuthorIndex: codec.u16.asOpaque(),
7063
- entropySource: codec.bytes(BANDERSNATCH_VRF_SIGNATURE_BYTES).asOpaque(),
7064
- offendersMarker: codec.sequenceVarLen(codec.bytes(ED25519_KEY_BYTES).asOpaque()),
7065
- seal: codec.bytes(BANDERSNATCH_VRF_SIGNATURE_BYTES).asOpaque(),
6985
+ static Codec = Class(Header, {
6986
+ parentHeaderHash: bytes(HASH_SIZE).asOpaque(),
6987
+ priorStateRoot: bytes(HASH_SIZE).asOpaque(),
6988
+ extrinsicHash: bytes(HASH_SIZE).asOpaque(),
6989
+ timeSlotIndex: u32.asOpaque(),
6990
+ epochMarker: optional(EpochMarker.Codec),
6991
+ ticketsMarker: optional(TicketsMarker.Codec),
6992
+ bandersnatchBlockAuthorIndex: u16.asOpaque(),
6993
+ entropySource: bytes(BANDERSNATCH_VRF_SIGNATURE_BYTES).asOpaque(),
6994
+ offendersMarker: sequenceVarLen(bytes(ED25519_KEY_BYTES).asOpaque()),
6995
+ seal: bytes(BANDERSNATCH_VRF_SIGNATURE_BYTES).asOpaque(),
7066
6996
  });
7067
6997
  static create(h) {
7068
6998
  return Object.assign(Header.empty(), h);
@@ -7117,8 +7047,8 @@ class Header extends WithDebug {
7117
7047
  * `DescriptorRecord` or `CodecRecord` for some reason.
7118
7048
  */
7119
7049
  class HeaderViewWithHash extends WithHash {
7120
- static Codec = codec.Class(HeaderViewWithHash, {
7121
- hash: codec.bytes(HASH_SIZE).asOpaque(),
7050
+ static Codec = Class(HeaderViewWithHash, {
7051
+ hash: bytes(HASH_SIZE).asOpaque(),
7122
7052
  data: Header.Codec.View,
7123
7053
  });
7124
7054
  static create({ hash, data }) {
@@ -7136,9 +7066,9 @@ const headerViewWithHashCodec = HeaderViewWithHash.Codec;
7136
7066
  class Preimage extends WithDebug {
7137
7067
  requester;
7138
7068
  blob;
7139
- static Codec = codec.Class(Preimage, {
7140
- requester: codec.u32.asOpaque(),
7141
- blob: codec.blob,
7069
+ static Codec = Class(Preimage, {
7070
+ requester: u32.asOpaque(),
7071
+ blob: blob,
7142
7072
  });
7143
7073
  static create({ requester, blob }) {
7144
7074
  return new Preimage(requester, blob);
@@ -7153,7 +7083,7 @@ class Preimage extends WithDebug {
7153
7083
  this.blob = blob;
7154
7084
  }
7155
7085
  }
7156
- const preimagesExtrinsicCodec = codec.sequenceVarLen(Preimage.Codec);
7086
+ const preimagesExtrinsicCodec = sequenceVarLen(Preimage.Codec);
7157
7087
 
7158
7088
  var preimage = /*#__PURE__*/Object.freeze({
7159
7089
  __proto__: null,
@@ -7174,7 +7104,7 @@ class Extrinsic extends WithDebug {
7174
7104
  guarantees;
7175
7105
  assurances;
7176
7106
  disputes;
7177
- static Codec = codec.Class(Extrinsic, {
7107
+ static Codec = Class(Extrinsic, {
7178
7108
  tickets: ticketsExtrinsicCodec,
7179
7109
  preimages: preimagesExtrinsicCodec,
7180
7110
  guarantees: guaranteesExtrinsicCodec,
@@ -7227,7 +7157,7 @@ class Extrinsic extends WithDebug {
7227
7157
  class Block extends WithDebug {
7228
7158
  header;
7229
7159
  extrinsic;
7230
- static Codec = codec.Class(Block, {
7160
+ static Codec = Class(Block, {
7231
7161
  header: Header.Codec,
7232
7162
  extrinsic: Extrinsic.Codec,
7233
7163
  });
@@ -8725,9 +8655,9 @@ function legacyServiceNested(serviceId, hash) {
8725
8655
  class AccumulationOutput {
8726
8656
  serviceId;
8727
8657
  output;
8728
- static Codec = codec.Class(AccumulationOutput, {
8729
- serviceId: codec.u32.asOpaque(),
8730
- output: codec.bytes(HASH_SIZE),
8658
+ static Codec = Class(AccumulationOutput, {
8659
+ serviceId: u32.asOpaque(),
8660
+ output: bytes(HASH_SIZE),
8731
8661
  });
8732
8662
  static create(a) {
8733
8663
  return new AccumulationOutput(a.serviceId, a.output);
@@ -8803,9 +8733,9 @@ const MAX_REPORT_DEPENDENCIES = 8;
8803
8733
  class NotYetAccumulatedReport extends WithDebug {
8804
8734
  report;
8805
8735
  dependencies;
8806
- static Codec = codec.Class(NotYetAccumulatedReport, {
8736
+ static Codec = Class(NotYetAccumulatedReport, {
8807
8737
  report: WorkReport.Codec,
8808
- dependencies: codecKnownSizeArray(codec.bytes(HASH_SIZE).asOpaque(), {
8738
+ dependencies: codecKnownSizeArray(bytes(HASH_SIZE).asOpaque(), {
8809
8739
  typicalLength: MAX_REPORT_DEPENDENCIES / 2,
8810
8740
  maxLength: MAX_REPORT_DEPENDENCIES,
8811
8741
  minLength: 0,
@@ -8832,7 +8762,7 @@ class NotYetAccumulatedReport extends WithDebug {
8832
8762
  this.dependencies = dependencies;
8833
8763
  }
8834
8764
  }
8835
- const accumulationQueueCodec = codecPerEpochBlock(codec.readonlyArray(codec.sequenceVarLen(NotYetAccumulatedReport.Codec)));
8765
+ const accumulationQueueCodec = codecPerEpochBlock(readonlyArray(sequenceVarLen(NotYetAccumulatedReport.Codec)));
8836
8766
 
8837
8767
  /** Check if given array has correct length before casting to the opaque type. */
8838
8768
  function tryAsPerCore(array, spec) {
@@ -8857,9 +8787,9 @@ const codecPerCore = (val) => codecWithContext((context) => {
8857
8787
  class AvailabilityAssignment extends WithDebug {
8858
8788
  workReport;
8859
8789
  timeout;
8860
- static Codec = codec.Class(AvailabilityAssignment, {
8790
+ static Codec = Class(AvailabilityAssignment, {
8861
8791
  workReport: WorkReport.Codec,
8862
- timeout: codec.u32.asOpaque(),
8792
+ timeout: u32.asOpaque(),
8863
8793
  });
8864
8794
  static create({ workReport, timeout }) {
8865
8795
  return new AvailabilityAssignment(workReport, timeout);
@@ -8874,20 +8804,20 @@ class AvailabilityAssignment extends WithDebug {
8874
8804
  this.timeout = timeout;
8875
8805
  }
8876
8806
  }
8877
- const availabilityAssignmentsCodec = codecPerCore(codec.optional(AvailabilityAssignment.Codec));
8807
+ const availabilityAssignmentsCodec = codecPerCore(optional(AvailabilityAssignment.Codec));
8878
8808
 
8879
8809
  /** `O`: Maximal authorization pool size. */
8880
8810
  const MAX_AUTH_POOL_SIZE = O;
8881
8811
  /** `Q`: Size of the authorization queue. */
8882
8812
  const AUTHORIZATION_QUEUE_SIZE = Q;
8883
- const authPoolsCodec = codecPerCore(codecKnownSizeArray(codec.bytes(HASH_SIZE).asOpaque(), {
8813
+ const authPoolsCodec = codecPerCore(codecKnownSizeArray(bytes(HASH_SIZE).asOpaque(), {
8884
8814
  minLength: 0,
8885
8815
  maxLength: MAX_AUTH_POOL_SIZE,
8886
8816
  typicalLength: MAX_AUTH_POOL_SIZE,
8887
8817
  }));
8888
- const authQueuesCodec = codecPerCore(codecFixedSizeArray(codec.bytes(HASH_SIZE).asOpaque(), AUTHORIZATION_QUEUE_SIZE));
8818
+ const authQueuesCodec = codecPerCore(codecFixedSizeArray(bytes(HASH_SIZE).asOpaque(), AUTHORIZATION_QUEUE_SIZE));
8889
8819
 
8890
- const sortedSetCodec = () => codec.readonlyArray(codec.sequenceVarLen(codec.bytes(HASH_SIZE))).convert((input) => input.array, (output) => {
8820
+ const sortedSetCodec = () => readonlyArray(sequenceVarLen(bytes(HASH_SIZE))).convert((input) => input.array, (output) => {
8891
8821
  const typed = output.map((x) => x.asOpaque());
8892
8822
  return SortedSet.fromSortedArray(hashComparator, typed);
8893
8823
  });
@@ -8902,7 +8832,7 @@ class DisputesRecords {
8902
8832
  badSet;
8903
8833
  wonkySet;
8904
8834
  punishSet;
8905
- static Codec = codec.Class(DisputesRecords, {
8835
+ static Codec = Class(DisputesRecords, {
8906
8836
  goodSet: workReportsSortedSetCodec,
8907
8837
  badSet: workReportsSortedSetCodec,
8908
8838
  wonkySet: workReportsSortedSetCodec,
@@ -8967,10 +8897,10 @@ class BlockState extends WithDebug {
8967
8897
  accumulationResult;
8968
8898
  postStateRoot;
8969
8899
  reported;
8970
- static Codec = codec.Class(BlockState, {
8971
- headerHash: codec.bytes(HASH_SIZE).asOpaque(),
8972
- accumulationResult: codec.bytes(HASH_SIZE),
8973
- postStateRoot: codec.bytes(HASH_SIZE).asOpaque(),
8900
+ static Codec = Class(BlockState, {
8901
+ headerHash: bytes(HASH_SIZE).asOpaque(),
8902
+ accumulationResult: bytes(HASH_SIZE),
8903
+ postStateRoot: bytes(HASH_SIZE).asOpaque(),
8974
8904
  reported: codecHashDictionary(WorkPackageInfo.Codec, (x) => x.workPackageHash),
8975
8905
  });
8976
8906
  static create({ headerHash, accumulationResult, postStateRoot, reported }) {
@@ -9000,14 +8930,14 @@ class BlockState extends WithDebug {
9000
8930
  class RecentBlocks extends WithDebug {
9001
8931
  blocks;
9002
8932
  accumulationLog;
9003
- static Codec = codec.Class(RecentBlocks, {
8933
+ static Codec = Class(RecentBlocks, {
9004
8934
  blocks: codecKnownSizeArray(BlockState.Codec, {
9005
8935
  minLength: 0,
9006
8936
  maxLength: MAX_RECENT_HISTORY,
9007
8937
  typicalLength: MAX_RECENT_HISTORY,
9008
8938
  }),
9009
- accumulationLog: codec.object({
9010
- peaks: codec.readonlyArray(codec.sequenceVarLen(codec.optional(codec.bytes(HASH_SIZE)))),
8939
+ accumulationLog: object({
8940
+ peaks: readonlyArray(sequenceVarLen(optional(bytes(HASH_SIZE)))),
9011
8941
  }),
9012
8942
  });
9013
8943
  static empty() {
@@ -9035,7 +8965,7 @@ class RecentBlocks extends WithDebug {
9035
8965
  }
9036
8966
  }
9037
8967
 
9038
- const recentlyAccumulatedCodec = codecPerEpochBlock(codec.sequenceVarLen(codec.bytes(HASH_SIZE).asOpaque()).convert((x) => Array.from(x), (x) => HashSet.from(x)));
8968
+ const recentlyAccumulatedCodec = codecPerEpochBlock(sequenceVarLen(bytes(HASH_SIZE).asOpaque()).convert((x) => Array.from(x), (x) => HashSet.from(x)));
9039
8969
 
9040
8970
  /**
9041
8971
  * Fixed size of validator metadata.
@@ -9053,11 +8983,11 @@ class ValidatorData extends WithDebug {
9053
8983
  ed25519;
9054
8984
  bls;
9055
8985
  metadata;
9056
- static Codec = codec.Class(ValidatorData, {
9057
- bandersnatch: codec.bytes(BANDERSNATCH_KEY_BYTES).asOpaque(),
9058
- ed25519: codec.bytes(ED25519_KEY_BYTES).asOpaque(),
9059
- bls: codec.bytes(BLS_KEY_BYTES).asOpaque(),
9060
- metadata: codec.bytes(VALIDATOR_META_BYTES),
8986
+ static Codec = Class(ValidatorData, {
8987
+ bandersnatch: bytes(BANDERSNATCH_KEY_BYTES).asOpaque(),
8988
+ ed25519: bytes(ED25519_KEY_BYTES).asOpaque(),
8989
+ bls: bytes(BLS_KEY_BYTES).asOpaque(),
8990
+ metadata: bytes(VALIDATOR_META_BYTES),
9061
8991
  });
9062
8992
  static create({ ed25519, bandersnatch, bls, metadata }) {
9063
8993
  return new ValidatorData(bandersnatch, ed25519, bls, metadata);
@@ -9085,26 +9015,46 @@ var SafroleSealingKeysKind;
9085
9015
  SafroleSealingKeysKind[SafroleSealingKeysKind["Tickets"] = 0] = "Tickets";
9086
9016
  SafroleSealingKeysKind[SafroleSealingKeysKind["Keys"] = 1] = "Keys";
9087
9017
  })(SafroleSealingKeysKind || (SafroleSealingKeysKind = {}));
9088
- const codecBandersnatchKey = codec.bytes(BANDERSNATCH_KEY_BYTES).asOpaque();
9018
+ const codecBandersnatchKey = bytes(BANDERSNATCH_KEY_BYTES).asOpaque();
9089
9019
  class SafroleSealingKeysData extends WithDebug {
9090
9020
  kind;
9091
9021
  keys;
9092
9022
  tickets;
9093
9023
  static Codec = codecWithContext((context) => {
9094
- const keysCodec = codec
9095
- .sequenceFixLen(codecBandersnatchKey, context.epochLength)
9096
- .convert((keys) => Array.from(keys), (keys) => tryAsPerEpochBlock(keys, context));
9097
- const ticketsCodec = codec.sequenceFixLen(Ticket.Codec, context.epochLength).convert((tickets) => Array.from(tickets), (tickets) => tryAsPerEpochBlock(tickets, context));
9098
- return codec
9099
- .union("SafroleSealingKeys", {
9100
- [SafroleSealingKeysKind.Keys]: codec.object({ keys: keysCodec }),
9101
- [SafroleSealingKeysKind.Tickets]: codec.object({ tickets: ticketsCodec }),
9102
- })
9103
- .convert((x) => x, (x) => {
9024
+ return custom({
9025
+ name: "SafroleSealingKeys",
9026
+ sizeHint: { bytes: 1 + HASH_SIZE * context.epochLength, isExact: false },
9027
+ }, (e, x) => {
9028
+ e.varU32(tryAsU32(x.kind));
9104
9029
  if (x.kind === SafroleSealingKeysKind.Keys) {
9105
- return SafroleSealingKeysData.keys(x.keys);
9030
+ e.sequenceFixLen(codecBandersnatchKey, x.keys);
9106
9031
  }
9107
- return SafroleSealingKeysData.tickets(x.tickets);
9032
+ else {
9033
+ e.sequenceFixLen(Ticket.Codec, x.tickets);
9034
+ }
9035
+ }, (d) => {
9036
+ const epochLength = context.epochLength;
9037
+ const kind = d.varU32();
9038
+ if (kind === SafroleSealingKeysKind.Keys) {
9039
+ const keys = d.sequenceFixLen(codecBandersnatchKey, epochLength);
9040
+ return SafroleSealingKeysData.keys(tryAsPerEpochBlock(keys, context));
9041
+ }
9042
+ if (kind === SafroleSealingKeysKind.Tickets) {
9043
+ const tickets = d.sequenceFixLen(Ticket.Codec, epochLength);
9044
+ return SafroleSealingKeysData.tickets(tryAsPerEpochBlock(tickets, context));
9045
+ }
9046
+ throw new Error(`Unexpected safrole sealing keys kind: ${kind}`);
9047
+ }, (s) => {
9048
+ const kind = s.decoder.varU32();
9049
+ if (kind === SafroleSealingKeysKind.Keys) {
9050
+ s.sequenceFixLen(codecBandersnatchKey, context.epochLength);
9051
+ return;
9052
+ }
9053
+ if (kind === SafroleSealingKeysKind.Tickets) {
9054
+ s.sequenceFixLen(Ticket.Codec, context.epochLength);
9055
+ return;
9056
+ }
9057
+ throw new Error(`Unexpected safrole sealing keys kind: ${kind}`);
9108
9058
  });
9109
9059
  });
9110
9060
  static keys(keys) {
@@ -9125,11 +9075,11 @@ class SafroleData {
9125
9075
  epochRoot;
9126
9076
  sealingKeySeries;
9127
9077
  ticketsAccumulator;
9128
- static Codec = codec.Class(SafroleData, {
9078
+ static Codec = Class(SafroleData, {
9129
9079
  nextValidatorData: codecPerValidator(ValidatorData.Codec),
9130
- epochRoot: codec.bytes(BANDERSNATCH_RING_ROOT_BYTES).asOpaque(),
9080
+ epochRoot: bytes(BANDERSNATCH_RING_ROOT_BYTES).asOpaque(),
9131
9081
  sealingKeySeries: SafroleSealingKeysData.Codec,
9132
- ticketsAccumulator: codec.readonlyArray(codec.sequenceVarLen(Ticket.Codec)).convert(seeThrough, asKnownSize),
9082
+ ticketsAccumulator: readonlyArray(sequenceVarLen(Ticket.Codec)).convert(seeThrough, asKnownSize),
9133
9083
  });
9134
9084
  static create({ nextValidatorData, epochRoot, sealingKeySeries, ticketsAccumulator }) {
9135
9085
  return new SafroleData(nextValidatorData, epochRoot, sealingKeySeries, ticketsAccumulator);
@@ -9207,17 +9157,17 @@ class ServiceAccountInfo extends WithDebug {
9207
9157
  created;
9208
9158
  lastAccumulation;
9209
9159
  parentService;
9210
- static Codec = codec.Class(ServiceAccountInfo, {
9211
- codeHash: codec.bytes(HASH_SIZE).asOpaque(),
9212
- balance: codec.u64,
9213
- accumulateMinGas: codec.u64.convert((x) => x, tryAsServiceGas),
9214
- onTransferMinGas: codec.u64.convert((x) => x, tryAsServiceGas),
9215
- storageUtilisationBytes: codec.u64,
9216
- gratisStorage: codec.u64,
9217
- storageUtilisationCount: codec.u32,
9218
- created: codec.u32.convert((x) => x, tryAsTimeSlot),
9219
- lastAccumulation: codec.u32.convert((x) => x, tryAsTimeSlot),
9220
- parentService: codec.u32.convert((x) => x, tryAsServiceId),
9160
+ static Codec = Class(ServiceAccountInfo, {
9161
+ codeHash: bytes(HASH_SIZE).asOpaque(),
9162
+ balance: u64,
9163
+ accumulateMinGas: u64.convert((x) => x, tryAsServiceGas),
9164
+ onTransferMinGas: u64.convert((x) => x, tryAsServiceGas),
9165
+ storageUtilisationBytes: u64,
9166
+ gratisStorage: u64,
9167
+ storageUtilisationCount: u32,
9168
+ created: u32.convert((x) => x, tryAsTimeSlot),
9169
+ lastAccumulation: u32.convert((x) => x, tryAsTimeSlot),
9170
+ parentService: u32.convert((x) => x, tryAsServiceId),
9221
9171
  });
9222
9172
  static create(a) {
9223
9173
  return new ServiceAccountInfo(a.codeHash, a.balance, a.accumulateMinGas, a.onTransferMinGas, a.storageUtilisationBytes, a.gratisStorage, a.storageUtilisationCount, a.created, a.lastAccumulation, a.parentService);
@@ -9273,9 +9223,9 @@ class ServiceAccountInfo extends WithDebug {
9273
9223
  class PreimageItem extends WithDebug {
9274
9224
  hash;
9275
9225
  blob;
9276
- static Codec = codec.Class(PreimageItem, {
9277
- hash: codec.bytes(HASH_SIZE).asOpaque(),
9278
- blob: codec.blob,
9226
+ static Codec = Class(PreimageItem, {
9227
+ hash: bytes(HASH_SIZE).asOpaque(),
9228
+ blob: blob,
9279
9229
  });
9280
9230
  static create({ hash, blob }) {
9281
9231
  return new PreimageItem(hash, blob);
@@ -9289,9 +9239,9 @@ class PreimageItem extends WithDebug {
9289
9239
  class StorageItem extends WithDebug {
9290
9240
  key;
9291
9241
  value;
9292
- static Codec = codec.Class(StorageItem, {
9293
- key: codec.blob.convert((i) => i, (o) => asOpaqueType(o)),
9294
- value: codec.blob,
9242
+ static Codec = Class(StorageItem, {
9243
+ key: blob.convert((i) => i, (o) => asOpaqueType(o)),
9244
+ value: blob,
9295
9245
  });
9296
9246
  static create({ key, value }) {
9297
9247
  return new StorageItem(key, value);
@@ -9345,13 +9295,13 @@ class ValidatorStatistics {
9345
9295
  preImagesSize;
9346
9296
  guarantees;
9347
9297
  assurances;
9348
- static Codec = codec.Class(ValidatorStatistics, {
9349
- blocks: codec.u32,
9350
- tickets: codec.u32,
9351
- preImages: codec.u32,
9352
- preImagesSize: codec.u32,
9353
- guarantees: codec.u32,
9354
- assurances: codec.u32,
9298
+ static Codec = Class(ValidatorStatistics, {
9299
+ blocks: u32,
9300
+ tickets: u32,
9301
+ preImages: u32,
9302
+ preImagesSize: u32,
9303
+ guarantees: u32,
9304
+ assurances: u32,
9355
9305
  });
9356
9306
  static create({ blocks, tickets, preImages, preImagesSize, guarantees, assurances, }) {
9357
9307
  return new ValidatorStatistics(blocks, tickets, preImages, preImagesSize, guarantees, assurances);
@@ -9381,9 +9331,9 @@ class ValidatorStatistics {
9381
9331
  return new ValidatorStatistics(zero, zero, zero, zero, zero, zero);
9382
9332
  }
9383
9333
  }
9384
- const codecVarU16 = codec.varU32.convert((i) => tryAsU32(i), (o) => tryAsU16(o));
9334
+ const codecVarU16 = varU32.convert((i) => tryAsU32(i), (o) => tryAsU16(o));
9385
9335
  /** Encode/decode unsigned gas. */
9386
- const codecVarGas = codec.varU64.convert((g) => tryAsU64(g), (i) => tryAsServiceGas(i));
9336
+ const codecVarGas = varU64.convert((g) => tryAsU64(g), (i) => tryAsServiceGas(i));
9387
9337
  /**
9388
9338
  * Single core statistics.
9389
9339
  * Updated per block, based on incoming work reports (`w`).
@@ -9400,14 +9350,14 @@ class CoreStatistics {
9400
9350
  extrinsicCount;
9401
9351
  bundleSize;
9402
9352
  gasUsed;
9403
- static Codec = codec.Class(CoreStatistics, {
9404
- dataAvailabilityLoad: codec.varU32,
9353
+ static Codec = Class(CoreStatistics, {
9354
+ dataAvailabilityLoad: varU32,
9405
9355
  popularity: codecVarU16,
9406
9356
  imports: codecVarU16,
9407
9357
  extrinsicCount: codecVarU16,
9408
- extrinsicSize: codec.varU32,
9358
+ extrinsicSize: varU32,
9409
9359
  exports: codecVarU16,
9410
- bundleSize: codec.varU32,
9360
+ bundleSize: varU32,
9411
9361
  gasUsed: codecVarGas,
9412
9362
  });
9413
9363
  static create(v) {
@@ -9466,31 +9416,31 @@ class ServiceStatistics {
9466
9416
  onTransfersCount;
9467
9417
  onTransfersGasUsed;
9468
9418
  static Codec = Compatibility.selectIfGreaterOrEqual({
9469
- fallback: codec.Class(ServiceStatistics, {
9419
+ fallback: Class(ServiceStatistics, {
9470
9420
  providedCount: codecVarU16,
9471
- providedSize: codec.varU32,
9472
- refinementCount: codec.varU32,
9421
+ providedSize: varU32,
9422
+ refinementCount: varU32,
9473
9423
  refinementGasUsed: codecVarGas,
9474
9424
  imports: codecVarU16,
9475
9425
  extrinsicCount: codecVarU16,
9476
- extrinsicSize: codec.varU32,
9426
+ extrinsicSize: varU32,
9477
9427
  exports: codecVarU16,
9478
- accumulateCount: codec.varU32,
9428
+ accumulateCount: varU32,
9479
9429
  accumulateGasUsed: codecVarGas,
9480
- onTransfersCount: codec.varU32,
9430
+ onTransfersCount: varU32,
9481
9431
  onTransfersGasUsed: codecVarGas,
9482
9432
  }),
9483
9433
  versions: {
9484
- [GpVersion.V0_7_1]: codec.Class(ServiceStatistics, {
9434
+ [GpVersion.V0_7_1]: Class(ServiceStatistics, {
9485
9435
  providedCount: codecVarU16,
9486
- providedSize: codec.varU32,
9487
- refinementCount: codec.varU32,
9436
+ providedSize: varU32,
9437
+ refinementCount: varU32,
9488
9438
  refinementGasUsed: codecVarGas,
9489
9439
  imports: codecVarU16,
9490
9440
  extrinsicCount: codecVarU16,
9491
- extrinsicSize: codec.varU32,
9441
+ extrinsicSize: varU32,
9492
9442
  exports: codecVarU16,
9493
- accumulateCount: codec.varU32,
9443
+ accumulateCount: varU32,
9494
9444
  accumulateGasUsed: codecVarGas,
9495
9445
  onTransfersCount: ignoreValueWithDefault(tryAsU32(0)),
9496
9446
  onTransfersGasUsed: ignoreValueWithDefault(tryAsServiceGas(0)),
@@ -9551,11 +9501,11 @@ class StatisticsData {
9551
9501
  previous;
9552
9502
  cores;
9553
9503
  services;
9554
- static Codec = codec.Class(StatisticsData, {
9504
+ static Codec = Class(StatisticsData, {
9555
9505
  current: codecPerValidator(ValidatorStatistics.Codec),
9556
9506
  previous: codecPerValidator(ValidatorStatistics.Codec),
9557
9507
  cores: codecPerCore(CoreStatistics.Codec),
9558
- services: codec.dictionary(codec.u32.asOpaque(), ServiceStatistics.Codec, {
9508
+ services: dictionary(u32.asOpaque(), ServiceStatistics.Codec, {
9559
9509
  sortKeys: (a, b) => a - b,
9560
9510
  }),
9561
9511
  });
@@ -9637,14 +9587,14 @@ class PrivilegedServices {
9637
9587
  assigners;
9638
9588
  autoAccumulateServices;
9639
9589
  /** https://graypaper.fluffylabs.dev/#/ab2cdbd/3bbd023bcb02?v=0.7.2 */
9640
- static Codec = codec.Class(PrivilegedServices, {
9641
- manager: codec.u32.asOpaque(),
9642
- assigners: codecPerCore(codec.u32.asOpaque()),
9643
- delegator: codec.u32.asOpaque(),
9590
+ static Codec = Class(PrivilegedServices, {
9591
+ manager: u32.asOpaque(),
9592
+ assigners: codecPerCore(u32.asOpaque()),
9593
+ delegator: u32.asOpaque(),
9644
9594
  registrar: Compatibility.isGreaterOrEqual(GpVersion.V0_7_1)
9645
- ? codec.u32.asOpaque()
9595
+ ? u32.asOpaque()
9646
9596
  : ignoreValueWithDefault(tryAsServiceId(2 ** 32 - 1)),
9647
- autoAccumulateServices: codec.dictionary(codec.u32.asOpaque(), codec.u64.asOpaque(), {
9597
+ autoAccumulateServices: dictionary(u32.asOpaque(), u64.asOpaque(), {
9648
9598
  sortKeys: (a, b) => a - b,
9649
9599
  }),
9650
9600
  });
@@ -10289,15 +10239,15 @@ class InMemoryState extends WithDebug {
10289
10239
  });
10290
10240
  }
10291
10241
  }
10292
- const serviceEntriesCodec = codec.object({
10293
- storageKeys: codec.sequenceVarLen(codec.blob.convert((i) => i, (o) => asOpaqueType(o))),
10294
- preimages: codec.sequenceVarLen(codec.bytes(HASH_SIZE).asOpaque()),
10295
- lookupHistory: codec.sequenceVarLen(codec.object({
10296
- hash: codec.bytes(HASH_SIZE).asOpaque(),
10297
- length: codec.u32,
10242
+ const serviceEntriesCodec = object({
10243
+ storageKeys: sequenceVarLen(blob.convert((i) => i, (o) => asOpaqueType(o))),
10244
+ preimages: sequenceVarLen(bytes(HASH_SIZE).asOpaque()),
10245
+ lookupHistory: sequenceVarLen(object({
10246
+ hash: bytes(HASH_SIZE).asOpaque(),
10247
+ length: u32,
10298
10248
  })),
10299
10249
  });
10300
- const serviceDataCodec = codec.dictionary(codec.u32.asOpaque(), serviceEntriesCodec, {
10250
+ const serviceDataCodec = dictionary(u32.asOpaque(), serviceEntriesCodec, {
10301
10251
  sortKeys: (a, b) => a - b,
10302
10252
  });
10303
10253
 
@@ -10400,7 +10350,7 @@ var serialize;
10400
10350
  /** C(6): https://graypaper.fluffylabs.dev/#/7e6ff6a/3bf3013bf301?v=0.6.7 */
10401
10351
  serialize.entropy = {
10402
10352
  key: stateKeys.index(StateKeyIdx.Eta),
10403
- Codec: codecFixedSizeArray(codec.bytes(HASH_SIZE).asOpaque(), ENTROPY_ENTRIES),
10353
+ Codec: codecFixedSizeArray(bytes(HASH_SIZE).asOpaque(), ENTROPY_ENTRIES),
10404
10354
  extract: (s) => s.entropy,
10405
10355
  };
10406
10356
  /** C(7): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b00023b0002?v=0.6.7 */
@@ -10430,7 +10380,7 @@ var serialize;
10430
10380
  /** C(11): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b3e023b3e02?v=0.6.7 */
10431
10381
  serialize.timeslot = {
10432
10382
  key: stateKeys.index(StateKeyIdx.Tau),
10433
- Codec: codec.u32.asOpaque(),
10383
+ Codec: u32.asOpaque(),
10434
10384
  extract: (s) => s.timeslot,
10435
10385
  };
10436
10386
  /** C(12): https://graypaper.fluffylabs.dev/#/7e6ff6a/3b4c023b4c02?v=0.6.7 */
@@ -10460,7 +10410,7 @@ var serialize;
10460
10410
  /** C(16): https://graypaper.fluffylabs.dev/#/38c4e62/3b46033b4603?v=0.7.0 */
10461
10411
  serialize.accumulationOutputLog = {
10462
10412
  key: stateKeys.index(StateKeyIdx.Theta),
10463
- Codec: codec.sequenceVarLen(AccumulationOutput.Codec).convert((i) => i.array, (o) => SortedArray.fromSortedArray(accumulationOutputComparator, o)),
10413
+ Codec: sequenceVarLen(AccumulationOutput.Codec).convert((i) => i.array, (o) => SortedArray.fromSortedArray(accumulationOutputComparator, o)),
10464
10414
  extract: (s) => s.accumulationOutputLog,
10465
10415
  };
10466
10416
  /** C(255, s): https://graypaper.fluffylabs.dev/#/85129da/383103383103?v=0.6.3 */
@@ -10483,7 +10433,7 @@ var serialize;
10483
10433
  /** https://graypaper.fluffylabs.dev/#/85129da/387603387603?v=0.6.3 */
10484
10434
  serialize.serviceLookupHistory = (blake2b, serviceId, hash, len) => ({
10485
10435
  key: stateKeys.serviceLookupHistory(blake2b, serviceId, hash, len),
10486
- Codec: codec.readonlyArray(codec.sequenceVarLen(codec.u32)),
10436
+ Codec: readonlyArray(sequenceVarLen(u32)),
10487
10437
  });
10488
10438
  })(serialize || (serialize = {}));
10489
10439
  /**
@@ -11577,7 +11527,7 @@ function getSafroleData(nextValidatorData, epochRoot, sealingKeySeries, ticketsA
11577
11527
 
11578
11528
  const TYPICAL_STATE_ITEMS = 50;
11579
11529
  const TYPICAL_STATE_ITEM_LEN = 50;
11580
- const stateEntriesSequenceCodec = codec.sequenceVarLen(codec.pair(codec.bytes(TRUNCATED_HASH_SIZE), codec.blob));
11530
+ const stateEntriesSequenceCodec = sequenceVarLen(pair(bytes(TRUNCATED_HASH_SIZE), blob));
11581
11531
  /**
11582
11532
  * Full, in-memory state represented as serialized entries dictionary.
11583
11533
  *
@@ -11585,7 +11535,7 @@ const stateEntriesSequenceCodec = codec.sequenceVarLen(codec.pair(codec.bytes(TR
11585
11535
  */
11586
11536
  class StateEntries {
11587
11537
  dictionary;
11588
- static Codec = codec.custom({
11538
+ static Codec = custom({
11589
11539
  name: "StateEntries",
11590
11540
  sizeHint: {
11591
11541
  isExact: false,
@@ -12405,10 +12355,10 @@ class Version extends WithDebug {
12405
12355
  major;
12406
12356
  minor;
12407
12357
  patch;
12408
- static Codec = codec.Class(Version, {
12409
- major: codec.u8,
12410
- minor: codec.u8,
12411
- patch: codec.u8,
12358
+ static Codec = Class(Version, {
12359
+ major: u8,
12360
+ minor: u8,
12361
+ patch: u8,
12412
12362
  });
12413
12363
  static tryFromString(str) {
12414
12364
  const parse = (v) => tryAsU8(Number(v));
@@ -12460,12 +12410,12 @@ class PeerInfo extends WithDebug {
12460
12410
  jamVersion;
12461
12411
  appVersion;
12462
12412
  name;
12463
- static Codec = codec.Class(PeerInfo, {
12464
- fuzzVersion: codec.u8,
12465
- features: codec.u32,
12413
+ static Codec = Class(PeerInfo, {
12414
+ fuzzVersion: u8,
12415
+ features: u32,
12466
12416
  jamVersion: Version.Codec,
12467
12417
  appVersion: Version.Codec,
12468
- name: codec.string,
12418
+ name: string,
12469
12419
  });
12470
12420
  static create({ fuzzVersion, features, appVersion, jamVersion, name }) {
12471
12421
  return new PeerInfo(fuzzVersion, features, jamVersion, appVersion, name);
@@ -12488,9 +12438,9 @@ class PeerInfo extends WithDebug {
12488
12438
  class AncestryItem extends WithDebug {
12489
12439
  slot;
12490
12440
  headerHash;
12491
- static Codec = codec.Class(AncestryItem, {
12492
- slot: codec.u32.asOpaque(),
12493
- headerHash: codec.bytes(HASH_SIZE).asOpaque(),
12441
+ static Codec = Class(AncestryItem, {
12442
+ slot: u32.asOpaque(),
12443
+ headerHash: bytes(HASH_SIZE).asOpaque(),
12494
12444
  });
12495
12445
  static create({ slot, headerHash }) {
12496
12446
  return new AncestryItem(slot, headerHash);
@@ -12510,9 +12460,9 @@ class AncestryItem extends WithDebug {
12510
12460
  class KeyValue extends WithDebug {
12511
12461
  key;
12512
12462
  value;
12513
- static Codec = codec.Class(KeyValue, {
12514
- key: codec.bytes(TRUNCATED_HASH_SIZE),
12515
- value: codec.blob,
12463
+ static Codec = Class(KeyValue, {
12464
+ key: bytes(TRUNCATED_HASH_SIZE),
12465
+ value: blob,
12516
12466
  });
12517
12467
  static create({ key, value }) {
12518
12468
  return new KeyValue(key, value);
@@ -12524,12 +12474,12 @@ class KeyValue extends WithDebug {
12524
12474
  }
12525
12475
  }
12526
12476
  /** State ::= SEQUENCE OF KeyValue */
12527
- const stateCodec = codec.sequenceVarLen(KeyValue.Codec);
12477
+ const stateCodec = sequenceVarLen(KeyValue.Codec);
12528
12478
  /**
12529
12479
  * Ancestry ::= SEQUENCE (SIZE(0..24)) OF AncestryItem
12530
12480
  * Empty when `feature-ancestry` is not supported by both parties
12531
12481
  */
12532
- const ancestryCodec = codec.sequenceVarLen(AncestryItem.Codec, {
12482
+ const ancestryCodec = sequenceVarLen(AncestryItem.Codec, {
12533
12483
  minLength: 0,
12534
12484
  maxLength: 24,
12535
12485
  });
@@ -12544,7 +12494,7 @@ class Initialize extends WithDebug {
12544
12494
  header;
12545
12495
  keyvals;
12546
12496
  ancestry;
12547
- static Codec = codec.Class(Initialize, {
12497
+ static Codec = Class(Initialize, {
12548
12498
  header: Header.Codec,
12549
12499
  keyvals: stateCodec,
12550
12500
  ancestry: ancestryCodec,
@@ -12560,14 +12510,14 @@ class Initialize extends WithDebug {
12560
12510
  }
12561
12511
  }
12562
12512
  /** GetState ::= HeaderHash */
12563
- const getStateCodec = codec.bytes(HASH_SIZE).asOpaque();
12513
+ const getStateCodec = bytes(HASH_SIZE).asOpaque();
12564
12514
  /** StateRoot ::= StateRootHash */
12565
- const stateRootCodec = codec.bytes(HASH_SIZE).asOpaque();
12515
+ const stateRootCodec = bytes(HASH_SIZE).asOpaque();
12566
12516
  /** Error ::= UTF8String */
12567
12517
  class ErrorMessage extends WithDebug {
12568
12518
  message;
12569
- static Codec = codec.Class(ErrorMessage, {
12570
- message: codec.string,
12519
+ static Codec = Class(ErrorMessage, {
12520
+ message: string,
12571
12521
  });
12572
12522
  static create({ message }) {
12573
12523
  return new ErrorMessage(message);
@@ -12599,7 +12549,7 @@ var MessageType;
12599
12549
  * error [255] Error
12600
12550
  * }
12601
12551
  */
12602
- const messageCodec = codec.custom({
12552
+ const messageCodec = custom({
12603
12553
  name: "Message",
12604
12554
  sizeHint: { bytes: 1, isExact: false },
12605
12555
  }, (e, msg) => {
@@ -18084,7 +18034,7 @@ class Assign {
18084
18034
  // NOTE: Here we know the core index is valid
18085
18035
  const coreIndex = tryAsCoreIndex(Number(maybeCoreIndex));
18086
18036
  const decoder = Decoder.fromBlob(res);
18087
- const authQueue = decoder.sequenceFixLen(codec.bytes(HASH_SIZE).asOpaque(), AUTHORIZATION_QUEUE_SIZE);
18037
+ const authQueue = decoder.sequenceFixLen(bytes(HASH_SIZE).asOpaque(), AUTHORIZATION_QUEUE_SIZE);
18088
18038
  const fixedSizeAuthQueue = FixedSizeArray.new(authQueue, AUTHORIZATION_QUEUE_SIZE);
18089
18039
  const result = this.partialState.updateAuthorizationQueue(coreIndex, fixedSizeAuthQueue, assigners);
18090
18040
  if (result.isOk) {
@@ -18108,9 +18058,9 @@ class Assign {
18108
18058
  }
18109
18059
 
18110
18060
  const IN_OUT_REG$m = 7;
18111
- const serviceIdAndGasCodec = codec.object({
18112
- serviceId: codec.u32.convert((i) => i, (o) => asOpaqueType(o)),
18113
- gas: codec.u64.convert((i) => tryAsU64(i), (o) => tryAsServiceGas(o)),
18061
+ const serviceIdAndGasCodec = object({
18062
+ serviceId: u32.convert((i) => i, (o) => asOpaqueType(o)),
18063
+ gas: u64.convert((i) => tryAsU64(i), (o) => tryAsServiceGas(o)),
18114
18064
  });
18115
18065
  /**
18116
18066
  * Modify privileged services and services that auto-accumulate every block.
@@ -18168,7 +18118,7 @@ class Bless {
18168
18118
  memIndex = tryAsU64(memIndex + tryAsU64(decoder.bytesRead()));
18169
18119
  }
18170
18120
  // https://graypaper.fluffylabs.dev/#/7e6ff6a/367200367200?v=0.6.7
18171
- const res = safeAllocUint8Array(tryAsExactBytes(codec.u32.sizeHint) * this.chainSpec.coresCount);
18121
+ const res = safeAllocUint8Array(tryAsExactBytes(u32.sizeHint) * this.chainSpec.coresCount);
18172
18122
  const authorizersDecoder = Decoder.fromBlob(res);
18173
18123
  const memoryReadResult = memory.loadInto(res, authorization);
18174
18124
  if (memoryReadResult.isError) {
@@ -18176,7 +18126,7 @@ class Bless {
18176
18126
  return PvmExecution.Panic;
18177
18127
  }
18178
18128
  // `a`
18179
- const authorizers = tryAsPerCore(authorizersDecoder.sequenceFixLen(codec.u32.asOpaque(), this.chainSpec.coresCount), this.chainSpec);
18129
+ const authorizers = tryAsPerCore(authorizersDecoder.sequenceFixLen(u32.asOpaque(), this.chainSpec.coresCount), this.chainSpec);
18180
18130
  const updateResult = this.partialState.updatePrivilegedServices(manager, authorizers, delegator, registrar, autoAccumulate);
18181
18131
  if (updateResult.isOk) {
18182
18132
  logger$7.trace `[${this.currentServiceId}] BLESS(m: ${manager}, a: [${authorizers}], v: ${delegator}, r: ${registrar}, ${lazyInspect(autoAccumulate)}) <- OK`;
@@ -18786,12 +18736,12 @@ class PendingTransfer {
18786
18736
  amount;
18787
18737
  memo;
18788
18738
  gas;
18789
- static Codec = codec.Class(PendingTransfer, {
18790
- source: codec.u32.asOpaque(),
18791
- destination: codec.u32.asOpaque(),
18792
- amount: codec.u64,
18793
- memo: codec.bytes(TRANSFER_MEMO_BYTES),
18794
- gas: codec.u64.asOpaque(),
18739
+ static Codec = Class(PendingTransfer, {
18740
+ source: u32.asOpaque(),
18741
+ destination: u32.asOpaque(),
18742
+ amount: u64,
18743
+ memo: bytes(TRANSFER_MEMO_BYTES),
18744
+ gas: u64.asOpaque(),
18795
18745
  });
18796
18746
  constructor(
18797
18747
  /** `s`: sending service */
@@ -19089,18 +19039,18 @@ class Info {
19089
19039
  *
19090
19040
  * https://graypaper.fluffylabs.dev/#/ab2cdbd/33920033b500?v=0.7.2
19091
19041
  */
19092
- const codecServiceAccountInfoWithThresholdBalance = codec.object({
19093
- codeHash: codec.bytes(HASH_SIZE),
19094
- balance: codec.u64,
19095
- thresholdBalance: codec.u64,
19096
- accumulateMinGas: codec.u64.convert((i) => i, tryAsServiceGas),
19097
- onTransferMinGas: codec.u64.convert((i) => i, tryAsServiceGas),
19098
- storageUtilisationBytes: codec.u64,
19099
- storageUtilisationCount: codec.u32,
19100
- gratisStorage: codec.u64,
19101
- created: codec.u32.convert((x) => x, tryAsTimeSlot),
19102
- lastAccumulation: codec.u32.convert((x) => x, tryAsTimeSlot),
19103
- parentService: codec.u32.convert((x) => x, tryAsServiceId),
19042
+ const codecServiceAccountInfoWithThresholdBalance = object({
19043
+ codeHash: bytes(HASH_SIZE),
19044
+ balance: u64,
19045
+ thresholdBalance: u64,
19046
+ accumulateMinGas: u64.convert((i) => i, tryAsServiceGas),
19047
+ onTransferMinGas: u64.convert((i) => i, tryAsServiceGas),
19048
+ storageUtilisationBytes: u64,
19049
+ storageUtilisationCount: u32,
19050
+ gratisStorage: u64,
19051
+ created: u32.convert((x) => x, tryAsTimeSlot),
19052
+ lastAccumulation: u32.convert((x) => x, tryAsTimeSlot),
19053
+ parentService: u32.convert((x) => x, tryAsServiceId),
19104
19054
  }, "ServiceAccountInfoWithThresholdBalance");
19105
19055
 
19106
19056
  const decoder = new TextDecoder("utf8");
@@ -19476,9 +19426,9 @@ class HistoricalLookup {
19476
19426
 
19477
19427
  const IN_OUT_REG_1 = 7;
19478
19428
  const IN_OUT_REG_2 = 8;
19479
- const gasAndRegistersCodec = codec.object({
19480
- gas: codec.i64,
19481
- registers: codec.bytes(NO_OF_REGISTERS$1 * REGISTER_BYTE_SIZE),
19429
+ const gasAndRegistersCodec = object({
19430
+ gas: i64,
19431
+ registers: bytes(NO_OF_REGISTERS$1 * REGISTER_BYTE_SIZE),
19482
19432
  });
19483
19433
  const GAS_REGISTERS_SIZE = tryAsExactBytes(gasAndRegistersCodec.sizeHint);
19484
19434
  /**
@@ -20324,21 +20274,21 @@ const GAS_TO_INVOKE_WORK_REPORT = 10000000n;
20324
20274
  * https://graypaper.fluffylabs.dev/#/ab2cdbd/176b00176b00?v=0.7.2
20325
20275
  */
20326
20276
  class Operand extends WithDebug {
20327
- static Codec = codec.Class(Operand, {
20277
+ static Codec = Class(Operand, {
20328
20278
  // h
20329
- hash: codec.bytes(HASH_SIZE).asOpaque(),
20279
+ hash: bytes(HASH_SIZE).asOpaque(),
20330
20280
  // e
20331
- exportsRoot: codec.bytes(HASH_SIZE).asOpaque(),
20281
+ exportsRoot: bytes(HASH_SIZE).asOpaque(),
20332
20282
  // a
20333
- authorizerHash: codec.bytes(HASH_SIZE).asOpaque(),
20283
+ authorizerHash: bytes(HASH_SIZE).asOpaque(),
20334
20284
  // y
20335
- payloadHash: codec.bytes(HASH_SIZE),
20285
+ payloadHash: bytes(HASH_SIZE),
20336
20286
  // g
20337
- gas: codec.varU64.asOpaque(),
20287
+ gas: varU64.asOpaque(),
20338
20288
  // d
20339
20289
  result: WorkExecResult.Codec,
20340
20290
  // o
20341
- authorizationOutput: codec.blob,
20291
+ authorizationOutput: blob,
20342
20292
  });
20343
20293
  /**
20344
20294
  * https://graypaper.fluffylabs.dev/#/ab2cdbd/18680118eb01?v=0.7.2
@@ -20609,11 +20559,7 @@ function verifyReportsBasic(input) {
20609
20559
  const authOutputSize = reportView.authorizationOutput.view().length;
20610
20560
  let totalOutputsSize = 0;
20611
20561
  for (const item of reportView.results.view()) {
20612
- const workItemView = item.view();
20613
- const result = workItemView.result.materialize();
20614
- if (result.kind === WorkExecResultKind.ok) {
20615
- totalOutputsSize += result.okBlob?.raw.length ?? 0;
20616
- }
20562
+ totalOutputsSize += item.view().result.view().okBlob?.raw.length ?? 0;
20617
20563
  }
20618
20564
  if (authOutputSize + totalOutputsSize > MAX_WORK_REPORT_SIZE_BYTES) {
20619
20565
  return Result$1.error(ReportsError.WorkReportTooBig, () => `Work report at ${reportView.coreIndex.materialize()} too big. Got ${authOutputSize} + ${totalOutputsSize}, max: ${MAX_WORK_REPORT_SIZE_BYTES}`);
@@ -20627,47 +20573,75 @@ var TransferOperandKind;
20627
20573
  TransferOperandKind[TransferOperandKind["OPERAND"] = 0] = "OPERAND";
20628
20574
  TransferOperandKind[TransferOperandKind["TRANSFER"] = 1] = "TRANSFER";
20629
20575
  })(TransferOperandKind || (TransferOperandKind = {}));
20630
- const TRANSFER_OR_OPERAND = codec.union("TransferOrOperand", {
20631
- [TransferOperandKind.OPERAND]: codec.object({ value: Operand.Codec }),
20632
- [TransferOperandKind.TRANSFER]: codec.object({ value: PendingTransfer.Codec }),
20576
+ const TRANSFER_OR_OPERAND = custom({
20577
+ name: "TransferOrOperand",
20578
+ sizeHint: { bytes: 1, isExact: false },
20579
+ }, (e, x) => {
20580
+ e.varU32(tryAsU32(x.kind));
20581
+ if (x.kind === TransferOperandKind.OPERAND) {
20582
+ e.object(Operand.Codec, x.value);
20583
+ }
20584
+ if (x.kind === TransferOperandKind.TRANSFER) {
20585
+ e.object(PendingTransfer.Codec, x.value);
20586
+ }
20587
+ }, (d) => {
20588
+ const kind = d.varU32();
20589
+ if (kind === TransferOperandKind.OPERAND) {
20590
+ return {
20591
+ kind: TransferOperandKind.OPERAND,
20592
+ value: d.object(Operand.Codec),
20593
+ };
20594
+ }
20595
+ if (kind === TransferOperandKind.TRANSFER) {
20596
+ return { kind: TransferOperandKind.TRANSFER, value: d.object(PendingTransfer.Codec) };
20597
+ }
20598
+ throw new Error(`Unable to decode TransferOrOperand. Invalid kind: ${kind}.`);
20599
+ }, (s) => {
20600
+ const kind = s.decoder.varU32();
20601
+ if (kind === TransferOperandKind.OPERAND) {
20602
+ s.object(Operand.Codec);
20603
+ }
20604
+ if (kind === TransferOperandKind.TRANSFER) {
20605
+ s.object(PendingTransfer.Codec);
20606
+ }
20633
20607
  });
20634
- const TRANSFERS_AND_OPERANDS = codec.sequenceVarLen(TRANSFER_OR_OPERAND);
20608
+ const TRANSFERS_AND_OPERANDS = sequenceVarLen(TRANSFER_OR_OPERAND);
20635
20609
  // https://github.com/gavofyork/graypaper/pull/414
20636
20610
  // 0.7.0 encoding is used for prior versions as well.
20637
- const CONSTANTS_CODEC = codec.object({
20638
- B_I: codec.u64,
20639
- B_L: codec.u64,
20640
- B_S: codec.u64,
20641
- C: codec.u16,
20642
- D: codec.u32,
20643
- E: codec.u32,
20644
- G_A: codec.u64,
20645
- G_I: codec.u64,
20646
- G_R: codec.u64,
20647
- G_T: codec.u64,
20648
- H: codec.u16,
20649
- I: codec.u16,
20650
- J: codec.u16,
20651
- K: codec.u16,
20652
- L: codec.u32,
20653
- N: codec.u16,
20654
- O: codec.u16,
20655
- P: codec.u16,
20656
- Q: codec.u16,
20657
- R: codec.u16,
20658
- T: codec.u16,
20659
- U: codec.u16,
20660
- V: codec.u16,
20661
- W_A: codec.u32,
20662
- W_B: codec.u32,
20663
- W_C: codec.u32,
20664
- W_E: codec.u32,
20665
- W_M: codec.u32,
20666
- W_P: codec.u32,
20667
- W_R: codec.u32,
20668
- W_T: codec.u32,
20669
- W_X: codec.u32,
20670
- Y: codec.u32,
20611
+ const CONSTANTS_CODEC = object({
20612
+ B_I: u64,
20613
+ B_L: u64,
20614
+ B_S: u64,
20615
+ C: u16,
20616
+ D: u32,
20617
+ E: u32,
20618
+ G_A: u64,
20619
+ G_I: u64,
20620
+ G_R: u64,
20621
+ G_T: u64,
20622
+ H: u16,
20623
+ I: u16,
20624
+ J: u16,
20625
+ K: u16,
20626
+ L: u32,
20627
+ N: u16,
20628
+ O: u16,
20629
+ P: u16,
20630
+ Q: u16,
20631
+ R: u16,
20632
+ T: u16,
20633
+ U: u16,
20634
+ V: u16,
20635
+ W_A: u32,
20636
+ W_B: u32,
20637
+ W_C: u32,
20638
+ W_E: u32,
20639
+ W_M: u32,
20640
+ W_P: u32,
20641
+ W_R: u32,
20642
+ W_T: u32,
20643
+ W_X: u32,
20644
+ Y: u32,
20671
20645
  });
20672
20646
  const encodedConstantsCache = new Map();
20673
20647
  function getEncodedConstants(chainSpec) {
@@ -20780,7 +20754,7 @@ class FetchExternalities {
20780
20754
  allOperands() {
20781
20755
  if (this.fetchData.context === FetchContext.LegacyAccumulate) {
20782
20756
  const operands = this.fetchData.operands;
20783
- return Encoder.encodeObject(codec.sequenceVarLen(Operand.Codec), operands, this.chainSpec);
20757
+ return Encoder.encodeObject(sequenceVarLen(Operand.Codec), operands, this.chainSpec);
20784
20758
  }
20785
20759
  return null;
20786
20760
  }
@@ -20801,7 +20775,7 @@ class FetchExternalities {
20801
20775
  allTransfers() {
20802
20776
  if (this.fetchData.context === FetchContext.LegacyOnTransfer) {
20803
20777
  const { transfers } = this.fetchData;
20804
- return Encoder.encodeObject(codec.sequenceVarLen(PendingTransfer.Codec), transfers, this.chainSpec);
20778
+ return Encoder.encodeObject(sequenceVarLen(PendingTransfer.Codec), transfers, this.chainSpec);
20805
20779
  }
20806
20780
  return null;
20807
20781
  }
@@ -21032,10 +21006,10 @@ function getWorkPackageHashes(reports) {
21032
21006
  const workPackageHashes = reports.map((report) => report.workPackageSpec.hash);
21033
21007
  return HashSet.from(workPackageHashes);
21034
21008
  }
21035
- const NEXT_ID_CODEC = codec.object({
21036
- serviceId: codec.varU32.asOpaque(),
21037
- entropy: codec.bytes(HASH_SIZE).asOpaque(),
21038
- timeslot: codec.varU32.asOpaque(),
21009
+ const NEXT_ID_CODEC = object({
21010
+ serviceId: varU32.asOpaque(),
21011
+ entropy: bytes(HASH_SIZE).asOpaque(),
21012
+ timeslot: varU32.asOpaque(),
21039
21013
  });
21040
21014
  /**
21041
21015
  * Generate a next service id.
@@ -21433,10 +21407,10 @@ var PvmInvocationError;
21433
21407
  PvmInvocationError[PvmInvocationError["PreimageTooLong"] = 2] = "PreimageTooLong";
21434
21408
  })(PvmInvocationError || (PvmInvocationError = {}));
21435
21409
  const logger$5 = Logger.new(import.meta.filename, "accumulate");
21436
- const ARGS_CODEC$1 = codec.object({
21437
- slot: codec.varU32.asOpaque(),
21438
- serviceId: codec.varU32.asOpaque(),
21439
- argsLength: codec.varU32,
21410
+ const ARGS_CODEC$1 = object({
21411
+ slot: varU32.asOpaque(),
21412
+ serviceId: varU32.asOpaque(),
21413
+ argsLength: varU32,
21440
21414
  });
21441
21415
  class Accumulate {
21442
21416
  chainSpec;
@@ -21846,10 +21820,10 @@ class Accumulate {
21846
21820
  }
21847
21821
  }
21848
21822
 
21849
- const ARGS_CODEC = codec.object({
21850
- timeslot: codec.varU32.asOpaque(),
21851
- serviceId: codec.varU32.asOpaque(),
21852
- transfersLength: codec.varU32,
21823
+ const ARGS_CODEC = object({
21824
+ timeslot: varU32.asOpaque(),
21825
+ serviceId: varU32.asOpaque(),
21826
+ transfersLength: varU32,
21853
21827
  });
21854
21828
  var DeferredTransfersErrorCode;
21855
21829
  (function (DeferredTransfersErrorCode) {
@@ -24578,7 +24552,7 @@ class TransitionHasher {
24578
24552
  extrinsic(extrinsicView) {
24579
24553
  // https://graypaper.fluffylabs.dev/#/cc517d7/0cfb000cfb00?v=0.6.5
24580
24554
  const guaranteesCount = tryAsU32(extrinsicView.guarantees.view().length);
24581
- const countEncoded = Encoder.encodeObject(codec.varU32, guaranteesCount);
24555
+ const countEncoded = Encoder.encodeObject(varU32, guaranteesCount);
24582
24556
  const guaranteesBlobs = extrinsicView.guarantees
24583
24557
  .view()
24584
24558
  .map((g) => g.view())
@@ -26292,7 +26266,7 @@ var index$3 = /*#__PURE__*/Object.freeze({
26292
26266
  startSameThread: startSameThread
26293
26267
  });
26294
26268
 
26295
- const importBlockResultCodec = (hashName) => codec.custom({
26269
+ const importBlockResultCodec = (hashName) => custom({
26296
26270
  name: `Result<${hashName}, string>`,
26297
26271
  sizeHint: { bytes: 1, isExact: false },
26298
26272
  }, (e, x) => {
@@ -26330,33 +26304,33 @@ const importBlockResultCodec = (hashName) => codec.custom({
26330
26304
  const protocol = createProtocol("importer", {
26331
26305
  toWorker: {
26332
26306
  getStateEntries: {
26333
- request: codec.bytes(HASH_SIZE).asOpaque(),
26334
- response: codec.optional(StateEntries.Codec),
26307
+ request: bytes(HASH_SIZE).asOpaque(),
26308
+ response: optional(StateEntries.Codec),
26335
26309
  },
26336
26310
  getBestStateRootHash: {
26337
- request: codec.nothing,
26338
- response: codec.bytes(HASH_SIZE).asOpaque(),
26311
+ request: nothing,
26312
+ response: bytes(HASH_SIZE).asOpaque(),
26339
26313
  },
26340
26314
  importBlock: {
26341
26315
  request: Block.Codec.View,
26342
26316
  response: importBlockResultCodec("HeaderHash"),
26343
26317
  },
26344
26318
  finish: {
26345
- request: codec.nothing,
26346
- response: codec.nothing,
26319
+ request: nothing,
26320
+ response: nothing,
26347
26321
  },
26348
26322
  },
26349
26323
  fromWorker: {
26350
26324
  bestHeaderAnnouncement: {
26351
26325
  request: headerViewWithHashCodec,
26352
- response: codec.nothing,
26326
+ response: nothing,
26353
26327
  },
26354
26328
  },
26355
26329
  });
26356
26330
  class ImporterConfig {
26357
26331
  pvm;
26358
- static Codec = codec.Class(ImporterConfig, {
26359
- pvm: codec.u8.convert((i) => tryAsU8(i), (o) => {
26332
+ static Codec = Class(ImporterConfig, {
26333
+ pvm: u8.convert((i) => tryAsU8(i), (o) => {
26360
26334
  if (o === PvmBackend.BuiltIn) {
26361
26335
  return PvmBackend.BuiltIn;
26362
26336
  }
@@ -26374,7 +26348,7 @@ class ImporterConfig {
26374
26348
  }
26375
26349
  }
26376
26350
 
26377
- const WORKER = new URL("./bootstrap-importer.mjs", import.meta.url);
26351
+ const WORKER = new URL(import.meta.resolve("./bootstrap-importer.mjs"));
26378
26352
 
26379
26353
  var index$2 = /*#__PURE__*/Object.freeze({
26380
26354
  __proto__: null,
@@ -26821,11 +26795,11 @@ class TestState {
26821
26795
  state_root: fromJson.bytes32(),
26822
26796
  keyvals: json.array(StateKeyVal.fromJson),
26823
26797
  };
26824
- static Codec = codec.object({
26825
- state_root: codec.bytes(HASH_SIZE).asOpaque(),
26826
- keyvals: codec.sequenceVarLen(codec.object({
26827
- key: codec.bytes(TRUNCATED_HASH_SIZE),
26828
- value: codec.blob,
26798
+ static Codec = object({
26799
+ state_root: bytes(HASH_SIZE).asOpaque(),
26800
+ keyvals: sequenceVarLen(object({
26801
+ key: bytes(TRUNCATED_HASH_SIZE),
26802
+ value: blob,
26829
26803
  })),
26830
26804
  });
26831
26805
  state_root;
@@ -26836,7 +26810,7 @@ class StateTransitionGenesis {
26836
26810
  header: headerFromJson,
26837
26811
  state: TestState.fromJson,
26838
26812
  };
26839
- static Codec = codec.object({
26813
+ static Codec = object({
26840
26814
  header: Header.Codec,
26841
26815
  state: TestState.Codec,
26842
26816
  });
@@ -26849,7 +26823,7 @@ class StateTransition {
26849
26823
  post_state: TestState.fromJson,
26850
26824
  block: blockFromJson(tinyChainSpec),
26851
26825
  };
26852
- static Codec = codec.object({
26826
+ static Codec = object({
26853
26827
  pre_state: TestState.Codec,
26854
26828
  block: Block.Codec,
26855
26829
  post_state: TestState.Codec,