@solana/codecs-data-structures 2.0.0-experimental.fc4e943 → 2.0.0-experimental.fcff844

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. package/dist/index.browser.cjs +385 -474
  2. package/dist/index.browser.cjs.map +1 -1
  3. package/dist/index.browser.js +389 -472
  4. package/dist/index.browser.js.map +1 -1
  5. package/dist/index.development.js +501 -566
  6. package/dist/index.development.js.map +1 -1
  7. package/dist/index.native.js +387 -474
  8. package/dist/index.native.js.map +1 -1
  9. package/dist/index.node.cjs +385 -476
  10. package/dist/index.node.cjs.map +1 -1
  11. package/dist/index.node.js +387 -474
  12. package/dist/index.node.js.map +1 -1
  13. package/dist/index.production.min.js +37 -41
  14. package/dist/types/array.d.ts +54 -10
  15. package/dist/types/array.d.ts.map +1 -1
  16. package/dist/types/bit-array.d.ts +9 -9
  17. package/dist/types/bit-array.d.ts.map +1 -1
  18. package/dist/types/boolean.d.ts +22 -10
  19. package/dist/types/boolean.d.ts.map +1 -1
  20. package/dist/types/bytes.d.ts +18 -9
  21. package/dist/types/bytes.d.ts.map +1 -1
  22. package/dist/types/data-enum.d.ts +20 -20
  23. package/dist/types/data-enum.d.ts.map +1 -1
  24. package/dist/types/index.d.ts +0 -1
  25. package/dist/types/index.d.ts.map +1 -1
  26. package/dist/types/map.d.ts +43 -10
  27. package/dist/types/map.d.ts.map +1 -1
  28. package/dist/types/nullable.d.ts +28 -10
  29. package/dist/types/nullable.d.ts.map +1 -1
  30. package/dist/types/scalar-enum.d.ts +22 -10
  31. package/dist/types/scalar-enum.d.ts.map +1 -1
  32. package/dist/types/set.d.ts +43 -10
  33. package/dist/types/set.d.ts.map +1 -1
  34. package/dist/types/struct.d.ts +28 -18
  35. package/dist/types/struct.d.ts.map +1 -1
  36. package/dist/types/tuple.d.ts +22 -15
  37. package/dist/types/tuple.d.ts.map +1 -1
  38. package/dist/types/unit.d.ts +4 -12
  39. package/dist/types/unit.d.ts.map +1 -1
  40. package/dist/types/utils.d.ts +10 -2
  41. package/dist/types/utils.d.ts.map +1 -1
  42. package/package.json +11 -11
  43. package/dist/types/array-like-codec-size.d.ts +0 -20
  44. package/dist/types/array-like-codec-size.d.ts.map +0 -1
@@ -5,7 +5,12 @@ var codecsNumbers = require('@solana/codecs-numbers');
5
5
 
6
6
  // src/array.ts
7
7
 
8
- // src/utils.ts
8
+ // src/assertions.ts
9
+ function assertValidNumberOfItemsForCodec(codecDescription, expected, actual) {
10
+ if (expected !== actual) {
11
+ throw new Error(`Expected [${codecDescription}] to have ${expected} items, got ${actual}.`);
12
+ }
13
+ }
9
14
  function maxCodecSizes(sizes) {
10
15
  return sizes.reduce(
11
16
  (all, size) => all === null || size === null ? null : Math.max(all, size),
@@ -15,106 +20,107 @@ function maxCodecSizes(sizes) {
15
20
  function sumCodecSizes(sizes) {
16
21
  return sizes.reduce((all, size) => all === null || size === null ? null : all + size, 0);
17
22
  }
23
+ function getFixedSize(codec) {
24
+ return codecsCore.isFixedSize(codec) ? codec.fixedSize : null;
25
+ }
26
+ function getMaxSize(codec) {
27
+ return codecsCore.isFixedSize(codec) ? codec.fixedSize : codec.maxSize ?? null;
28
+ }
18
29
 
19
- // src/array-like-codec-size.ts
20
- function decodeArrayLikeCodecSize(size, childrenSizes, bytes, offset) {
30
+ // src/array.ts
31
+ function getArrayEncoder(item, config = {}) {
32
+ const size = config.size ?? codecsNumbers.getU32Encoder();
33
+ if (size === "remainder") {
34
+ codecsCore.assertIsFixedSize(item, 'Codecs of "remainder" size must have fixed-size items.');
35
+ }
36
+ const fixedSize = computeArrayLikeCodecSize(size, getFixedSize(item));
37
+ const maxSize = computeArrayLikeCodecSize(size, getMaxSize(item)) ?? void 0;
38
+ return codecsCore.createEncoder({
39
+ ...fixedSize !== null ? { fixedSize } : {
40
+ getSizeFromValue: (array) => {
41
+ const prefixSize = typeof size === "object" ? codecsCore.getEncodedSize(array.length, size) : 0;
42
+ return prefixSize + [...array].reduce((all, value) => all + codecsCore.getEncodedSize(value, item), 0);
43
+ },
44
+ maxSize
45
+ },
46
+ write: (array, bytes, offset) => {
47
+ if (typeof size === "number") {
48
+ assertValidNumberOfItemsForCodec("array", size, array.length);
49
+ }
50
+ if (typeof size === "object") {
51
+ offset = size.write(array.length, bytes, offset);
52
+ }
53
+ array.forEach((value) => {
54
+ offset = item.write(value, bytes, offset);
55
+ });
56
+ return offset;
57
+ }
58
+ });
59
+ }
60
+ function getArrayDecoder(item, config = {}) {
61
+ const size = config.size ?? codecsNumbers.getU32Decoder();
62
+ if (size === "remainder") {
63
+ codecsCore.assertIsFixedSize(item, 'Codecs of "remainder" size must have fixed-size items.');
64
+ }
65
+ const itemSize = getFixedSize(item);
66
+ const fixedSize = computeArrayLikeCodecSize(size, itemSize);
67
+ const maxSize = computeArrayLikeCodecSize(size, getMaxSize(item)) ?? void 0;
68
+ return codecsCore.createDecoder({
69
+ ...fixedSize !== null ? { fixedSize } : { maxSize },
70
+ read: (bytes, offset) => {
71
+ const array = [];
72
+ if (typeof size === "object" && bytes.slice(offset).length === 0) {
73
+ return [array, offset];
74
+ }
75
+ const [resolvedSize, newOffset] = readArrayLikeCodecSize(size, itemSize, bytes, offset);
76
+ offset = newOffset;
77
+ for (let i = 0; i < resolvedSize; i += 1) {
78
+ const [value, newOffset2] = item.read(bytes, offset);
79
+ offset = newOffset2;
80
+ array.push(value);
81
+ }
82
+ return [array, offset];
83
+ }
84
+ });
85
+ }
86
+ function getArrayCodec(item, config = {}) {
87
+ return codecsCore.combineCodec(getArrayEncoder(item, config), getArrayDecoder(item, config));
88
+ }
89
+ function readArrayLikeCodecSize(size, itemSize, bytes, offset) {
21
90
  if (typeof size === "number") {
22
91
  return [size, offset];
23
92
  }
24
93
  if (typeof size === "object") {
25
- return size.decode(bytes, offset);
94
+ return size.read(bytes, offset);
26
95
  }
27
96
  if (size === "remainder") {
28
- const childrenSize = sumCodecSizes(childrenSizes);
29
- if (childrenSize === null) {
97
+ if (itemSize === null) {
30
98
  throw new Error('Codecs of "remainder" size must have fixed-size items.');
31
99
  }
32
- const remainder = bytes.slice(offset).length;
33
- if (remainder % childrenSize !== 0) {
100
+ const remainder = Math.max(0, bytes.length - offset);
101
+ if (remainder % itemSize !== 0) {
34
102
  throw new Error(
35
- `The remainder of the byte array (${remainder} bytes) cannot be split into chunks of ${childrenSize} bytes. Codecs of "remainder" size must have a remainder that is a multiple of its item size. In other words, ${remainder} modulo ${childrenSize} should be equal to zero.`
103
+ `The remainder of the byte array (${remainder} bytes) cannot be split into chunks of ${itemSize} bytes. Codecs of "remainder" size must have a remainder that is a multiple of its item size. In other words, ${remainder} modulo ${itemSize} should be equal to zero.`
36
104
  );
37
105
  }
38
- return [remainder / childrenSize, offset];
106
+ return [remainder / itemSize, offset];
39
107
  }
40
108
  throw new Error(`Unrecognized array-like codec size: ${JSON.stringify(size)}`);
41
109
  }
42
- function getArrayLikeCodecSizeDescription(size) {
43
- return typeof size === "object" ? size.description : `${size}`;
44
- }
45
- function getArrayLikeCodecSizeFromChildren(size, childrenSizes) {
110
+ function computeArrayLikeCodecSize(size, itemSize) {
46
111
  if (typeof size !== "number")
47
112
  return null;
48
113
  if (size === 0)
49
114
  return 0;
50
- const childrenSize = sumCodecSizes(childrenSizes);
51
- return childrenSize === null ? null : childrenSize * size;
52
- }
53
- function getArrayLikeCodecSizePrefix(size, realSize) {
54
- return typeof size === "object" ? size.encode(realSize) : new Uint8Array();
55
- }
56
-
57
- // src/assertions.ts
58
- function assertValidNumberOfItemsForCodec(codecDescription, expected, actual) {
59
- if (expected !== actual) {
60
- throw new Error(`Expected [${codecDescription}] to have ${expected} items, got ${actual}.`);
61
- }
62
- }
63
-
64
- // src/array.ts
65
- function arrayCodecHelper(item, size, description) {
66
- if (size === "remainder" && item.fixedSize === null) {
67
- throw new Error('Codecs of "remainder" size must have fixed-size items.');
68
- }
69
- return {
70
- description: description ?? `array(${item.description}; ${getArrayLikeCodecSizeDescription(size)})`,
71
- fixedSize: getArrayLikeCodecSizeFromChildren(size, [item.fixedSize]),
72
- maxSize: getArrayLikeCodecSizeFromChildren(size, [item.maxSize])
73
- };
74
- }
75
- function getArrayEncoder(item, options = {}) {
76
- const size = options.size ?? codecsNumbers.getU32Encoder();
77
- return {
78
- ...arrayCodecHelper(item, size, options.description),
79
- encode: (value) => {
80
- if (typeof size === "number") {
81
- assertValidNumberOfItemsForCodec("array", size, value.length);
82
- }
83
- return codecsCore.mergeBytes([getArrayLikeCodecSizePrefix(size, value.length), ...value.map((v) => item.encode(v))]);
84
- }
85
- };
86
- }
87
- function getArrayDecoder(item, options = {}) {
88
- const size = options.size ?? codecsNumbers.getU32Decoder();
89
- return {
90
- ...arrayCodecHelper(item, size, options.description),
91
- decode: (bytes, offset = 0) => {
92
- if (typeof size === "object" && bytes.slice(offset).length === 0) {
93
- return [[], offset];
94
- }
95
- const [resolvedSize, newOffset] = decodeArrayLikeCodecSize(size, [item.fixedSize], bytes, offset);
96
- offset = newOffset;
97
- const values = [];
98
- for (let i = 0; i < resolvedSize; i += 1) {
99
- const [value, newOffset2] = item.decode(bytes, offset);
100
- values.push(value);
101
- offset = newOffset2;
102
- }
103
- return [values, offset];
104
- }
105
- };
106
- }
107
- function getArrayCodec(item, options = {}) {
108
- return codecsCore.combineCodec(getArrayEncoder(item, options), getArrayDecoder(item, options));
115
+ return itemSize === null ? null : itemSize * size;
109
116
  }
110
- var getBitArrayEncoder = (size, options = {}) => {
111
- const parsedOptions = typeof options === "boolean" ? { backward: options } : options;
112
- const backward = parsedOptions.backward ?? false;
113
- const backwardSuffix = backward ? "; backward" : "";
114
- return {
115
- description: parsedOptions.description ?? `bitArray(${size}${backwardSuffix})`,
116
- encode(value) {
117
- const bytes = [];
117
+ function getBitArrayEncoder(size, config = {}) {
118
+ const parsedConfig = typeof config === "boolean" ? { backward: config } : config;
119
+ const backward = parsedConfig.backward ?? false;
120
+ return codecsCore.createEncoder({
121
+ fixedSize: size,
122
+ write(value, bytes, offset) {
123
+ const bytesToAdd = [];
118
124
  for (let i = 0; i < size; i += 1) {
119
125
  let byte = 0;
120
126
  for (let j = 0; j < 8; j += 1) {
@@ -122,23 +128,22 @@ var getBitArrayEncoder = (size, options = {}) => {
122
128
  byte |= feature << (backward ? j : 7 - j);
123
129
  }
124
130
  if (backward) {
125
- bytes.unshift(byte);
131
+ bytesToAdd.unshift(byte);
126
132
  } else {
127
- bytes.push(byte);
133
+ bytesToAdd.push(byte);
128
134
  }
129
135
  }
130
- return new Uint8Array(bytes);
131
- },
136
+ bytes.set(bytesToAdd, offset);
137
+ return size;
138
+ }
139
+ });
140
+ }
141
+ function getBitArrayDecoder(size, config = {}) {
142
+ const parsedConfig = typeof config === "boolean" ? { backward: config } : config;
143
+ const backward = parsedConfig.backward ?? false;
144
+ return codecsCore.createDecoder({
132
145
  fixedSize: size,
133
- maxSize: size
134
- };
135
- };
136
- var getBitArrayDecoder = (size, options = {}) => {
137
- const parsedOptions = typeof options === "boolean" ? { backward: options } : options;
138
- const backward = parsedOptions.backward ?? false;
139
- const backwardSuffix = backward ? "; backward" : "";
140
- return {
141
- decode(bytes, offset = 0) {
146
+ read(bytes, offset) {
142
147
  codecsCore.assertByteArrayHasEnoughBytesForCodec("bitArray", size, bytes, offset);
143
148
  const booleans = [];
144
149
  let slice = bytes.slice(offset, offset + size);
@@ -155,138 +160,107 @@ var getBitArrayDecoder = (size, options = {}) => {
155
160
  }
156
161
  });
157
162
  return [booleans, offset + size];
158
- },
159
- description: parsedOptions.description ?? `bitArray(${size}${backwardSuffix})`,
160
- fixedSize: size,
161
- maxSize: size
162
- };
163
- };
164
- var getBitArrayCodec = (size, options = {}) => codecsCore.combineCodec(getBitArrayEncoder(size, options), getBitArrayDecoder(size, options));
165
- function getBooleanEncoder(options = {}) {
166
- const size = options.size ?? codecsNumbers.getU8Encoder();
167
- codecsCore.assertFixedSizeCodec(size, "Codec [bool] requires a fixed size.");
168
- return {
169
- description: options.description ?? `bool(${size.description})`,
170
- encode: (value) => size.encode(value ? 1 : 0),
171
- fixedSize: size.fixedSize,
172
- maxSize: size.fixedSize
173
- };
174
- }
175
- function getBooleanDecoder(options = {}) {
176
- const size = options.size ?? codecsNumbers.getU8Decoder();
177
- codecsCore.assertFixedSizeCodec(size, "Codec [bool] requires a fixed size.");
178
- return {
179
- decode: (bytes, offset = 0) => {
180
- codecsCore.assertByteArrayIsNotEmptyForCodec("bool", bytes, offset);
181
- const [value, vOffset] = size.decode(bytes, offset);
182
- return [value === 1, vOffset];
183
- },
184
- description: options.description ?? `bool(${size.description})`,
185
- fixedSize: size.fixedSize,
186
- maxSize: size.fixedSize
187
- };
188
- }
189
- function getBooleanCodec(options = {}) {
190
- return codecsCore.combineCodec(getBooleanEncoder(options), getBooleanDecoder(options));
191
- }
192
- function getBytesEncoder(options = {}) {
193
- const size = options.size ?? "variable";
194
- const sizeDescription = typeof size === "object" ? size.description : `${size}`;
195
- const description = options.description ?? `bytes(${sizeDescription})`;
196
- const byteEncoder = {
197
- description,
198
- encode: (value) => value,
199
- fixedSize: null,
200
- maxSize: null
201
- };
163
+ }
164
+ });
165
+ }
166
+ function getBitArrayCodec(size, config = {}) {
167
+ return codecsCore.combineCodec(getBitArrayEncoder(size, config), getBitArrayDecoder(size, config));
168
+ }
169
+ function getBooleanEncoder(config = {}) {
170
+ const size = config.size ?? codecsNumbers.getU8Encoder();
171
+ codecsCore.assertIsFixedSize(size, "Codec [bool] requires a fixed size.");
172
+ return codecsCore.mapEncoder(size, (value) => value ? 1 : 0);
173
+ }
174
+ function getBooleanDecoder(config = {}) {
175
+ const size = config.size ?? codecsNumbers.getU8Decoder();
176
+ codecsCore.assertIsFixedSize(size, "Codec [bool] requires a fixed size.");
177
+ return codecsCore.mapDecoder(size, (value) => Number(value) === 1);
178
+ }
179
+ function getBooleanCodec(config = {}) {
180
+ return codecsCore.combineCodec(getBooleanEncoder(config), getBooleanDecoder(config));
181
+ }
182
+ function getBytesEncoder(config = {}) {
183
+ const size = config.size ?? "variable";
184
+ const byteEncoder = codecsCore.createEncoder({
185
+ getSizeFromValue: (value) => value.length,
186
+ write: (value, bytes, offset) => {
187
+ bytes.set(value, offset);
188
+ return offset + value.length;
189
+ }
190
+ });
202
191
  if (size === "variable") {
203
192
  return byteEncoder;
204
193
  }
205
194
  if (typeof size === "number") {
206
- return codecsCore.fixEncoder(byteEncoder, size, description);
195
+ return codecsCore.fixEncoder(byteEncoder, size);
207
196
  }
208
- return {
209
- ...byteEncoder,
210
- encode: (value) => {
211
- const contentBytes = byteEncoder.encode(value);
212
- const lengthBytes = size.encode(contentBytes.length);
213
- return codecsCore.mergeBytes([lengthBytes, contentBytes]);
197
+ return codecsCore.createEncoder({
198
+ getSizeFromValue: (value) => codecsCore.getEncodedSize(value.length, size) + value.length,
199
+ write: (value, bytes, offset) => {
200
+ offset = size.write(value.length, bytes, offset);
201
+ return byteEncoder.write(value, bytes, offset);
214
202
  }
215
- };
203
+ });
216
204
  }
217
- function getBytesDecoder(options = {}) {
218
- const size = options.size ?? "variable";
219
- const sizeDescription = typeof size === "object" ? size.description : `${size}`;
220
- const description = options.description ?? `bytes(${sizeDescription})`;
221
- const byteDecoder = {
222
- decode: (bytes, offset = 0) => {
205
+ function getBytesDecoder(config = {}) {
206
+ const size = config.size ?? "variable";
207
+ const byteDecoder = codecsCore.createDecoder({
208
+ read: (bytes, offset) => {
223
209
  const slice = bytes.slice(offset);
224
210
  return [slice, offset + slice.length];
225
- },
226
- description,
227
- fixedSize: null,
228
- maxSize: null
229
- };
211
+ }
212
+ });
230
213
  if (size === "variable") {
231
214
  return byteDecoder;
232
215
  }
233
216
  if (typeof size === "number") {
234
- return codecsCore.fixDecoder(byteDecoder, size, description);
217
+ return codecsCore.fixDecoder(byteDecoder, size);
235
218
  }
236
- return {
237
- ...byteDecoder,
238
- decode: (bytes, offset = 0) => {
219
+ return codecsCore.createDecoder({
220
+ read: (bytes, offset) => {
239
221
  codecsCore.assertByteArrayIsNotEmptyForCodec("bytes", bytes, offset);
240
- const [lengthBigInt, lengthOffset] = size.decode(bytes, offset);
222
+ const [lengthBigInt, lengthOffset] = size.read(bytes, offset);
241
223
  const length = Number(lengthBigInt);
242
224
  offset = lengthOffset;
243
225
  const contentBytes = bytes.slice(offset, offset + length);
244
226
  codecsCore.assertByteArrayHasEnoughBytesForCodec("bytes", length, contentBytes);
245
- const [value, contentOffset] = byteDecoder.decode(contentBytes);
227
+ const [value, contentOffset] = byteDecoder.read(contentBytes, 0);
246
228
  offset += contentOffset;
247
229
  return [value, offset];
248
230
  }
249
- };
250
- }
251
- function getBytesCodec(options = {}) {
252
- return codecsCore.combineCodec(getBytesEncoder(options), getBytesDecoder(options));
253
- }
254
- function dataEnumCodecHelper(variants, prefix, description) {
255
- const fieldDescriptions = variants.map(([name, codec]) => `${String(name)}${codec ? `: ${codec.description}` : ""}`).join(", ");
256
- const allVariantHaveTheSameFixedSize = variants.every((one, _i, all) => one[1].fixedSize === all[0][1].fixedSize);
257
- const fixedVariantSize = allVariantHaveTheSameFixedSize ? variants[0][1].fixedSize : null;
258
- const maxVariantSize = maxCodecSizes(variants.map(([, field]) => field.maxSize));
259
- return {
260
- description: description ?? `dataEnum(${fieldDescriptions}; ${prefix.description})`,
261
- fixedSize: variants.length === 0 ? prefix.fixedSize : sumCodecSizes([prefix.fixedSize, fixedVariantSize]),
262
- maxSize: variants.length === 0 ? prefix.maxSize : sumCodecSizes([prefix.maxSize, maxVariantSize])
263
- };
264
- }
265
- function getDataEnumEncoder(variants, options = {}) {
266
- const prefix = options.size ?? codecsNumbers.getU8Encoder();
267
- return {
268
- ...dataEnumCodecHelper(variants, prefix, options.description),
269
- encode: (variant) => {
270
- const discriminator = variants.findIndex(([key]) => variant.__kind === key);
271
- if (discriminator < 0) {
272
- throw new Error(
273
- `Invalid data enum variant. Expected one of [${variants.map(([key]) => key).join(", ")}], got "${variant.__kind}".`
274
- );
275
- }
276
- const variantPrefix = prefix.encode(discriminator);
277
- const variantSerializer = variants[discriminator][1];
278
- const variantBytes = variantSerializer.encode(variant);
279
- return codecsCore.mergeBytes([variantPrefix, variantBytes]);
231
+ });
232
+ }
233
+ function getBytesCodec(config = {}) {
234
+ return codecsCore.combineCodec(getBytesEncoder(config), getBytesDecoder(config));
235
+ }
236
+ function getDataEnumEncoder(variants, config = {}) {
237
+ const prefix = config.size ?? codecsNumbers.getU8Encoder();
238
+ const fixedSize = getDataEnumFixedSize(variants, prefix);
239
+ return codecsCore.createEncoder({
240
+ ...fixedSize !== null ? { fixedSize } : {
241
+ getSizeFromValue: (variant) => {
242
+ const discriminator = getVariantDiscriminator(variants, variant);
243
+ const variantEncoder = variants[discriminator][1];
244
+ return codecsCore.getEncodedSize(discriminator, prefix) + codecsCore.getEncodedSize(variant, variantEncoder);
245
+ },
246
+ maxSize: getDataEnumMaxSize(variants, prefix)
247
+ },
248
+ write: (variant, bytes, offset) => {
249
+ const discriminator = getVariantDiscriminator(variants, variant);
250
+ offset = prefix.write(discriminator, bytes, offset);
251
+ const variantEncoder = variants[discriminator][1];
252
+ return variantEncoder.write(variant, bytes, offset);
280
253
  }
281
- };
282
- }
283
- function getDataEnumDecoder(variants, options = {}) {
284
- const prefix = options.size ?? codecsNumbers.getU8Decoder();
285
- return {
286
- ...dataEnumCodecHelper(variants, prefix, options.description),
287
- decode: (bytes, offset = 0) => {
254
+ });
255
+ }
256
+ function getDataEnumDecoder(variants, config = {}) {
257
+ const prefix = config.size ?? codecsNumbers.getU8Decoder();
258
+ const fixedSize = getDataEnumFixedSize(variants, prefix);
259
+ return codecsCore.createDecoder({
260
+ ...fixedSize !== null ? { fixedSize } : { maxSize: getDataEnumMaxSize(variants, prefix) },
261
+ read: (bytes, offset) => {
288
262
  codecsCore.assertByteArrayIsNotEmptyForCodec("dataEnum", bytes, offset);
289
- const [discriminator, dOffset] = prefix.decode(bytes, offset);
263
+ const [discriminator, dOffset] = prefix.read(bytes, offset);
290
264
  offset = dOffset;
291
265
  const variantField = variants[Number(discriminator)] ?? null;
292
266
  if (!variantField) {
@@ -294,337 +268,274 @@ function getDataEnumDecoder(variants, options = {}) {
294
268
  `Enum discriminator out of range. Expected a number between 0 and ${variants.length - 1}, got ${discriminator}.`
295
269
  );
296
270
  }
297
- const [variant, vOffset] = variantField[1].decode(bytes, offset);
271
+ const [variant, vOffset] = variantField[1].read(bytes, offset);
298
272
  offset = vOffset;
299
273
  return [{ __kind: variantField[0], ...variant ?? {} }, offset];
300
274
  }
301
- };
275
+ });
302
276
  }
303
- function getDataEnumCodec(variants, options = {}) {
304
- return codecsCore.combineCodec(getDataEnumEncoder(variants, options), getDataEnumDecoder(variants, options));
277
+ function getDataEnumCodec(variants, config = {}) {
278
+ return codecsCore.combineCodec(getDataEnumEncoder(variants, config), getDataEnumDecoder(variants, config));
305
279
  }
306
- function mapCodecHelper(key, value, size, description) {
307
- if (size === "remainder" && (key.fixedSize === null || value.fixedSize === null)) {
308
- throw new Error('Codecs of "remainder" size must have fixed-size items.');
280
+ function getDataEnumFixedSize(variants, prefix) {
281
+ if (variants.length === 0)
282
+ return codecsCore.isFixedSize(prefix) ? prefix.fixedSize : null;
283
+ if (!codecsCore.isFixedSize(variants[0][1]))
284
+ return null;
285
+ const variantSize = variants[0][1].fixedSize;
286
+ const sameSizedVariants = variants.every(
287
+ (variant) => codecsCore.isFixedSize(variant[1]) && variant[1].fixedSize === variantSize
288
+ );
289
+ if (!sameSizedVariants)
290
+ return null;
291
+ return codecsCore.isFixedSize(prefix) ? prefix.fixedSize + variantSize : null;
292
+ }
293
+ function getDataEnumMaxSize(variants, prefix) {
294
+ const maxVariantSize = maxCodecSizes(variants.map(([, codec]) => getMaxSize(codec)));
295
+ return sumCodecSizes([getMaxSize(prefix), maxVariantSize]) ?? void 0;
296
+ }
297
+ function getVariantDiscriminator(variants, variant) {
298
+ const discriminator = variants.findIndex(([key]) => variant.__kind === key);
299
+ if (discriminator < 0) {
300
+ throw new Error(
301
+ `Invalid data enum variant. Expected one of [${variants.map(([key]) => key).join(", ")}], got "${variant.__kind}".`
302
+ );
309
303
  }
310
- return {
311
- description: description ?? `map(${key.description}, ${value.description}; ${getArrayLikeCodecSizeDescription(size)})`,
312
- fixedSize: getArrayLikeCodecSizeFromChildren(size, [key.fixedSize, value.fixedSize]),
313
- maxSize: getArrayLikeCodecSizeFromChildren(size, [key.maxSize, value.maxSize])
314
- };
315
- }
316
- function getMapEncoder(key, value, options = {}) {
317
- const size = options.size ?? codecsNumbers.getU32Encoder();
318
- return {
319
- ...mapCodecHelper(key, value, size, options.description),
320
- encode: (map) => {
321
- if (typeof size === "number") {
322
- assertValidNumberOfItemsForCodec("map", size, map.size);
323
- }
324
- const itemBytes = Array.from(map, ([k, v]) => codecsCore.mergeBytes([key.encode(k), value.encode(v)]));
325
- return codecsCore.mergeBytes([getArrayLikeCodecSizePrefix(size, map.size), ...itemBytes]);
304
+ return discriminator;
305
+ }
306
+ function getTupleEncoder(items) {
307
+ const fixedSize = sumCodecSizes(items.map(getFixedSize));
308
+ const maxSize = sumCodecSizes(items.map(getMaxSize)) ?? void 0;
309
+ return codecsCore.createEncoder({
310
+ ...fixedSize === null ? {
311
+ getSizeFromValue: (value) => items.map((item, index) => codecsCore.getEncodedSize(value[index], item)).reduce((all, one) => all + one, 0),
312
+ maxSize
313
+ } : { fixedSize },
314
+ write: (value, bytes, offset) => {
315
+ assertValidNumberOfItemsForCodec("tuple", items.length, value.length);
316
+ items.forEach((item, index) => {
317
+ offset = item.write(value[index], bytes, offset);
318
+ });
319
+ return offset;
326
320
  }
327
- };
321
+ });
322
+ }
323
+ function getTupleDecoder(items) {
324
+ const fixedSize = sumCodecSizes(items.map(getFixedSize));
325
+ const maxSize = sumCodecSizes(items.map(getMaxSize)) ?? void 0;
326
+ return codecsCore.createDecoder({
327
+ ...fixedSize === null ? { maxSize } : { fixedSize },
328
+ read: (bytes, offset) => {
329
+ const values = [];
330
+ items.forEach((item) => {
331
+ const [newValue, newOffset] = item.read(bytes, offset);
332
+ values.push(newValue);
333
+ offset = newOffset;
334
+ });
335
+ return [values, offset];
336
+ }
337
+ });
328
338
  }
329
- function getMapDecoder(key, value, options = {}) {
330
- const size = options.size ?? codecsNumbers.getU32Decoder();
331
- return {
332
- ...mapCodecHelper(key, value, size, options.description),
333
- decode: (bytes, offset = 0) => {
334
- const map = /* @__PURE__ */ new Map();
335
- if (typeof size === "object" && bytes.slice(offset).length === 0) {
336
- return [map, offset];
339
+ function getTupleCodec(items) {
340
+ return codecsCore.combineCodec(
341
+ getTupleEncoder(items),
342
+ getTupleDecoder(items)
343
+ );
344
+ }
345
+
346
+ // src/map.ts
347
+ function getMapEncoder(key, value, config = {}) {
348
+ return codecsCore.mapEncoder(
349
+ getArrayEncoder(getTupleEncoder([key, value]), config),
350
+ (map) => [...map.entries()]
351
+ );
352
+ }
353
+ function getMapDecoder(key, value, config = {}) {
354
+ return codecsCore.mapDecoder(
355
+ getArrayDecoder(getTupleDecoder([key, value]), config),
356
+ (entries) => new Map(entries)
357
+ );
358
+ }
359
+ function getMapCodec(key, value, config = {}) {
360
+ return codecsCore.combineCodec(getMapEncoder(key, value, config), getMapDecoder(key, value, config));
361
+ }
362
+ function getNullableEncoder(item, config = {}) {
363
+ const prefix = config.prefix ?? codecsNumbers.getU8Encoder();
364
+ const fixed = config.fixed ?? false;
365
+ const isZeroSizeItem = codecsCore.isFixedSize(item) && codecsCore.isFixedSize(prefix) && item.fixedSize === 0;
366
+ if (fixed || isZeroSizeItem) {
367
+ codecsCore.assertIsFixedSize(item, "Fixed nullables can only be used with fixed-size codecs.");
368
+ codecsCore.assertIsFixedSize(prefix, "Fixed nullables can only be used with fixed-size prefix.");
369
+ const fixedSize = prefix.fixedSize + item.fixedSize;
370
+ return codecsCore.createEncoder({
371
+ fixedSize,
372
+ write: (option, bytes, offset) => {
373
+ const prefixOffset = prefix.write(Number(option !== null), bytes, offset);
374
+ if (option !== null) {
375
+ item.write(option, bytes, prefixOffset);
376
+ }
377
+ return offset + fixedSize;
337
378
  }
338
- const [resolvedSize, newOffset] = decodeArrayLikeCodecSize(
339
- size,
340
- [key.fixedSize, value.fixedSize],
341
- bytes,
342
- offset
343
- );
344
- offset = newOffset;
345
- for (let i = 0; i < resolvedSize; i += 1) {
346
- const [decodedKey, kOffset] = key.decode(bytes, offset);
347
- offset = kOffset;
348
- const [decodedValue, vOffset] = value.decode(bytes, offset);
349
- offset = vOffset;
350
- map.set(decodedKey, decodedValue);
379
+ });
380
+ }
381
+ return codecsCore.createEncoder({
382
+ getSizeFromValue: (option) => codecsCore.getEncodedSize(Number(option !== null), prefix) + (option !== null ? codecsCore.getEncodedSize(option, item) : 0),
383
+ maxSize: sumCodecSizes([prefix, item].map(getMaxSize)) ?? void 0,
384
+ write: (option, bytes, offset) => {
385
+ offset = prefix.write(Number(option !== null), bytes, offset);
386
+ if (option !== null) {
387
+ offset = item.write(option, bytes, offset);
351
388
  }
352
- return [map, offset];
389
+ return offset;
353
390
  }
354
- };
355
- }
356
- function getMapCodec(key, value, options = {}) {
357
- return codecsCore.combineCodec(getMapEncoder(key, value, options), getMapDecoder(key, value, options));
358
- }
359
- function nullableCodecHelper(item, prefix, fixed, description) {
360
- let descriptionSuffix = `; ${prefix.description}`;
361
- let fixedSize = item.fixedSize === 0 ? prefix.fixedSize : null;
362
- if (fixed) {
363
- codecsCore.assertFixedSizeCodec(item, "Fixed nullables can only be used with fixed-size codecs.");
364
- codecsCore.assertFixedSizeCodec(prefix, "Fixed nullables can only be used with fixed-size prefix.");
365
- descriptionSuffix += "; fixed";
391
+ });
392
+ }
393
+ function getNullableDecoder(item, config = {}) {
394
+ const prefix = config.prefix ?? codecsNumbers.getU8Decoder();
395
+ const fixed = config.fixed ?? false;
396
+ let fixedSize = null;
397
+ const isZeroSizeItem = codecsCore.isFixedSize(item) && codecsCore.isFixedSize(prefix) && item.fixedSize === 0;
398
+ if (fixed || isZeroSizeItem) {
399
+ codecsCore.assertIsFixedSize(item, "Fixed nullables can only be used with fixed-size codecs.");
400
+ codecsCore.assertIsFixedSize(prefix, "Fixed nullables can only be used with fixed-size prefix.");
366
401
  fixedSize = prefix.fixedSize + item.fixedSize;
367
402
  }
368
- return {
369
- description: description ?? `nullable(${item.description + descriptionSuffix})`,
370
- fixedSize,
371
- maxSize: sumCodecSizes([prefix.maxSize, item.maxSize])
372
- };
373
- }
374
- function getNullableEncoder(item, options = {}) {
375
- const prefix = options.prefix ?? codecsNumbers.getU8Encoder();
376
- const fixed = options.fixed ?? false;
377
- return {
378
- ...nullableCodecHelper(item, prefix, fixed, options.description),
379
- encode: (option) => {
380
- const prefixByte = prefix.encode(Number(option !== null));
381
- let itemBytes = option !== null ? item.encode(option) : new Uint8Array();
382
- itemBytes = fixed ? codecsCore.fixBytes(itemBytes, item.fixedSize) : itemBytes;
383
- return codecsCore.mergeBytes([prefixByte, itemBytes]);
384
- }
385
- };
386
- }
387
- function getNullableDecoder(item, options = {}) {
388
- const prefix = options.prefix ?? codecsNumbers.getU8Decoder();
389
- const fixed = options.fixed ?? false;
390
- return {
391
- ...nullableCodecHelper(item, prefix, fixed, options.description),
392
- decode: (bytes, offset = 0) => {
403
+ return codecsCore.createDecoder({
404
+ ...fixedSize === null ? { maxSize: sumCodecSizes([prefix, item].map(getMaxSize)) ?? void 0 } : { fixedSize },
405
+ read: (bytes, offset) => {
393
406
  if (bytes.length - offset <= 0) {
394
407
  return [null, offset];
395
408
  }
396
- const fixedOffset = offset + (prefix.fixedSize ?? 0) + (item.fixedSize ?? 0);
397
- const [isSome, prefixOffset] = prefix.decode(bytes, offset);
398
- offset = prefixOffset;
409
+ const [isSome, prefixOffset] = prefix.read(bytes, offset);
399
410
  if (isSome === 0) {
400
- return [null, fixed ? fixedOffset : offset];
411
+ return [null, fixedSize !== null ? offset + fixedSize : prefixOffset];
401
412
  }
402
- const [value, newOffset] = item.decode(bytes, offset);
403
- offset = newOffset;
404
- return [value, fixed ? fixedOffset : offset];
413
+ const [value, newOffset] = item.read(bytes, prefixOffset);
414
+ return [value, fixedSize !== null ? offset + fixedSize : newOffset];
405
415
  }
406
- };
416
+ });
417
+ }
418
+ function getNullableCodec(item, config = {}) {
419
+ const configCast = config;
420
+ return codecsCore.combineCodec(getNullableEncoder(item, configCast), getNullableDecoder(item, configCast));
421
+ }
422
+ function getScalarEnumEncoder(constructor, config = {}) {
423
+ const prefix = config.size ?? codecsNumbers.getU8Encoder();
424
+ const { minRange, maxRange, stringValues, enumKeys, enumValues } = getScalarEnumStats(constructor);
425
+ return codecsCore.mapEncoder(prefix, (value) => {
426
+ const isInvalidNumber = typeof value === "number" && (value < minRange || value > maxRange);
427
+ const isInvalidString = typeof value === "string" && !stringValues.includes(value);
428
+ if (isInvalidNumber || isInvalidString) {
429
+ throw new Error(
430
+ `Invalid scalar enum variant. Expected one of [${stringValues.join(", ")}] or a number between ${minRange} and ${maxRange}, got "${value}".`
431
+ );
432
+ }
433
+ if (typeof value === "number")
434
+ return value;
435
+ const valueIndex = enumValues.indexOf(value);
436
+ if (valueIndex >= 0)
437
+ return valueIndex;
438
+ return enumKeys.indexOf(value);
439
+ });
440
+ }
441
+ function getScalarEnumDecoder(constructor, config = {}) {
442
+ const prefix = config.size ?? codecsNumbers.getU8Decoder();
443
+ const { minRange, maxRange, isNumericEnum, enumValues } = getScalarEnumStats(constructor);
444
+ return codecsCore.mapDecoder(prefix, (value) => {
445
+ const valueAsNumber = Number(value);
446
+ if (valueAsNumber < minRange || valueAsNumber > maxRange) {
447
+ throw new Error(
448
+ `Enum discriminator out of range. Expected a number between ${minRange} and ${maxRange}, got ${valueAsNumber}.`
449
+ );
450
+ }
451
+ return isNumericEnum ? valueAsNumber : enumValues[valueAsNumber];
452
+ });
407
453
  }
408
- function getNullableCodec(item, options = {}) {
409
- return codecsCore.combineCodec(getNullableEncoder(item, options), getNullableDecoder(item, options));
454
+ function getScalarEnumCodec(constructor, config = {}) {
455
+ return codecsCore.combineCodec(getScalarEnumEncoder(constructor, config), getScalarEnumDecoder(constructor, config));
410
456
  }
411
- function scalarEnumCoderHelper(constructor, prefix, description) {
457
+ function getScalarEnumStats(constructor) {
412
458
  const enumKeys = Object.keys(constructor);
413
459
  const enumValues = Object.values(constructor);
414
460
  const isNumericEnum = enumValues.some((v) => typeof v === "number");
415
- const valueDescriptions = enumValues.filter((v) => typeof v === "string").join(", ");
416
461
  const minRange = 0;
417
462
  const maxRange = isNumericEnum ? enumValues.length / 2 - 1 : enumValues.length - 1;
418
463
  const stringValues = isNumericEnum ? [...enumKeys] : [.../* @__PURE__ */ new Set([...enumKeys, ...enumValues])];
419
464
  return {
420
- description: description ?? `enum(${valueDescriptions}; ${prefix.description})`,
421
465
  enumKeys,
422
466
  enumValues,
423
- fixedSize: prefix.fixedSize,
424
467
  isNumericEnum,
425
468
  maxRange,
426
- maxSize: prefix.maxSize,
427
469
  minRange,
428
470
  stringValues
429
471
  };
430
472
  }
431
- function getScalarEnumEncoder(constructor, options = {}) {
432
- const prefix = options.size ?? codecsNumbers.getU8Encoder();
433
- const { description, fixedSize, maxSize, minRange, maxRange, stringValues, enumKeys, enumValues } = scalarEnumCoderHelper(constructor, prefix, options.description);
434
- return {
435
- description,
436
- encode: (value) => {
437
- const isInvalidNumber = typeof value === "number" && (value < minRange || value > maxRange);
438
- const isInvalidString = typeof value === "string" && !stringValues.includes(value);
439
- if (isInvalidNumber || isInvalidString) {
440
- throw new Error(
441
- `Invalid scalar enum variant. Expected one of [${stringValues.join(", ")}] or a number between ${minRange} and ${maxRange}, got "${value}".`
442
- );
443
- }
444
- if (typeof value === "number")
445
- return prefix.encode(value);
446
- const valueIndex = enumValues.indexOf(value);
447
- if (valueIndex >= 0)
448
- return prefix.encode(valueIndex);
449
- return prefix.encode(enumKeys.indexOf(value));
450
- },
451
- fixedSize,
452
- maxSize
453
- };
454
- }
455
- function getScalarEnumDecoder(constructor, options = {}) {
456
- const prefix = options.size ?? codecsNumbers.getU8Decoder();
457
- const { description, fixedSize, maxSize, minRange, maxRange, isNumericEnum, enumValues } = scalarEnumCoderHelper(
458
- constructor,
459
- prefix,
460
- options.description
461
- );
462
- return {
463
- decode: (bytes, offset = 0) => {
464
- codecsCore.assertByteArrayIsNotEmptyForCodec("enum", bytes, offset);
465
- const [value, newOffset] = prefix.decode(bytes, offset);
466
- const valueAsNumber = Number(value);
467
- offset = newOffset;
468
- if (valueAsNumber < minRange || valueAsNumber > maxRange) {
469
- throw new Error(
470
- `Enum discriminator out of range. Expected a number between ${minRange} and ${maxRange}, got ${valueAsNumber}.`
471
- );
472
- }
473
- return [isNumericEnum ? valueAsNumber : enumValues[valueAsNumber], offset];
474
- },
475
- description,
476
- fixedSize,
477
- maxSize
478
- };
479
- }
480
- function getScalarEnumCodec(constructor, options = {}) {
481
- return codecsCore.combineCodec(getScalarEnumEncoder(constructor, options), getScalarEnumDecoder(constructor, options));
482
- }
483
- function setCodecHelper(item, size, description) {
484
- if (size === "remainder" && item.fixedSize === null) {
485
- throw new Error('Codecs of "remainder" size must have fixed-size items.');
486
- }
487
- return {
488
- description: description ?? `set(${item.description}; ${getArrayLikeCodecSizeDescription(size)})`,
489
- fixedSize: getArrayLikeCodecSizeFromChildren(size, [item.fixedSize]),
490
- maxSize: getArrayLikeCodecSizeFromChildren(size, [item.maxSize])
491
- };
492
- }
493
- function getSetEncoder(item, options = {}) {
494
- const size = options.size ?? codecsNumbers.getU32Encoder();
495
- return {
496
- ...setCodecHelper(item, size, options.description),
497
- encode: (set) => {
498
- if (typeof size === "number" && set.size !== size) {
499
- assertValidNumberOfItemsForCodec("set", size, set.size);
500
- }
501
- const itemBytes = Array.from(set, (value) => item.encode(value));
502
- return codecsCore.mergeBytes([getArrayLikeCodecSizePrefix(size, set.size), ...itemBytes]);
503
- }
504
- };
505
- }
506
- function getSetDecoder(item, options = {}) {
507
- const size = options.size ?? codecsNumbers.getU32Decoder();
508
- return {
509
- ...setCodecHelper(item, size, options.description),
510
- decode: (bytes, offset = 0) => {
511
- const set = /* @__PURE__ */ new Set();
512
- if (typeof size === "object" && bytes.slice(offset).length === 0) {
513
- return [set, offset];
514
- }
515
- const [resolvedSize, newOffset] = decodeArrayLikeCodecSize(size, [item.fixedSize], bytes, offset);
516
- offset = newOffset;
517
- for (let i = 0; i < resolvedSize; i += 1) {
518
- const [value, newOffset2] = item.decode(bytes, offset);
519
- offset = newOffset2;
520
- set.add(value);
521
- }
522
- return [set, offset];
523
- }
524
- };
473
+ function getSetEncoder(item, config = {}) {
474
+ return codecsCore.mapEncoder(getArrayEncoder(item, config), (set) => [...set]);
525
475
  }
526
- function getSetCodec(item, options = {}) {
527
- return codecsCore.combineCodec(getSetEncoder(item, options), getSetDecoder(item, options));
476
+ function getSetDecoder(item, config = {}) {
477
+ return codecsCore.mapDecoder(getArrayDecoder(item, config), (entries) => new Set(entries));
528
478
  }
529
- function structCodecHelper(fields, description) {
530
- const fieldDescriptions = fields.map(([name, codec]) => `${String(name)}: ${codec.description}`).join(", ");
531
- return {
532
- description: description ?? `struct(${fieldDescriptions})`,
533
- fixedSize: sumCodecSizes(fields.map(([, field]) => field.fixedSize)),
534
- maxSize: sumCodecSizes(fields.map(([, field]) => field.maxSize))
535
- };
479
+ function getSetCodec(item, config = {}) {
480
+ return codecsCore.combineCodec(getSetEncoder(item, config), getSetDecoder(item, config));
536
481
  }
537
- function getStructEncoder(fields, options = {}) {
538
- return {
539
- ...structCodecHelper(fields, options.description),
540
- encode: (struct) => {
541
- const fieldBytes = fields.map(([key, codec]) => codec.encode(struct[key]));
542
- return codecsCore.mergeBytes(fieldBytes);
482
+ function getStructEncoder(fields) {
483
+ const fieldCodecs = fields.map(([, codec]) => codec);
484
+ const fixedSize = sumCodecSizes(fieldCodecs.map(getFixedSize));
485
+ const maxSize = sumCodecSizes(fieldCodecs.map(getMaxSize)) ?? void 0;
486
+ return codecsCore.createEncoder({
487
+ ...fixedSize === null ? {
488
+ getSizeFromValue: (value) => fields.map(([key, codec]) => codecsCore.getEncodedSize(value[key], codec)).reduce((all, one) => all + one, 0),
489
+ maxSize
490
+ } : { fixedSize },
491
+ write: (struct, bytes, offset) => {
492
+ fields.forEach(([key, codec]) => {
493
+ offset = codec.write(struct[key], bytes, offset);
494
+ });
495
+ return offset;
543
496
  }
544
- };
545
- }
546
- function getStructDecoder(fields, options = {}) {
547
- return {
548
- ...structCodecHelper(fields, options.description),
549
- decode: (bytes, offset = 0) => {
497
+ });
498
+ }
499
+ function getStructDecoder(fields) {
500
+ const fieldCodecs = fields.map(([, codec]) => codec);
501
+ const fixedSize = sumCodecSizes(fieldCodecs.map(getFixedSize));
502
+ const maxSize = sumCodecSizes(fieldCodecs.map(getMaxSize)) ?? void 0;
503
+ return codecsCore.createDecoder({
504
+ ...fixedSize === null ? { maxSize } : { fixedSize },
505
+ read: (bytes, offset) => {
550
506
  const struct = {};
551
507
  fields.forEach(([key, codec]) => {
552
- const [value, newOffset] = codec.decode(bytes, offset);
508
+ const [value, newOffset] = codec.read(bytes, offset);
553
509
  offset = newOffset;
554
510
  struct[key] = value;
555
511
  });
556
512
  return [struct, offset];
557
513
  }
558
- };
559
- }
560
- function getStructCodec(fields, options = {}) {
561
- return codecsCore.combineCodec(getStructEncoder(fields, options), getStructDecoder(fields, options));
562
- }
563
- function tupleCodecHelper(items, description) {
564
- const itemDescriptions = items.map((item) => item.description).join(", ");
565
- return {
566
- description: description ?? `tuple(${itemDescriptions})`,
567
- fixedSize: sumCodecSizes(items.map((item) => item.fixedSize)),
568
- maxSize: sumCodecSizes(items.map((item) => item.maxSize))
569
- };
570
- }
571
- function getTupleEncoder(items, options = {}) {
572
- return {
573
- ...tupleCodecHelper(items, options.description),
574
- encode: (value) => {
575
- assertValidNumberOfItemsForCodec("tuple", items.length, value.length);
576
- return codecsCore.mergeBytes(items.map((item, index) => item.encode(value[index])));
577
- }
578
- };
579
- }
580
- function getTupleDecoder(items, options = {}) {
581
- return {
582
- ...tupleCodecHelper(items, options.description),
583
- decode: (bytes, offset = 0) => {
584
- const values = [];
585
- items.forEach((codec) => {
586
- const [newValue, newOffset] = codec.decode(bytes, offset);
587
- values.push(newValue);
588
- offset = newOffset;
589
- });
590
- return [values, offset];
591
- }
592
- };
514
+ });
593
515
  }
594
- function getTupleCodec(items, options = {}) {
595
- return codecsCore.combineCodec(
596
- getTupleEncoder(items, options),
597
- getTupleDecoder(items, options)
598
- );
516
+ function getStructCodec(fields) {
517
+ return codecsCore.combineCodec(getStructEncoder(fields), getStructDecoder(fields));
599
518
  }
600
- function getUnitEncoder(options = {}) {
601
- return {
602
- description: options.description ?? "unit",
603
- encode: () => new Uint8Array(),
519
+ function getUnitEncoder() {
520
+ return codecsCore.createEncoder({
604
521
  fixedSize: 0,
605
- maxSize: 0
606
- };
522
+ write: (_value, _bytes, offset) => offset
523
+ });
607
524
  }
608
- function getUnitDecoder(options = {}) {
609
- return {
610
- decode: (_bytes, offset = 0) => [void 0, offset],
611
- description: options.description ?? "unit",
525
+ function getUnitDecoder() {
526
+ return codecsCore.createDecoder({
612
527
  fixedSize: 0,
613
- maxSize: 0
614
- };
528
+ read: (_bytes, offset) => [void 0, offset]
529
+ });
615
530
  }
616
- function getUnitCodec(options = {}) {
617
- return codecsCore.combineCodec(getUnitEncoder(options), getUnitDecoder(options));
531
+ function getUnitCodec() {
532
+ return codecsCore.combineCodec(getUnitEncoder(), getUnitDecoder());
618
533
  }
619
534
 
620
535
  exports.assertValidNumberOfItemsForCodec = assertValidNumberOfItemsForCodec;
621
- exports.decodeArrayLikeCodecSize = decodeArrayLikeCodecSize;
622
536
  exports.getArrayCodec = getArrayCodec;
623
537
  exports.getArrayDecoder = getArrayDecoder;
624
538
  exports.getArrayEncoder = getArrayEncoder;
625
- exports.getArrayLikeCodecSizeDescription = getArrayLikeCodecSizeDescription;
626
- exports.getArrayLikeCodecSizeFromChildren = getArrayLikeCodecSizeFromChildren;
627
- exports.getArrayLikeCodecSizePrefix = getArrayLikeCodecSizePrefix;
628
539
  exports.getBitArrayCodec = getBitArrayCodec;
629
540
  exports.getBitArrayDecoder = getBitArrayDecoder;
630
541
  exports.getBitArrayEncoder = getBitArrayEncoder;