@solana/codecs-data-structures 2.0.0-experimental.efe6f4d → 2.0.0-experimental.f16a625

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. package/README.md +482 -4
  2. package/dist/index.browser.cjs +329 -433
  3. package/dist/index.browser.cjs.map +1 -1
  4. package/dist/index.browser.js +331 -431
  5. package/dist/index.browser.js.map +1 -1
  6. package/dist/index.native.js +331 -431
  7. package/dist/index.native.js.map +1 -1
  8. package/dist/index.node.cjs +329 -433
  9. package/dist/index.node.cjs.map +1 -1
  10. package/dist/index.node.js +331 -431
  11. package/dist/index.node.js.map +1 -1
  12. package/dist/types/array.d.ts +35 -6
  13. package/dist/types/array.d.ts.map +1 -1
  14. package/dist/types/bit-array.d.ts +5 -5
  15. package/dist/types/bit-array.d.ts.map +1 -1
  16. package/dist/types/boolean.d.ts +18 -6
  17. package/dist/types/boolean.d.ts.map +1 -1
  18. package/dist/types/bytes.d.ts +14 -5
  19. package/dist/types/bytes.d.ts.map +1 -1
  20. package/dist/types/data-enum.d.ts +14 -14
  21. package/dist/types/data-enum.d.ts.map +1 -1
  22. package/dist/types/index.d.ts +13 -14
  23. package/dist/types/index.d.ts.map +1 -1
  24. package/dist/types/map.d.ts +24 -6
  25. package/dist/types/map.d.ts.map +1 -1
  26. package/dist/types/nullable.d.ts +24 -6
  27. package/dist/types/nullable.d.ts.map +1 -1
  28. package/dist/types/scalar-enum.d.ts +42 -11
  29. package/dist/types/scalar-enum.d.ts.map +1 -1
  30. package/dist/types/set.d.ts +24 -6
  31. package/dist/types/set.d.ts.map +1 -1
  32. package/dist/types/struct.d.ts +28 -18
  33. package/dist/types/struct.d.ts.map +1 -1
  34. package/dist/types/tuple.d.ts +22 -15
  35. package/dist/types/tuple.d.ts.map +1 -1
  36. package/dist/types/unit.d.ts +4 -12
  37. package/dist/types/unit.d.ts.map +1 -1
  38. package/dist/types/utils.d.ts +10 -2
  39. package/dist/types/utils.d.ts.map +1 -1
  40. package/package.json +12 -34
  41. package/dist/index.development.js +0 -865
  42. package/dist/index.development.js.map +0 -1
  43. package/dist/index.production.min.js +0 -51
  44. package/dist/types/array-like-codec-size.d.ts +0 -20
  45. package/dist/types/array-like-codec-size.d.ts.map +0 -1
@@ -1,9 +1,14 @@
1
- import { mergeBytes, combineCodec, assertByteArrayHasEnoughBytesForCodec, assertFixedSizeCodec, assertByteArrayIsNotEmptyForCodec, fixEncoder, fixDecoder, fixBytes } from '@solana/codecs-core';
1
+ import { createEncoder, getEncodedSize, createDecoder, combineCodec, assertByteArrayHasEnoughBytesForCodec, assertIsFixedSize, mapEncoder, mapDecoder, fixEncoder, fixDecoder, assertByteArrayIsNotEmptyForCodec, isFixedSize } from '@solana/codecs-core';
2
2
  import { getU32Encoder, getU32Decoder, getU8Encoder, getU8Decoder } from '@solana/codecs-numbers';
3
3
 
4
4
  // src/array.ts
5
5
 
6
- // src/utils.ts
6
+ // src/assertions.ts
7
+ function assertValidNumberOfItemsForCodec(codecDescription, expected, actual) {
8
+ if (expected !== actual) {
9
+ throw new Error(`Expected [${codecDescription}] to have ${expected} items, got ${actual}.`);
10
+ }
11
+ }
7
12
  function maxCodecSizes(sizes) {
8
13
  return sizes.reduce(
9
14
  (all, size) => all === null || size === null ? null : Math.max(all, size),
@@ -13,106 +18,88 @@ function maxCodecSizes(sizes) {
13
18
  function sumCodecSizes(sizes) {
14
19
  return sizes.reduce((all, size) => all === null || size === null ? null : all + size, 0);
15
20
  }
16
-
17
- // src/array-like-codec-size.ts
18
- function decodeArrayLikeCodecSize(size, childrenSizes, bytes, offset) {
19
- if (typeof size === "number") {
20
- return [size, offset];
21
- }
22
- if (typeof size === "object") {
23
- return size.decode(bytes, offset);
24
- }
25
- if (size === "remainder") {
26
- const childrenSize = sumCodecSizes(childrenSizes);
27
- if (childrenSize === null) {
28
- throw new Error('Codecs of "remainder" size must have fixed-size items.');
29
- }
30
- const remainder = bytes.slice(offset).length;
31
- if (remainder % childrenSize !== 0) {
32
- throw new Error(
33
- `The remainder of the byte array (${remainder} bytes) cannot be split into chunks of ${childrenSize} bytes. Codecs of "remainder" size must have a remainder that is a multiple of its item size. In other words, ${remainder} modulo ${childrenSize} should be equal to zero.`
34
- );
35
- }
36
- return [remainder / childrenSize, offset];
37
- }
38
- throw new Error(`Unrecognized array-like codec size: ${JSON.stringify(size)}`);
39
- }
40
- function getArrayLikeCodecSizeDescription(size) {
41
- return typeof size === "object" ? size.description : `${size}`;
42
- }
43
- function getArrayLikeCodecSizeFromChildren(size, childrenSizes) {
44
- if (typeof size !== "number")
45
- return null;
46
- if (size === 0)
47
- return 0;
48
- const childrenSize = sumCodecSizes(childrenSizes);
49
- return childrenSize === null ? null : childrenSize * size;
50
- }
51
- function getArrayLikeCodecSizePrefix(size, realSize) {
52
- return typeof size === "object" ? size.encode(realSize) : new Uint8Array();
21
+ function getFixedSize(codec) {
22
+ return isFixedSize(codec) ? codec.fixedSize : null;
53
23
  }
54
-
55
- // src/assertions.ts
56
- function assertValidNumberOfItemsForCodec(codecDescription, expected, actual) {
57
- if (expected !== actual) {
58
- throw new Error(`Expected [${codecDescription}] to have ${expected} items, got ${actual}.`);
59
- }
24
+ function getMaxSize(codec) {
25
+ return isFixedSize(codec) ? codec.fixedSize : codec.maxSize ?? null;
60
26
  }
61
27
 
62
28
  // src/array.ts
63
- function arrayCodecHelper(item, size, description) {
64
- if (size === "remainder" && item.fixedSize === null) {
65
- throw new Error('Codecs of "remainder" size must have fixed-size items.');
66
- }
67
- return {
68
- description: description ?? `array(${item.description}; ${getArrayLikeCodecSizeDescription(size)})`,
69
- fixedSize: getArrayLikeCodecSizeFromChildren(size, [item.fixedSize]),
70
- maxSize: getArrayLikeCodecSizeFromChildren(size, [item.maxSize])
71
- };
72
- }
73
29
  function getArrayEncoder(item, config = {}) {
74
30
  const size = config.size ?? getU32Encoder();
75
- return {
76
- ...arrayCodecHelper(item, size, config.description),
77
- encode: (value) => {
31
+ const fixedSize = computeArrayLikeCodecSize(size, getFixedSize(item));
32
+ const maxSize = computeArrayLikeCodecSize(size, getMaxSize(item)) ?? void 0;
33
+ return createEncoder({
34
+ ...fixedSize !== null ? { fixedSize } : {
35
+ getSizeFromValue: (array) => {
36
+ const prefixSize = typeof size === "object" ? getEncodedSize(array.length, size) : 0;
37
+ return prefixSize + [...array].reduce((all, value) => all + getEncodedSize(value, item), 0);
38
+ },
39
+ maxSize
40
+ },
41
+ write: (array, bytes, offset) => {
78
42
  if (typeof size === "number") {
79
- assertValidNumberOfItemsForCodec("array", size, value.length);
43
+ assertValidNumberOfItemsForCodec("array", size, array.length);
44
+ }
45
+ if (typeof size === "object") {
46
+ offset = size.write(array.length, bytes, offset);
80
47
  }
81
- return mergeBytes([getArrayLikeCodecSizePrefix(size, value.length), ...value.map((v) => item.encode(v))]);
48
+ array.forEach((value) => {
49
+ offset = item.write(value, bytes, offset);
50
+ });
51
+ return offset;
82
52
  }
83
- };
53
+ });
84
54
  }
85
55
  function getArrayDecoder(item, config = {}) {
86
56
  const size = config.size ?? getU32Decoder();
87
- return {
88
- ...arrayCodecHelper(item, size, config.description),
89
- decode: (bytes, offset = 0) => {
57
+ const itemSize = getFixedSize(item);
58
+ const fixedSize = computeArrayLikeCodecSize(size, itemSize);
59
+ const maxSize = computeArrayLikeCodecSize(size, getMaxSize(item)) ?? void 0;
60
+ return createDecoder({
61
+ ...fixedSize !== null ? { fixedSize } : { maxSize },
62
+ read: (bytes, offset) => {
63
+ const array = [];
90
64
  if (typeof size === "object" && bytes.slice(offset).length === 0) {
91
- return [[], offset];
65
+ return [array, offset];
92
66
  }
93
- const [resolvedSize, newOffset] = decodeArrayLikeCodecSize(size, [item.fixedSize], bytes, offset);
67
+ if (size === "remainder") {
68
+ while (offset < bytes.length) {
69
+ const [value, newOffset2] = item.read(bytes, offset);
70
+ offset = newOffset2;
71
+ array.push(value);
72
+ }
73
+ return [array, offset];
74
+ }
75
+ const [resolvedSize, newOffset] = typeof size === "number" ? [size, offset] : size.read(bytes, offset);
94
76
  offset = newOffset;
95
- const values = [];
96
77
  for (let i = 0; i < resolvedSize; i += 1) {
97
- const [value, newOffset2] = item.decode(bytes, offset);
98
- values.push(value);
78
+ const [value, newOffset2] = item.read(bytes, offset);
99
79
  offset = newOffset2;
80
+ array.push(value);
100
81
  }
101
- return [values, offset];
82
+ return [array, offset];
102
83
  }
103
- };
84
+ });
104
85
  }
105
86
  function getArrayCodec(item, config = {}) {
106
87
  return combineCodec(getArrayEncoder(item, config), getArrayDecoder(item, config));
107
88
  }
108
- var getBitArrayEncoder = (size, config = {}) => {
89
+ function computeArrayLikeCodecSize(size, itemSize) {
90
+ if (typeof size !== "number")
91
+ return null;
92
+ if (size === 0)
93
+ return 0;
94
+ return itemSize === null ? null : itemSize * size;
95
+ }
96
+ function getBitArrayEncoder(size, config = {}) {
109
97
  const parsedConfig = typeof config === "boolean" ? { backward: config } : config;
110
98
  const backward = parsedConfig.backward ?? false;
111
- const backwardSuffix = backward ? "; backward" : "";
112
- return {
113
- description: parsedConfig.description ?? `bitArray(${size}${backwardSuffix})`,
114
- encode(value) {
115
- const bytes = [];
99
+ return createEncoder({
100
+ fixedSize: size,
101
+ write(value, bytes, offset) {
102
+ const bytesToAdd = [];
116
103
  for (let i = 0; i < size; i += 1) {
117
104
  let byte = 0;
118
105
  for (let j = 0; j < 8; j += 1) {
@@ -120,23 +107,22 @@ var getBitArrayEncoder = (size, config = {}) => {
120
107
  byte |= feature << (backward ? j : 7 - j);
121
108
  }
122
109
  if (backward) {
123
- bytes.unshift(byte);
110
+ bytesToAdd.unshift(byte);
124
111
  } else {
125
- bytes.push(byte);
112
+ bytesToAdd.push(byte);
126
113
  }
127
114
  }
128
- return new Uint8Array(bytes);
129
- },
130
- fixedSize: size,
131
- maxSize: size
132
- };
133
- };
134
- var getBitArrayDecoder = (size, config = {}) => {
115
+ bytes.set(bytesToAdd, offset);
116
+ return size;
117
+ }
118
+ });
119
+ }
120
+ function getBitArrayDecoder(size, config = {}) {
135
121
  const parsedConfig = typeof config === "boolean" ? { backward: config } : config;
136
122
  const backward = parsedConfig.backward ?? false;
137
- const backwardSuffix = backward ? "; backward" : "";
138
- return {
139
- decode(bytes, offset = 0) {
123
+ return createDecoder({
124
+ fixedSize: size,
125
+ read(bytes, offset) {
140
126
  assertByteArrayHasEnoughBytesForCodec("bitArray", size, bytes, offset);
141
127
  const booleans = [];
142
128
  let slice = bytes.slice(offset, offset + size);
@@ -153,138 +139,107 @@ var getBitArrayDecoder = (size, config = {}) => {
153
139
  }
154
140
  });
155
141
  return [booleans, offset + size];
156
- },
157
- description: parsedConfig.description ?? `bitArray(${size}${backwardSuffix})`,
158
- fixedSize: size,
159
- maxSize: size
160
- };
161
- };
162
- var getBitArrayCodec = (size, config = {}) => combineCodec(getBitArrayEncoder(size, config), getBitArrayDecoder(size, config));
142
+ }
143
+ });
144
+ }
145
+ function getBitArrayCodec(size, config = {}) {
146
+ return combineCodec(getBitArrayEncoder(size, config), getBitArrayDecoder(size, config));
147
+ }
163
148
  function getBooleanEncoder(config = {}) {
164
149
  const size = config.size ?? getU8Encoder();
165
- assertFixedSizeCodec(size, "Codec [bool] requires a fixed size.");
166
- return {
167
- description: config.description ?? `bool(${size.description})`,
168
- encode: (value) => size.encode(value ? 1 : 0),
169
- fixedSize: size.fixedSize,
170
- maxSize: size.fixedSize
171
- };
150
+ assertIsFixedSize(size, "Codec [bool] requires a fixed size.");
151
+ return mapEncoder(size, (value) => value ? 1 : 0);
172
152
  }
173
153
  function getBooleanDecoder(config = {}) {
174
154
  const size = config.size ?? getU8Decoder();
175
- assertFixedSizeCodec(size, "Codec [bool] requires a fixed size.");
176
- return {
177
- decode: (bytes, offset = 0) => {
178
- assertByteArrayIsNotEmptyForCodec("bool", bytes, offset);
179
- const [value, vOffset] = size.decode(bytes, offset);
180
- return [value === 1, vOffset];
181
- },
182
- description: config.description ?? `bool(${size.description})`,
183
- fixedSize: size.fixedSize,
184
- maxSize: size.fixedSize
185
- };
155
+ assertIsFixedSize(size, "Codec [bool] requires a fixed size.");
156
+ return mapDecoder(size, (value) => Number(value) === 1);
186
157
  }
187
158
  function getBooleanCodec(config = {}) {
188
159
  return combineCodec(getBooleanEncoder(config), getBooleanDecoder(config));
189
160
  }
190
161
  function getBytesEncoder(config = {}) {
191
162
  const size = config.size ?? "variable";
192
- const sizeDescription = typeof size === "object" ? size.description : `${size}`;
193
- const description = config.description ?? `bytes(${sizeDescription})`;
194
- const byteEncoder = {
195
- description,
196
- encode: (value) => value,
197
- fixedSize: null,
198
- maxSize: null
199
- };
163
+ const byteEncoder = createEncoder({
164
+ getSizeFromValue: (value) => value.length,
165
+ write: (value, bytes, offset) => {
166
+ bytes.set(value, offset);
167
+ return offset + value.length;
168
+ }
169
+ });
200
170
  if (size === "variable") {
201
171
  return byteEncoder;
202
172
  }
203
173
  if (typeof size === "number") {
204
- return fixEncoder(byteEncoder, size, description);
174
+ return fixEncoder(byteEncoder, size);
205
175
  }
206
- return {
207
- ...byteEncoder,
208
- encode: (value) => {
209
- const contentBytes = byteEncoder.encode(value);
210
- const lengthBytes = size.encode(contentBytes.length);
211
- return mergeBytes([lengthBytes, contentBytes]);
176
+ return createEncoder({
177
+ getSizeFromValue: (value) => getEncodedSize(value.length, size) + value.length,
178
+ write: (value, bytes, offset) => {
179
+ offset = size.write(value.length, bytes, offset);
180
+ return byteEncoder.write(value, bytes, offset);
212
181
  }
213
- };
182
+ });
214
183
  }
215
184
  function getBytesDecoder(config = {}) {
216
185
  const size = config.size ?? "variable";
217
- const sizeDescription = typeof size === "object" ? size.description : `${size}`;
218
- const description = config.description ?? `bytes(${sizeDescription})`;
219
- const byteDecoder = {
220
- decode: (bytes, offset = 0) => {
186
+ const byteDecoder = createDecoder({
187
+ read: (bytes, offset) => {
221
188
  const slice = bytes.slice(offset);
222
189
  return [slice, offset + slice.length];
223
- },
224
- description,
225
- fixedSize: null,
226
- maxSize: null
227
- };
190
+ }
191
+ });
228
192
  if (size === "variable") {
229
193
  return byteDecoder;
230
194
  }
231
195
  if (typeof size === "number") {
232
- return fixDecoder(byteDecoder, size, description);
196
+ return fixDecoder(byteDecoder, size);
233
197
  }
234
- return {
235
- ...byteDecoder,
236
- decode: (bytes, offset = 0) => {
198
+ return createDecoder({
199
+ read: (bytes, offset) => {
237
200
  assertByteArrayIsNotEmptyForCodec("bytes", bytes, offset);
238
- const [lengthBigInt, lengthOffset] = size.decode(bytes, offset);
201
+ const [lengthBigInt, lengthOffset] = size.read(bytes, offset);
239
202
  const length = Number(lengthBigInt);
240
203
  offset = lengthOffset;
241
204
  const contentBytes = bytes.slice(offset, offset + length);
242
205
  assertByteArrayHasEnoughBytesForCodec("bytes", length, contentBytes);
243
- const [value, contentOffset] = byteDecoder.decode(contentBytes);
206
+ const [value, contentOffset] = byteDecoder.read(contentBytes, 0);
244
207
  offset += contentOffset;
245
208
  return [value, offset];
246
209
  }
247
- };
210
+ });
248
211
  }
249
212
  function getBytesCodec(config = {}) {
250
213
  return combineCodec(getBytesEncoder(config), getBytesDecoder(config));
251
214
  }
252
- function dataEnumCodecHelper(variants, prefix, description) {
253
- const fieldDescriptions = variants.map(([name, codec]) => `${String(name)}${codec ? `: ${codec.description}` : ""}`).join(", ");
254
- const allVariantHaveTheSameFixedSize = variants.every((one, _i, all) => one[1].fixedSize === all[0][1].fixedSize);
255
- const fixedVariantSize = allVariantHaveTheSameFixedSize ? variants[0][1].fixedSize : null;
256
- const maxVariantSize = maxCodecSizes(variants.map(([, field]) => field.maxSize));
257
- return {
258
- description: description ?? `dataEnum(${fieldDescriptions}; ${prefix.description})`,
259
- fixedSize: variants.length === 0 ? prefix.fixedSize : sumCodecSizes([prefix.fixedSize, fixedVariantSize]),
260
- maxSize: variants.length === 0 ? prefix.maxSize : sumCodecSizes([prefix.maxSize, maxVariantSize])
261
- };
262
- }
263
215
  function getDataEnumEncoder(variants, config = {}) {
264
216
  const prefix = config.size ?? getU8Encoder();
265
- return {
266
- ...dataEnumCodecHelper(variants, prefix, config.description),
267
- encode: (variant) => {
268
- const discriminator = variants.findIndex(([key]) => variant.__kind === key);
269
- if (discriminator < 0) {
270
- throw new Error(
271
- `Invalid data enum variant. Expected one of [${variants.map(([key]) => key).join(", ")}], got "${variant.__kind}".`
272
- );
273
- }
274
- const variantPrefix = prefix.encode(discriminator);
275
- const variantSerializer = variants[discriminator][1];
276
- const variantBytes = variantSerializer.encode(variant);
277
- return mergeBytes([variantPrefix, variantBytes]);
217
+ const fixedSize = getDataEnumFixedSize(variants, prefix);
218
+ return createEncoder({
219
+ ...fixedSize !== null ? { fixedSize } : {
220
+ getSizeFromValue: (variant) => {
221
+ const discriminator = getVariantDiscriminator(variants, variant);
222
+ const variantEncoder = variants[discriminator][1];
223
+ return getEncodedSize(discriminator, prefix) + getEncodedSize(variant, variantEncoder);
224
+ },
225
+ maxSize: getDataEnumMaxSize(variants, prefix)
226
+ },
227
+ write: (variant, bytes, offset) => {
228
+ const discriminator = getVariantDiscriminator(variants, variant);
229
+ offset = prefix.write(discriminator, bytes, offset);
230
+ const variantEncoder = variants[discriminator][1];
231
+ return variantEncoder.write(variant, bytes, offset);
278
232
  }
279
- };
233
+ });
280
234
  }
281
235
  function getDataEnumDecoder(variants, config = {}) {
282
236
  const prefix = config.size ?? getU8Decoder();
283
- return {
284
- ...dataEnumCodecHelper(variants, prefix, config.description),
285
- decode: (bytes, offset = 0) => {
237
+ const fixedSize = getDataEnumFixedSize(variants, prefix);
238
+ return createDecoder({
239
+ ...fixedSize !== null ? { fixedSize } : { maxSize: getDataEnumMaxSize(variants, prefix) },
240
+ read: (bytes, offset) => {
286
241
  assertByteArrayIsNotEmptyForCodec("dataEnum", bytes, offset);
287
- const [discriminator, dOffset] = prefix.decode(bytes, offset);
242
+ const [discriminator, dOffset] = prefix.read(bytes, offset);
288
243
  offset = dOffset;
289
244
  const variantField = variants[Number(discriminator)] ?? null;
290
245
  if (!variantField) {
@@ -292,329 +247,274 @@ function getDataEnumDecoder(variants, config = {}) {
292
247
  `Enum discriminator out of range. Expected a number between 0 and ${variants.length - 1}, got ${discriminator}.`
293
248
  );
294
249
  }
295
- const [variant, vOffset] = variantField[1].decode(bytes, offset);
250
+ const [variant, vOffset] = variantField[1].read(bytes, offset);
296
251
  offset = vOffset;
297
252
  return [{ __kind: variantField[0], ...variant ?? {} }, offset];
298
253
  }
299
- };
254
+ });
300
255
  }
301
256
  function getDataEnumCodec(variants, config = {}) {
302
257
  return combineCodec(getDataEnumEncoder(variants, config), getDataEnumDecoder(variants, config));
303
258
  }
304
- function mapCodecHelper(key, value, size, description) {
305
- if (size === "remainder" && (key.fixedSize === null || value.fixedSize === null)) {
306
- throw new Error('Codecs of "remainder" size must have fixed-size items.');
259
+ function getDataEnumFixedSize(variants, prefix) {
260
+ if (variants.length === 0)
261
+ return isFixedSize(prefix) ? prefix.fixedSize : null;
262
+ if (!isFixedSize(variants[0][1]))
263
+ return null;
264
+ const variantSize = variants[0][1].fixedSize;
265
+ const sameSizedVariants = variants.every(
266
+ (variant) => isFixedSize(variant[1]) && variant[1].fixedSize === variantSize
267
+ );
268
+ if (!sameSizedVariants)
269
+ return null;
270
+ return isFixedSize(prefix) ? prefix.fixedSize + variantSize : null;
271
+ }
272
+ function getDataEnumMaxSize(variants, prefix) {
273
+ const maxVariantSize = maxCodecSizes(variants.map(([, codec]) => getMaxSize(codec)));
274
+ return sumCodecSizes([getMaxSize(prefix), maxVariantSize]) ?? void 0;
275
+ }
276
+ function getVariantDiscriminator(variants, variant) {
277
+ const discriminator = variants.findIndex(([key]) => variant.__kind === key);
278
+ if (discriminator < 0) {
279
+ throw new Error(
280
+ `Invalid data enum variant. Expected one of [${variants.map(([key]) => key).join(", ")}], got "${variant.__kind}".`
281
+ );
307
282
  }
308
- return {
309
- description: description ?? `map(${key.description}, ${value.description}; ${getArrayLikeCodecSizeDescription(size)})`,
310
- fixedSize: getArrayLikeCodecSizeFromChildren(size, [key.fixedSize, value.fixedSize]),
311
- maxSize: getArrayLikeCodecSizeFromChildren(size, [key.maxSize, value.maxSize])
312
- };
283
+ return discriminator;
284
+ }
285
+ function getTupleEncoder(items) {
286
+ const fixedSize = sumCodecSizes(items.map(getFixedSize));
287
+ const maxSize = sumCodecSizes(items.map(getMaxSize)) ?? void 0;
288
+ return createEncoder({
289
+ ...fixedSize === null ? {
290
+ getSizeFromValue: (value) => items.map((item, index) => getEncodedSize(value[index], item)).reduce((all, one) => all + one, 0),
291
+ maxSize
292
+ } : { fixedSize },
293
+ write: (value, bytes, offset) => {
294
+ assertValidNumberOfItemsForCodec("tuple", items.length, value.length);
295
+ items.forEach((item, index) => {
296
+ offset = item.write(value[index], bytes, offset);
297
+ });
298
+ return offset;
299
+ }
300
+ });
301
+ }
302
+ function getTupleDecoder(items) {
303
+ const fixedSize = sumCodecSizes(items.map(getFixedSize));
304
+ const maxSize = sumCodecSizes(items.map(getMaxSize)) ?? void 0;
305
+ return createDecoder({
306
+ ...fixedSize === null ? { maxSize } : { fixedSize },
307
+ read: (bytes, offset) => {
308
+ const values = [];
309
+ items.forEach((item) => {
310
+ const [newValue, newOffset] = item.read(bytes, offset);
311
+ values.push(newValue);
312
+ offset = newOffset;
313
+ });
314
+ return [values, offset];
315
+ }
316
+ });
317
+ }
318
+ function getTupleCodec(items) {
319
+ return combineCodec(
320
+ getTupleEncoder(items),
321
+ getTupleDecoder(items)
322
+ );
313
323
  }
324
+
325
+ // src/map.ts
314
326
  function getMapEncoder(key, value, config = {}) {
315
- const size = config.size ?? getU32Encoder();
316
- return {
317
- ...mapCodecHelper(key, value, size, config.description),
318
- encode: (map) => {
319
- if (typeof size === "number") {
320
- assertValidNumberOfItemsForCodec("map", size, map.size);
321
- }
322
- const itemBytes = Array.from(map, ([k, v]) => mergeBytes([key.encode(k), value.encode(v)]));
323
- return mergeBytes([getArrayLikeCodecSizePrefix(size, map.size), ...itemBytes]);
324
- }
325
- };
327
+ return mapEncoder(
328
+ getArrayEncoder(getTupleEncoder([key, value]), config),
329
+ (map) => [...map.entries()]
330
+ );
326
331
  }
327
332
  function getMapDecoder(key, value, config = {}) {
328
- const size = config.size ?? getU32Decoder();
329
- return {
330
- ...mapCodecHelper(key, value, size, config.description),
331
- decode: (bytes, offset = 0) => {
332
- const map = /* @__PURE__ */ new Map();
333
- if (typeof size === "object" && bytes.slice(offset).length === 0) {
334
- return [map, offset];
335
- }
336
- const [resolvedSize, newOffset] = decodeArrayLikeCodecSize(
337
- size,
338
- [key.fixedSize, value.fixedSize],
339
- bytes,
340
- offset
341
- );
342
- offset = newOffset;
343
- for (let i = 0; i < resolvedSize; i += 1) {
344
- const [decodedKey, kOffset] = key.decode(bytes, offset);
345
- offset = kOffset;
346
- const [decodedValue, vOffset] = value.decode(bytes, offset);
347
- offset = vOffset;
348
- map.set(decodedKey, decodedValue);
349
- }
350
- return [map, offset];
351
- }
352
- };
333
+ return mapDecoder(
334
+ getArrayDecoder(getTupleDecoder([key, value]), config),
335
+ (entries) => new Map(entries)
336
+ );
353
337
  }
354
338
  function getMapCodec(key, value, config = {}) {
355
339
  return combineCodec(getMapEncoder(key, value, config), getMapDecoder(key, value, config));
356
340
  }
357
- function nullableCodecHelper(item, prefix, fixed, description) {
358
- let descriptionSuffix = `; ${prefix.description}`;
359
- let fixedSize = item.fixedSize === 0 ? prefix.fixedSize : null;
360
- if (fixed) {
361
- assertFixedSizeCodec(item, "Fixed nullables can only be used with fixed-size codecs.");
362
- assertFixedSizeCodec(prefix, "Fixed nullables can only be used with fixed-size prefix.");
363
- descriptionSuffix += "; fixed";
364
- fixedSize = prefix.fixedSize + item.fixedSize;
365
- }
366
- return {
367
- description: description ?? `nullable(${item.description + descriptionSuffix})`,
368
- fixedSize,
369
- maxSize: sumCodecSizes([prefix.maxSize, item.maxSize])
370
- };
371
- }
372
341
  function getNullableEncoder(item, config = {}) {
373
342
  const prefix = config.prefix ?? getU8Encoder();
374
343
  const fixed = config.fixed ?? false;
375
- return {
376
- ...nullableCodecHelper(item, prefix, fixed, config.description),
377
- encode: (option) => {
378
- const prefixByte = prefix.encode(Number(option !== null));
379
- let itemBytes = option !== null ? item.encode(option) : new Uint8Array();
380
- itemBytes = fixed ? fixBytes(itemBytes, item.fixedSize) : itemBytes;
381
- return mergeBytes([prefixByte, itemBytes]);
344
+ const isZeroSizeItem = isFixedSize(item) && isFixedSize(prefix) && item.fixedSize === 0;
345
+ if (fixed || isZeroSizeItem) {
346
+ assertIsFixedSize(item, "Fixed nullables can only be used with fixed-size codecs.");
347
+ assertIsFixedSize(prefix, "Fixed nullables can only be used with fixed-size prefix.");
348
+ const fixedSize = prefix.fixedSize + item.fixedSize;
349
+ return createEncoder({
350
+ fixedSize,
351
+ write: (option, bytes, offset) => {
352
+ const prefixOffset = prefix.write(Number(option !== null), bytes, offset);
353
+ if (option !== null) {
354
+ item.write(option, bytes, prefixOffset);
355
+ }
356
+ return offset + fixedSize;
357
+ }
358
+ });
359
+ }
360
+ return createEncoder({
361
+ getSizeFromValue: (option) => getEncodedSize(Number(option !== null), prefix) + (option !== null ? getEncodedSize(option, item) : 0),
362
+ maxSize: sumCodecSizes([prefix, item].map(getMaxSize)) ?? void 0,
363
+ write: (option, bytes, offset) => {
364
+ offset = prefix.write(Number(option !== null), bytes, offset);
365
+ if (option !== null) {
366
+ offset = item.write(option, bytes, offset);
367
+ }
368
+ return offset;
382
369
  }
383
- };
370
+ });
384
371
  }
385
372
  function getNullableDecoder(item, config = {}) {
386
373
  const prefix = config.prefix ?? getU8Decoder();
387
374
  const fixed = config.fixed ?? false;
388
- return {
389
- ...nullableCodecHelper(item, prefix, fixed, config.description),
390
- decode: (bytes, offset = 0) => {
375
+ let fixedSize = null;
376
+ const isZeroSizeItem = isFixedSize(item) && isFixedSize(prefix) && item.fixedSize === 0;
377
+ if (fixed || isZeroSizeItem) {
378
+ assertIsFixedSize(item, "Fixed nullables can only be used with fixed-size codecs.");
379
+ assertIsFixedSize(prefix, "Fixed nullables can only be used with fixed-size prefix.");
380
+ fixedSize = prefix.fixedSize + item.fixedSize;
381
+ }
382
+ return createDecoder({
383
+ ...fixedSize === null ? { maxSize: sumCodecSizes([prefix, item].map(getMaxSize)) ?? void 0 } : { fixedSize },
384
+ read: (bytes, offset) => {
391
385
  if (bytes.length - offset <= 0) {
392
386
  return [null, offset];
393
387
  }
394
- const fixedOffset = offset + (prefix.fixedSize ?? 0) + (item.fixedSize ?? 0);
395
- const [isSome, prefixOffset] = prefix.decode(bytes, offset);
396
- offset = prefixOffset;
388
+ const [isSome, prefixOffset] = prefix.read(bytes, offset);
397
389
  if (isSome === 0) {
398
- return [null, fixed ? fixedOffset : offset];
390
+ return [null, fixedSize !== null ? offset + fixedSize : prefixOffset];
399
391
  }
400
- const [value, newOffset] = item.decode(bytes, offset);
401
- offset = newOffset;
402
- return [value, fixed ? fixedOffset : offset];
392
+ const [value, newOffset] = item.read(bytes, prefixOffset);
393
+ return [value, fixedSize !== null ? offset + fixedSize : newOffset];
403
394
  }
404
- };
395
+ });
405
396
  }
406
397
  function getNullableCodec(item, config = {}) {
407
- return combineCodec(getNullableEncoder(item, config), getNullableDecoder(item, config));
408
- }
409
- function scalarEnumCoderHelper(constructor, prefix, description) {
410
- const enumKeys = Object.keys(constructor);
411
- const enumValues = Object.values(constructor);
412
- const isNumericEnum = enumValues.some((v) => typeof v === "number");
413
- const valueDescriptions = enumValues.filter((v) => typeof v === "string").join(", ");
414
- const minRange = 0;
415
- const maxRange = isNumericEnum ? enumValues.length / 2 - 1 : enumValues.length - 1;
416
- const stringValues = isNumericEnum ? [...enumKeys] : [.../* @__PURE__ */ new Set([...enumKeys, ...enumValues])];
417
- return {
418
- description: description ?? `enum(${valueDescriptions}; ${prefix.description})`,
419
- enumKeys,
420
- enumValues,
421
- fixedSize: prefix.fixedSize,
422
- isNumericEnum,
423
- maxRange,
424
- maxSize: prefix.maxSize,
425
- minRange,
426
- stringValues
427
- };
398
+ const configCast = config;
399
+ return combineCodec(getNullableEncoder(item, configCast), getNullableDecoder(item, configCast));
428
400
  }
429
401
  function getScalarEnumEncoder(constructor, config = {}) {
430
402
  const prefix = config.size ?? getU8Encoder();
431
- const { description, fixedSize, maxSize, minRange, maxRange, stringValues, enumKeys, enumValues } = scalarEnumCoderHelper(constructor, prefix, config.description);
432
- return {
433
- description,
434
- encode: (value) => {
435
- const isInvalidNumber = typeof value === "number" && (value < minRange || value > maxRange);
436
- const isInvalidString = typeof value === "string" && !stringValues.includes(value);
437
- if (isInvalidNumber || isInvalidString) {
438
- throw new Error(
439
- `Invalid scalar enum variant. Expected one of [${stringValues.join(", ")}] or a number between ${minRange} and ${maxRange}, got "${value}".`
440
- );
441
- }
442
- if (typeof value === "number")
443
- return prefix.encode(value);
444
- const valueIndex = enumValues.indexOf(value);
445
- if (valueIndex >= 0)
446
- return prefix.encode(valueIndex);
447
- return prefix.encode(enumKeys.indexOf(value));
448
- },
449
- fixedSize,
450
- maxSize
451
- };
403
+ const { minRange, maxRange, allStringInputs, enumKeys, enumValues } = getScalarEnumStats(constructor);
404
+ return mapEncoder(prefix, (value) => {
405
+ const isInvalidNumber = typeof value === "number" && (value < minRange || value > maxRange);
406
+ const isInvalidString = typeof value === "string" && !allStringInputs.includes(value);
407
+ if (isInvalidNumber || isInvalidString) {
408
+ throw new Error(
409
+ `Invalid scalar enum variant. Expected one of [${allStringInputs.join(", ")}] or a number between ${minRange} and ${maxRange}, got "${value}".`
410
+ );
411
+ }
412
+ if (typeof value === "number")
413
+ return value;
414
+ const valueIndex = enumValues.indexOf(value);
415
+ if (valueIndex >= 0)
416
+ return valueIndex;
417
+ return enumKeys.indexOf(value);
418
+ });
452
419
  }
453
420
  function getScalarEnumDecoder(constructor, config = {}) {
454
421
  const prefix = config.size ?? getU8Decoder();
455
- const { description, fixedSize, maxSize, minRange, maxRange, isNumericEnum, enumValues } = scalarEnumCoderHelper(
456
- constructor,
457
- prefix,
458
- config.description
459
- );
460
- return {
461
- decode: (bytes, offset = 0) => {
462
- assertByteArrayIsNotEmptyForCodec("enum", bytes, offset);
463
- const [value, newOffset] = prefix.decode(bytes, offset);
464
- const valueAsNumber = Number(value);
465
- offset = newOffset;
466
- if (valueAsNumber < minRange || valueAsNumber > maxRange) {
467
- throw new Error(
468
- `Enum discriminator out of range. Expected a number between ${minRange} and ${maxRange}, got ${valueAsNumber}.`
469
- );
470
- }
471
- return [isNumericEnum ? valueAsNumber : enumValues[valueAsNumber], offset];
472
- },
473
- description,
474
- fixedSize,
475
- maxSize
476
- };
422
+ const { minRange, maxRange, enumKeys } = getScalarEnumStats(constructor);
423
+ return mapDecoder(prefix, (value) => {
424
+ const valueAsNumber = Number(value);
425
+ if (valueAsNumber < minRange || valueAsNumber > maxRange) {
426
+ throw new Error(
427
+ `Enum discriminator out of range. Expected a number between ${minRange} and ${maxRange}, got ${valueAsNumber}.`
428
+ );
429
+ }
430
+ return constructor[enumKeys[valueAsNumber]];
431
+ });
477
432
  }
478
433
  function getScalarEnumCodec(constructor, config = {}) {
479
434
  return combineCodec(getScalarEnumEncoder(constructor, config), getScalarEnumDecoder(constructor, config));
480
435
  }
481
- function setCodecHelper(item, size, description) {
482
- if (size === "remainder" && item.fixedSize === null) {
483
- throw new Error('Codecs of "remainder" size must have fixed-size items.');
484
- }
436
+ function getScalarEnumStats(constructor) {
437
+ const numericValues = Object.values(constructor).filter((v) => typeof v === "number");
438
+ const deduplicatedConstructor = Object.fromEntries(
439
+ Object.entries(constructor).slice(numericValues.length)
440
+ );
441
+ const enumKeys = Object.keys(deduplicatedConstructor);
442
+ const enumValues = Object.values(deduplicatedConstructor);
443
+ const minRange = 0;
444
+ const maxRange = enumValues.length - 1;
445
+ const allStringInputs = [
446
+ .../* @__PURE__ */ new Set([...enumKeys, ...enumValues.filter((v) => typeof v === "string")])
447
+ ];
485
448
  return {
486
- description: description ?? `set(${item.description}; ${getArrayLikeCodecSizeDescription(size)})`,
487
- fixedSize: getArrayLikeCodecSizeFromChildren(size, [item.fixedSize]),
488
- maxSize: getArrayLikeCodecSizeFromChildren(size, [item.maxSize])
449
+ allStringInputs,
450
+ enumKeys,
451
+ enumValues,
452
+ maxRange,
453
+ minRange
489
454
  };
490
455
  }
491
456
  function getSetEncoder(item, config = {}) {
492
- const size = config.size ?? getU32Encoder();
493
- return {
494
- ...setCodecHelper(item, size, config.description),
495
- encode: (set) => {
496
- if (typeof size === "number" && set.size !== size) {
497
- assertValidNumberOfItemsForCodec("set", size, set.size);
498
- }
499
- const itemBytes = Array.from(set, (value) => item.encode(value));
500
- return mergeBytes([getArrayLikeCodecSizePrefix(size, set.size), ...itemBytes]);
501
- }
502
- };
457
+ return mapEncoder(getArrayEncoder(item, config), (set) => [...set]);
503
458
  }
504
459
  function getSetDecoder(item, config = {}) {
505
- const size = config.size ?? getU32Decoder();
506
- return {
507
- ...setCodecHelper(item, size, config.description),
508
- decode: (bytes, offset = 0) => {
509
- const set = /* @__PURE__ */ new Set();
510
- if (typeof size === "object" && bytes.slice(offset).length === 0) {
511
- return [set, offset];
512
- }
513
- const [resolvedSize, newOffset] = decodeArrayLikeCodecSize(size, [item.fixedSize], bytes, offset);
514
- offset = newOffset;
515
- for (let i = 0; i < resolvedSize; i += 1) {
516
- const [value, newOffset2] = item.decode(bytes, offset);
517
- offset = newOffset2;
518
- set.add(value);
519
- }
520
- return [set, offset];
521
- }
522
- };
460
+ return mapDecoder(getArrayDecoder(item, config), (entries) => new Set(entries));
523
461
  }
524
462
  function getSetCodec(item, config = {}) {
525
463
  return combineCodec(getSetEncoder(item, config), getSetDecoder(item, config));
526
464
  }
527
- function structCodecHelper(fields, description) {
528
- const fieldDescriptions = fields.map(([name, codec]) => `${String(name)}: ${codec.description}`).join(", ");
529
- return {
530
- description: description ?? `struct(${fieldDescriptions})`,
531
- fixedSize: sumCodecSizes(fields.map(([, field]) => field.fixedSize)),
532
- maxSize: sumCodecSizes(fields.map(([, field]) => field.maxSize))
533
- };
534
- }
535
- function getStructEncoder(fields, config = {}) {
536
- return {
537
- ...structCodecHelper(fields, config.description),
538
- encode: (struct) => {
539
- const fieldBytes = fields.map(([key, codec]) => codec.encode(struct[key]));
540
- return mergeBytes(fieldBytes);
465
+ function getStructEncoder(fields) {
466
+ const fieldCodecs = fields.map(([, codec]) => codec);
467
+ const fixedSize = sumCodecSizes(fieldCodecs.map(getFixedSize));
468
+ const maxSize = sumCodecSizes(fieldCodecs.map(getMaxSize)) ?? void 0;
469
+ return createEncoder({
470
+ ...fixedSize === null ? {
471
+ getSizeFromValue: (value) => fields.map(([key, codec]) => getEncodedSize(value[key], codec)).reduce((all, one) => all + one, 0),
472
+ maxSize
473
+ } : { fixedSize },
474
+ write: (struct, bytes, offset) => {
475
+ fields.forEach(([key, codec]) => {
476
+ offset = codec.write(struct[key], bytes, offset);
477
+ });
478
+ return offset;
541
479
  }
542
- };
543
- }
544
- function getStructDecoder(fields, config = {}) {
545
- return {
546
- ...structCodecHelper(fields, config.description),
547
- decode: (bytes, offset = 0) => {
480
+ });
481
+ }
482
+ function getStructDecoder(fields) {
483
+ const fieldCodecs = fields.map(([, codec]) => codec);
484
+ const fixedSize = sumCodecSizes(fieldCodecs.map(getFixedSize));
485
+ const maxSize = sumCodecSizes(fieldCodecs.map(getMaxSize)) ?? void 0;
486
+ return createDecoder({
487
+ ...fixedSize === null ? { maxSize } : { fixedSize },
488
+ read: (bytes, offset) => {
548
489
  const struct = {};
549
490
  fields.forEach(([key, codec]) => {
550
- const [value, newOffset] = codec.decode(bytes, offset);
491
+ const [value, newOffset] = codec.read(bytes, offset);
551
492
  offset = newOffset;
552
493
  struct[key] = value;
553
494
  });
554
495
  return [struct, offset];
555
496
  }
556
- };
557
- }
558
- function getStructCodec(fields, config = {}) {
559
- return combineCodec(getStructEncoder(fields, config), getStructDecoder(fields, config));
560
- }
561
- function tupleCodecHelper(items, description) {
562
- const itemDescriptions = items.map((item) => item.description).join(", ");
563
- return {
564
- description: description ?? `tuple(${itemDescriptions})`,
565
- fixedSize: sumCodecSizes(items.map((item) => item.fixedSize)),
566
- maxSize: sumCodecSizes(items.map((item) => item.maxSize))
567
- };
497
+ });
568
498
  }
569
- function getTupleEncoder(items, config = {}) {
570
- return {
571
- ...tupleCodecHelper(items, config.description),
572
- encode: (value) => {
573
- assertValidNumberOfItemsForCodec("tuple", items.length, value.length);
574
- return mergeBytes(items.map((item, index) => item.encode(value[index])));
575
- }
576
- };
499
+ function getStructCodec(fields) {
500
+ return combineCodec(getStructEncoder(fields), getStructDecoder(fields));
577
501
  }
578
- function getTupleDecoder(items, config = {}) {
579
- return {
580
- ...tupleCodecHelper(items, config.description),
581
- decode: (bytes, offset = 0) => {
582
- const values = [];
583
- items.forEach((codec) => {
584
- const [newValue, newOffset] = codec.decode(bytes, offset);
585
- values.push(newValue);
586
- offset = newOffset;
587
- });
588
- return [values, offset];
589
- }
590
- };
591
- }
592
- function getTupleCodec(items, config = {}) {
593
- return combineCodec(
594
- getTupleEncoder(items, config),
595
- getTupleDecoder(items, config)
596
- );
597
- }
598
- function getUnitEncoder(config = {}) {
599
- return {
600
- description: config.description ?? "unit",
601
- encode: () => new Uint8Array(),
502
+ function getUnitEncoder() {
503
+ return createEncoder({
602
504
  fixedSize: 0,
603
- maxSize: 0
604
- };
505
+ write: (_value, _bytes, offset) => offset
506
+ });
605
507
  }
606
- function getUnitDecoder(config = {}) {
607
- return {
608
- decode: (_bytes, offset = 0) => [void 0, offset],
609
- description: config.description ?? "unit",
508
+ function getUnitDecoder() {
509
+ return createDecoder({
610
510
  fixedSize: 0,
611
- maxSize: 0
612
- };
511
+ read: (_bytes, offset) => [void 0, offset]
512
+ });
613
513
  }
614
- function getUnitCodec(config = {}) {
615
- return combineCodec(getUnitEncoder(config), getUnitDecoder(config));
514
+ function getUnitCodec() {
515
+ return combineCodec(getUnitEncoder(), getUnitDecoder());
616
516
  }
617
517
 
618
- export { assertValidNumberOfItemsForCodec, decodeArrayLikeCodecSize, getArrayCodec, getArrayDecoder, getArrayEncoder, getArrayLikeCodecSizeDescription, getArrayLikeCodecSizeFromChildren, getArrayLikeCodecSizePrefix, getBitArrayCodec, getBitArrayDecoder, getBitArrayEncoder, getBooleanCodec, getBooleanDecoder, getBooleanEncoder, getBytesCodec, getBytesDecoder, getBytesEncoder, getDataEnumCodec, getDataEnumDecoder, getDataEnumEncoder, getMapCodec, getMapDecoder, getMapEncoder, getNullableCodec, getNullableDecoder, getNullableEncoder, getScalarEnumCodec, getScalarEnumDecoder, getScalarEnumEncoder, getSetCodec, getSetDecoder, getSetEncoder, getStructCodec, getStructDecoder, getStructEncoder, getTupleCodec, getTupleDecoder, getTupleEncoder, getUnitCodec, getUnitDecoder, getUnitEncoder };
518
+ export { assertValidNumberOfItemsForCodec, getArrayCodec, getArrayDecoder, getArrayEncoder, getBitArrayCodec, getBitArrayDecoder, getBitArrayEncoder, getBooleanCodec, getBooleanDecoder, getBooleanEncoder, getBytesCodec, getBytesDecoder, getBytesEncoder, getDataEnumCodec, getDataEnumDecoder, getDataEnumEncoder, getMapCodec, getMapDecoder, getMapEncoder, getNullableCodec, getNullableDecoder, getNullableEncoder, getScalarEnumCodec, getScalarEnumDecoder, getScalarEnumEncoder, getSetCodec, getSetDecoder, getSetEncoder, getStructCodec, getStructDecoder, getStructEncoder, getTupleCodec, getTupleDecoder, getTupleEncoder, getUnitCodec, getUnitDecoder, getUnitEncoder };
619
519
  //# sourceMappingURL=out.js.map
620
520
  //# sourceMappingURL=index.node.js.map