@solana/codecs-data-structures 2.0.0-experimental.fc4e943 → 2.0.0-experimental.fcff844

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. package/dist/index.browser.cjs +385 -474
  2. package/dist/index.browser.cjs.map +1 -1
  3. package/dist/index.browser.js +389 -472
  4. package/dist/index.browser.js.map +1 -1
  5. package/dist/index.development.js +501 -566
  6. package/dist/index.development.js.map +1 -1
  7. package/dist/index.native.js +387 -474
  8. package/dist/index.native.js.map +1 -1
  9. package/dist/index.node.cjs +385 -476
  10. package/dist/index.node.cjs.map +1 -1
  11. package/dist/index.node.js +387 -474
  12. package/dist/index.node.js.map +1 -1
  13. package/dist/index.production.min.js +37 -41
  14. package/dist/types/array.d.ts +54 -10
  15. package/dist/types/array.d.ts.map +1 -1
  16. package/dist/types/bit-array.d.ts +9 -9
  17. package/dist/types/bit-array.d.ts.map +1 -1
  18. package/dist/types/boolean.d.ts +22 -10
  19. package/dist/types/boolean.d.ts.map +1 -1
  20. package/dist/types/bytes.d.ts +18 -9
  21. package/dist/types/bytes.d.ts.map +1 -1
  22. package/dist/types/data-enum.d.ts +20 -20
  23. package/dist/types/data-enum.d.ts.map +1 -1
  24. package/dist/types/index.d.ts +0 -1
  25. package/dist/types/index.d.ts.map +1 -1
  26. package/dist/types/map.d.ts +43 -10
  27. package/dist/types/map.d.ts.map +1 -1
  28. package/dist/types/nullable.d.ts +28 -10
  29. package/dist/types/nullable.d.ts.map +1 -1
  30. package/dist/types/scalar-enum.d.ts +22 -10
  31. package/dist/types/scalar-enum.d.ts.map +1 -1
  32. package/dist/types/set.d.ts +43 -10
  33. package/dist/types/set.d.ts.map +1 -1
  34. package/dist/types/struct.d.ts +28 -18
  35. package/dist/types/struct.d.ts.map +1 -1
  36. package/dist/types/tuple.d.ts +22 -15
  37. package/dist/types/tuple.d.ts.map +1 -1
  38. package/dist/types/unit.d.ts +4 -12
  39. package/dist/types/unit.d.ts.map +1 -1
  40. package/dist/types/utils.d.ts +10 -2
  41. package/dist/types/utils.d.ts.map +1 -1
  42. package/package.json +11 -11
  43. package/dist/types/array-like-codec-size.d.ts +0 -20
  44. package/dist/types/array-like-codec-size.d.ts.map +0 -1
@@ -14,28 +14,6 @@ this.globalThis.solanaWeb3 = (function (exports) {
14
14
  throw new Error(`Codec [${codecDescription}] expected ${expected} bytes, got ${bytesLength}.`);
15
15
  }
16
16
  }
17
- function assertFixedSizeCodec(data, message) {
18
- if (data.fixedSize === null) {
19
- throw new Error(message ?? "Expected a fixed-size codec, got a variable-size one.");
20
- }
21
- }
22
- var mergeBytes = (byteArrays) => {
23
- const nonEmptyByteArrays = byteArrays.filter((arr) => arr.length);
24
- if (nonEmptyByteArrays.length === 0) {
25
- return byteArrays.length ? byteArrays[0] : new Uint8Array();
26
- }
27
- if (nonEmptyByteArrays.length === 1) {
28
- return nonEmptyByteArrays[0];
29
- }
30
- const totalLength = nonEmptyByteArrays.reduce((total, arr) => total + arr.length, 0);
31
- const result = new Uint8Array(totalLength);
32
- let offset = 0;
33
- nonEmptyByteArrays.forEach((arr) => {
34
- result.set(arr, offset);
35
- offset += arr.length;
36
- });
37
- return result;
38
- };
39
17
  var padBytes = (bytes, length) => {
40
18
  if (bytes.length >= length)
41
19
  return bytes;
@@ -44,58 +22,100 @@ this.globalThis.solanaWeb3 = (function (exports) {
44
22
  return paddedBytes;
45
23
  };
46
24
  var fixBytes = (bytes, length) => padBytes(bytes.length <= length ? bytes : bytes.slice(0, length), length);
47
- function combineCodec(encoder, decoder, description) {
48
- if (encoder.fixedSize !== decoder.fixedSize) {
25
+ function getEncodedSize(value, encoder) {
26
+ return "fixedSize" in encoder ? encoder.fixedSize : encoder.getSizeFromValue(value);
27
+ }
28
+ function createEncoder(encoder) {
29
+ return Object.freeze({
30
+ ...encoder,
31
+ encode: (value) => {
32
+ const bytes = new Uint8Array(getEncodedSize(value, encoder));
33
+ encoder.write(value, bytes, 0);
34
+ return bytes;
35
+ }
36
+ });
37
+ }
38
+ function createDecoder(decoder) {
39
+ return Object.freeze({
40
+ ...decoder,
41
+ decode: (bytes, offset = 0) => decoder.read(bytes, offset)[0]
42
+ });
43
+ }
44
+ function isFixedSize(codec) {
45
+ return "fixedSize" in codec && typeof codec.fixedSize === "number";
46
+ }
47
+ function assertIsFixedSize(codec, message) {
48
+ if (!isFixedSize(codec)) {
49
+ throw new Error(message != null ? message : "Expected a fixed-size codec, got a variable-size one.");
50
+ }
51
+ }
52
+ function isVariableSize(codec) {
53
+ return !isFixedSize(codec);
54
+ }
55
+ function combineCodec(encoder, decoder) {
56
+ if (isFixedSize(encoder) !== isFixedSize(decoder)) {
57
+ throw new Error(`Encoder and decoder must either both be fixed-size or variable-size.`);
58
+ }
59
+ if (isFixedSize(encoder) && isFixedSize(decoder) && encoder.fixedSize !== decoder.fixedSize) {
49
60
  throw new Error(
50
61
  `Encoder and decoder must have the same fixed size, got [${encoder.fixedSize}] and [${decoder.fixedSize}].`
51
62
  );
52
63
  }
53
- if (encoder.maxSize !== decoder.maxSize) {
64
+ if (!isFixedSize(encoder) && !isFixedSize(decoder) && encoder.maxSize !== decoder.maxSize) {
54
65
  throw new Error(
55
66
  `Encoder and decoder must have the same max size, got [${encoder.maxSize}] and [${decoder.maxSize}].`
56
67
  );
57
68
  }
58
- if (description === void 0 && encoder.description !== decoder.description) {
59
- throw new Error(
60
- `Encoder and decoder must have the same description, got [${encoder.description}] and [${decoder.description}]. Pass a custom description as a third argument if you want to override the description and bypass this error.`
61
- );
62
- }
63
69
  return {
70
+ ...decoder,
71
+ ...encoder,
64
72
  decode: decoder.decode,
65
- description: description ?? encoder.description,
66
73
  encode: encoder.encode,
67
- fixedSize: encoder.fixedSize,
68
- maxSize: encoder.maxSize
74
+ read: decoder.read,
75
+ write: encoder.write
69
76
  };
70
77
  }
71
- function fixCodecHelper(data, fixedBytes, description) {
72
- return {
73
- description: description ?? `fixed(${fixedBytes}, ${data.description})`,
78
+ function fixEncoder(encoder, fixedBytes) {
79
+ return createEncoder({
74
80
  fixedSize: fixedBytes,
75
- maxSize: fixedBytes
76
- };
77
- }
78
- function fixEncoder(encoder, fixedBytes, description) {
79
- return {
80
- ...fixCodecHelper(encoder, fixedBytes, description),
81
- encode: (value) => fixBytes(encoder.encode(value), fixedBytes)
82
- };
81
+ write: (value, bytes, offset) => {
82
+ const variableByteArray = encoder.encode(value);
83
+ const fixedByteArray = variableByteArray.length > fixedBytes ? variableByteArray.slice(0, fixedBytes) : variableByteArray;
84
+ bytes.set(fixedByteArray, offset);
85
+ return offset + fixedBytes;
86
+ }
87
+ });
83
88
  }
84
- function fixDecoder(decoder, fixedBytes, description) {
85
- return {
86
- ...fixCodecHelper(decoder, fixedBytes, description),
87
- decode: (bytes, offset = 0) => {
89
+ function fixDecoder(decoder, fixedBytes) {
90
+ return createDecoder({
91
+ fixedSize: fixedBytes,
92
+ read: (bytes, offset) => {
88
93
  assertByteArrayHasEnoughBytesForCodec("fixCodec", fixedBytes, bytes, offset);
89
94
  if (offset > 0 || bytes.length > fixedBytes) {
90
95
  bytes = bytes.slice(offset, offset + fixedBytes);
91
96
  }
92
- if (decoder.fixedSize !== null) {
97
+ if (isFixedSize(decoder)) {
93
98
  bytes = fixBytes(bytes, decoder.fixedSize);
94
99
  }
95
- const [value] = decoder.decode(bytes, 0);
100
+ const [value] = decoder.read(bytes, 0);
96
101
  return [value, offset + fixedBytes];
97
102
  }
98
- };
103
+ });
104
+ }
105
+ function mapEncoder(encoder, unmap) {
106
+ return createEncoder({
107
+ ...isVariableSize(encoder) ? { ...encoder, getSizeFromValue: (value) => encoder.getSizeFromValue(unmap(value)) } : encoder,
108
+ write: (value, bytes, offset) => encoder.write(unmap(value), bytes, offset)
109
+ });
110
+ }
111
+ function mapDecoder(decoder, map) {
112
+ return createDecoder({
113
+ ...decoder,
114
+ read: (bytes, offset) => {
115
+ const [value, newOffset] = decoder.read(bytes, offset);
116
+ return [map(value, bytes, offset), newOffset];
117
+ }
118
+ });
99
119
  }
100
120
 
101
121
  // ../codecs-numbers/dist/index.browser.js
@@ -106,82 +126,71 @@ this.globalThis.solanaWeb3 = (function (exports) {
106
126
  );
107
127
  }
108
128
  }
109
- function sharedNumberFactory(input) {
110
- let littleEndian;
111
- let defaultDescription = input.name;
112
- if (input.size > 1) {
113
- littleEndian = !("endian" in input.options) || input.options.endian === 0;
114
- defaultDescription += littleEndian ? "(le)" : "(be)";
115
- }
116
- return {
117
- description: input.options.description ?? defaultDescription,
118
- fixedSize: input.size,
119
- littleEndian,
120
- maxSize: input.size
121
- };
129
+ function isLittleEndian(config) {
130
+ return (config == null ? void 0 : config.endian) === 1 ? false : true;
122
131
  }
123
132
  function numberEncoderFactory(input) {
124
- const codecData = sharedNumberFactory(input);
125
- return {
126
- description: codecData.description,
127
- encode(value) {
133
+ return createEncoder({
134
+ fixedSize: input.size,
135
+ write(value, bytes, offset) {
128
136
  if (input.range) {
129
137
  assertNumberIsBetweenForCodec(input.name, input.range[0], input.range[1], value);
130
138
  }
131
139
  const arrayBuffer = new ArrayBuffer(input.size);
132
- input.set(new DataView(arrayBuffer), value, codecData.littleEndian);
133
- return new Uint8Array(arrayBuffer);
134
- },
135
- fixedSize: codecData.fixedSize,
136
- maxSize: codecData.maxSize
137
- };
140
+ input.set(new DataView(arrayBuffer), value, isLittleEndian(input.config));
141
+ bytes.set(new Uint8Array(arrayBuffer), offset);
142
+ return offset + input.size;
143
+ }
144
+ });
138
145
  }
139
146
  function numberDecoderFactory(input) {
140
- const codecData = sharedNumberFactory(input);
141
- return {
142
- decode(bytes, offset = 0) {
143
- assertByteArrayIsNotEmptyForCodec(codecData.description, bytes, offset);
144
- assertByteArrayHasEnoughBytesForCodec(codecData.description, input.size, bytes, offset);
147
+ return createDecoder({
148
+ fixedSize: input.size,
149
+ read(bytes, offset = 0) {
150
+ assertByteArrayIsNotEmptyForCodec(input.name, bytes, offset);
151
+ assertByteArrayHasEnoughBytesForCodec(input.name, input.size, bytes, offset);
145
152
  const view = new DataView(toArrayBuffer(bytes, offset, input.size));
146
- return [input.get(view, codecData.littleEndian), offset + input.size];
147
- },
148
- description: codecData.description,
149
- fixedSize: codecData.fixedSize,
150
- maxSize: codecData.maxSize
151
- };
153
+ return [input.get(view, isLittleEndian(input.config)), offset + input.size];
154
+ }
155
+ });
152
156
  }
153
157
  function toArrayBuffer(bytes, offset, length) {
154
- const bytesOffset = bytes.byteOffset + (offset ?? 0);
155
- const bytesLength = length ?? bytes.byteLength;
158
+ const bytesOffset = bytes.byteOffset + (offset != null ? offset : 0);
159
+ const bytesLength = length != null ? length : bytes.byteLength;
156
160
  return bytes.buffer.slice(bytesOffset, bytesOffset + bytesLength);
157
161
  }
158
- var getU32Encoder = (options = {}) => numberEncoderFactory({
162
+ var getU32Encoder = (config = {}) => numberEncoderFactory({
163
+ config,
159
164
  name: "u32",
160
- options,
161
165
  range: [0, Number("0xffffffff")],
162
166
  set: (view, value, le) => view.setUint32(0, value, le),
163
167
  size: 4
164
168
  });
165
- var getU32Decoder = (options = {}) => numberDecoderFactory({
169
+ var getU32Decoder = (config = {}) => numberDecoderFactory({
170
+ config,
166
171
  get: (view, le) => view.getUint32(0, le),
167
172
  name: "u32",
168
- options,
169
173
  size: 4
170
174
  });
171
- var getU8Encoder = (options = {}) => numberEncoderFactory({
175
+ var getU8Encoder = () => numberEncoderFactory({
172
176
  name: "u8",
173
- options,
174
177
  range: [0, Number("0xff")],
175
178
  set: (view, value) => view.setUint8(0, value),
176
179
  size: 1
177
180
  });
178
- var getU8Decoder = (options = {}) => numberDecoderFactory({
181
+ var getU8Decoder = () => numberDecoderFactory({
179
182
  get: (view) => view.getUint8(0),
180
183
  name: "u8",
181
- options,
182
184
  size: 1
183
185
  });
184
186
 
187
+ // src/assertions.ts
188
+ function assertValidNumberOfItemsForCodec(codecDescription, expected, actual) {
189
+ if (expected !== actual) {
190
+ throw new Error(`Expected [${codecDescription}] to have ${expected} items, got ${actual}.`);
191
+ }
192
+ }
193
+
185
194
  // src/utils.ts
186
195
  function maxCodecSizes(sizes) {
187
196
  return sizes.reduce(
@@ -192,132 +201,138 @@ this.globalThis.solanaWeb3 = (function (exports) {
192
201
  function sumCodecSizes(sizes) {
193
202
  return sizes.reduce((all, size) => all === null || size === null ? null : all + size, 0);
194
203
  }
204
+ function getFixedSize(codec) {
205
+ return isFixedSize(codec) ? codec.fixedSize : null;
206
+ }
207
+ function getMaxSize(codec) {
208
+ var _a;
209
+ return isFixedSize(codec) ? codec.fixedSize : (_a = codec.maxSize) != null ? _a : null;
210
+ }
195
211
 
196
- // src/array-like-codec-size.ts
197
- function decodeArrayLikeCodecSize(size, childrenSizes, bytes, offset) {
212
+ // src/array.ts
213
+ function getArrayEncoder(item, config = {}) {
214
+ var _a, _b;
215
+ const size = (_a = config.size) != null ? _a : getU32Encoder();
216
+ if (size === "remainder") {
217
+ assertIsFixedSize(item, 'Codecs of "remainder" size must have fixed-size items.');
218
+ }
219
+ const fixedSize = computeArrayLikeCodecSize(size, getFixedSize(item));
220
+ const maxSize = (_b = computeArrayLikeCodecSize(size, getMaxSize(item))) != null ? _b : void 0;
221
+ return createEncoder({
222
+ ...fixedSize !== null ? { fixedSize } : {
223
+ getSizeFromValue: (array) => {
224
+ const prefixSize = typeof size === "object" ? getEncodedSize(array.length, size) : 0;
225
+ return prefixSize + [...array].reduce((all, value) => all + getEncodedSize(value, item), 0);
226
+ },
227
+ maxSize
228
+ },
229
+ write: (array, bytes, offset) => {
230
+ if (typeof size === "number") {
231
+ assertValidNumberOfItemsForCodec("array", size, array.length);
232
+ }
233
+ if (typeof size === "object") {
234
+ offset = size.write(array.length, bytes, offset);
235
+ }
236
+ array.forEach((value) => {
237
+ offset = item.write(value, bytes, offset);
238
+ });
239
+ return offset;
240
+ }
241
+ });
242
+ }
243
+ function getArrayDecoder(item, config = {}) {
244
+ var _a, _b;
245
+ const size = (_a = config.size) != null ? _a : getU32Decoder();
246
+ if (size === "remainder") {
247
+ assertIsFixedSize(item, 'Codecs of "remainder" size must have fixed-size items.');
248
+ }
249
+ const itemSize = getFixedSize(item);
250
+ const fixedSize = computeArrayLikeCodecSize(size, itemSize);
251
+ const maxSize = (_b = computeArrayLikeCodecSize(size, getMaxSize(item))) != null ? _b : void 0;
252
+ return createDecoder({
253
+ ...fixedSize !== null ? { fixedSize } : { maxSize },
254
+ read: (bytes, offset) => {
255
+ const array = [];
256
+ if (typeof size === "object" && bytes.slice(offset).length === 0) {
257
+ return [array, offset];
258
+ }
259
+ const [resolvedSize, newOffset] = readArrayLikeCodecSize(size, itemSize, bytes, offset);
260
+ offset = newOffset;
261
+ for (let i = 0; i < resolvedSize; i += 1) {
262
+ const [value, newOffset2] = item.read(bytes, offset);
263
+ offset = newOffset2;
264
+ array.push(value);
265
+ }
266
+ return [array, offset];
267
+ }
268
+ });
269
+ }
270
+ function getArrayCodec(item, config = {}) {
271
+ return combineCodec(getArrayEncoder(item, config), getArrayDecoder(item, config));
272
+ }
273
+ function readArrayLikeCodecSize(size, itemSize, bytes, offset) {
198
274
  if (typeof size === "number") {
199
275
  return [size, offset];
200
276
  }
201
277
  if (typeof size === "object") {
202
- return size.decode(bytes, offset);
278
+ return size.read(bytes, offset);
203
279
  }
204
280
  if (size === "remainder") {
205
- const childrenSize = sumCodecSizes(childrenSizes);
206
- if (childrenSize === null) {
281
+ if (itemSize === null) {
207
282
  throw new Error('Codecs of "remainder" size must have fixed-size items.');
208
283
  }
209
- const remainder = bytes.slice(offset).length;
210
- if (remainder % childrenSize !== 0) {
284
+ const remainder = Math.max(0, bytes.length - offset);
285
+ if (remainder % itemSize !== 0) {
211
286
  throw new Error(
212
- `The remainder of the byte array (${remainder} bytes) cannot be split into chunks of ${childrenSize} bytes. Codecs of "remainder" size must have a remainder that is a multiple of its item size. In other words, ${remainder} modulo ${childrenSize} should be equal to zero.`
287
+ `The remainder of the byte array (${remainder} bytes) cannot be split into chunks of ${itemSize} bytes. Codecs of "remainder" size must have a remainder that is a multiple of its item size. In other words, ${remainder} modulo ${itemSize} should be equal to zero.`
213
288
  );
214
289
  }
215
- return [remainder / childrenSize, offset];
290
+ return [remainder / itemSize, offset];
216
291
  }
217
292
  throw new Error(`Unrecognized array-like codec size: ${JSON.stringify(size)}`);
218
293
  }
219
- function getArrayLikeCodecSizeDescription(size) {
220
- return typeof size === "object" ? size.description : `${size}`;
221
- }
222
- function getArrayLikeCodecSizeFromChildren(size, childrenSizes) {
294
+ function computeArrayLikeCodecSize(size, itemSize) {
223
295
  if (typeof size !== "number")
224
296
  return null;
225
297
  if (size === 0)
226
298
  return 0;
227
- const childrenSize = sumCodecSizes(childrenSizes);
228
- return childrenSize === null ? null : childrenSize * size;
229
- }
230
- function getArrayLikeCodecSizePrefix(size, realSize) {
231
- return typeof size === "object" ? size.encode(realSize) : new Uint8Array();
232
- }
233
-
234
- // src/assertions.ts
235
- function assertValidNumberOfItemsForCodec(codecDescription, expected, actual) {
236
- if (expected !== actual) {
237
- throw new Error(`Expected [${codecDescription}] to have ${expected} items, got ${actual}.`);
238
- }
239
- }
240
-
241
- // src/array.ts
242
- function arrayCodecHelper(item, size, description) {
243
- if (size === "remainder" && item.fixedSize === null) {
244
- throw new Error('Codecs of "remainder" size must have fixed-size items.');
245
- }
246
- return {
247
- description: description ?? `array(${item.description}; ${getArrayLikeCodecSizeDescription(size)})`,
248
- fixedSize: getArrayLikeCodecSizeFromChildren(size, [item.fixedSize]),
249
- maxSize: getArrayLikeCodecSizeFromChildren(size, [item.maxSize])
250
- };
251
- }
252
- function getArrayEncoder(item, options = {}) {
253
- const size = options.size ?? getU32Encoder();
254
- return {
255
- ...arrayCodecHelper(item, size, options.description),
256
- encode: (value) => {
257
- if (typeof size === "number") {
258
- assertValidNumberOfItemsForCodec("array", size, value.length);
259
- }
260
- return mergeBytes([getArrayLikeCodecSizePrefix(size, value.length), ...value.map((v) => item.encode(v))]);
261
- }
262
- };
263
- }
264
- function getArrayDecoder(item, options = {}) {
265
- const size = options.size ?? getU32Decoder();
266
- return {
267
- ...arrayCodecHelper(item, size, options.description),
268
- decode: (bytes, offset = 0) => {
269
- if (typeof size === "object" && bytes.slice(offset).length === 0) {
270
- return [[], offset];
271
- }
272
- const [resolvedSize, newOffset] = decodeArrayLikeCodecSize(size, [item.fixedSize], bytes, offset);
273
- offset = newOffset;
274
- const values = [];
275
- for (let i = 0; i < resolvedSize; i += 1) {
276
- const [value, newOffset2] = item.decode(bytes, offset);
277
- values.push(value);
278
- offset = newOffset2;
279
- }
280
- return [values, offset];
281
- }
282
- };
283
- }
284
- function getArrayCodec(item, options = {}) {
285
- return combineCodec(getArrayEncoder(item, options), getArrayDecoder(item, options));
299
+ return itemSize === null ? null : itemSize * size;
286
300
  }
287
301
 
288
302
  // src/bit-array.ts
289
- var getBitArrayEncoder = (size, options = {}) => {
290
- const parsedOptions = typeof options === "boolean" ? { backward: options } : options;
291
- const backward = parsedOptions.backward ?? false;
292
- const backwardSuffix = backward ? "; backward" : "";
293
- return {
294
- description: parsedOptions.description ?? `bitArray(${size}${backwardSuffix})`,
295
- encode(value) {
296
- const bytes = [];
303
+ function getBitArrayEncoder(size, config = {}) {
304
+ var _a;
305
+ const parsedConfig = typeof config === "boolean" ? { backward: config } : config;
306
+ const backward = (_a = parsedConfig.backward) != null ? _a : false;
307
+ return createEncoder({
308
+ fixedSize: size,
309
+ write(value, bytes, offset) {
310
+ var _a2;
311
+ const bytesToAdd = [];
297
312
  for (let i = 0; i < size; i += 1) {
298
313
  let byte = 0;
299
314
  for (let j = 0; j < 8; j += 1) {
300
- const feature = Number(value[i * 8 + j] ?? 0);
315
+ const feature = Number((_a2 = value[i * 8 + j]) != null ? _a2 : 0);
301
316
  byte |= feature << (backward ? j : 7 - j);
302
317
  }
303
318
  if (backward) {
304
- bytes.unshift(byte);
319
+ bytesToAdd.unshift(byte);
305
320
  } else {
306
- bytes.push(byte);
321
+ bytesToAdd.push(byte);
307
322
  }
308
323
  }
309
- return new Uint8Array(bytes);
310
- },
324
+ bytes.set(bytesToAdd, offset);
325
+ return size;
326
+ }
327
+ });
328
+ }
329
+ function getBitArrayDecoder(size, config = {}) {
330
+ var _a;
331
+ const parsedConfig = typeof config === "boolean" ? { backward: config } : config;
332
+ const backward = (_a = parsedConfig.backward) != null ? _a : false;
333
+ return createDecoder({
311
334
  fixedSize: size,
312
- maxSize: size
313
- };
314
- };
315
- var getBitArrayDecoder = (size, options = {}) => {
316
- const parsedOptions = typeof options === "boolean" ? { backward: options } : options;
317
- const backward = parsedOptions.backward ?? false;
318
- const backwardSuffix = backward ? "; backward" : "";
319
- return {
320
- decode(bytes, offset = 0) {
335
+ read(bytes, offset) {
321
336
  assertByteArrayHasEnoughBytesForCodec("bitArray", size, bytes, offset);
322
337
  const booleans = [];
323
338
  let slice = bytes.slice(offset, offset + size);
@@ -334,496 +349,416 @@ this.globalThis.solanaWeb3 = (function (exports) {
334
349
  }
335
350
  });
336
351
  return [booleans, offset + size];
337
- },
338
- description: parsedOptions.description ?? `bitArray(${size}${backwardSuffix})`,
339
- fixedSize: size,
340
- maxSize: size
341
- };
342
- };
343
- var getBitArrayCodec = (size, options = {}) => combineCodec(getBitArrayEncoder(size, options), getBitArrayDecoder(size, options));
352
+ }
353
+ });
354
+ }
355
+ function getBitArrayCodec(size, config = {}) {
356
+ return combineCodec(getBitArrayEncoder(size, config), getBitArrayDecoder(size, config));
357
+ }
344
358
 
345
359
  // src/boolean.ts
346
- function getBooleanEncoder(options = {}) {
347
- const size = options.size ?? getU8Encoder();
348
- assertFixedSizeCodec(size, "Codec [bool] requires a fixed size.");
349
- return {
350
- description: options.description ?? `bool(${size.description})`,
351
- encode: (value) => size.encode(value ? 1 : 0),
352
- fixedSize: size.fixedSize,
353
- maxSize: size.fixedSize
354
- };
360
+ function getBooleanEncoder(config = {}) {
361
+ var _a;
362
+ const size = (_a = config.size) != null ? _a : getU8Encoder();
363
+ assertIsFixedSize(size, "Codec [bool] requires a fixed size.");
364
+ return mapEncoder(size, (value) => value ? 1 : 0);
355
365
  }
356
- function getBooleanDecoder(options = {}) {
357
- const size = options.size ?? getU8Decoder();
358
- assertFixedSizeCodec(size, "Codec [bool] requires a fixed size.");
359
- return {
360
- decode: (bytes, offset = 0) => {
361
- assertByteArrayIsNotEmptyForCodec("bool", bytes, offset);
362
- const [value, vOffset] = size.decode(bytes, offset);
363
- return [value === 1, vOffset];
364
- },
365
- description: options.description ?? `bool(${size.description})`,
366
- fixedSize: size.fixedSize,
367
- maxSize: size.fixedSize
368
- };
366
+ function getBooleanDecoder(config = {}) {
367
+ var _a;
368
+ const size = (_a = config.size) != null ? _a : getU8Decoder();
369
+ assertIsFixedSize(size, "Codec [bool] requires a fixed size.");
370
+ return mapDecoder(size, (value) => Number(value) === 1);
369
371
  }
370
- function getBooleanCodec(options = {}) {
371
- return combineCodec(getBooleanEncoder(options), getBooleanDecoder(options));
372
+ function getBooleanCodec(config = {}) {
373
+ return combineCodec(getBooleanEncoder(config), getBooleanDecoder(config));
372
374
  }
373
375
 
374
376
  // src/bytes.ts
375
- function getBytesEncoder(options = {}) {
376
- const size = options.size ?? "variable";
377
- const sizeDescription = typeof size === "object" ? size.description : `${size}`;
378
- const description = options.description ?? `bytes(${sizeDescription})`;
379
- const byteEncoder = {
380
- description,
381
- encode: (value) => value,
382
- fixedSize: null,
383
- maxSize: null
384
- };
377
+ function getBytesEncoder(config = {}) {
378
+ var _a;
379
+ const size = (_a = config.size) != null ? _a : "variable";
380
+ const byteEncoder = createEncoder({
381
+ getSizeFromValue: (value) => value.length,
382
+ write: (value, bytes, offset) => {
383
+ bytes.set(value, offset);
384
+ return offset + value.length;
385
+ }
386
+ });
385
387
  if (size === "variable") {
386
388
  return byteEncoder;
387
389
  }
388
390
  if (typeof size === "number") {
389
- return fixEncoder(byteEncoder, size, description);
391
+ return fixEncoder(byteEncoder, size);
390
392
  }
391
- return {
392
- ...byteEncoder,
393
- encode: (value) => {
394
- const contentBytes = byteEncoder.encode(value);
395
- const lengthBytes = size.encode(contentBytes.length);
396
- return mergeBytes([lengthBytes, contentBytes]);
393
+ return createEncoder({
394
+ getSizeFromValue: (value) => getEncodedSize(value.length, size) + value.length,
395
+ write: (value, bytes, offset) => {
396
+ offset = size.write(value.length, bytes, offset);
397
+ return byteEncoder.write(value, bytes, offset);
397
398
  }
398
- };
399
+ });
399
400
  }
400
- function getBytesDecoder(options = {}) {
401
- const size = options.size ?? "variable";
402
- const sizeDescription = typeof size === "object" ? size.description : `${size}`;
403
- const description = options.description ?? `bytes(${sizeDescription})`;
404
- const byteDecoder = {
405
- decode: (bytes, offset = 0) => {
401
+ function getBytesDecoder(config = {}) {
402
+ var _a;
403
+ const size = (_a = config.size) != null ? _a : "variable";
404
+ const byteDecoder = createDecoder({
405
+ read: (bytes, offset) => {
406
406
  const slice = bytes.slice(offset);
407
407
  return [slice, offset + slice.length];
408
- },
409
- description,
410
- fixedSize: null,
411
- maxSize: null
412
- };
408
+ }
409
+ });
413
410
  if (size === "variable") {
414
411
  return byteDecoder;
415
412
  }
416
413
  if (typeof size === "number") {
417
- return fixDecoder(byteDecoder, size, description);
414
+ return fixDecoder(byteDecoder, size);
418
415
  }
419
- return {
420
- ...byteDecoder,
421
- decode: (bytes, offset = 0) => {
416
+ return createDecoder({
417
+ read: (bytes, offset) => {
422
418
  assertByteArrayIsNotEmptyForCodec("bytes", bytes, offset);
423
- const [lengthBigInt, lengthOffset] = size.decode(bytes, offset);
419
+ const [lengthBigInt, lengthOffset] = size.read(bytes, offset);
424
420
  const length = Number(lengthBigInt);
425
421
  offset = lengthOffset;
426
422
  const contentBytes = bytes.slice(offset, offset + length);
427
423
  assertByteArrayHasEnoughBytesForCodec("bytes", length, contentBytes);
428
- const [value, contentOffset] = byteDecoder.decode(contentBytes);
424
+ const [value, contentOffset] = byteDecoder.read(contentBytes, 0);
429
425
  offset += contentOffset;
430
426
  return [value, offset];
431
427
  }
432
- };
428
+ });
433
429
  }
434
- function getBytesCodec(options = {}) {
435
- return combineCodec(getBytesEncoder(options), getBytesDecoder(options));
430
+ function getBytesCodec(config = {}) {
431
+ return combineCodec(getBytesEncoder(config), getBytesDecoder(config));
436
432
  }
437
433
 
438
434
  // src/data-enum.ts
439
- function dataEnumCodecHelper(variants, prefix, description) {
440
- const fieldDescriptions = variants.map(([name, codec]) => `${String(name)}${codec ? `: ${codec.description}` : ""}`).join(", ");
441
- const allVariantHaveTheSameFixedSize = variants.every((one, _i, all) => one[1].fixedSize === all[0][1].fixedSize);
442
- const fixedVariantSize = allVariantHaveTheSameFixedSize ? variants[0][1].fixedSize : null;
443
- const maxVariantSize = maxCodecSizes(variants.map(([, field]) => field.maxSize));
444
- return {
445
- description: description ?? `dataEnum(${fieldDescriptions}; ${prefix.description})`,
446
- fixedSize: variants.length === 0 ? prefix.fixedSize : sumCodecSizes([prefix.fixedSize, fixedVariantSize]),
447
- maxSize: variants.length === 0 ? prefix.maxSize : sumCodecSizes([prefix.maxSize, maxVariantSize])
448
- };
449
- }
450
- function getDataEnumEncoder(variants, options = {}) {
451
- const prefix = options.size ?? getU8Encoder();
452
- return {
453
- ...dataEnumCodecHelper(variants, prefix, options.description),
454
- encode: (variant) => {
455
- const discriminator = variants.findIndex(([key]) => variant.__kind === key);
456
- if (discriminator < 0) {
457
- throw new Error(
458
- `Invalid data enum variant. Expected one of [${variants.map(([key]) => key).join(", ")}], got "${variant.__kind}".`
459
- );
460
- }
461
- const variantPrefix = prefix.encode(discriminator);
462
- const variantSerializer = variants[discriminator][1];
463
- const variantBytes = variantSerializer.encode(variant);
464
- return mergeBytes([variantPrefix, variantBytes]);
435
+ function getDataEnumEncoder(variants, config = {}) {
436
+ var _a;
437
+ const prefix = (_a = config.size) != null ? _a : getU8Encoder();
438
+ const fixedSize = getDataEnumFixedSize(variants, prefix);
439
+ return createEncoder({
440
+ ...fixedSize !== null ? { fixedSize } : {
441
+ getSizeFromValue: (variant) => {
442
+ const discriminator = getVariantDiscriminator(variants, variant);
443
+ const variantEncoder = variants[discriminator][1];
444
+ return getEncodedSize(discriminator, prefix) + getEncodedSize(variant, variantEncoder);
445
+ },
446
+ maxSize: getDataEnumMaxSize(variants, prefix)
447
+ },
448
+ write: (variant, bytes, offset) => {
449
+ const discriminator = getVariantDiscriminator(variants, variant);
450
+ offset = prefix.write(discriminator, bytes, offset);
451
+ const variantEncoder = variants[discriminator][1];
452
+ return variantEncoder.write(variant, bytes, offset);
465
453
  }
466
- };
454
+ });
467
455
  }
468
- function getDataEnumDecoder(variants, options = {}) {
469
- const prefix = options.size ?? getU8Decoder();
470
- return {
471
- ...dataEnumCodecHelper(variants, prefix, options.description),
472
- decode: (bytes, offset = 0) => {
456
+ function getDataEnumDecoder(variants, config = {}) {
457
+ var _a;
458
+ const prefix = (_a = config.size) != null ? _a : getU8Decoder();
459
+ const fixedSize = getDataEnumFixedSize(variants, prefix);
460
+ return createDecoder({
461
+ ...fixedSize !== null ? { fixedSize } : { maxSize: getDataEnumMaxSize(variants, prefix) },
462
+ read: (bytes, offset) => {
463
+ var _a2;
473
464
  assertByteArrayIsNotEmptyForCodec("dataEnum", bytes, offset);
474
- const [discriminator, dOffset] = prefix.decode(bytes, offset);
465
+ const [discriminator, dOffset] = prefix.read(bytes, offset);
475
466
  offset = dOffset;
476
- const variantField = variants[Number(discriminator)] ?? null;
467
+ const variantField = (_a2 = variants[Number(discriminator)]) != null ? _a2 : null;
477
468
  if (!variantField) {
478
469
  throw new Error(
479
470
  `Enum discriminator out of range. Expected a number between 0 and ${variants.length - 1}, got ${discriminator}.`
480
471
  );
481
472
  }
482
- const [variant, vOffset] = variantField[1].decode(bytes, offset);
473
+ const [variant, vOffset] = variantField[1].read(bytes, offset);
483
474
  offset = vOffset;
484
- return [{ __kind: variantField[0], ...variant ?? {} }, offset];
475
+ return [{ __kind: variantField[0], ...variant != null ? variant : {} }, offset];
485
476
  }
486
- };
477
+ });
487
478
  }
488
- function getDataEnumCodec(variants, options = {}) {
489
- return combineCodec(getDataEnumEncoder(variants, options), getDataEnumDecoder(variants, options));
479
+ function getDataEnumCodec(variants, config = {}) {
480
+ return combineCodec(getDataEnumEncoder(variants, config), getDataEnumDecoder(variants, config));
490
481
  }
491
-
492
- // src/map.ts
493
- function mapCodecHelper(key, value, size, description) {
494
- if (size === "remainder" && (key.fixedSize === null || value.fixedSize === null)) {
495
- throw new Error('Codecs of "remainder" size must have fixed-size items.');
482
+ function getDataEnumFixedSize(variants, prefix) {
483
+ if (variants.length === 0)
484
+ return isFixedSize(prefix) ? prefix.fixedSize : null;
485
+ if (!isFixedSize(variants[0][1]))
486
+ return null;
487
+ const variantSize = variants[0][1].fixedSize;
488
+ const sameSizedVariants = variants.every(
489
+ (variant) => isFixedSize(variant[1]) && variant[1].fixedSize === variantSize
490
+ );
491
+ if (!sameSizedVariants)
492
+ return null;
493
+ return isFixedSize(prefix) ? prefix.fixedSize + variantSize : null;
494
+ }
495
+ function getDataEnumMaxSize(variants, prefix) {
496
+ var _a;
497
+ const maxVariantSize = maxCodecSizes(variants.map(([, codec]) => getMaxSize(codec)));
498
+ return (_a = sumCodecSizes([getMaxSize(prefix), maxVariantSize])) != null ? _a : void 0;
499
+ }
500
+ function getVariantDiscriminator(variants, variant) {
501
+ const discriminator = variants.findIndex(([key]) => variant.__kind === key);
502
+ if (discriminator < 0) {
503
+ throw new Error(
504
+ `Invalid data enum variant. Expected one of [${variants.map(([key]) => key).join(", ")}], got "${variant.__kind}".`
505
+ );
496
506
  }
497
- return {
498
- description: description ?? `map(${key.description}, ${value.description}; ${getArrayLikeCodecSizeDescription(size)})`,
499
- fixedSize: getArrayLikeCodecSizeFromChildren(size, [key.fixedSize, value.fixedSize]),
500
- maxSize: getArrayLikeCodecSizeFromChildren(size, [key.maxSize, value.maxSize])
501
- };
507
+ return discriminator;
502
508
  }
503
- function getMapEncoder(key, value, options = {}) {
504
- const size = options.size ?? getU32Encoder();
505
- return {
506
- ...mapCodecHelper(key, value, size, options.description),
507
- encode: (map) => {
508
- if (typeof size === "number") {
509
- assertValidNumberOfItemsForCodec("map", size, map.size);
510
- }
511
- const itemBytes = Array.from(map, ([k, v]) => mergeBytes([key.encode(k), value.encode(v)]));
512
- return mergeBytes([getArrayLikeCodecSizePrefix(size, map.size), ...itemBytes]);
509
+
510
+ // src/tuple.ts
511
+ function getTupleEncoder(items) {
512
+ var _a;
513
+ const fixedSize = sumCodecSizes(items.map(getFixedSize));
514
+ const maxSize = (_a = sumCodecSizes(items.map(getMaxSize))) != null ? _a : void 0;
515
+ return createEncoder({
516
+ ...fixedSize === null ? {
517
+ getSizeFromValue: (value) => items.map((item, index) => getEncodedSize(value[index], item)).reduce((all, one) => all + one, 0),
518
+ maxSize
519
+ } : { fixedSize },
520
+ write: (value, bytes, offset) => {
521
+ assertValidNumberOfItemsForCodec("tuple", items.length, value.length);
522
+ items.forEach((item, index) => {
523
+ offset = item.write(value[index], bytes, offset);
524
+ });
525
+ return offset;
513
526
  }
514
- };
527
+ });
515
528
  }
516
- function getMapDecoder(key, value, options = {}) {
517
- const size = options.size ?? getU32Decoder();
518
- return {
519
- ...mapCodecHelper(key, value, size, options.description),
520
- decode: (bytes, offset = 0) => {
521
- const map = /* @__PURE__ */ new Map();
522
- if (typeof size === "object" && bytes.slice(offset).length === 0) {
523
- return [map, offset];
524
- }
525
- const [resolvedSize, newOffset] = decodeArrayLikeCodecSize(
526
- size,
527
- [key.fixedSize, value.fixedSize],
528
- bytes,
529
- offset
530
- );
531
- offset = newOffset;
532
- for (let i = 0; i < resolvedSize; i += 1) {
533
- const [decodedKey, kOffset] = key.decode(bytes, offset);
534
- offset = kOffset;
535
- const [decodedValue, vOffset] = value.decode(bytes, offset);
536
- offset = vOffset;
537
- map.set(decodedKey, decodedValue);
538
- }
539
- return [map, offset];
529
+ function getTupleDecoder(items) {
530
+ var _a;
531
+ const fixedSize = sumCodecSizes(items.map(getFixedSize));
532
+ const maxSize = (_a = sumCodecSizes(items.map(getMaxSize))) != null ? _a : void 0;
533
+ return createDecoder({
534
+ ...fixedSize === null ? { maxSize } : { fixedSize },
535
+ read: (bytes, offset) => {
536
+ const values = [];
537
+ items.forEach((item) => {
538
+ const [newValue, newOffset] = item.read(bytes, offset);
539
+ values.push(newValue);
540
+ offset = newOffset;
541
+ });
542
+ return [values, offset];
540
543
  }
541
- };
544
+ });
545
+ }
546
+ function getTupleCodec(items) {
547
+ return combineCodec(
548
+ getTupleEncoder(items),
549
+ getTupleDecoder(items)
550
+ );
551
+ }
552
+
553
+ // src/map.ts
554
+ function getMapEncoder(key, value, config = {}) {
555
+ return mapEncoder(
556
+ getArrayEncoder(getTupleEncoder([key, value]), config),
557
+ (map) => [...map.entries()]
558
+ );
559
+ }
560
+ function getMapDecoder(key, value, config = {}) {
561
+ return mapDecoder(
562
+ getArrayDecoder(getTupleDecoder([key, value]), config),
563
+ (entries) => new Map(entries)
564
+ );
542
565
  }
543
- function getMapCodec(key, value, options = {}) {
544
- return combineCodec(getMapEncoder(key, value, options), getMapDecoder(key, value, options));
566
+ function getMapCodec(key, value, config = {}) {
567
+ return combineCodec(getMapEncoder(key, value, config), getMapDecoder(key, value, config));
545
568
  }
546
569
 
547
570
  // src/nullable.ts
548
- function nullableCodecHelper(item, prefix, fixed, description) {
549
- let descriptionSuffix = `; ${prefix.description}`;
550
- let fixedSize = item.fixedSize === 0 ? prefix.fixedSize : null;
551
- if (fixed) {
552
- assertFixedSizeCodec(item, "Fixed nullables can only be used with fixed-size codecs.");
553
- assertFixedSizeCodec(prefix, "Fixed nullables can only be used with fixed-size prefix.");
554
- descriptionSuffix += "; fixed";
555
- fixedSize = prefix.fixedSize + item.fixedSize;
571
+ function getNullableEncoder(item, config = {}) {
572
+ var _a, _b, _c;
573
+ const prefix = (_a = config.prefix) != null ? _a : getU8Encoder();
574
+ const fixed = (_b = config.fixed) != null ? _b : false;
575
+ const isZeroSizeItem = isFixedSize(item) && isFixedSize(prefix) && item.fixedSize === 0;
576
+ if (fixed || isZeroSizeItem) {
577
+ assertIsFixedSize(item, "Fixed nullables can only be used with fixed-size codecs.");
578
+ assertIsFixedSize(prefix, "Fixed nullables can only be used with fixed-size prefix.");
579
+ const fixedSize = prefix.fixedSize + item.fixedSize;
580
+ return createEncoder({
581
+ fixedSize,
582
+ write: (option, bytes, offset) => {
583
+ const prefixOffset = prefix.write(Number(option !== null), bytes, offset);
584
+ if (option !== null) {
585
+ item.write(option, bytes, prefixOffset);
586
+ }
587
+ return offset + fixedSize;
588
+ }
589
+ });
556
590
  }
557
- return {
558
- description: description ?? `nullable(${item.description + descriptionSuffix})`,
559
- fixedSize,
560
- maxSize: sumCodecSizes([prefix.maxSize, item.maxSize])
561
- };
562
- }
563
- function getNullableEncoder(item, options = {}) {
564
- const prefix = options.prefix ?? getU8Encoder();
565
- const fixed = options.fixed ?? false;
566
- return {
567
- ...nullableCodecHelper(item, prefix, fixed, options.description),
568
- encode: (option) => {
569
- const prefixByte = prefix.encode(Number(option !== null));
570
- let itemBytes = option !== null ? item.encode(option) : new Uint8Array();
571
- itemBytes = fixed ? fixBytes(itemBytes, item.fixedSize) : itemBytes;
572
- return mergeBytes([prefixByte, itemBytes]);
591
+ return createEncoder({
592
+ getSizeFromValue: (option) => getEncodedSize(Number(option !== null), prefix) + (option !== null ? getEncodedSize(option, item) : 0),
593
+ maxSize: (_c = sumCodecSizes([prefix, item].map(getMaxSize))) != null ? _c : void 0,
594
+ write: (option, bytes, offset) => {
595
+ offset = prefix.write(Number(option !== null), bytes, offset);
596
+ if (option !== null) {
597
+ offset = item.write(option, bytes, offset);
598
+ }
599
+ return offset;
573
600
  }
574
- };
601
+ });
575
602
  }
576
- function getNullableDecoder(item, options = {}) {
577
- const prefix = options.prefix ?? getU8Decoder();
578
- const fixed = options.fixed ?? false;
579
- return {
580
- ...nullableCodecHelper(item, prefix, fixed, options.description),
581
- decode: (bytes, offset = 0) => {
603
+ function getNullableDecoder(item, config = {}) {
604
+ var _a, _b, _c;
605
+ const prefix = (_a = config.prefix) != null ? _a : getU8Decoder();
606
+ const fixed = (_b = config.fixed) != null ? _b : false;
607
+ let fixedSize = null;
608
+ const isZeroSizeItem = isFixedSize(item) && isFixedSize(prefix) && item.fixedSize === 0;
609
+ if (fixed || isZeroSizeItem) {
610
+ assertIsFixedSize(item, "Fixed nullables can only be used with fixed-size codecs.");
611
+ assertIsFixedSize(prefix, "Fixed nullables can only be used with fixed-size prefix.");
612
+ fixedSize = prefix.fixedSize + item.fixedSize;
613
+ }
614
+ return createDecoder({
615
+ ...fixedSize === null ? { maxSize: (_c = sumCodecSizes([prefix, item].map(getMaxSize))) != null ? _c : void 0 } : { fixedSize },
616
+ read: (bytes, offset) => {
582
617
  if (bytes.length - offset <= 0) {
583
618
  return [null, offset];
584
619
  }
585
- const fixedOffset = offset + (prefix.fixedSize ?? 0) + (item.fixedSize ?? 0);
586
- const [isSome, prefixOffset] = prefix.decode(bytes, offset);
587
- offset = prefixOffset;
620
+ const [isSome, prefixOffset] = prefix.read(bytes, offset);
588
621
  if (isSome === 0) {
589
- return [null, fixed ? fixedOffset : offset];
622
+ return [null, fixedSize !== null ? offset + fixedSize : prefixOffset];
590
623
  }
591
- const [value, newOffset] = item.decode(bytes, offset);
592
- offset = newOffset;
593
- return [value, fixed ? fixedOffset : offset];
624
+ const [value, newOffset] = item.read(bytes, prefixOffset);
625
+ return [value, fixedSize !== null ? offset + fixedSize : newOffset];
594
626
  }
595
- };
627
+ });
596
628
  }
597
- function getNullableCodec(item, options = {}) {
598
- return combineCodec(getNullableEncoder(item, options), getNullableDecoder(item, options));
629
+ function getNullableCodec(item, config = {}) {
630
+ const configCast = config;
631
+ return combineCodec(getNullableEncoder(item, configCast), getNullableDecoder(item, configCast));
599
632
  }
600
633
 
601
634
  // src/scalar-enum.ts
602
- function scalarEnumCoderHelper(constructor, prefix, description) {
635
+ function getScalarEnumEncoder(constructor, config = {}) {
636
+ var _a;
637
+ const prefix = (_a = config.size) != null ? _a : getU8Encoder();
638
+ const { minRange, maxRange, stringValues, enumKeys, enumValues } = getScalarEnumStats(constructor);
639
+ return mapEncoder(prefix, (value) => {
640
+ const isInvalidNumber = typeof value === "number" && (value < minRange || value > maxRange);
641
+ const isInvalidString = typeof value === "string" && !stringValues.includes(value);
642
+ if (isInvalidNumber || isInvalidString) {
643
+ throw new Error(
644
+ `Invalid scalar enum variant. Expected one of [${stringValues.join(", ")}] or a number between ${minRange} and ${maxRange}, got "${value}".`
645
+ );
646
+ }
647
+ if (typeof value === "number")
648
+ return value;
649
+ const valueIndex = enumValues.indexOf(value);
650
+ if (valueIndex >= 0)
651
+ return valueIndex;
652
+ return enumKeys.indexOf(value);
653
+ });
654
+ }
655
+ function getScalarEnumDecoder(constructor, config = {}) {
656
+ var _a;
657
+ const prefix = (_a = config.size) != null ? _a : getU8Decoder();
658
+ const { minRange, maxRange, isNumericEnum, enumValues } = getScalarEnumStats(constructor);
659
+ return mapDecoder(prefix, (value) => {
660
+ const valueAsNumber = Number(value);
661
+ if (valueAsNumber < minRange || valueAsNumber > maxRange) {
662
+ throw new Error(
663
+ `Enum discriminator out of range. Expected a number between ${minRange} and ${maxRange}, got ${valueAsNumber}.`
664
+ );
665
+ }
666
+ return isNumericEnum ? valueAsNumber : enumValues[valueAsNumber];
667
+ });
668
+ }
669
+ function getScalarEnumCodec(constructor, config = {}) {
670
+ return combineCodec(getScalarEnumEncoder(constructor, config), getScalarEnumDecoder(constructor, config));
671
+ }
672
+ function getScalarEnumStats(constructor) {
603
673
  const enumKeys = Object.keys(constructor);
604
674
  const enumValues = Object.values(constructor);
605
675
  const isNumericEnum = enumValues.some((v) => typeof v === "number");
606
- const valueDescriptions = enumValues.filter((v) => typeof v === "string").join(", ");
607
676
  const minRange = 0;
608
677
  const maxRange = isNumericEnum ? enumValues.length / 2 - 1 : enumValues.length - 1;
609
678
  const stringValues = isNumericEnum ? [...enumKeys] : [.../* @__PURE__ */ new Set([...enumKeys, ...enumValues])];
610
679
  return {
611
- description: description ?? `enum(${valueDescriptions}; ${prefix.description})`,
612
680
  enumKeys,
613
681
  enumValues,
614
- fixedSize: prefix.fixedSize,
615
682
  isNumericEnum,
616
683
  maxRange,
617
- maxSize: prefix.maxSize,
618
684
  minRange,
619
685
  stringValues
620
686
  };
621
687
  }
622
- function getScalarEnumEncoder(constructor, options = {}) {
623
- const prefix = options.size ?? getU8Encoder();
624
- const { description, fixedSize, maxSize, minRange, maxRange, stringValues, enumKeys, enumValues } = scalarEnumCoderHelper(constructor, prefix, options.description);
625
- return {
626
- description,
627
- encode: (value) => {
628
- const isInvalidNumber = typeof value === "number" && (value < minRange || value > maxRange);
629
- const isInvalidString = typeof value === "string" && !stringValues.includes(value);
630
- if (isInvalidNumber || isInvalidString) {
631
- throw new Error(
632
- `Invalid scalar enum variant. Expected one of [${stringValues.join(", ")}] or a number between ${minRange} and ${maxRange}, got "${value}".`
633
- );
634
- }
635
- if (typeof value === "number")
636
- return prefix.encode(value);
637
- const valueIndex = enumValues.indexOf(value);
638
- if (valueIndex >= 0)
639
- return prefix.encode(valueIndex);
640
- return prefix.encode(enumKeys.indexOf(value));
641
- },
642
- fixedSize,
643
- maxSize
644
- };
645
- }
646
- function getScalarEnumDecoder(constructor, options = {}) {
647
- const prefix = options.size ?? getU8Decoder();
648
- const { description, fixedSize, maxSize, minRange, maxRange, isNumericEnum, enumValues } = scalarEnumCoderHelper(
649
- constructor,
650
- prefix,
651
- options.description
652
- );
653
- return {
654
- decode: (bytes, offset = 0) => {
655
- assertByteArrayIsNotEmptyForCodec("enum", bytes, offset);
656
- const [value, newOffset] = prefix.decode(bytes, offset);
657
- const valueAsNumber = Number(value);
658
- offset = newOffset;
659
- if (valueAsNumber < minRange || valueAsNumber > maxRange) {
660
- throw new Error(
661
- `Enum discriminator out of range. Expected a number between ${minRange} and ${maxRange}, got ${valueAsNumber}.`
662
- );
663
- }
664
- return [isNumericEnum ? valueAsNumber : enumValues[valueAsNumber], offset];
665
- },
666
- description,
667
- fixedSize,
668
- maxSize
669
- };
670
- }
671
- function getScalarEnumCodec(constructor, options = {}) {
672
- return combineCodec(getScalarEnumEncoder(constructor, options), getScalarEnumDecoder(constructor, options));
673
- }
674
688
 
675
689
  // src/set.ts
676
- function setCodecHelper(item, size, description) {
677
- if (size === "remainder" && item.fixedSize === null) {
678
- throw new Error('Codecs of "remainder" size must have fixed-size items.');
679
- }
680
- return {
681
- description: description ?? `set(${item.description}; ${getArrayLikeCodecSizeDescription(size)})`,
682
- fixedSize: getArrayLikeCodecSizeFromChildren(size, [item.fixedSize]),
683
- maxSize: getArrayLikeCodecSizeFromChildren(size, [item.maxSize])
684
- };
690
+ function getSetEncoder(item, config = {}) {
691
+ return mapEncoder(getArrayEncoder(item, config), (set) => [...set]);
685
692
  }
686
- function getSetEncoder(item, options = {}) {
687
- const size = options.size ?? getU32Encoder();
688
- return {
689
- ...setCodecHelper(item, size, options.description),
690
- encode: (set) => {
691
- if (typeof size === "number" && set.size !== size) {
692
- assertValidNumberOfItemsForCodec("set", size, set.size);
693
- }
694
- const itemBytes = Array.from(set, (value) => item.encode(value));
695
- return mergeBytes([getArrayLikeCodecSizePrefix(size, set.size), ...itemBytes]);
696
- }
697
- };
693
+ function getSetDecoder(item, config = {}) {
694
+ return mapDecoder(getArrayDecoder(item, config), (entries) => new Set(entries));
698
695
  }
699
- function getSetDecoder(item, options = {}) {
700
- const size = options.size ?? getU32Decoder();
701
- return {
702
- ...setCodecHelper(item, size, options.description),
703
- decode: (bytes, offset = 0) => {
704
- const set = /* @__PURE__ */ new Set();
705
- if (typeof size === "object" && bytes.slice(offset).length === 0) {
706
- return [set, offset];
707
- }
708
- const [resolvedSize, newOffset] = decodeArrayLikeCodecSize(size, [item.fixedSize], bytes, offset);
709
- offset = newOffset;
710
- for (let i = 0; i < resolvedSize; i += 1) {
711
- const [value, newOffset2] = item.decode(bytes, offset);
712
- offset = newOffset2;
713
- set.add(value);
714
- }
715
- return [set, offset];
716
- }
717
- };
718
- }
719
- function getSetCodec(item, options = {}) {
720
- return combineCodec(getSetEncoder(item, options), getSetDecoder(item, options));
696
+ function getSetCodec(item, config = {}) {
697
+ return combineCodec(getSetEncoder(item, config), getSetDecoder(item, config));
721
698
  }
722
699
 
723
700
  // src/struct.ts
724
- function structCodecHelper(fields, description) {
725
- const fieldDescriptions = fields.map(([name, codec]) => `${String(name)}: ${codec.description}`).join(", ");
726
- return {
727
- description: description ?? `struct(${fieldDescriptions})`,
728
- fixedSize: sumCodecSizes(fields.map(([, field]) => field.fixedSize)),
729
- maxSize: sumCodecSizes(fields.map(([, field]) => field.maxSize))
730
- };
731
- }
732
- function getStructEncoder(fields, options = {}) {
733
- return {
734
- ...structCodecHelper(fields, options.description),
735
- encode: (struct) => {
736
- const fieldBytes = fields.map(([key, codec]) => codec.encode(struct[key]));
737
- return mergeBytes(fieldBytes);
701
+ function getStructEncoder(fields) {
702
+ var _a;
703
+ const fieldCodecs = fields.map(([, codec]) => codec);
704
+ const fixedSize = sumCodecSizes(fieldCodecs.map(getFixedSize));
705
+ const maxSize = (_a = sumCodecSizes(fieldCodecs.map(getMaxSize))) != null ? _a : void 0;
706
+ return createEncoder({
707
+ ...fixedSize === null ? {
708
+ getSizeFromValue: (value) => fields.map(([key, codec]) => getEncodedSize(value[key], codec)).reduce((all, one) => all + one, 0),
709
+ maxSize
710
+ } : { fixedSize },
711
+ write: (struct, bytes, offset) => {
712
+ fields.forEach(([key, codec]) => {
713
+ offset = codec.write(struct[key], bytes, offset);
714
+ });
715
+ return offset;
738
716
  }
739
- };
717
+ });
740
718
  }
741
- function getStructDecoder(fields, options = {}) {
742
- return {
743
- ...structCodecHelper(fields, options.description),
744
- decode: (bytes, offset = 0) => {
719
+ function getStructDecoder(fields) {
720
+ var _a;
721
+ const fieldCodecs = fields.map(([, codec]) => codec);
722
+ const fixedSize = sumCodecSizes(fieldCodecs.map(getFixedSize));
723
+ const maxSize = (_a = sumCodecSizes(fieldCodecs.map(getMaxSize))) != null ? _a : void 0;
724
+ return createDecoder({
725
+ ...fixedSize === null ? { maxSize } : { fixedSize },
726
+ read: (bytes, offset) => {
745
727
  const struct = {};
746
728
  fields.forEach(([key, codec]) => {
747
- const [value, newOffset] = codec.decode(bytes, offset);
729
+ const [value, newOffset] = codec.read(bytes, offset);
748
730
  offset = newOffset;
749
731
  struct[key] = value;
750
732
  });
751
733
  return [struct, offset];
752
734
  }
753
- };
754
- }
755
- function getStructCodec(fields, options = {}) {
756
- return combineCodec(getStructEncoder(fields, options), getStructDecoder(fields, options));
757
- }
758
-
759
- // src/tuple.ts
760
- function tupleCodecHelper(items, description) {
761
- const itemDescriptions = items.map((item) => item.description).join(", ");
762
- return {
763
- description: description ?? `tuple(${itemDescriptions})`,
764
- fixedSize: sumCodecSizes(items.map((item) => item.fixedSize)),
765
- maxSize: sumCodecSizes(items.map((item) => item.maxSize))
766
- };
767
- }
768
- function getTupleEncoder(items, options = {}) {
769
- return {
770
- ...tupleCodecHelper(items, options.description),
771
- encode: (value) => {
772
- assertValidNumberOfItemsForCodec("tuple", items.length, value.length);
773
- return mergeBytes(items.map((item, index) => item.encode(value[index])));
774
- }
775
- };
776
- }
777
- function getTupleDecoder(items, options = {}) {
778
- return {
779
- ...tupleCodecHelper(items, options.description),
780
- decode: (bytes, offset = 0) => {
781
- const values = [];
782
- items.forEach((codec) => {
783
- const [newValue, newOffset] = codec.decode(bytes, offset);
784
- values.push(newValue);
785
- offset = newOffset;
786
- });
787
- return [values, offset];
788
- }
789
- };
735
+ });
790
736
  }
791
- function getTupleCodec(items, options = {}) {
792
- return combineCodec(
793
- getTupleEncoder(items, options),
794
- getTupleDecoder(items, options)
795
- );
737
+ function getStructCodec(fields) {
738
+ return combineCodec(getStructEncoder(fields), getStructDecoder(fields));
796
739
  }
797
740
 
798
741
  // src/unit.ts
799
- function getUnitEncoder(options = {}) {
800
- return {
801
- description: options.description ?? "unit",
802
- encode: () => new Uint8Array(),
742
+ function getUnitEncoder() {
743
+ return createEncoder({
803
744
  fixedSize: 0,
804
- maxSize: 0
805
- };
745
+ write: (_value, _bytes, offset) => offset
746
+ });
806
747
  }
807
- function getUnitDecoder(options = {}) {
808
- return {
809
- decode: (_bytes, offset = 0) => [void 0, offset],
810
- description: options.description ?? "unit",
748
+ function getUnitDecoder() {
749
+ return createDecoder({
811
750
  fixedSize: 0,
812
- maxSize: 0
813
- };
751
+ read: (_bytes, offset) => [void 0, offset]
752
+ });
814
753
  }
815
- function getUnitCodec(options = {}) {
816
- return combineCodec(getUnitEncoder(options), getUnitDecoder(options));
754
+ function getUnitCodec() {
755
+ return combineCodec(getUnitEncoder(), getUnitDecoder());
817
756
  }
818
757
 
819
758
  exports.assertValidNumberOfItemsForCodec = assertValidNumberOfItemsForCodec;
820
- exports.decodeArrayLikeCodecSize = decodeArrayLikeCodecSize;
821
759
  exports.getArrayCodec = getArrayCodec;
822
760
  exports.getArrayDecoder = getArrayDecoder;
823
761
  exports.getArrayEncoder = getArrayEncoder;
824
- exports.getArrayLikeCodecSizeDescription = getArrayLikeCodecSizeDescription;
825
- exports.getArrayLikeCodecSizeFromChildren = getArrayLikeCodecSizeFromChildren;
826
- exports.getArrayLikeCodecSizePrefix = getArrayLikeCodecSizePrefix;
827
762
  exports.getBitArrayCodec = getBitArrayCodec;
828
763
  exports.getBitArrayDecoder = getBitArrayDecoder;
829
764
  exports.getBitArrayEncoder = getBitArrayEncoder;