@solana/codecs-data-structures 2.0.0-experimental.5e8ac8d
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +20 -0
- package/README.md +25 -0
- package/dist/index.browser.cjs +662 -0
- package/dist/index.browser.cjs.map +1 -0
- package/dist/index.browser.js +620 -0
- package/dist/index.browser.js.map +1 -0
- package/dist/index.development.js +865 -0
- package/dist/index.development.js.map +1 -0
- package/dist/index.native.js +620 -0
- package/dist/index.native.js.map +1 -0
- package/dist/index.node.cjs +662 -0
- package/dist/index.node.cjs.map +1 -0
- package/dist/index.node.js +620 -0
- package/dist/index.node.js.map +1 -0
- package/dist/index.production.min.js +51 -0
- package/dist/types/array-like-codec-size.d.ts +20 -0
- package/dist/types/array-like-codec-size.d.ts.map +1 -0
- package/dist/types/array.d.ts +33 -0
- package/dist/types/array.d.ts.map +1 -0
- package/dist/types/assertions.d.ts +3 -0
- package/dist/types/assertions.d.ts.map +1 -0
- package/dist/types/bit-array.d.ts +31 -0
- package/dist/types/bit-array.d.ts.map +1 -0
- package/dist/types/boolean.d.ts +29 -0
- package/dist/types/boolean.d.ts.map +1 -0
- package/dist/types/bytes.d.ts +32 -0
- package/dist/types/bytes.d.ts.map +1 -0
- package/dist/types/data-enum.d.ts +90 -0
- package/dist/types/data-enum.d.ts.map +1 -0
- package/dist/types/index.d.ts +15 -0
- package/dist/types/index.d.ts.map +1 -0
- package/dist/types/map.d.ts +36 -0
- package/dist/types/map.d.ts.map +1 -0
- package/dist/types/nullable.d.ts +41 -0
- package/dist/types/nullable.d.ts.map +1 -0
- package/dist/types/scalar-enum.d.ts +44 -0
- package/dist/types/scalar-enum.d.ts.map +1 -0
- package/dist/types/set.d.ts +33 -0
- package/dist/types/set.d.ts.map +1 -0
- package/dist/types/struct.d.ts +37 -0
- package/dist/types/struct.d.ts.map +1 -0
- package/dist/types/tuple.d.ts +36 -0
- package/dist/types/tuple.d.ts.map +1 -0
- package/dist/types/unit.d.ts +22 -0
- package/dist/types/unit.d.ts.map +1 -0
- package/dist/types/utils.d.ts +5 -0
- package/dist/types/utils.d.ts.map +1 -0
- package/package.json +101 -0
|
@@ -0,0 +1,662 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
var codecsCore = require('@solana/codecs-core');
|
|
4
|
+
var codecsNumbers = require('@solana/codecs-numbers');
|
|
5
|
+
|
|
6
|
+
// src/array.ts
|
|
7
|
+
|
|
8
|
+
// src/utils.ts
|
|
9
|
+
function maxCodecSizes(sizes) {
|
|
10
|
+
return sizes.reduce(
|
|
11
|
+
(all, size) => all === null || size === null ? null : Math.max(all, size),
|
|
12
|
+
0
|
|
13
|
+
);
|
|
14
|
+
}
|
|
15
|
+
function sumCodecSizes(sizes) {
|
|
16
|
+
return sizes.reduce((all, size) => all === null || size === null ? null : all + size, 0);
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
// src/array-like-codec-size.ts
|
|
20
|
+
function decodeArrayLikeCodecSize(size, childrenSizes, bytes, offset) {
|
|
21
|
+
if (typeof size === "number") {
|
|
22
|
+
return [size, offset];
|
|
23
|
+
}
|
|
24
|
+
if (typeof size === "object") {
|
|
25
|
+
return size.decode(bytes, offset);
|
|
26
|
+
}
|
|
27
|
+
if (size === "remainder") {
|
|
28
|
+
const childrenSize = sumCodecSizes(childrenSizes);
|
|
29
|
+
if (childrenSize === null) {
|
|
30
|
+
throw new Error('Codecs of "remainder" size must have fixed-size items.');
|
|
31
|
+
}
|
|
32
|
+
const remainder = bytes.slice(offset).length;
|
|
33
|
+
if (remainder % childrenSize !== 0) {
|
|
34
|
+
throw new Error(
|
|
35
|
+
`The remainder of the byte array (${remainder} bytes) cannot be split into chunks of ${childrenSize} bytes. Codecs of "remainder" size must have a remainder that is a multiple of its item size. In other words, ${remainder} modulo ${childrenSize} should be equal to zero.`
|
|
36
|
+
);
|
|
37
|
+
}
|
|
38
|
+
return [remainder / childrenSize, offset];
|
|
39
|
+
}
|
|
40
|
+
throw new Error(`Unrecognized array-like codec size: ${JSON.stringify(size)}`);
|
|
41
|
+
}
|
|
42
|
+
function getArrayLikeCodecSizeDescription(size) {
|
|
43
|
+
return typeof size === "object" ? size.description : `${size}`;
|
|
44
|
+
}
|
|
45
|
+
function getArrayLikeCodecSizeFromChildren(size, childrenSizes) {
|
|
46
|
+
if (typeof size !== "number")
|
|
47
|
+
return null;
|
|
48
|
+
if (size === 0)
|
|
49
|
+
return 0;
|
|
50
|
+
const childrenSize = sumCodecSizes(childrenSizes);
|
|
51
|
+
return childrenSize === null ? null : childrenSize * size;
|
|
52
|
+
}
|
|
53
|
+
function getArrayLikeCodecSizePrefix(size, realSize) {
|
|
54
|
+
return typeof size === "object" ? size.encode(realSize) : new Uint8Array();
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
// src/assertions.ts
|
|
58
|
+
function assertValidNumberOfItemsForCodec(codecDescription, expected, actual) {
|
|
59
|
+
if (expected !== actual) {
|
|
60
|
+
throw new Error(`Expected [${codecDescription}] to have ${expected} items, got ${actual}.`);
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
// src/array.ts
|
|
65
|
+
function arrayCodecHelper(item, size, description) {
|
|
66
|
+
if (size === "remainder" && item.fixedSize === null) {
|
|
67
|
+
throw new Error('Codecs of "remainder" size must have fixed-size items.');
|
|
68
|
+
}
|
|
69
|
+
return {
|
|
70
|
+
description: description ?? `array(${item.description}; ${getArrayLikeCodecSizeDescription(size)})`,
|
|
71
|
+
fixedSize: getArrayLikeCodecSizeFromChildren(size, [item.fixedSize]),
|
|
72
|
+
maxSize: getArrayLikeCodecSizeFromChildren(size, [item.maxSize])
|
|
73
|
+
};
|
|
74
|
+
}
|
|
75
|
+
function getArrayEncoder(item, options = {}) {
|
|
76
|
+
const size = options.size ?? codecsNumbers.getU32Encoder();
|
|
77
|
+
return {
|
|
78
|
+
...arrayCodecHelper(item, size, options.description),
|
|
79
|
+
encode: (value) => {
|
|
80
|
+
if (typeof size === "number") {
|
|
81
|
+
assertValidNumberOfItemsForCodec("array", size, value.length);
|
|
82
|
+
}
|
|
83
|
+
return codecsCore.mergeBytes([getArrayLikeCodecSizePrefix(size, value.length), ...value.map((v) => item.encode(v))]);
|
|
84
|
+
}
|
|
85
|
+
};
|
|
86
|
+
}
|
|
87
|
+
function getArrayDecoder(item, options = {}) {
|
|
88
|
+
const size = options.size ?? codecsNumbers.getU32Decoder();
|
|
89
|
+
return {
|
|
90
|
+
...arrayCodecHelper(item, size, options.description),
|
|
91
|
+
decode: (bytes, offset = 0) => {
|
|
92
|
+
if (typeof size === "object" && bytes.slice(offset).length === 0) {
|
|
93
|
+
return [[], offset];
|
|
94
|
+
}
|
|
95
|
+
const [resolvedSize, newOffset] = decodeArrayLikeCodecSize(size, [item.fixedSize], bytes, offset);
|
|
96
|
+
offset = newOffset;
|
|
97
|
+
const values = [];
|
|
98
|
+
for (let i = 0; i < resolvedSize; i += 1) {
|
|
99
|
+
const [value, newOffset2] = item.decode(bytes, offset);
|
|
100
|
+
values.push(value);
|
|
101
|
+
offset = newOffset2;
|
|
102
|
+
}
|
|
103
|
+
return [values, offset];
|
|
104
|
+
}
|
|
105
|
+
};
|
|
106
|
+
}
|
|
107
|
+
function getArrayCodec(item, options = {}) {
|
|
108
|
+
return codecsCore.combineCodec(getArrayEncoder(item, options), getArrayDecoder(item, options));
|
|
109
|
+
}
|
|
110
|
+
var getBitArrayEncoder = (size, options = {}) => {
|
|
111
|
+
const parsedOptions = typeof options === "boolean" ? { backward: options } : options;
|
|
112
|
+
const backward = parsedOptions.backward ?? false;
|
|
113
|
+
const backwardSuffix = backward ? "; backward" : "";
|
|
114
|
+
return {
|
|
115
|
+
description: parsedOptions.description ?? `bitArray(${size}${backwardSuffix})`,
|
|
116
|
+
encode(value) {
|
|
117
|
+
const bytes = [];
|
|
118
|
+
for (let i = 0; i < size; i += 1) {
|
|
119
|
+
let byte = 0;
|
|
120
|
+
for (let j = 0; j < 8; j += 1) {
|
|
121
|
+
const feature = Number(value[i * 8 + j] ?? 0);
|
|
122
|
+
byte |= feature << (backward ? j : 7 - j);
|
|
123
|
+
}
|
|
124
|
+
if (backward) {
|
|
125
|
+
bytes.unshift(byte);
|
|
126
|
+
} else {
|
|
127
|
+
bytes.push(byte);
|
|
128
|
+
}
|
|
129
|
+
}
|
|
130
|
+
return new Uint8Array(bytes);
|
|
131
|
+
},
|
|
132
|
+
fixedSize: size,
|
|
133
|
+
maxSize: size
|
|
134
|
+
};
|
|
135
|
+
};
|
|
136
|
+
var getBitArrayDecoder = (size, options = {}) => {
|
|
137
|
+
const parsedOptions = typeof options === "boolean" ? { backward: options } : options;
|
|
138
|
+
const backward = parsedOptions.backward ?? false;
|
|
139
|
+
const backwardSuffix = backward ? "; backward" : "";
|
|
140
|
+
return {
|
|
141
|
+
decode(bytes, offset = 0) {
|
|
142
|
+
codecsCore.assertByteArrayHasEnoughBytesForCodec("bitArray", size, bytes, offset);
|
|
143
|
+
const booleans = [];
|
|
144
|
+
let slice = bytes.slice(offset, offset + size);
|
|
145
|
+
slice = backward ? slice.reverse() : slice;
|
|
146
|
+
slice.forEach((byte) => {
|
|
147
|
+
for (let i = 0; i < 8; i += 1) {
|
|
148
|
+
if (backward) {
|
|
149
|
+
booleans.push(Boolean(byte & 1));
|
|
150
|
+
byte >>= 1;
|
|
151
|
+
} else {
|
|
152
|
+
booleans.push(Boolean(byte & 128));
|
|
153
|
+
byte <<= 1;
|
|
154
|
+
}
|
|
155
|
+
}
|
|
156
|
+
});
|
|
157
|
+
return [booleans, offset + size];
|
|
158
|
+
},
|
|
159
|
+
description: parsedOptions.description ?? `bitArray(${size}${backwardSuffix})`,
|
|
160
|
+
fixedSize: size,
|
|
161
|
+
maxSize: size
|
|
162
|
+
};
|
|
163
|
+
};
|
|
164
|
+
var getBitArrayCodec = (size, options = {}) => codecsCore.combineCodec(getBitArrayEncoder(size, options), getBitArrayDecoder(size, options));
|
|
165
|
+
function getBooleanEncoder(options = {}) {
|
|
166
|
+
const size = options.size ?? codecsNumbers.getU8Encoder();
|
|
167
|
+
codecsCore.assertFixedSizeCodec(size, "Codec [bool] requires a fixed size.");
|
|
168
|
+
return {
|
|
169
|
+
description: options.description ?? `bool(${size.description})`,
|
|
170
|
+
encode: (value) => size.encode(value ? 1 : 0),
|
|
171
|
+
fixedSize: size.fixedSize,
|
|
172
|
+
maxSize: size.fixedSize
|
|
173
|
+
};
|
|
174
|
+
}
|
|
175
|
+
function getBooleanDecoder(options = {}) {
|
|
176
|
+
const size = options.size ?? codecsNumbers.getU8Decoder();
|
|
177
|
+
codecsCore.assertFixedSizeCodec(size, "Codec [bool] requires a fixed size.");
|
|
178
|
+
return {
|
|
179
|
+
decode: (bytes, offset = 0) => {
|
|
180
|
+
codecsCore.assertByteArrayIsNotEmptyForCodec("bool", bytes, offset);
|
|
181
|
+
const [value, vOffset] = size.decode(bytes, offset);
|
|
182
|
+
return [value === 1, vOffset];
|
|
183
|
+
},
|
|
184
|
+
description: options.description ?? `bool(${size.description})`,
|
|
185
|
+
fixedSize: size.fixedSize,
|
|
186
|
+
maxSize: size.fixedSize
|
|
187
|
+
};
|
|
188
|
+
}
|
|
189
|
+
function getBooleanCodec(options = {}) {
|
|
190
|
+
return codecsCore.combineCodec(getBooleanEncoder(options), getBooleanDecoder(options));
|
|
191
|
+
}
|
|
192
|
+
function getBytesEncoder(options = {}) {
|
|
193
|
+
const size = options.size ?? "variable";
|
|
194
|
+
const sizeDescription = typeof size === "object" ? size.description : `${size}`;
|
|
195
|
+
const description = options.description ?? `bytes(${sizeDescription})`;
|
|
196
|
+
const byteEncoder = {
|
|
197
|
+
description,
|
|
198
|
+
encode: (value) => value,
|
|
199
|
+
fixedSize: null,
|
|
200
|
+
maxSize: null
|
|
201
|
+
};
|
|
202
|
+
if (size === "variable") {
|
|
203
|
+
return byteEncoder;
|
|
204
|
+
}
|
|
205
|
+
if (typeof size === "number") {
|
|
206
|
+
return codecsCore.fixEncoder(byteEncoder, size, description);
|
|
207
|
+
}
|
|
208
|
+
return {
|
|
209
|
+
...byteEncoder,
|
|
210
|
+
encode: (value) => {
|
|
211
|
+
const contentBytes = byteEncoder.encode(value);
|
|
212
|
+
const lengthBytes = size.encode(contentBytes.length);
|
|
213
|
+
return codecsCore.mergeBytes([lengthBytes, contentBytes]);
|
|
214
|
+
}
|
|
215
|
+
};
|
|
216
|
+
}
|
|
217
|
+
function getBytesDecoder(options = {}) {
|
|
218
|
+
const size = options.size ?? "variable";
|
|
219
|
+
const sizeDescription = typeof size === "object" ? size.description : `${size}`;
|
|
220
|
+
const description = options.description ?? `bytes(${sizeDescription})`;
|
|
221
|
+
const byteDecoder = {
|
|
222
|
+
decode: (bytes, offset = 0) => {
|
|
223
|
+
const slice = bytes.slice(offset);
|
|
224
|
+
return [slice, offset + slice.length];
|
|
225
|
+
},
|
|
226
|
+
description,
|
|
227
|
+
fixedSize: null,
|
|
228
|
+
maxSize: null
|
|
229
|
+
};
|
|
230
|
+
if (size === "variable") {
|
|
231
|
+
return byteDecoder;
|
|
232
|
+
}
|
|
233
|
+
if (typeof size === "number") {
|
|
234
|
+
return codecsCore.fixDecoder(byteDecoder, size, description);
|
|
235
|
+
}
|
|
236
|
+
return {
|
|
237
|
+
...byteDecoder,
|
|
238
|
+
decode: (bytes, offset = 0) => {
|
|
239
|
+
codecsCore.assertByteArrayIsNotEmptyForCodec("bytes", bytes, offset);
|
|
240
|
+
const [lengthBigInt, lengthOffset] = size.decode(bytes, offset);
|
|
241
|
+
const length = Number(lengthBigInt);
|
|
242
|
+
offset = lengthOffset;
|
|
243
|
+
const contentBytes = bytes.slice(offset, offset + length);
|
|
244
|
+
codecsCore.assertByteArrayHasEnoughBytesForCodec("bytes", length, contentBytes);
|
|
245
|
+
const [value, contentOffset] = byteDecoder.decode(contentBytes);
|
|
246
|
+
offset += contentOffset;
|
|
247
|
+
return [value, offset];
|
|
248
|
+
}
|
|
249
|
+
};
|
|
250
|
+
}
|
|
251
|
+
function getBytesCodec(options = {}) {
|
|
252
|
+
return codecsCore.combineCodec(getBytesEncoder(options), getBytesDecoder(options));
|
|
253
|
+
}
|
|
254
|
+
function dataEnumCodecHelper(variants, prefix, description) {
|
|
255
|
+
const fieldDescriptions = variants.map(([name, codec]) => `${String(name)}${codec ? `: ${codec.description}` : ""}`).join(", ");
|
|
256
|
+
const allVariantHaveTheSameFixedSize = variants.every((one, _i, all) => one[1].fixedSize === all[0][1].fixedSize);
|
|
257
|
+
const fixedVariantSize = allVariantHaveTheSameFixedSize ? variants[0][1].fixedSize : null;
|
|
258
|
+
const maxVariantSize = maxCodecSizes(variants.map(([, field]) => field.maxSize));
|
|
259
|
+
return {
|
|
260
|
+
description: description ?? `dataEnum(${fieldDescriptions}; ${prefix.description})`,
|
|
261
|
+
fixedSize: variants.length === 0 ? prefix.fixedSize : sumCodecSizes([prefix.fixedSize, fixedVariantSize]),
|
|
262
|
+
maxSize: variants.length === 0 ? prefix.maxSize : sumCodecSizes([prefix.maxSize, maxVariantSize])
|
|
263
|
+
};
|
|
264
|
+
}
|
|
265
|
+
function getDataEnumEncoder(variants, options = {}) {
|
|
266
|
+
const prefix = options.size ?? codecsNumbers.getU8Encoder();
|
|
267
|
+
return {
|
|
268
|
+
...dataEnumCodecHelper(variants, prefix, options.description),
|
|
269
|
+
encode: (variant) => {
|
|
270
|
+
const discriminator = variants.findIndex(([key]) => variant.__kind === key);
|
|
271
|
+
if (discriminator < 0) {
|
|
272
|
+
throw new Error(
|
|
273
|
+
`Invalid data enum variant. Expected one of [${variants.map(([key]) => key).join(", ")}], got "${variant.__kind}".`
|
|
274
|
+
);
|
|
275
|
+
}
|
|
276
|
+
const variantPrefix = prefix.encode(discriminator);
|
|
277
|
+
const variantSerializer = variants[discriminator][1];
|
|
278
|
+
const variantBytes = variantSerializer.encode(variant);
|
|
279
|
+
return codecsCore.mergeBytes([variantPrefix, variantBytes]);
|
|
280
|
+
}
|
|
281
|
+
};
|
|
282
|
+
}
|
|
283
|
+
function getDataEnumDecoder(variants, options = {}) {
|
|
284
|
+
const prefix = options.size ?? codecsNumbers.getU8Decoder();
|
|
285
|
+
return {
|
|
286
|
+
...dataEnumCodecHelper(variants, prefix, options.description),
|
|
287
|
+
decode: (bytes, offset = 0) => {
|
|
288
|
+
codecsCore.assertByteArrayIsNotEmptyForCodec("dataEnum", bytes, offset);
|
|
289
|
+
const [discriminator, dOffset] = prefix.decode(bytes, offset);
|
|
290
|
+
offset = dOffset;
|
|
291
|
+
const variantField = variants[Number(discriminator)] ?? null;
|
|
292
|
+
if (!variantField) {
|
|
293
|
+
throw new Error(
|
|
294
|
+
`Enum discriminator out of range. Expected a number between 0 and ${variants.length - 1}, got ${discriminator}.`
|
|
295
|
+
);
|
|
296
|
+
}
|
|
297
|
+
const [variant, vOffset] = variantField[1].decode(bytes, offset);
|
|
298
|
+
offset = vOffset;
|
|
299
|
+
return [{ __kind: variantField[0], ...variant ?? {} }, offset];
|
|
300
|
+
}
|
|
301
|
+
};
|
|
302
|
+
}
|
|
303
|
+
function getDataEnumCodec(variants, options = {}) {
|
|
304
|
+
return codecsCore.combineCodec(getDataEnumEncoder(variants, options), getDataEnumDecoder(variants, options));
|
|
305
|
+
}
|
|
306
|
+
function mapCodecHelper(key, value, size, description) {
|
|
307
|
+
if (size === "remainder" && (key.fixedSize === null || value.fixedSize === null)) {
|
|
308
|
+
throw new Error('Codecs of "remainder" size must have fixed-size items.');
|
|
309
|
+
}
|
|
310
|
+
return {
|
|
311
|
+
description: description ?? `map(${key.description}, ${value.description}; ${getArrayLikeCodecSizeDescription(size)})`,
|
|
312
|
+
fixedSize: getArrayLikeCodecSizeFromChildren(size, [key.fixedSize, value.fixedSize]),
|
|
313
|
+
maxSize: getArrayLikeCodecSizeFromChildren(size, [key.maxSize, value.maxSize])
|
|
314
|
+
};
|
|
315
|
+
}
|
|
316
|
+
function getMapEncoder(key, value, options = {}) {
|
|
317
|
+
const size = options.size ?? codecsNumbers.getU32Encoder();
|
|
318
|
+
return {
|
|
319
|
+
...mapCodecHelper(key, value, size, options.description),
|
|
320
|
+
encode: (map) => {
|
|
321
|
+
if (typeof size === "number") {
|
|
322
|
+
assertValidNumberOfItemsForCodec("map", size, map.size);
|
|
323
|
+
}
|
|
324
|
+
const itemBytes = Array.from(map, ([k, v]) => codecsCore.mergeBytes([key.encode(k), value.encode(v)]));
|
|
325
|
+
return codecsCore.mergeBytes([getArrayLikeCodecSizePrefix(size, map.size), ...itemBytes]);
|
|
326
|
+
}
|
|
327
|
+
};
|
|
328
|
+
}
|
|
329
|
+
function getMapDecoder(key, value, options = {}) {
|
|
330
|
+
const size = options.size ?? codecsNumbers.getU32Decoder();
|
|
331
|
+
return {
|
|
332
|
+
...mapCodecHelper(key, value, size, options.description),
|
|
333
|
+
decode: (bytes, offset = 0) => {
|
|
334
|
+
const map = /* @__PURE__ */ new Map();
|
|
335
|
+
if (typeof size === "object" && bytes.slice(offset).length === 0) {
|
|
336
|
+
return [map, offset];
|
|
337
|
+
}
|
|
338
|
+
const [resolvedSize, newOffset] = decodeArrayLikeCodecSize(
|
|
339
|
+
size,
|
|
340
|
+
[key.fixedSize, value.fixedSize],
|
|
341
|
+
bytes,
|
|
342
|
+
offset
|
|
343
|
+
);
|
|
344
|
+
offset = newOffset;
|
|
345
|
+
for (let i = 0; i < resolvedSize; i += 1) {
|
|
346
|
+
const [decodedKey, kOffset] = key.decode(bytes, offset);
|
|
347
|
+
offset = kOffset;
|
|
348
|
+
const [decodedValue, vOffset] = value.decode(bytes, offset);
|
|
349
|
+
offset = vOffset;
|
|
350
|
+
map.set(decodedKey, decodedValue);
|
|
351
|
+
}
|
|
352
|
+
return [map, offset];
|
|
353
|
+
}
|
|
354
|
+
};
|
|
355
|
+
}
|
|
356
|
+
function getMapCodec(key, value, options = {}) {
|
|
357
|
+
return codecsCore.combineCodec(getMapEncoder(key, value, options), getMapDecoder(key, value, options));
|
|
358
|
+
}
|
|
359
|
+
function nullableCodecHelper(item, prefix, fixed, description) {
|
|
360
|
+
let descriptionSuffix = `; ${getArrayLikeCodecSizeDescription(prefix)}`;
|
|
361
|
+
let fixedSize = item.fixedSize === 0 ? prefix.fixedSize : null;
|
|
362
|
+
if (fixed) {
|
|
363
|
+
codecsCore.assertFixedSizeCodec(item, "Fixed nullables can only be used with fixed-size codecs.");
|
|
364
|
+
codecsCore.assertFixedSizeCodec(prefix, "Fixed nullables can only be used with fixed-size prefix.");
|
|
365
|
+
descriptionSuffix += "; fixed";
|
|
366
|
+
fixedSize = prefix.fixedSize + item.fixedSize;
|
|
367
|
+
}
|
|
368
|
+
return {
|
|
369
|
+
description: description ?? `nullable(${item.description + descriptionSuffix})`,
|
|
370
|
+
fixedSize,
|
|
371
|
+
maxSize: sumCodecSizes([prefix.maxSize, item.maxSize])
|
|
372
|
+
};
|
|
373
|
+
}
|
|
374
|
+
function getNullableEncoder(item, options = {}) {
|
|
375
|
+
const prefix = options.prefix ?? codecsNumbers.getU8Encoder();
|
|
376
|
+
const fixed = options.fixed ?? false;
|
|
377
|
+
return {
|
|
378
|
+
...nullableCodecHelper(item, prefix, fixed, options.description),
|
|
379
|
+
encode: (option) => {
|
|
380
|
+
const prefixByte = prefix.encode(Number(option !== null));
|
|
381
|
+
let itemBytes = option !== null ? item.encode(option) : new Uint8Array();
|
|
382
|
+
itemBytes = fixed ? codecsCore.fixBytes(itemBytes, item.fixedSize) : itemBytes;
|
|
383
|
+
return codecsCore.mergeBytes([prefixByte, itemBytes]);
|
|
384
|
+
}
|
|
385
|
+
};
|
|
386
|
+
}
|
|
387
|
+
function getNullableDecoder(item, options = {}) {
|
|
388
|
+
const prefix = options.prefix ?? codecsNumbers.getU8Decoder();
|
|
389
|
+
const fixed = options.fixed ?? false;
|
|
390
|
+
return {
|
|
391
|
+
...nullableCodecHelper(item, prefix, fixed, options.description),
|
|
392
|
+
decode: (bytes, offset = 0) => {
|
|
393
|
+
if (bytes.slice(offset).length === 0) {
|
|
394
|
+
return [null, offset];
|
|
395
|
+
}
|
|
396
|
+
const fixedOffset = offset + (prefix.fixedSize ?? 0) + (item.fixedSize ?? 0);
|
|
397
|
+
const [isSome, prefixOffset] = prefix.decode(bytes, offset);
|
|
398
|
+
offset = prefixOffset;
|
|
399
|
+
if (isSome === 0) {
|
|
400
|
+
return [null, fixed ? fixedOffset : offset];
|
|
401
|
+
}
|
|
402
|
+
const [value, newOffset] = item.decode(bytes, offset);
|
|
403
|
+
offset = newOffset;
|
|
404
|
+
return [value, fixed ? fixedOffset : offset];
|
|
405
|
+
}
|
|
406
|
+
};
|
|
407
|
+
}
|
|
408
|
+
function getNullableCodec(item, options = {}) {
|
|
409
|
+
return codecsCore.combineCodec(getNullableEncoder(item, options), getNullableDecoder(item, options));
|
|
410
|
+
}
|
|
411
|
+
function scalarEnumCoderHelper(constructor, prefix, description) {
|
|
412
|
+
const enumKeys = Object.keys(constructor);
|
|
413
|
+
const enumValues = Object.values(constructor);
|
|
414
|
+
const isNumericEnum = enumValues.some((v) => typeof v === "number");
|
|
415
|
+
const valueDescriptions = enumValues.filter((v) => typeof v === "string").join(", ");
|
|
416
|
+
const minRange = 0;
|
|
417
|
+
const maxRange = isNumericEnum ? enumValues.length / 2 - 1 : enumValues.length - 1;
|
|
418
|
+
const stringValues = isNumericEnum ? [...enumKeys] : [.../* @__PURE__ */ new Set([...enumKeys, ...enumValues])];
|
|
419
|
+
return {
|
|
420
|
+
description: description ?? `enum(${valueDescriptions}; ${prefix.description})`,
|
|
421
|
+
enumKeys,
|
|
422
|
+
enumValues,
|
|
423
|
+
fixedSize: prefix.fixedSize,
|
|
424
|
+
isNumericEnum,
|
|
425
|
+
maxRange,
|
|
426
|
+
maxSize: prefix.maxSize,
|
|
427
|
+
minRange,
|
|
428
|
+
stringValues
|
|
429
|
+
};
|
|
430
|
+
}
|
|
431
|
+
function getScalarEnumEncoder(constructor, options = {}) {
|
|
432
|
+
const prefix = options.size ?? codecsNumbers.getU8Encoder();
|
|
433
|
+
const { description, fixedSize, maxSize, minRange, maxRange, stringValues, enumKeys, enumValues } = scalarEnumCoderHelper(constructor, prefix, options.description);
|
|
434
|
+
return {
|
|
435
|
+
description,
|
|
436
|
+
encode: (value) => {
|
|
437
|
+
const isInvalidNumber = typeof value === "number" && (value < minRange || value > maxRange);
|
|
438
|
+
const isInvalidString = typeof value === "string" && !stringValues.includes(value);
|
|
439
|
+
if (isInvalidNumber || isInvalidString) {
|
|
440
|
+
throw new Error(
|
|
441
|
+
`Invalid scalar enum variant. Expected one of [${stringValues.join(", ")}] or a number between ${minRange} and ${maxRange}, got "${value}".`
|
|
442
|
+
);
|
|
443
|
+
}
|
|
444
|
+
if (typeof value === "number")
|
|
445
|
+
return prefix.encode(value);
|
|
446
|
+
const valueIndex = enumValues.indexOf(value);
|
|
447
|
+
if (valueIndex >= 0)
|
|
448
|
+
return prefix.encode(valueIndex);
|
|
449
|
+
return prefix.encode(enumKeys.indexOf(value));
|
|
450
|
+
},
|
|
451
|
+
fixedSize,
|
|
452
|
+
maxSize
|
|
453
|
+
};
|
|
454
|
+
}
|
|
455
|
+
function getScalarEnumDecoder(constructor, options = {}) {
|
|
456
|
+
const prefix = options.size ?? codecsNumbers.getU8Decoder();
|
|
457
|
+
const { description, fixedSize, maxSize, minRange, maxRange, isNumericEnum, enumValues } = scalarEnumCoderHelper(
|
|
458
|
+
constructor,
|
|
459
|
+
prefix,
|
|
460
|
+
options.description
|
|
461
|
+
);
|
|
462
|
+
return {
|
|
463
|
+
decode: (bytes, offset = 0) => {
|
|
464
|
+
codecsCore.assertByteArrayIsNotEmptyForCodec("enum", bytes, offset);
|
|
465
|
+
const [value, newOffset] = prefix.decode(bytes, offset);
|
|
466
|
+
const valueAsNumber = Number(value);
|
|
467
|
+
offset = newOffset;
|
|
468
|
+
if (valueAsNumber < minRange || valueAsNumber > maxRange) {
|
|
469
|
+
throw new Error(
|
|
470
|
+
`Enum discriminator out of range. Expected a number between ${minRange} and ${maxRange}, got ${valueAsNumber}.`
|
|
471
|
+
);
|
|
472
|
+
}
|
|
473
|
+
return [isNumericEnum ? valueAsNumber : enumValues[valueAsNumber], offset];
|
|
474
|
+
},
|
|
475
|
+
description,
|
|
476
|
+
fixedSize,
|
|
477
|
+
maxSize
|
|
478
|
+
};
|
|
479
|
+
}
|
|
480
|
+
function getScalarEnumCodec(constructor, options = {}) {
|
|
481
|
+
return codecsCore.combineCodec(getScalarEnumEncoder(constructor, options), getScalarEnumDecoder(constructor, options));
|
|
482
|
+
}
|
|
483
|
+
function setCodecHelper(item, size, description) {
|
|
484
|
+
if (size === "remainder" && item.fixedSize === null) {
|
|
485
|
+
throw new Error('Codecs of "remainder" size must have fixed-size items.');
|
|
486
|
+
}
|
|
487
|
+
return {
|
|
488
|
+
description: description ?? `set(${item.description}; ${getArrayLikeCodecSizeDescription(size)})`,
|
|
489
|
+
fixedSize: getArrayLikeCodecSizeFromChildren(size, [item.fixedSize]),
|
|
490
|
+
maxSize: getArrayLikeCodecSizeFromChildren(size, [item.maxSize])
|
|
491
|
+
};
|
|
492
|
+
}
|
|
493
|
+
function getSetEncoder(item, options = {}) {
|
|
494
|
+
const size = options.size ?? codecsNumbers.getU32Encoder();
|
|
495
|
+
return {
|
|
496
|
+
...setCodecHelper(item, size, options.description),
|
|
497
|
+
encode: (set) => {
|
|
498
|
+
if (typeof size === "number" && set.size !== size) {
|
|
499
|
+
assertValidNumberOfItemsForCodec("set", size, set.size);
|
|
500
|
+
}
|
|
501
|
+
const itemBytes = Array.from(set, (value) => item.encode(value));
|
|
502
|
+
return codecsCore.mergeBytes([getArrayLikeCodecSizePrefix(size, set.size), ...itemBytes]);
|
|
503
|
+
}
|
|
504
|
+
};
|
|
505
|
+
}
|
|
506
|
+
function getSetDecoder(item, options = {}) {
|
|
507
|
+
const size = options.size ?? codecsNumbers.getU32Decoder();
|
|
508
|
+
return {
|
|
509
|
+
...setCodecHelper(item, size, options.description),
|
|
510
|
+
decode: (bytes, offset = 0) => {
|
|
511
|
+
const set = /* @__PURE__ */ new Set();
|
|
512
|
+
if (typeof size === "object" && bytes.slice(offset).length === 0) {
|
|
513
|
+
return [set, offset];
|
|
514
|
+
}
|
|
515
|
+
const [resolvedSize, newOffset] = decodeArrayLikeCodecSize(size, [item.fixedSize], bytes, offset);
|
|
516
|
+
offset = newOffset;
|
|
517
|
+
for (let i = 0; i < resolvedSize; i += 1) {
|
|
518
|
+
const [value, newOffset2] = item.decode(bytes, offset);
|
|
519
|
+
offset = newOffset2;
|
|
520
|
+
set.add(value);
|
|
521
|
+
}
|
|
522
|
+
return [set, offset];
|
|
523
|
+
}
|
|
524
|
+
};
|
|
525
|
+
}
|
|
526
|
+
function getSetCodec(item, options = {}) {
|
|
527
|
+
return codecsCore.combineCodec(getSetEncoder(item, options), getSetDecoder(item, options));
|
|
528
|
+
}
|
|
529
|
+
function structCodecHelper(fields, description) {
|
|
530
|
+
const fieldDescriptions = fields.map(([name, codec]) => `${String(name)}: ${codec.description}`).join(", ");
|
|
531
|
+
return {
|
|
532
|
+
description: description ?? `struct(${fieldDescriptions})`,
|
|
533
|
+
fixedSize: sumCodecSizes(fields.map(([, field]) => field.fixedSize)),
|
|
534
|
+
maxSize: sumCodecSizes(fields.map(([, field]) => field.maxSize))
|
|
535
|
+
};
|
|
536
|
+
}
|
|
537
|
+
function getStructEncoder(fields, options = {}) {
|
|
538
|
+
return {
|
|
539
|
+
...structCodecHelper(fields, options.description),
|
|
540
|
+
encode: (struct) => {
|
|
541
|
+
const fieldBytes = fields.map(([key, codec]) => codec.encode(struct[key]));
|
|
542
|
+
return codecsCore.mergeBytes(fieldBytes);
|
|
543
|
+
}
|
|
544
|
+
};
|
|
545
|
+
}
|
|
546
|
+
function getStructDecoder(fields, options = {}) {
|
|
547
|
+
return {
|
|
548
|
+
...structCodecHelper(fields, options.description),
|
|
549
|
+
decode: (bytes, offset = 0) => {
|
|
550
|
+
const struct = {};
|
|
551
|
+
fields.forEach(([key, codec]) => {
|
|
552
|
+
const [value, newOffset] = codec.decode(bytes, offset);
|
|
553
|
+
offset = newOffset;
|
|
554
|
+
struct[key] = value;
|
|
555
|
+
});
|
|
556
|
+
return [struct, offset];
|
|
557
|
+
}
|
|
558
|
+
};
|
|
559
|
+
}
|
|
560
|
+
function getStructCodec(fields, options = {}) {
|
|
561
|
+
return codecsCore.combineCodec(getStructEncoder(fields, options), getStructDecoder(fields, options));
|
|
562
|
+
}
|
|
563
|
+
function tupleCodecHelper(items, description) {
|
|
564
|
+
const itemDescriptions = items.map((item) => item.description).join(", ");
|
|
565
|
+
return {
|
|
566
|
+
description: description ?? `tuple(${itemDescriptions})`,
|
|
567
|
+
fixedSize: sumCodecSizes(items.map((item) => item.fixedSize)),
|
|
568
|
+
maxSize: sumCodecSizes(items.map((item) => item.maxSize))
|
|
569
|
+
};
|
|
570
|
+
}
|
|
571
|
+
function getTupleEncoder(items, options = {}) {
|
|
572
|
+
return {
|
|
573
|
+
...tupleCodecHelper(items, options.description),
|
|
574
|
+
encode: (value) => {
|
|
575
|
+
assertValidNumberOfItemsForCodec("tuple", items.length, value.length);
|
|
576
|
+
return codecsCore.mergeBytes(items.map((item, index) => item.encode(value[index])));
|
|
577
|
+
}
|
|
578
|
+
};
|
|
579
|
+
}
|
|
580
|
+
function getTupleDecoder(items, options = {}) {
|
|
581
|
+
return {
|
|
582
|
+
...tupleCodecHelper(items, options.description),
|
|
583
|
+
decode: (bytes, offset = 0) => {
|
|
584
|
+
const values = [];
|
|
585
|
+
items.forEach((codec) => {
|
|
586
|
+
const [newValue, newOffset] = codec.decode(bytes, offset);
|
|
587
|
+
values.push(newValue);
|
|
588
|
+
offset = newOffset;
|
|
589
|
+
});
|
|
590
|
+
return [values, offset];
|
|
591
|
+
}
|
|
592
|
+
};
|
|
593
|
+
}
|
|
594
|
+
function getTupleCodec(items, options = {}) {
|
|
595
|
+
return codecsCore.combineCodec(
|
|
596
|
+
getTupleEncoder(items, options),
|
|
597
|
+
getTupleDecoder(items, options)
|
|
598
|
+
);
|
|
599
|
+
}
|
|
600
|
+
function getUnitEncoder(options = {}) {
|
|
601
|
+
return {
|
|
602
|
+
description: options.description ?? "unit",
|
|
603
|
+
encode: () => new Uint8Array(),
|
|
604
|
+
fixedSize: 0,
|
|
605
|
+
maxSize: 0
|
|
606
|
+
};
|
|
607
|
+
}
|
|
608
|
+
function getUnitDecoder(options = {}) {
|
|
609
|
+
return {
|
|
610
|
+
decode: (_bytes, offset = 0) => [void 0, offset],
|
|
611
|
+
description: options.description ?? "unit",
|
|
612
|
+
fixedSize: 0,
|
|
613
|
+
maxSize: 0
|
|
614
|
+
};
|
|
615
|
+
}
|
|
616
|
+
function getUnitCodec(options = {}) {
|
|
617
|
+
return codecsCore.combineCodec(getUnitEncoder(options), getUnitDecoder(options));
|
|
618
|
+
}
|
|
619
|
+
|
|
620
|
+
exports.assertValidNumberOfItemsForCodec = assertValidNumberOfItemsForCodec;
|
|
621
|
+
exports.decodeArrayLikeCodecSize = decodeArrayLikeCodecSize;
|
|
622
|
+
exports.getArrayCodec = getArrayCodec;
|
|
623
|
+
exports.getArrayDecoder = getArrayDecoder;
|
|
624
|
+
exports.getArrayEncoder = getArrayEncoder;
|
|
625
|
+
exports.getArrayLikeCodecSizeDescription = getArrayLikeCodecSizeDescription;
|
|
626
|
+
exports.getArrayLikeCodecSizeFromChildren = getArrayLikeCodecSizeFromChildren;
|
|
627
|
+
exports.getArrayLikeCodecSizePrefix = getArrayLikeCodecSizePrefix;
|
|
628
|
+
exports.getBitArrayCodec = getBitArrayCodec;
|
|
629
|
+
exports.getBitArrayDecoder = getBitArrayDecoder;
|
|
630
|
+
exports.getBitArrayEncoder = getBitArrayEncoder;
|
|
631
|
+
exports.getBooleanCodec = getBooleanCodec;
|
|
632
|
+
exports.getBooleanDecoder = getBooleanDecoder;
|
|
633
|
+
exports.getBooleanEncoder = getBooleanEncoder;
|
|
634
|
+
exports.getBytesCodec = getBytesCodec;
|
|
635
|
+
exports.getBytesDecoder = getBytesDecoder;
|
|
636
|
+
exports.getBytesEncoder = getBytesEncoder;
|
|
637
|
+
exports.getDataEnumCodec = getDataEnumCodec;
|
|
638
|
+
exports.getDataEnumDecoder = getDataEnumDecoder;
|
|
639
|
+
exports.getDataEnumEncoder = getDataEnumEncoder;
|
|
640
|
+
exports.getMapCodec = getMapCodec;
|
|
641
|
+
exports.getMapDecoder = getMapDecoder;
|
|
642
|
+
exports.getMapEncoder = getMapEncoder;
|
|
643
|
+
exports.getNullableCodec = getNullableCodec;
|
|
644
|
+
exports.getNullableDecoder = getNullableDecoder;
|
|
645
|
+
exports.getNullableEncoder = getNullableEncoder;
|
|
646
|
+
exports.getScalarEnumCodec = getScalarEnumCodec;
|
|
647
|
+
exports.getScalarEnumDecoder = getScalarEnumDecoder;
|
|
648
|
+
exports.getScalarEnumEncoder = getScalarEnumEncoder;
|
|
649
|
+
exports.getSetCodec = getSetCodec;
|
|
650
|
+
exports.getSetDecoder = getSetDecoder;
|
|
651
|
+
exports.getSetEncoder = getSetEncoder;
|
|
652
|
+
exports.getStructCodec = getStructCodec;
|
|
653
|
+
exports.getStructDecoder = getStructDecoder;
|
|
654
|
+
exports.getStructEncoder = getStructEncoder;
|
|
655
|
+
exports.getTupleCodec = getTupleCodec;
|
|
656
|
+
exports.getTupleDecoder = getTupleDecoder;
|
|
657
|
+
exports.getTupleEncoder = getTupleEncoder;
|
|
658
|
+
exports.getUnitCodec = getUnitCodec;
|
|
659
|
+
exports.getUnitDecoder = getUnitDecoder;
|
|
660
|
+
exports.getUnitEncoder = getUnitEncoder;
|
|
661
|
+
//# sourceMappingURL=out.js.map
|
|
662
|
+
//# sourceMappingURL=index.node.cjs.map
|