@solana/codecs-data-structures 2.0.0-experimental.ef09aec → 2.0.0-experimental.ef2569b
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.browser.cjs +341 -430
- package/dist/index.browser.cjs.map +1 -1
- package/dist/index.browser.js +343 -428
- package/dist/index.browser.js.map +1 -1
- package/dist/index.native.js +343 -428
- package/dist/index.native.js.map +1 -1
- package/dist/index.node.cjs +341 -430
- package/dist/index.node.cjs.map +1 -1
- package/dist/index.node.js +343 -428
- package/dist/index.node.js.map +1 -1
- package/dist/types/array.d.ts +50 -6
- package/dist/types/array.d.ts.map +1 -0
- package/dist/types/assertions.d.ts.map +1 -0
- package/dist/types/bit-array.d.ts +5 -5
- package/dist/types/bit-array.d.ts.map +1 -0
- package/dist/types/boolean.d.ts +18 -6
- package/dist/types/boolean.d.ts.map +1 -0
- package/dist/types/bytes.d.ts +14 -5
- package/dist/types/bytes.d.ts.map +1 -0
- package/dist/types/data-enum.d.ts +14 -14
- package/dist/types/data-enum.d.ts.map +1 -0
- package/dist/types/index.d.ts +13 -14
- package/dist/types/index.d.ts.map +1 -0
- package/dist/types/map.d.ts +39 -6
- package/dist/types/map.d.ts.map +1 -0
- package/dist/types/nullable.d.ts +24 -6
- package/dist/types/nullable.d.ts.map +1 -0
- package/dist/types/scalar-enum.d.ts +18 -6
- package/dist/types/scalar-enum.d.ts.map +1 -0
- package/dist/types/set.d.ts +39 -6
- package/dist/types/set.d.ts.map +1 -0
- package/dist/types/struct.d.ts +28 -18
- package/dist/types/struct.d.ts.map +1 -0
- package/dist/types/tuple.d.ts +22 -15
- package/dist/types/tuple.d.ts.map +1 -0
- package/dist/types/unit.d.ts +4 -12
- package/dist/types/unit.d.ts.map +1 -0
- package/dist/types/utils.d.ts +10 -2
- package/dist/types/utils.d.ts.map +1 -0
- package/package.json +9 -9
- package/dist/index.development.js +0 -889
- package/dist/index.development.js.map +0 -1
- package/dist/index.production.min.js +0 -51
- package/dist/types/array-like-codec-size.d.ts +0 -20
package/dist/index.browser.cjs
CHANGED
|
@@ -5,7 +5,12 @@ var codecsNumbers = require('@solana/codecs-numbers');
|
|
|
5
5
|
|
|
6
6
|
// src/array.ts
|
|
7
7
|
|
|
8
|
-
// src/
|
|
8
|
+
// src/assertions.ts
|
|
9
|
+
function assertValidNumberOfItemsForCodec(codecDescription, expected, actual) {
|
|
10
|
+
if (expected !== actual) {
|
|
11
|
+
throw new Error(`Expected [${codecDescription}] to have ${expected} items, got ${actual}.`);
|
|
12
|
+
}
|
|
13
|
+
}
|
|
9
14
|
function maxCodecSizes(sizes) {
|
|
10
15
|
return sizes.reduce(
|
|
11
16
|
(all, size) => all === null || size === null ? null : Math.max(all, size),
|
|
@@ -15,106 +20,107 @@ function maxCodecSizes(sizes) {
|
|
|
15
20
|
function sumCodecSizes(sizes) {
|
|
16
21
|
return sizes.reduce((all, size) => all === null || size === null ? null : all + size, 0);
|
|
17
22
|
}
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
function decodeArrayLikeCodecSize(size, childrenSizes, bytes, offset) {
|
|
21
|
-
if (typeof size === "number") {
|
|
22
|
-
return [size, offset];
|
|
23
|
-
}
|
|
24
|
-
if (typeof size === "object") {
|
|
25
|
-
return size.decode(bytes, offset);
|
|
26
|
-
}
|
|
27
|
-
if (size === "remainder") {
|
|
28
|
-
const childrenSize = sumCodecSizes(childrenSizes);
|
|
29
|
-
if (childrenSize === null) {
|
|
30
|
-
throw new Error('Codecs of "remainder" size must have fixed-size items.');
|
|
31
|
-
}
|
|
32
|
-
const remainder = bytes.slice(offset).length;
|
|
33
|
-
if (remainder % childrenSize !== 0) {
|
|
34
|
-
throw new Error(
|
|
35
|
-
`The remainder of the byte array (${remainder} bytes) cannot be split into chunks of ${childrenSize} bytes. Codecs of "remainder" size must have a remainder that is a multiple of its item size. In other words, ${remainder} modulo ${childrenSize} should be equal to zero.`
|
|
36
|
-
);
|
|
37
|
-
}
|
|
38
|
-
return [remainder / childrenSize, offset];
|
|
39
|
-
}
|
|
40
|
-
throw new Error(`Unrecognized array-like codec size: ${JSON.stringify(size)}`);
|
|
41
|
-
}
|
|
42
|
-
function getArrayLikeCodecSizeDescription(size) {
|
|
43
|
-
return typeof size === "object" ? size.description : `${size}`;
|
|
44
|
-
}
|
|
45
|
-
function getArrayLikeCodecSizeFromChildren(size, childrenSizes) {
|
|
46
|
-
if (typeof size !== "number")
|
|
47
|
-
return null;
|
|
48
|
-
if (size === 0)
|
|
49
|
-
return 0;
|
|
50
|
-
const childrenSize = sumCodecSizes(childrenSizes);
|
|
51
|
-
return childrenSize === null ? null : childrenSize * size;
|
|
23
|
+
function getFixedSize(codec) {
|
|
24
|
+
return codecsCore.isFixedSize(codec) ? codec.fixedSize : null;
|
|
52
25
|
}
|
|
53
|
-
function
|
|
54
|
-
return
|
|
55
|
-
}
|
|
56
|
-
|
|
57
|
-
// src/assertions.ts
|
|
58
|
-
function assertValidNumberOfItemsForCodec(codecDescription, expected, actual) {
|
|
59
|
-
if (expected !== actual) {
|
|
60
|
-
throw new Error(`Expected [${codecDescription}] to have ${expected} items, got ${actual}.`);
|
|
61
|
-
}
|
|
26
|
+
function getMaxSize(codec) {
|
|
27
|
+
return codecsCore.isFixedSize(codec) ? codec.fixedSize : codec.maxSize ?? null;
|
|
62
28
|
}
|
|
63
29
|
|
|
64
30
|
// src/array.ts
|
|
65
|
-
function arrayCodecHelper(item, size, description) {
|
|
66
|
-
if (size === "remainder" && item.fixedSize === null) {
|
|
67
|
-
throw new Error('Codecs of "remainder" size must have fixed-size items.');
|
|
68
|
-
}
|
|
69
|
-
return {
|
|
70
|
-
description: description ?? `array(${item.description}; ${getArrayLikeCodecSizeDescription(size)})`,
|
|
71
|
-
fixedSize: getArrayLikeCodecSizeFromChildren(size, [item.fixedSize]),
|
|
72
|
-
maxSize: getArrayLikeCodecSizeFromChildren(size, [item.maxSize])
|
|
73
|
-
};
|
|
74
|
-
}
|
|
75
31
|
function getArrayEncoder(item, config = {}) {
|
|
76
32
|
const size = config.size ?? codecsNumbers.getU32Encoder();
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
33
|
+
if (size === "remainder") {
|
|
34
|
+
codecsCore.assertIsFixedSize(item, 'Codecs of "remainder" size must have fixed-size items.');
|
|
35
|
+
}
|
|
36
|
+
const fixedSize = computeArrayLikeCodecSize(size, getFixedSize(item));
|
|
37
|
+
const maxSize = computeArrayLikeCodecSize(size, getMaxSize(item)) ?? void 0;
|
|
38
|
+
return codecsCore.createEncoder({
|
|
39
|
+
...fixedSize !== null ? { fixedSize } : {
|
|
40
|
+
getSizeFromValue: (array) => {
|
|
41
|
+
const prefixSize = typeof size === "object" ? codecsCore.getEncodedSize(array.length, size) : 0;
|
|
42
|
+
return prefixSize + [...array].reduce((all, value) => all + codecsCore.getEncodedSize(value, item), 0);
|
|
43
|
+
},
|
|
44
|
+
maxSize
|
|
45
|
+
},
|
|
46
|
+
write: (array, bytes, offset) => {
|
|
80
47
|
if (typeof size === "number") {
|
|
81
|
-
assertValidNumberOfItemsForCodec("array", size,
|
|
48
|
+
assertValidNumberOfItemsForCodec("array", size, array.length);
|
|
49
|
+
}
|
|
50
|
+
if (typeof size === "object") {
|
|
51
|
+
offset = size.write(array.length, bytes, offset);
|
|
82
52
|
}
|
|
83
|
-
|
|
53
|
+
array.forEach((value) => {
|
|
54
|
+
offset = item.write(value, bytes, offset);
|
|
55
|
+
});
|
|
56
|
+
return offset;
|
|
84
57
|
}
|
|
85
|
-
};
|
|
58
|
+
});
|
|
86
59
|
}
|
|
87
60
|
function getArrayDecoder(item, config = {}) {
|
|
88
61
|
const size = config.size ?? codecsNumbers.getU32Decoder();
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
62
|
+
if (size === "remainder") {
|
|
63
|
+
codecsCore.assertIsFixedSize(item, 'Codecs of "remainder" size must have fixed-size items.');
|
|
64
|
+
}
|
|
65
|
+
const itemSize = getFixedSize(item);
|
|
66
|
+
const fixedSize = computeArrayLikeCodecSize(size, itemSize);
|
|
67
|
+
const maxSize = computeArrayLikeCodecSize(size, getMaxSize(item)) ?? void 0;
|
|
68
|
+
return codecsCore.createDecoder({
|
|
69
|
+
...fixedSize !== null ? { fixedSize } : { maxSize },
|
|
70
|
+
read: (bytes, offset) => {
|
|
71
|
+
const array = [];
|
|
92
72
|
if (typeof size === "object" && bytes.slice(offset).length === 0) {
|
|
93
|
-
return [
|
|
73
|
+
return [array, offset];
|
|
94
74
|
}
|
|
95
|
-
const [resolvedSize, newOffset] =
|
|
75
|
+
const [resolvedSize, newOffset] = readArrayLikeCodecSize(size, itemSize, bytes, offset);
|
|
96
76
|
offset = newOffset;
|
|
97
|
-
const values = [];
|
|
98
77
|
for (let i = 0; i < resolvedSize; i += 1) {
|
|
99
|
-
const [value, newOffset2] = item.
|
|
100
|
-
values.push(value);
|
|
78
|
+
const [value, newOffset2] = item.read(bytes, offset);
|
|
101
79
|
offset = newOffset2;
|
|
80
|
+
array.push(value);
|
|
102
81
|
}
|
|
103
|
-
return [
|
|
82
|
+
return [array, offset];
|
|
104
83
|
}
|
|
105
|
-
};
|
|
84
|
+
});
|
|
106
85
|
}
|
|
107
86
|
function getArrayCodec(item, config = {}) {
|
|
108
87
|
return codecsCore.combineCodec(getArrayEncoder(item, config), getArrayDecoder(item, config));
|
|
109
88
|
}
|
|
110
|
-
|
|
89
|
+
function readArrayLikeCodecSize(size, itemSize, bytes, offset) {
|
|
90
|
+
if (typeof size === "number") {
|
|
91
|
+
return [size, offset];
|
|
92
|
+
}
|
|
93
|
+
if (typeof size === "object") {
|
|
94
|
+
return size.read(bytes, offset);
|
|
95
|
+
}
|
|
96
|
+
if (size === "remainder") {
|
|
97
|
+
if (itemSize === null) {
|
|
98
|
+
throw new Error('Codecs of "remainder" size must have fixed-size items.');
|
|
99
|
+
}
|
|
100
|
+
const remainder = Math.max(0, bytes.length - offset);
|
|
101
|
+
if (remainder % itemSize !== 0) {
|
|
102
|
+
throw new Error(
|
|
103
|
+
`The remainder of the byte array (${remainder} bytes) cannot be split into chunks of ${itemSize} bytes. Codecs of "remainder" size must have a remainder that is a multiple of its item size. In other words, ${remainder} modulo ${itemSize} should be equal to zero.`
|
|
104
|
+
);
|
|
105
|
+
}
|
|
106
|
+
return [remainder / itemSize, offset];
|
|
107
|
+
}
|
|
108
|
+
throw new Error(`Unrecognized array-like codec size: ${JSON.stringify(size)}`);
|
|
109
|
+
}
|
|
110
|
+
function computeArrayLikeCodecSize(size, itemSize) {
|
|
111
|
+
if (typeof size !== "number")
|
|
112
|
+
return null;
|
|
113
|
+
if (size === 0)
|
|
114
|
+
return 0;
|
|
115
|
+
return itemSize === null ? null : itemSize * size;
|
|
116
|
+
}
|
|
117
|
+
function getBitArrayEncoder(size, config = {}) {
|
|
111
118
|
const parsedConfig = typeof config === "boolean" ? { backward: config } : config;
|
|
112
119
|
const backward = parsedConfig.backward ?? false;
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
const bytes = [];
|
|
120
|
+
return codecsCore.createEncoder({
|
|
121
|
+
fixedSize: size,
|
|
122
|
+
write(value, bytes, offset) {
|
|
123
|
+
const bytesToAdd = [];
|
|
118
124
|
for (let i = 0; i < size; i += 1) {
|
|
119
125
|
let byte = 0;
|
|
120
126
|
for (let j = 0; j < 8; j += 1) {
|
|
@@ -122,23 +128,22 @@ var getBitArrayEncoder = (size, config = {}) => {
|
|
|
122
128
|
byte |= feature << (backward ? j : 7 - j);
|
|
123
129
|
}
|
|
124
130
|
if (backward) {
|
|
125
|
-
|
|
131
|
+
bytesToAdd.unshift(byte);
|
|
126
132
|
} else {
|
|
127
|
-
|
|
133
|
+
bytesToAdd.push(byte);
|
|
128
134
|
}
|
|
129
135
|
}
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
}
|
|
136
|
-
var getBitArrayDecoder = (size, config = {}) => {
|
|
136
|
+
bytes.set(bytesToAdd, offset);
|
|
137
|
+
return size;
|
|
138
|
+
}
|
|
139
|
+
});
|
|
140
|
+
}
|
|
141
|
+
function getBitArrayDecoder(size, config = {}) {
|
|
137
142
|
const parsedConfig = typeof config === "boolean" ? { backward: config } : config;
|
|
138
143
|
const backward = parsedConfig.backward ?? false;
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
144
|
+
return codecsCore.createDecoder({
|
|
145
|
+
fixedSize: size,
|
|
146
|
+
read(bytes, offset) {
|
|
142
147
|
codecsCore.assertByteArrayHasEnoughBytesForCodec("bitArray", size, bytes, offset);
|
|
143
148
|
const booleans = [];
|
|
144
149
|
let slice = bytes.slice(offset, offset + size);
|
|
@@ -155,138 +160,107 @@ var getBitArrayDecoder = (size, config = {}) => {
|
|
|
155
160
|
}
|
|
156
161
|
});
|
|
157
162
|
return [booleans, offset + size];
|
|
158
|
-
}
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
}
|
|
164
|
-
var getBitArrayCodec = (size, config = {}) => codecsCore.combineCodec(getBitArrayEncoder(size, config), getBitArrayDecoder(size, config));
|
|
163
|
+
}
|
|
164
|
+
});
|
|
165
|
+
}
|
|
166
|
+
function getBitArrayCodec(size, config = {}) {
|
|
167
|
+
return codecsCore.combineCodec(getBitArrayEncoder(size, config), getBitArrayDecoder(size, config));
|
|
168
|
+
}
|
|
165
169
|
function getBooleanEncoder(config = {}) {
|
|
166
170
|
const size = config.size ?? codecsNumbers.getU8Encoder();
|
|
167
|
-
codecsCore.
|
|
168
|
-
return
|
|
169
|
-
description: config.description ?? `bool(${size.description})`,
|
|
170
|
-
encode: (value) => size.encode(value ? 1 : 0),
|
|
171
|
-
fixedSize: size.fixedSize,
|
|
172
|
-
maxSize: size.fixedSize
|
|
173
|
-
};
|
|
171
|
+
codecsCore.assertIsFixedSize(size, "Codec [bool] requires a fixed size.");
|
|
172
|
+
return codecsCore.mapEncoder(size, (value) => value ? 1 : 0);
|
|
174
173
|
}
|
|
175
174
|
function getBooleanDecoder(config = {}) {
|
|
176
175
|
const size = config.size ?? codecsNumbers.getU8Decoder();
|
|
177
|
-
codecsCore.
|
|
178
|
-
return
|
|
179
|
-
decode: (bytes, offset = 0) => {
|
|
180
|
-
codecsCore.assertByteArrayIsNotEmptyForCodec("bool", bytes, offset);
|
|
181
|
-
const [value, vOffset] = size.decode(bytes, offset);
|
|
182
|
-
return [value === 1, vOffset];
|
|
183
|
-
},
|
|
184
|
-
description: config.description ?? `bool(${size.description})`,
|
|
185
|
-
fixedSize: size.fixedSize,
|
|
186
|
-
maxSize: size.fixedSize
|
|
187
|
-
};
|
|
176
|
+
codecsCore.assertIsFixedSize(size, "Codec [bool] requires a fixed size.");
|
|
177
|
+
return codecsCore.mapDecoder(size, (value) => Number(value) === 1);
|
|
188
178
|
}
|
|
189
179
|
function getBooleanCodec(config = {}) {
|
|
190
180
|
return codecsCore.combineCodec(getBooleanEncoder(config), getBooleanDecoder(config));
|
|
191
181
|
}
|
|
192
182
|
function getBytesEncoder(config = {}) {
|
|
193
183
|
const size = config.size ?? "variable";
|
|
194
|
-
const
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
};
|
|
184
|
+
const byteEncoder = codecsCore.createEncoder({
|
|
185
|
+
getSizeFromValue: (value) => value.length,
|
|
186
|
+
write: (value, bytes, offset) => {
|
|
187
|
+
bytes.set(value, offset);
|
|
188
|
+
return offset + value.length;
|
|
189
|
+
}
|
|
190
|
+
});
|
|
202
191
|
if (size === "variable") {
|
|
203
192
|
return byteEncoder;
|
|
204
193
|
}
|
|
205
194
|
if (typeof size === "number") {
|
|
206
|
-
return codecsCore.fixEncoder(byteEncoder, size
|
|
195
|
+
return codecsCore.fixEncoder(byteEncoder, size);
|
|
207
196
|
}
|
|
208
|
-
return {
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
return codecsCore.mergeBytes([lengthBytes, contentBytes]);
|
|
197
|
+
return codecsCore.createEncoder({
|
|
198
|
+
getSizeFromValue: (value) => codecsCore.getEncodedSize(value.length, size) + value.length,
|
|
199
|
+
write: (value, bytes, offset) => {
|
|
200
|
+
offset = size.write(value.length, bytes, offset);
|
|
201
|
+
return byteEncoder.write(value, bytes, offset);
|
|
214
202
|
}
|
|
215
|
-
};
|
|
203
|
+
});
|
|
216
204
|
}
|
|
217
205
|
function getBytesDecoder(config = {}) {
|
|
218
206
|
const size = config.size ?? "variable";
|
|
219
|
-
const
|
|
220
|
-
|
|
221
|
-
const byteDecoder = {
|
|
222
|
-
decode: (bytes, offset = 0) => {
|
|
207
|
+
const byteDecoder = codecsCore.createDecoder({
|
|
208
|
+
read: (bytes, offset) => {
|
|
223
209
|
const slice = bytes.slice(offset);
|
|
224
210
|
return [slice, offset + slice.length];
|
|
225
|
-
}
|
|
226
|
-
|
|
227
|
-
fixedSize: null,
|
|
228
|
-
maxSize: null
|
|
229
|
-
};
|
|
211
|
+
}
|
|
212
|
+
});
|
|
230
213
|
if (size === "variable") {
|
|
231
214
|
return byteDecoder;
|
|
232
215
|
}
|
|
233
216
|
if (typeof size === "number") {
|
|
234
|
-
return codecsCore.fixDecoder(byteDecoder, size
|
|
217
|
+
return codecsCore.fixDecoder(byteDecoder, size);
|
|
235
218
|
}
|
|
236
|
-
return {
|
|
237
|
-
|
|
238
|
-
decode: (bytes, offset = 0) => {
|
|
219
|
+
return codecsCore.createDecoder({
|
|
220
|
+
read: (bytes, offset) => {
|
|
239
221
|
codecsCore.assertByteArrayIsNotEmptyForCodec("bytes", bytes, offset);
|
|
240
|
-
const [lengthBigInt, lengthOffset] = size.
|
|
222
|
+
const [lengthBigInt, lengthOffset] = size.read(bytes, offset);
|
|
241
223
|
const length = Number(lengthBigInt);
|
|
242
224
|
offset = lengthOffset;
|
|
243
225
|
const contentBytes = bytes.slice(offset, offset + length);
|
|
244
226
|
codecsCore.assertByteArrayHasEnoughBytesForCodec("bytes", length, contentBytes);
|
|
245
|
-
const [value, contentOffset] = byteDecoder.
|
|
227
|
+
const [value, contentOffset] = byteDecoder.read(contentBytes, 0);
|
|
246
228
|
offset += contentOffset;
|
|
247
229
|
return [value, offset];
|
|
248
230
|
}
|
|
249
|
-
};
|
|
231
|
+
});
|
|
250
232
|
}
|
|
251
233
|
function getBytesCodec(config = {}) {
|
|
252
234
|
return codecsCore.combineCodec(getBytesEncoder(config), getBytesDecoder(config));
|
|
253
235
|
}
|
|
254
|
-
function dataEnumCodecHelper(variants, prefix, description) {
|
|
255
|
-
const fieldDescriptions = variants.map(([name, codec]) => `${String(name)}${codec ? `: ${codec.description}` : ""}`).join(", ");
|
|
256
|
-
const allVariantHaveTheSameFixedSize = variants.every((one, _i, all) => one[1].fixedSize === all[0][1].fixedSize);
|
|
257
|
-
const fixedVariantSize = allVariantHaveTheSameFixedSize ? variants[0][1].fixedSize : null;
|
|
258
|
-
const maxVariantSize = maxCodecSizes(variants.map(([, field]) => field.maxSize));
|
|
259
|
-
return {
|
|
260
|
-
description: description ?? `dataEnum(${fieldDescriptions}; ${prefix.description})`,
|
|
261
|
-
fixedSize: variants.length === 0 ? prefix.fixedSize : sumCodecSizes([prefix.fixedSize, fixedVariantSize]),
|
|
262
|
-
maxSize: variants.length === 0 ? prefix.maxSize : sumCodecSizes([prefix.maxSize, maxVariantSize])
|
|
263
|
-
};
|
|
264
|
-
}
|
|
265
236
|
function getDataEnumEncoder(variants, config = {}) {
|
|
266
237
|
const prefix = config.size ?? codecsNumbers.getU8Encoder();
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
const
|
|
279
|
-
|
|
238
|
+
const fixedSize = getDataEnumFixedSize(variants, prefix);
|
|
239
|
+
return codecsCore.createEncoder({
|
|
240
|
+
...fixedSize !== null ? { fixedSize } : {
|
|
241
|
+
getSizeFromValue: (variant) => {
|
|
242
|
+
const discriminator = getVariantDiscriminator(variants, variant);
|
|
243
|
+
const variantEncoder = variants[discriminator][1];
|
|
244
|
+
return codecsCore.getEncodedSize(discriminator, prefix) + codecsCore.getEncodedSize(variant, variantEncoder);
|
|
245
|
+
},
|
|
246
|
+
maxSize: getDataEnumMaxSize(variants, prefix)
|
|
247
|
+
},
|
|
248
|
+
write: (variant, bytes, offset) => {
|
|
249
|
+
const discriminator = getVariantDiscriminator(variants, variant);
|
|
250
|
+
offset = prefix.write(discriminator, bytes, offset);
|
|
251
|
+
const variantEncoder = variants[discriminator][1];
|
|
252
|
+
return variantEncoder.write(variant, bytes, offset);
|
|
280
253
|
}
|
|
281
|
-
};
|
|
254
|
+
});
|
|
282
255
|
}
|
|
283
256
|
function getDataEnumDecoder(variants, config = {}) {
|
|
284
257
|
const prefix = config.size ?? codecsNumbers.getU8Decoder();
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
258
|
+
const fixedSize = getDataEnumFixedSize(variants, prefix);
|
|
259
|
+
return codecsCore.createDecoder({
|
|
260
|
+
...fixedSize !== null ? { fixedSize } : { maxSize: getDataEnumMaxSize(variants, prefix) },
|
|
261
|
+
read: (bytes, offset) => {
|
|
288
262
|
codecsCore.assertByteArrayIsNotEmptyForCodec("dataEnum", bytes, offset);
|
|
289
|
-
const [discriminator, dOffset] = prefix.
|
|
263
|
+
const [discriminator, dOffset] = prefix.read(bytes, offset);
|
|
290
264
|
offset = dOffset;
|
|
291
265
|
const variantField = variants[Number(discriminator)] ?? null;
|
|
292
266
|
if (!variantField) {
|
|
@@ -294,337 +268,274 @@ function getDataEnumDecoder(variants, config = {}) {
|
|
|
294
268
|
`Enum discriminator out of range. Expected a number between 0 and ${variants.length - 1}, got ${discriminator}.`
|
|
295
269
|
);
|
|
296
270
|
}
|
|
297
|
-
const [variant, vOffset] = variantField[1].
|
|
271
|
+
const [variant, vOffset] = variantField[1].read(bytes, offset);
|
|
298
272
|
offset = vOffset;
|
|
299
273
|
return [{ __kind: variantField[0], ...variant ?? {} }, offset];
|
|
300
274
|
}
|
|
301
|
-
};
|
|
275
|
+
});
|
|
302
276
|
}
|
|
303
277
|
function getDataEnumCodec(variants, config = {}) {
|
|
304
278
|
return codecsCore.combineCodec(getDataEnumEncoder(variants, config), getDataEnumDecoder(variants, config));
|
|
305
279
|
}
|
|
306
|
-
function
|
|
307
|
-
if (
|
|
308
|
-
|
|
280
|
+
function getDataEnumFixedSize(variants, prefix) {
|
|
281
|
+
if (variants.length === 0)
|
|
282
|
+
return codecsCore.isFixedSize(prefix) ? prefix.fixedSize : null;
|
|
283
|
+
if (!codecsCore.isFixedSize(variants[0][1]))
|
|
284
|
+
return null;
|
|
285
|
+
const variantSize = variants[0][1].fixedSize;
|
|
286
|
+
const sameSizedVariants = variants.every(
|
|
287
|
+
(variant) => codecsCore.isFixedSize(variant[1]) && variant[1].fixedSize === variantSize
|
|
288
|
+
);
|
|
289
|
+
if (!sameSizedVariants)
|
|
290
|
+
return null;
|
|
291
|
+
return codecsCore.isFixedSize(prefix) ? prefix.fixedSize + variantSize : null;
|
|
292
|
+
}
|
|
293
|
+
function getDataEnumMaxSize(variants, prefix) {
|
|
294
|
+
const maxVariantSize = maxCodecSizes(variants.map(([, codec]) => getMaxSize(codec)));
|
|
295
|
+
return sumCodecSizes([getMaxSize(prefix), maxVariantSize]) ?? void 0;
|
|
296
|
+
}
|
|
297
|
+
function getVariantDiscriminator(variants, variant) {
|
|
298
|
+
const discriminator = variants.findIndex(([key]) => variant.__kind === key);
|
|
299
|
+
if (discriminator < 0) {
|
|
300
|
+
throw new Error(
|
|
301
|
+
`Invalid data enum variant. Expected one of [${variants.map(([key]) => key).join(", ")}], got "${variant.__kind}".`
|
|
302
|
+
);
|
|
309
303
|
}
|
|
310
|
-
return
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
304
|
+
return discriminator;
|
|
305
|
+
}
|
|
306
|
+
function getTupleEncoder(items) {
|
|
307
|
+
const fixedSize = sumCodecSizes(items.map(getFixedSize));
|
|
308
|
+
const maxSize = sumCodecSizes(items.map(getMaxSize)) ?? void 0;
|
|
309
|
+
return codecsCore.createEncoder({
|
|
310
|
+
...fixedSize === null ? {
|
|
311
|
+
getSizeFromValue: (value) => items.map((item, index) => codecsCore.getEncodedSize(value[index], item)).reduce((all, one) => all + one, 0),
|
|
312
|
+
maxSize
|
|
313
|
+
} : { fixedSize },
|
|
314
|
+
write: (value, bytes, offset) => {
|
|
315
|
+
assertValidNumberOfItemsForCodec("tuple", items.length, value.length);
|
|
316
|
+
items.forEach((item, index) => {
|
|
317
|
+
offset = item.write(value[index], bytes, offset);
|
|
318
|
+
});
|
|
319
|
+
return offset;
|
|
320
|
+
}
|
|
321
|
+
});
|
|
322
|
+
}
|
|
323
|
+
function getTupleDecoder(items) {
|
|
324
|
+
const fixedSize = sumCodecSizes(items.map(getFixedSize));
|
|
325
|
+
const maxSize = sumCodecSizes(items.map(getMaxSize)) ?? void 0;
|
|
326
|
+
return codecsCore.createDecoder({
|
|
327
|
+
...fixedSize === null ? { maxSize } : { fixedSize },
|
|
328
|
+
read: (bytes, offset) => {
|
|
329
|
+
const values = [];
|
|
330
|
+
items.forEach((item) => {
|
|
331
|
+
const [newValue, newOffset] = item.read(bytes, offset);
|
|
332
|
+
values.push(newValue);
|
|
333
|
+
offset = newOffset;
|
|
334
|
+
});
|
|
335
|
+
return [values, offset];
|
|
336
|
+
}
|
|
337
|
+
});
|
|
338
|
+
}
|
|
339
|
+
function getTupleCodec(items) {
|
|
340
|
+
return codecsCore.combineCodec(
|
|
341
|
+
getTupleEncoder(items),
|
|
342
|
+
getTupleDecoder(items)
|
|
343
|
+
);
|
|
315
344
|
}
|
|
345
|
+
|
|
346
|
+
// src/map.ts
|
|
316
347
|
function getMapEncoder(key, value, config = {}) {
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
if (typeof size === "number") {
|
|
322
|
-
assertValidNumberOfItemsForCodec("map", size, map.size);
|
|
323
|
-
}
|
|
324
|
-
const itemBytes = Array.from(map, ([k, v]) => codecsCore.mergeBytes([key.encode(k), value.encode(v)]));
|
|
325
|
-
return codecsCore.mergeBytes([getArrayLikeCodecSizePrefix(size, map.size), ...itemBytes]);
|
|
326
|
-
}
|
|
327
|
-
};
|
|
348
|
+
return codecsCore.mapEncoder(
|
|
349
|
+
getArrayEncoder(getTupleEncoder([key, value]), config),
|
|
350
|
+
(map) => [...map.entries()]
|
|
351
|
+
);
|
|
328
352
|
}
|
|
329
353
|
function getMapDecoder(key, value, config = {}) {
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
const map = /* @__PURE__ */ new Map();
|
|
335
|
-
if (typeof size === "object" && bytes.slice(offset).length === 0) {
|
|
336
|
-
return [map, offset];
|
|
337
|
-
}
|
|
338
|
-
const [resolvedSize, newOffset] = decodeArrayLikeCodecSize(
|
|
339
|
-
size,
|
|
340
|
-
[key.fixedSize, value.fixedSize],
|
|
341
|
-
bytes,
|
|
342
|
-
offset
|
|
343
|
-
);
|
|
344
|
-
offset = newOffset;
|
|
345
|
-
for (let i = 0; i < resolvedSize; i += 1) {
|
|
346
|
-
const [decodedKey, kOffset] = key.decode(bytes, offset);
|
|
347
|
-
offset = kOffset;
|
|
348
|
-
const [decodedValue, vOffset] = value.decode(bytes, offset);
|
|
349
|
-
offset = vOffset;
|
|
350
|
-
map.set(decodedKey, decodedValue);
|
|
351
|
-
}
|
|
352
|
-
return [map, offset];
|
|
353
|
-
}
|
|
354
|
-
};
|
|
354
|
+
return codecsCore.mapDecoder(
|
|
355
|
+
getArrayDecoder(getTupleDecoder([key, value]), config),
|
|
356
|
+
(entries) => new Map(entries)
|
|
357
|
+
);
|
|
355
358
|
}
|
|
356
359
|
function getMapCodec(key, value, config = {}) {
|
|
357
360
|
return codecsCore.combineCodec(getMapEncoder(key, value, config), getMapDecoder(key, value, config));
|
|
358
361
|
}
|
|
359
|
-
function nullableCodecHelper(item, prefix, fixed, description) {
|
|
360
|
-
let descriptionSuffix = `; ${prefix.description}`;
|
|
361
|
-
let fixedSize = item.fixedSize === 0 ? prefix.fixedSize : null;
|
|
362
|
-
if (fixed) {
|
|
363
|
-
codecsCore.assertFixedSizeCodec(item, "Fixed nullables can only be used with fixed-size codecs.");
|
|
364
|
-
codecsCore.assertFixedSizeCodec(prefix, "Fixed nullables can only be used with fixed-size prefix.");
|
|
365
|
-
descriptionSuffix += "; fixed";
|
|
366
|
-
fixedSize = prefix.fixedSize + item.fixedSize;
|
|
367
|
-
}
|
|
368
|
-
return {
|
|
369
|
-
description: description ?? `nullable(${item.description + descriptionSuffix})`,
|
|
370
|
-
fixedSize,
|
|
371
|
-
maxSize: sumCodecSizes([prefix.maxSize, item.maxSize])
|
|
372
|
-
};
|
|
373
|
-
}
|
|
374
362
|
function getNullableEncoder(item, config = {}) {
|
|
375
363
|
const prefix = config.prefix ?? codecsNumbers.getU8Encoder();
|
|
376
364
|
const fixed = config.fixed ?? false;
|
|
377
|
-
|
|
378
|
-
|
|
379
|
-
|
|
380
|
-
|
|
381
|
-
|
|
382
|
-
|
|
383
|
-
|
|
365
|
+
const isZeroSizeItem = codecsCore.isFixedSize(item) && codecsCore.isFixedSize(prefix) && item.fixedSize === 0;
|
|
366
|
+
if (fixed || isZeroSizeItem) {
|
|
367
|
+
codecsCore.assertIsFixedSize(item, "Fixed nullables can only be used with fixed-size codecs.");
|
|
368
|
+
codecsCore.assertIsFixedSize(prefix, "Fixed nullables can only be used with fixed-size prefix.");
|
|
369
|
+
const fixedSize = prefix.fixedSize + item.fixedSize;
|
|
370
|
+
return codecsCore.createEncoder({
|
|
371
|
+
fixedSize,
|
|
372
|
+
write: (option, bytes, offset) => {
|
|
373
|
+
const prefixOffset = prefix.write(Number(option !== null), bytes, offset);
|
|
374
|
+
if (option !== null) {
|
|
375
|
+
item.write(option, bytes, prefixOffset);
|
|
376
|
+
}
|
|
377
|
+
return offset + fixedSize;
|
|
378
|
+
}
|
|
379
|
+
});
|
|
380
|
+
}
|
|
381
|
+
return codecsCore.createEncoder({
|
|
382
|
+
getSizeFromValue: (option) => codecsCore.getEncodedSize(Number(option !== null), prefix) + (option !== null ? codecsCore.getEncodedSize(option, item) : 0),
|
|
383
|
+
maxSize: sumCodecSizes([prefix, item].map(getMaxSize)) ?? void 0,
|
|
384
|
+
write: (option, bytes, offset) => {
|
|
385
|
+
offset = prefix.write(Number(option !== null), bytes, offset);
|
|
386
|
+
if (option !== null) {
|
|
387
|
+
offset = item.write(option, bytes, offset);
|
|
388
|
+
}
|
|
389
|
+
return offset;
|
|
384
390
|
}
|
|
385
|
-
};
|
|
391
|
+
});
|
|
386
392
|
}
|
|
387
393
|
function getNullableDecoder(item, config = {}) {
|
|
388
394
|
const prefix = config.prefix ?? codecsNumbers.getU8Decoder();
|
|
389
395
|
const fixed = config.fixed ?? false;
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
|
|
396
|
+
let fixedSize = null;
|
|
397
|
+
const isZeroSizeItem = codecsCore.isFixedSize(item) && codecsCore.isFixedSize(prefix) && item.fixedSize === 0;
|
|
398
|
+
if (fixed || isZeroSizeItem) {
|
|
399
|
+
codecsCore.assertIsFixedSize(item, "Fixed nullables can only be used with fixed-size codecs.");
|
|
400
|
+
codecsCore.assertIsFixedSize(prefix, "Fixed nullables can only be used with fixed-size prefix.");
|
|
401
|
+
fixedSize = prefix.fixedSize + item.fixedSize;
|
|
402
|
+
}
|
|
403
|
+
return codecsCore.createDecoder({
|
|
404
|
+
...fixedSize === null ? { maxSize: sumCodecSizes([prefix, item].map(getMaxSize)) ?? void 0 } : { fixedSize },
|
|
405
|
+
read: (bytes, offset) => {
|
|
393
406
|
if (bytes.length - offset <= 0) {
|
|
394
407
|
return [null, offset];
|
|
395
408
|
}
|
|
396
|
-
const
|
|
397
|
-
const [isSome, prefixOffset] = prefix.decode(bytes, offset);
|
|
398
|
-
offset = prefixOffset;
|
|
409
|
+
const [isSome, prefixOffset] = prefix.read(bytes, offset);
|
|
399
410
|
if (isSome === 0) {
|
|
400
|
-
return [null,
|
|
411
|
+
return [null, fixedSize !== null ? offset + fixedSize : prefixOffset];
|
|
401
412
|
}
|
|
402
|
-
const [value, newOffset] = item.
|
|
403
|
-
offset
|
|
404
|
-
return [value, fixed ? fixedOffset : offset];
|
|
413
|
+
const [value, newOffset] = item.read(bytes, prefixOffset);
|
|
414
|
+
return [value, fixedSize !== null ? offset + fixedSize : newOffset];
|
|
405
415
|
}
|
|
406
|
-
};
|
|
416
|
+
});
|
|
407
417
|
}
|
|
408
418
|
function getNullableCodec(item, config = {}) {
|
|
409
|
-
|
|
419
|
+
const configCast = config;
|
|
420
|
+
return codecsCore.combineCodec(getNullableEncoder(item, configCast), getNullableDecoder(item, configCast));
|
|
421
|
+
}
|
|
422
|
+
function getScalarEnumEncoder(constructor, config = {}) {
|
|
423
|
+
const prefix = config.size ?? codecsNumbers.getU8Encoder();
|
|
424
|
+
const { minRange, maxRange, stringValues, enumKeys, enumValues } = getScalarEnumStats(constructor);
|
|
425
|
+
return codecsCore.mapEncoder(prefix, (value) => {
|
|
426
|
+
const isInvalidNumber = typeof value === "number" && (value < minRange || value > maxRange);
|
|
427
|
+
const isInvalidString = typeof value === "string" && !stringValues.includes(value);
|
|
428
|
+
if (isInvalidNumber || isInvalidString) {
|
|
429
|
+
throw new Error(
|
|
430
|
+
`Invalid scalar enum variant. Expected one of [${stringValues.join(", ")}] or a number between ${minRange} and ${maxRange}, got "${value}".`
|
|
431
|
+
);
|
|
432
|
+
}
|
|
433
|
+
if (typeof value === "number")
|
|
434
|
+
return value;
|
|
435
|
+
const valueIndex = enumValues.indexOf(value);
|
|
436
|
+
if (valueIndex >= 0)
|
|
437
|
+
return valueIndex;
|
|
438
|
+
return enumKeys.indexOf(value);
|
|
439
|
+
});
|
|
440
|
+
}
|
|
441
|
+
function getScalarEnumDecoder(constructor, config = {}) {
|
|
442
|
+
const prefix = config.size ?? codecsNumbers.getU8Decoder();
|
|
443
|
+
const { minRange, maxRange, isNumericEnum, enumValues } = getScalarEnumStats(constructor);
|
|
444
|
+
return codecsCore.mapDecoder(prefix, (value) => {
|
|
445
|
+
const valueAsNumber = Number(value);
|
|
446
|
+
if (valueAsNumber < minRange || valueAsNumber > maxRange) {
|
|
447
|
+
throw new Error(
|
|
448
|
+
`Enum discriminator out of range. Expected a number between ${minRange} and ${maxRange}, got ${valueAsNumber}.`
|
|
449
|
+
);
|
|
450
|
+
}
|
|
451
|
+
return isNumericEnum ? valueAsNumber : enumValues[valueAsNumber];
|
|
452
|
+
});
|
|
410
453
|
}
|
|
411
|
-
function
|
|
454
|
+
function getScalarEnumCodec(constructor, config = {}) {
|
|
455
|
+
return codecsCore.combineCodec(getScalarEnumEncoder(constructor, config), getScalarEnumDecoder(constructor, config));
|
|
456
|
+
}
|
|
457
|
+
function getScalarEnumStats(constructor) {
|
|
412
458
|
const enumKeys = Object.keys(constructor);
|
|
413
459
|
const enumValues = Object.values(constructor);
|
|
414
460
|
const isNumericEnum = enumValues.some((v) => typeof v === "number");
|
|
415
|
-
const valueDescriptions = enumValues.filter((v) => typeof v === "string").join(", ");
|
|
416
461
|
const minRange = 0;
|
|
417
462
|
const maxRange = isNumericEnum ? enumValues.length / 2 - 1 : enumValues.length - 1;
|
|
418
463
|
const stringValues = isNumericEnum ? [...enumKeys] : [.../* @__PURE__ */ new Set([...enumKeys, ...enumValues])];
|
|
419
464
|
return {
|
|
420
|
-
description: description ?? `enum(${valueDescriptions}; ${prefix.description})`,
|
|
421
465
|
enumKeys,
|
|
422
466
|
enumValues,
|
|
423
|
-
fixedSize: prefix.fixedSize,
|
|
424
467
|
isNumericEnum,
|
|
425
468
|
maxRange,
|
|
426
|
-
maxSize: prefix.maxSize,
|
|
427
469
|
minRange,
|
|
428
470
|
stringValues
|
|
429
471
|
};
|
|
430
472
|
}
|
|
431
|
-
function getScalarEnumEncoder(constructor, config = {}) {
|
|
432
|
-
const prefix = config.size ?? codecsNumbers.getU8Encoder();
|
|
433
|
-
const { description, fixedSize, maxSize, minRange, maxRange, stringValues, enumKeys, enumValues } = scalarEnumCoderHelper(constructor, prefix, config.description);
|
|
434
|
-
return {
|
|
435
|
-
description,
|
|
436
|
-
encode: (value) => {
|
|
437
|
-
const isInvalidNumber = typeof value === "number" && (value < minRange || value > maxRange);
|
|
438
|
-
const isInvalidString = typeof value === "string" && !stringValues.includes(value);
|
|
439
|
-
if (isInvalidNumber || isInvalidString) {
|
|
440
|
-
throw new Error(
|
|
441
|
-
`Invalid scalar enum variant. Expected one of [${stringValues.join(", ")}] or a number between ${minRange} and ${maxRange}, got "${value}".`
|
|
442
|
-
);
|
|
443
|
-
}
|
|
444
|
-
if (typeof value === "number")
|
|
445
|
-
return prefix.encode(value);
|
|
446
|
-
const valueIndex = enumValues.indexOf(value);
|
|
447
|
-
if (valueIndex >= 0)
|
|
448
|
-
return prefix.encode(valueIndex);
|
|
449
|
-
return prefix.encode(enumKeys.indexOf(value));
|
|
450
|
-
},
|
|
451
|
-
fixedSize,
|
|
452
|
-
maxSize
|
|
453
|
-
};
|
|
454
|
-
}
|
|
455
|
-
function getScalarEnumDecoder(constructor, config = {}) {
|
|
456
|
-
const prefix = config.size ?? codecsNumbers.getU8Decoder();
|
|
457
|
-
const { description, fixedSize, maxSize, minRange, maxRange, isNumericEnum, enumValues } = scalarEnumCoderHelper(
|
|
458
|
-
constructor,
|
|
459
|
-
prefix,
|
|
460
|
-
config.description
|
|
461
|
-
);
|
|
462
|
-
return {
|
|
463
|
-
decode: (bytes, offset = 0) => {
|
|
464
|
-
codecsCore.assertByteArrayIsNotEmptyForCodec("enum", bytes, offset);
|
|
465
|
-
const [value, newOffset] = prefix.decode(bytes, offset);
|
|
466
|
-
const valueAsNumber = Number(value);
|
|
467
|
-
offset = newOffset;
|
|
468
|
-
if (valueAsNumber < minRange || valueAsNumber > maxRange) {
|
|
469
|
-
throw new Error(
|
|
470
|
-
`Enum discriminator out of range. Expected a number between ${minRange} and ${maxRange}, got ${valueAsNumber}.`
|
|
471
|
-
);
|
|
472
|
-
}
|
|
473
|
-
return [isNumericEnum ? valueAsNumber : enumValues[valueAsNumber], offset];
|
|
474
|
-
},
|
|
475
|
-
description,
|
|
476
|
-
fixedSize,
|
|
477
|
-
maxSize
|
|
478
|
-
};
|
|
479
|
-
}
|
|
480
|
-
function getScalarEnumCodec(constructor, config = {}) {
|
|
481
|
-
return codecsCore.combineCodec(getScalarEnumEncoder(constructor, config), getScalarEnumDecoder(constructor, config));
|
|
482
|
-
}
|
|
483
|
-
function setCodecHelper(item, size, description) {
|
|
484
|
-
if (size === "remainder" && item.fixedSize === null) {
|
|
485
|
-
throw new Error('Codecs of "remainder" size must have fixed-size items.');
|
|
486
|
-
}
|
|
487
|
-
return {
|
|
488
|
-
description: description ?? `set(${item.description}; ${getArrayLikeCodecSizeDescription(size)})`,
|
|
489
|
-
fixedSize: getArrayLikeCodecSizeFromChildren(size, [item.fixedSize]),
|
|
490
|
-
maxSize: getArrayLikeCodecSizeFromChildren(size, [item.maxSize])
|
|
491
|
-
};
|
|
492
|
-
}
|
|
493
473
|
function getSetEncoder(item, config = {}) {
|
|
494
|
-
|
|
495
|
-
return {
|
|
496
|
-
...setCodecHelper(item, size, config.description),
|
|
497
|
-
encode: (set) => {
|
|
498
|
-
if (typeof size === "number" && set.size !== size) {
|
|
499
|
-
assertValidNumberOfItemsForCodec("set", size, set.size);
|
|
500
|
-
}
|
|
501
|
-
const itemBytes = Array.from(set, (value) => item.encode(value));
|
|
502
|
-
return codecsCore.mergeBytes([getArrayLikeCodecSizePrefix(size, set.size), ...itemBytes]);
|
|
503
|
-
}
|
|
504
|
-
};
|
|
474
|
+
return codecsCore.mapEncoder(getArrayEncoder(item, config), (set) => [...set]);
|
|
505
475
|
}
|
|
506
476
|
function getSetDecoder(item, config = {}) {
|
|
507
|
-
|
|
508
|
-
return {
|
|
509
|
-
...setCodecHelper(item, size, config.description),
|
|
510
|
-
decode: (bytes, offset = 0) => {
|
|
511
|
-
const set = /* @__PURE__ */ new Set();
|
|
512
|
-
if (typeof size === "object" && bytes.slice(offset).length === 0) {
|
|
513
|
-
return [set, offset];
|
|
514
|
-
}
|
|
515
|
-
const [resolvedSize, newOffset] = decodeArrayLikeCodecSize(size, [item.fixedSize], bytes, offset);
|
|
516
|
-
offset = newOffset;
|
|
517
|
-
for (let i = 0; i < resolvedSize; i += 1) {
|
|
518
|
-
const [value, newOffset2] = item.decode(bytes, offset);
|
|
519
|
-
offset = newOffset2;
|
|
520
|
-
set.add(value);
|
|
521
|
-
}
|
|
522
|
-
return [set, offset];
|
|
523
|
-
}
|
|
524
|
-
};
|
|
477
|
+
return codecsCore.mapDecoder(getArrayDecoder(item, config), (entries) => new Set(entries));
|
|
525
478
|
}
|
|
526
479
|
function getSetCodec(item, config = {}) {
|
|
527
480
|
return codecsCore.combineCodec(getSetEncoder(item, config), getSetDecoder(item, config));
|
|
528
481
|
}
|
|
529
|
-
function
|
|
530
|
-
const
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
|
|
534
|
-
|
|
535
|
-
|
|
536
|
-
|
|
537
|
-
|
|
538
|
-
|
|
539
|
-
|
|
540
|
-
|
|
541
|
-
|
|
542
|
-
return
|
|
482
|
+
function getStructEncoder(fields) {
|
|
483
|
+
const fieldCodecs = fields.map(([, codec]) => codec);
|
|
484
|
+
const fixedSize = sumCodecSizes(fieldCodecs.map(getFixedSize));
|
|
485
|
+
const maxSize = sumCodecSizes(fieldCodecs.map(getMaxSize)) ?? void 0;
|
|
486
|
+
return codecsCore.createEncoder({
|
|
487
|
+
...fixedSize === null ? {
|
|
488
|
+
getSizeFromValue: (value) => fields.map(([key, codec]) => codecsCore.getEncodedSize(value[key], codec)).reduce((all, one) => all + one, 0),
|
|
489
|
+
maxSize
|
|
490
|
+
} : { fixedSize },
|
|
491
|
+
write: (struct, bytes, offset) => {
|
|
492
|
+
fields.forEach(([key, codec]) => {
|
|
493
|
+
offset = codec.write(struct[key], bytes, offset);
|
|
494
|
+
});
|
|
495
|
+
return offset;
|
|
543
496
|
}
|
|
544
|
-
};
|
|
545
|
-
}
|
|
546
|
-
function getStructDecoder(fields
|
|
547
|
-
|
|
548
|
-
|
|
549
|
-
|
|
497
|
+
});
|
|
498
|
+
}
|
|
499
|
+
function getStructDecoder(fields) {
|
|
500
|
+
const fieldCodecs = fields.map(([, codec]) => codec);
|
|
501
|
+
const fixedSize = sumCodecSizes(fieldCodecs.map(getFixedSize));
|
|
502
|
+
const maxSize = sumCodecSizes(fieldCodecs.map(getMaxSize)) ?? void 0;
|
|
503
|
+
return codecsCore.createDecoder({
|
|
504
|
+
...fixedSize === null ? { maxSize } : { fixedSize },
|
|
505
|
+
read: (bytes, offset) => {
|
|
550
506
|
const struct = {};
|
|
551
507
|
fields.forEach(([key, codec]) => {
|
|
552
|
-
const [value, newOffset] = codec.
|
|
508
|
+
const [value, newOffset] = codec.read(bytes, offset);
|
|
553
509
|
offset = newOffset;
|
|
554
510
|
struct[key] = value;
|
|
555
511
|
});
|
|
556
512
|
return [struct, offset];
|
|
557
513
|
}
|
|
558
|
-
};
|
|
559
|
-
}
|
|
560
|
-
function getStructCodec(fields, config = {}) {
|
|
561
|
-
return codecsCore.combineCodec(getStructEncoder(fields, config), getStructDecoder(fields, config));
|
|
514
|
+
});
|
|
562
515
|
}
|
|
563
|
-
function
|
|
564
|
-
|
|
565
|
-
return {
|
|
566
|
-
description: description ?? `tuple(${itemDescriptions})`,
|
|
567
|
-
fixedSize: sumCodecSizes(items.map((item) => item.fixedSize)),
|
|
568
|
-
maxSize: sumCodecSizes(items.map((item) => item.maxSize))
|
|
569
|
-
};
|
|
570
|
-
}
|
|
571
|
-
function getTupleEncoder(items, config = {}) {
|
|
572
|
-
return {
|
|
573
|
-
...tupleCodecHelper(items, config.description),
|
|
574
|
-
encode: (value) => {
|
|
575
|
-
assertValidNumberOfItemsForCodec("tuple", items.length, value.length);
|
|
576
|
-
return codecsCore.mergeBytes(items.map((item, index) => item.encode(value[index])));
|
|
577
|
-
}
|
|
578
|
-
};
|
|
516
|
+
function getStructCodec(fields) {
|
|
517
|
+
return codecsCore.combineCodec(getStructEncoder(fields), getStructDecoder(fields));
|
|
579
518
|
}
|
|
580
|
-
function
|
|
581
|
-
return {
|
|
582
|
-
...tupleCodecHelper(items, config.description),
|
|
583
|
-
decode: (bytes, offset = 0) => {
|
|
584
|
-
const values = [];
|
|
585
|
-
items.forEach((codec) => {
|
|
586
|
-
const [newValue, newOffset] = codec.decode(bytes, offset);
|
|
587
|
-
values.push(newValue);
|
|
588
|
-
offset = newOffset;
|
|
589
|
-
});
|
|
590
|
-
return [values, offset];
|
|
591
|
-
}
|
|
592
|
-
};
|
|
593
|
-
}
|
|
594
|
-
function getTupleCodec(items, config = {}) {
|
|
595
|
-
return codecsCore.combineCodec(
|
|
596
|
-
getTupleEncoder(items, config),
|
|
597
|
-
getTupleDecoder(items, config)
|
|
598
|
-
);
|
|
599
|
-
}
|
|
600
|
-
function getUnitEncoder(config = {}) {
|
|
601
|
-
return {
|
|
602
|
-
description: config.description ?? "unit",
|
|
603
|
-
encode: () => new Uint8Array(),
|
|
519
|
+
function getUnitEncoder() {
|
|
520
|
+
return codecsCore.createEncoder({
|
|
604
521
|
fixedSize: 0,
|
|
605
|
-
|
|
606
|
-
};
|
|
522
|
+
write: (_value, _bytes, offset) => offset
|
|
523
|
+
});
|
|
607
524
|
}
|
|
608
|
-
function getUnitDecoder(
|
|
609
|
-
return {
|
|
610
|
-
decode: (_bytes, offset = 0) => [void 0, offset],
|
|
611
|
-
description: config.description ?? "unit",
|
|
525
|
+
function getUnitDecoder() {
|
|
526
|
+
return codecsCore.createDecoder({
|
|
612
527
|
fixedSize: 0,
|
|
613
|
-
|
|
614
|
-
};
|
|
528
|
+
read: (_bytes, offset) => [void 0, offset]
|
|
529
|
+
});
|
|
615
530
|
}
|
|
616
|
-
function getUnitCodec(
|
|
617
|
-
return codecsCore.combineCodec(getUnitEncoder(
|
|
531
|
+
function getUnitCodec() {
|
|
532
|
+
return codecsCore.combineCodec(getUnitEncoder(), getUnitDecoder());
|
|
618
533
|
}
|
|
619
534
|
|
|
620
535
|
exports.assertValidNumberOfItemsForCodec = assertValidNumberOfItemsForCodec;
|
|
621
|
-
exports.decodeArrayLikeCodecSize = decodeArrayLikeCodecSize;
|
|
622
536
|
exports.getArrayCodec = getArrayCodec;
|
|
623
537
|
exports.getArrayDecoder = getArrayDecoder;
|
|
624
538
|
exports.getArrayEncoder = getArrayEncoder;
|
|
625
|
-
exports.getArrayLikeCodecSizeDescription = getArrayLikeCodecSizeDescription;
|
|
626
|
-
exports.getArrayLikeCodecSizeFromChildren = getArrayLikeCodecSizeFromChildren;
|
|
627
|
-
exports.getArrayLikeCodecSizePrefix = getArrayLikeCodecSizePrefix;
|
|
628
539
|
exports.getBitArrayCodec = getBitArrayCodec;
|
|
629
540
|
exports.getBitArrayDecoder = getBitArrayDecoder;
|
|
630
541
|
exports.getBitArrayEncoder = getBitArrayEncoder;
|