@solana/codecs-data-structures 2.0.0-experimental.ef09aec → 2.0.0-experimental.ef2569b
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.browser.cjs +341 -430
- package/dist/index.browser.cjs.map +1 -1
- package/dist/index.browser.js +343 -428
- package/dist/index.browser.js.map +1 -1
- package/dist/index.native.js +343 -428
- package/dist/index.native.js.map +1 -1
- package/dist/index.node.cjs +341 -430
- package/dist/index.node.cjs.map +1 -1
- package/dist/index.node.js +343 -428
- package/dist/index.node.js.map +1 -1
- package/dist/types/array.d.ts +50 -6
- package/dist/types/array.d.ts.map +1 -0
- package/dist/types/assertions.d.ts.map +1 -0
- package/dist/types/bit-array.d.ts +5 -5
- package/dist/types/bit-array.d.ts.map +1 -0
- package/dist/types/boolean.d.ts +18 -6
- package/dist/types/boolean.d.ts.map +1 -0
- package/dist/types/bytes.d.ts +14 -5
- package/dist/types/bytes.d.ts.map +1 -0
- package/dist/types/data-enum.d.ts +14 -14
- package/dist/types/data-enum.d.ts.map +1 -0
- package/dist/types/index.d.ts +13 -14
- package/dist/types/index.d.ts.map +1 -0
- package/dist/types/map.d.ts +39 -6
- package/dist/types/map.d.ts.map +1 -0
- package/dist/types/nullable.d.ts +24 -6
- package/dist/types/nullable.d.ts.map +1 -0
- package/dist/types/scalar-enum.d.ts +18 -6
- package/dist/types/scalar-enum.d.ts.map +1 -0
- package/dist/types/set.d.ts +39 -6
- package/dist/types/set.d.ts.map +1 -0
- package/dist/types/struct.d.ts +28 -18
- package/dist/types/struct.d.ts.map +1 -0
- package/dist/types/tuple.d.ts +22 -15
- package/dist/types/tuple.d.ts.map +1 -0
- package/dist/types/unit.d.ts +4 -12
- package/dist/types/unit.d.ts.map +1 -0
- package/dist/types/utils.d.ts +10 -2
- package/dist/types/utils.d.ts.map +1 -0
- package/package.json +9 -9
- package/dist/index.development.js +0 -889
- package/dist/index.development.js.map +0 -1
- package/dist/index.production.min.js +0 -51
- package/dist/types/array-like-codec-size.d.ts +0 -20
package/dist/index.native.js
CHANGED
|
@@ -1,9 +1,14 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { assertIsFixedSize, createEncoder, getEncodedSize, createDecoder, combineCodec, assertByteArrayHasEnoughBytesForCodec, mapEncoder, mapDecoder, fixEncoder, fixDecoder, assertByteArrayIsNotEmptyForCodec, isFixedSize } from '@solana/codecs-core';
|
|
2
2
|
import { getU32Encoder, getU32Decoder, getU8Encoder, getU8Decoder } from '@solana/codecs-numbers';
|
|
3
3
|
|
|
4
4
|
// src/array.ts
|
|
5
5
|
|
|
6
|
-
// src/
|
|
6
|
+
// src/assertions.ts
|
|
7
|
+
function assertValidNumberOfItemsForCodec(codecDescription, expected, actual) {
|
|
8
|
+
if (expected !== actual) {
|
|
9
|
+
throw new Error(`Expected [${codecDescription}] to have ${expected} items, got ${actual}.`);
|
|
10
|
+
}
|
|
11
|
+
}
|
|
7
12
|
function maxCodecSizes(sizes) {
|
|
8
13
|
return sizes.reduce(
|
|
9
14
|
(all, size) => all === null || size === null ? null : Math.max(all, size),
|
|
@@ -13,106 +18,107 @@ function maxCodecSizes(sizes) {
|
|
|
13
18
|
function sumCodecSizes(sizes) {
|
|
14
19
|
return sizes.reduce((all, size) => all === null || size === null ? null : all + size, 0);
|
|
15
20
|
}
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
function decodeArrayLikeCodecSize(size, childrenSizes, bytes, offset) {
|
|
19
|
-
if (typeof size === "number") {
|
|
20
|
-
return [size, offset];
|
|
21
|
-
}
|
|
22
|
-
if (typeof size === "object") {
|
|
23
|
-
return size.decode(bytes, offset);
|
|
24
|
-
}
|
|
25
|
-
if (size === "remainder") {
|
|
26
|
-
const childrenSize = sumCodecSizes(childrenSizes);
|
|
27
|
-
if (childrenSize === null) {
|
|
28
|
-
throw new Error('Codecs of "remainder" size must have fixed-size items.');
|
|
29
|
-
}
|
|
30
|
-
const remainder = bytes.slice(offset).length;
|
|
31
|
-
if (remainder % childrenSize !== 0) {
|
|
32
|
-
throw new Error(
|
|
33
|
-
`The remainder of the byte array (${remainder} bytes) cannot be split into chunks of ${childrenSize} bytes. Codecs of "remainder" size must have a remainder that is a multiple of its item size. In other words, ${remainder} modulo ${childrenSize} should be equal to zero.`
|
|
34
|
-
);
|
|
35
|
-
}
|
|
36
|
-
return [remainder / childrenSize, offset];
|
|
37
|
-
}
|
|
38
|
-
throw new Error(`Unrecognized array-like codec size: ${JSON.stringify(size)}`);
|
|
39
|
-
}
|
|
40
|
-
function getArrayLikeCodecSizeDescription(size) {
|
|
41
|
-
return typeof size === "object" ? size.description : `${size}`;
|
|
42
|
-
}
|
|
43
|
-
function getArrayLikeCodecSizeFromChildren(size, childrenSizes) {
|
|
44
|
-
if (typeof size !== "number")
|
|
45
|
-
return null;
|
|
46
|
-
if (size === 0)
|
|
47
|
-
return 0;
|
|
48
|
-
const childrenSize = sumCodecSizes(childrenSizes);
|
|
49
|
-
return childrenSize === null ? null : childrenSize * size;
|
|
21
|
+
function getFixedSize(codec) {
|
|
22
|
+
return isFixedSize(codec) ? codec.fixedSize : null;
|
|
50
23
|
}
|
|
51
|
-
function
|
|
52
|
-
return
|
|
53
|
-
}
|
|
54
|
-
|
|
55
|
-
// src/assertions.ts
|
|
56
|
-
function assertValidNumberOfItemsForCodec(codecDescription, expected, actual) {
|
|
57
|
-
if (expected !== actual) {
|
|
58
|
-
throw new Error(`Expected [${codecDescription}] to have ${expected} items, got ${actual}.`);
|
|
59
|
-
}
|
|
24
|
+
function getMaxSize(codec) {
|
|
25
|
+
return isFixedSize(codec) ? codec.fixedSize : codec.maxSize ?? null;
|
|
60
26
|
}
|
|
61
27
|
|
|
62
28
|
// src/array.ts
|
|
63
|
-
function arrayCodecHelper(item, size, description) {
|
|
64
|
-
if (size === "remainder" && item.fixedSize === null) {
|
|
65
|
-
throw new Error('Codecs of "remainder" size must have fixed-size items.');
|
|
66
|
-
}
|
|
67
|
-
return {
|
|
68
|
-
description: description ?? `array(${item.description}; ${getArrayLikeCodecSizeDescription(size)})`,
|
|
69
|
-
fixedSize: getArrayLikeCodecSizeFromChildren(size, [item.fixedSize]),
|
|
70
|
-
maxSize: getArrayLikeCodecSizeFromChildren(size, [item.maxSize])
|
|
71
|
-
};
|
|
72
|
-
}
|
|
73
29
|
function getArrayEncoder(item, config = {}) {
|
|
74
30
|
const size = config.size ?? getU32Encoder();
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
31
|
+
if (size === "remainder") {
|
|
32
|
+
assertIsFixedSize(item, 'Codecs of "remainder" size must have fixed-size items.');
|
|
33
|
+
}
|
|
34
|
+
const fixedSize = computeArrayLikeCodecSize(size, getFixedSize(item));
|
|
35
|
+
const maxSize = computeArrayLikeCodecSize(size, getMaxSize(item)) ?? void 0;
|
|
36
|
+
return createEncoder({
|
|
37
|
+
...fixedSize !== null ? { fixedSize } : {
|
|
38
|
+
getSizeFromValue: (array) => {
|
|
39
|
+
const prefixSize = typeof size === "object" ? getEncodedSize(array.length, size) : 0;
|
|
40
|
+
return prefixSize + [...array].reduce((all, value) => all + getEncodedSize(value, item), 0);
|
|
41
|
+
},
|
|
42
|
+
maxSize
|
|
43
|
+
},
|
|
44
|
+
write: (array, bytes, offset) => {
|
|
78
45
|
if (typeof size === "number") {
|
|
79
|
-
assertValidNumberOfItemsForCodec("array", size,
|
|
46
|
+
assertValidNumberOfItemsForCodec("array", size, array.length);
|
|
47
|
+
}
|
|
48
|
+
if (typeof size === "object") {
|
|
49
|
+
offset = size.write(array.length, bytes, offset);
|
|
80
50
|
}
|
|
81
|
-
|
|
51
|
+
array.forEach((value) => {
|
|
52
|
+
offset = item.write(value, bytes, offset);
|
|
53
|
+
});
|
|
54
|
+
return offset;
|
|
82
55
|
}
|
|
83
|
-
};
|
|
56
|
+
});
|
|
84
57
|
}
|
|
85
58
|
function getArrayDecoder(item, config = {}) {
|
|
86
59
|
const size = config.size ?? getU32Decoder();
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
60
|
+
if (size === "remainder") {
|
|
61
|
+
assertIsFixedSize(item, 'Codecs of "remainder" size must have fixed-size items.');
|
|
62
|
+
}
|
|
63
|
+
const itemSize = getFixedSize(item);
|
|
64
|
+
const fixedSize = computeArrayLikeCodecSize(size, itemSize);
|
|
65
|
+
const maxSize = computeArrayLikeCodecSize(size, getMaxSize(item)) ?? void 0;
|
|
66
|
+
return createDecoder({
|
|
67
|
+
...fixedSize !== null ? { fixedSize } : { maxSize },
|
|
68
|
+
read: (bytes, offset) => {
|
|
69
|
+
const array = [];
|
|
90
70
|
if (typeof size === "object" && bytes.slice(offset).length === 0) {
|
|
91
|
-
return [
|
|
71
|
+
return [array, offset];
|
|
92
72
|
}
|
|
93
|
-
const [resolvedSize, newOffset] =
|
|
73
|
+
const [resolvedSize, newOffset] = readArrayLikeCodecSize(size, itemSize, bytes, offset);
|
|
94
74
|
offset = newOffset;
|
|
95
|
-
const values = [];
|
|
96
75
|
for (let i = 0; i < resolvedSize; i += 1) {
|
|
97
|
-
const [value, newOffset2] = item.
|
|
98
|
-
values.push(value);
|
|
76
|
+
const [value, newOffset2] = item.read(bytes, offset);
|
|
99
77
|
offset = newOffset2;
|
|
78
|
+
array.push(value);
|
|
100
79
|
}
|
|
101
|
-
return [
|
|
80
|
+
return [array, offset];
|
|
102
81
|
}
|
|
103
|
-
};
|
|
82
|
+
});
|
|
104
83
|
}
|
|
105
84
|
function getArrayCodec(item, config = {}) {
|
|
106
85
|
return combineCodec(getArrayEncoder(item, config), getArrayDecoder(item, config));
|
|
107
86
|
}
|
|
108
|
-
|
|
87
|
+
function readArrayLikeCodecSize(size, itemSize, bytes, offset) {
|
|
88
|
+
if (typeof size === "number") {
|
|
89
|
+
return [size, offset];
|
|
90
|
+
}
|
|
91
|
+
if (typeof size === "object") {
|
|
92
|
+
return size.read(bytes, offset);
|
|
93
|
+
}
|
|
94
|
+
if (size === "remainder") {
|
|
95
|
+
if (itemSize === null) {
|
|
96
|
+
throw new Error('Codecs of "remainder" size must have fixed-size items.');
|
|
97
|
+
}
|
|
98
|
+
const remainder = Math.max(0, bytes.length - offset);
|
|
99
|
+
if (remainder % itemSize !== 0) {
|
|
100
|
+
throw new Error(
|
|
101
|
+
`The remainder of the byte array (${remainder} bytes) cannot be split into chunks of ${itemSize} bytes. Codecs of "remainder" size must have a remainder that is a multiple of its item size. In other words, ${remainder} modulo ${itemSize} should be equal to zero.`
|
|
102
|
+
);
|
|
103
|
+
}
|
|
104
|
+
return [remainder / itemSize, offset];
|
|
105
|
+
}
|
|
106
|
+
throw new Error(`Unrecognized array-like codec size: ${JSON.stringify(size)}`);
|
|
107
|
+
}
|
|
108
|
+
function computeArrayLikeCodecSize(size, itemSize) {
|
|
109
|
+
if (typeof size !== "number")
|
|
110
|
+
return null;
|
|
111
|
+
if (size === 0)
|
|
112
|
+
return 0;
|
|
113
|
+
return itemSize === null ? null : itemSize * size;
|
|
114
|
+
}
|
|
115
|
+
function getBitArrayEncoder(size, config = {}) {
|
|
109
116
|
const parsedConfig = typeof config === "boolean" ? { backward: config } : config;
|
|
110
117
|
const backward = parsedConfig.backward ?? false;
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
const bytes = [];
|
|
118
|
+
return createEncoder({
|
|
119
|
+
fixedSize: size,
|
|
120
|
+
write(value, bytes, offset) {
|
|
121
|
+
const bytesToAdd = [];
|
|
116
122
|
for (let i = 0; i < size; i += 1) {
|
|
117
123
|
let byte = 0;
|
|
118
124
|
for (let j = 0; j < 8; j += 1) {
|
|
@@ -120,23 +126,22 @@ var getBitArrayEncoder = (size, config = {}) => {
|
|
|
120
126
|
byte |= feature << (backward ? j : 7 - j);
|
|
121
127
|
}
|
|
122
128
|
if (backward) {
|
|
123
|
-
|
|
129
|
+
bytesToAdd.unshift(byte);
|
|
124
130
|
} else {
|
|
125
|
-
|
|
131
|
+
bytesToAdd.push(byte);
|
|
126
132
|
}
|
|
127
133
|
}
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
}
|
|
134
|
-
var getBitArrayDecoder = (size, config = {}) => {
|
|
134
|
+
bytes.set(bytesToAdd, offset);
|
|
135
|
+
return size;
|
|
136
|
+
}
|
|
137
|
+
});
|
|
138
|
+
}
|
|
139
|
+
function getBitArrayDecoder(size, config = {}) {
|
|
135
140
|
const parsedConfig = typeof config === "boolean" ? { backward: config } : config;
|
|
136
141
|
const backward = parsedConfig.backward ?? false;
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
142
|
+
return createDecoder({
|
|
143
|
+
fixedSize: size,
|
|
144
|
+
read(bytes, offset) {
|
|
140
145
|
assertByteArrayHasEnoughBytesForCodec("bitArray", size, bytes, offset);
|
|
141
146
|
const booleans = [];
|
|
142
147
|
let slice = bytes.slice(offset, offset + size);
|
|
@@ -153,138 +158,107 @@ var getBitArrayDecoder = (size, config = {}) => {
|
|
|
153
158
|
}
|
|
154
159
|
});
|
|
155
160
|
return [booleans, offset + size];
|
|
156
|
-
}
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
}
|
|
162
|
-
var getBitArrayCodec = (size, config = {}) => combineCodec(getBitArrayEncoder(size, config), getBitArrayDecoder(size, config));
|
|
161
|
+
}
|
|
162
|
+
});
|
|
163
|
+
}
|
|
164
|
+
function getBitArrayCodec(size, config = {}) {
|
|
165
|
+
return combineCodec(getBitArrayEncoder(size, config), getBitArrayDecoder(size, config));
|
|
166
|
+
}
|
|
163
167
|
function getBooleanEncoder(config = {}) {
|
|
164
168
|
const size = config.size ?? getU8Encoder();
|
|
165
|
-
|
|
166
|
-
return
|
|
167
|
-
description: config.description ?? `bool(${size.description})`,
|
|
168
|
-
encode: (value) => size.encode(value ? 1 : 0),
|
|
169
|
-
fixedSize: size.fixedSize,
|
|
170
|
-
maxSize: size.fixedSize
|
|
171
|
-
};
|
|
169
|
+
assertIsFixedSize(size, "Codec [bool] requires a fixed size.");
|
|
170
|
+
return mapEncoder(size, (value) => value ? 1 : 0);
|
|
172
171
|
}
|
|
173
172
|
function getBooleanDecoder(config = {}) {
|
|
174
173
|
const size = config.size ?? getU8Decoder();
|
|
175
|
-
|
|
176
|
-
return
|
|
177
|
-
decode: (bytes, offset = 0) => {
|
|
178
|
-
assertByteArrayIsNotEmptyForCodec("bool", bytes, offset);
|
|
179
|
-
const [value, vOffset] = size.decode(bytes, offset);
|
|
180
|
-
return [value === 1, vOffset];
|
|
181
|
-
},
|
|
182
|
-
description: config.description ?? `bool(${size.description})`,
|
|
183
|
-
fixedSize: size.fixedSize,
|
|
184
|
-
maxSize: size.fixedSize
|
|
185
|
-
};
|
|
174
|
+
assertIsFixedSize(size, "Codec [bool] requires a fixed size.");
|
|
175
|
+
return mapDecoder(size, (value) => Number(value) === 1);
|
|
186
176
|
}
|
|
187
177
|
function getBooleanCodec(config = {}) {
|
|
188
178
|
return combineCodec(getBooleanEncoder(config), getBooleanDecoder(config));
|
|
189
179
|
}
|
|
190
180
|
function getBytesEncoder(config = {}) {
|
|
191
181
|
const size = config.size ?? "variable";
|
|
192
|
-
const
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
};
|
|
182
|
+
const byteEncoder = createEncoder({
|
|
183
|
+
getSizeFromValue: (value) => value.length,
|
|
184
|
+
write: (value, bytes, offset) => {
|
|
185
|
+
bytes.set(value, offset);
|
|
186
|
+
return offset + value.length;
|
|
187
|
+
}
|
|
188
|
+
});
|
|
200
189
|
if (size === "variable") {
|
|
201
190
|
return byteEncoder;
|
|
202
191
|
}
|
|
203
192
|
if (typeof size === "number") {
|
|
204
|
-
return fixEncoder(byteEncoder, size
|
|
193
|
+
return fixEncoder(byteEncoder, size);
|
|
205
194
|
}
|
|
206
|
-
return {
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
return mergeBytes([lengthBytes, contentBytes]);
|
|
195
|
+
return createEncoder({
|
|
196
|
+
getSizeFromValue: (value) => getEncodedSize(value.length, size) + value.length,
|
|
197
|
+
write: (value, bytes, offset) => {
|
|
198
|
+
offset = size.write(value.length, bytes, offset);
|
|
199
|
+
return byteEncoder.write(value, bytes, offset);
|
|
212
200
|
}
|
|
213
|
-
};
|
|
201
|
+
});
|
|
214
202
|
}
|
|
215
203
|
function getBytesDecoder(config = {}) {
|
|
216
204
|
const size = config.size ?? "variable";
|
|
217
|
-
const
|
|
218
|
-
|
|
219
|
-
const byteDecoder = {
|
|
220
|
-
decode: (bytes, offset = 0) => {
|
|
205
|
+
const byteDecoder = createDecoder({
|
|
206
|
+
read: (bytes, offset) => {
|
|
221
207
|
const slice = bytes.slice(offset);
|
|
222
208
|
return [slice, offset + slice.length];
|
|
223
|
-
}
|
|
224
|
-
|
|
225
|
-
fixedSize: null,
|
|
226
|
-
maxSize: null
|
|
227
|
-
};
|
|
209
|
+
}
|
|
210
|
+
});
|
|
228
211
|
if (size === "variable") {
|
|
229
212
|
return byteDecoder;
|
|
230
213
|
}
|
|
231
214
|
if (typeof size === "number") {
|
|
232
|
-
return fixDecoder(byteDecoder, size
|
|
215
|
+
return fixDecoder(byteDecoder, size);
|
|
233
216
|
}
|
|
234
|
-
return {
|
|
235
|
-
|
|
236
|
-
decode: (bytes, offset = 0) => {
|
|
217
|
+
return createDecoder({
|
|
218
|
+
read: (bytes, offset) => {
|
|
237
219
|
assertByteArrayIsNotEmptyForCodec("bytes", bytes, offset);
|
|
238
|
-
const [lengthBigInt, lengthOffset] = size.
|
|
220
|
+
const [lengthBigInt, lengthOffset] = size.read(bytes, offset);
|
|
239
221
|
const length = Number(lengthBigInt);
|
|
240
222
|
offset = lengthOffset;
|
|
241
223
|
const contentBytes = bytes.slice(offset, offset + length);
|
|
242
224
|
assertByteArrayHasEnoughBytesForCodec("bytes", length, contentBytes);
|
|
243
|
-
const [value, contentOffset] = byteDecoder.
|
|
225
|
+
const [value, contentOffset] = byteDecoder.read(contentBytes, 0);
|
|
244
226
|
offset += contentOffset;
|
|
245
227
|
return [value, offset];
|
|
246
228
|
}
|
|
247
|
-
};
|
|
229
|
+
});
|
|
248
230
|
}
|
|
249
231
|
function getBytesCodec(config = {}) {
|
|
250
232
|
return combineCodec(getBytesEncoder(config), getBytesDecoder(config));
|
|
251
233
|
}
|
|
252
|
-
function dataEnumCodecHelper(variants, prefix, description) {
|
|
253
|
-
const fieldDescriptions = variants.map(([name, codec]) => `${String(name)}${codec ? `: ${codec.description}` : ""}`).join(", ");
|
|
254
|
-
const allVariantHaveTheSameFixedSize = variants.every((one, _i, all) => one[1].fixedSize === all[0][1].fixedSize);
|
|
255
|
-
const fixedVariantSize = allVariantHaveTheSameFixedSize ? variants[0][1].fixedSize : null;
|
|
256
|
-
const maxVariantSize = maxCodecSizes(variants.map(([, field]) => field.maxSize));
|
|
257
|
-
return {
|
|
258
|
-
description: description ?? `dataEnum(${fieldDescriptions}; ${prefix.description})`,
|
|
259
|
-
fixedSize: variants.length === 0 ? prefix.fixedSize : sumCodecSizes([prefix.fixedSize, fixedVariantSize]),
|
|
260
|
-
maxSize: variants.length === 0 ? prefix.maxSize : sumCodecSizes([prefix.maxSize, maxVariantSize])
|
|
261
|
-
};
|
|
262
|
-
}
|
|
263
234
|
function getDataEnumEncoder(variants, config = {}) {
|
|
264
235
|
const prefix = config.size ?? getU8Encoder();
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
const
|
|
277
|
-
|
|
236
|
+
const fixedSize = getDataEnumFixedSize(variants, prefix);
|
|
237
|
+
return createEncoder({
|
|
238
|
+
...fixedSize !== null ? { fixedSize } : {
|
|
239
|
+
getSizeFromValue: (variant) => {
|
|
240
|
+
const discriminator = getVariantDiscriminator(variants, variant);
|
|
241
|
+
const variantEncoder = variants[discriminator][1];
|
|
242
|
+
return getEncodedSize(discriminator, prefix) + getEncodedSize(variant, variantEncoder);
|
|
243
|
+
},
|
|
244
|
+
maxSize: getDataEnumMaxSize(variants, prefix)
|
|
245
|
+
},
|
|
246
|
+
write: (variant, bytes, offset) => {
|
|
247
|
+
const discriminator = getVariantDiscriminator(variants, variant);
|
|
248
|
+
offset = prefix.write(discriminator, bytes, offset);
|
|
249
|
+
const variantEncoder = variants[discriminator][1];
|
|
250
|
+
return variantEncoder.write(variant, bytes, offset);
|
|
278
251
|
}
|
|
279
|
-
};
|
|
252
|
+
});
|
|
280
253
|
}
|
|
281
254
|
function getDataEnumDecoder(variants, config = {}) {
|
|
282
255
|
const prefix = config.size ?? getU8Decoder();
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
256
|
+
const fixedSize = getDataEnumFixedSize(variants, prefix);
|
|
257
|
+
return createDecoder({
|
|
258
|
+
...fixedSize !== null ? { fixedSize } : { maxSize: getDataEnumMaxSize(variants, prefix) },
|
|
259
|
+
read: (bytes, offset) => {
|
|
286
260
|
assertByteArrayIsNotEmptyForCodec("dataEnum", bytes, offset);
|
|
287
|
-
const [discriminator, dOffset] = prefix.
|
|
261
|
+
const [discriminator, dOffset] = prefix.read(bytes, offset);
|
|
288
262
|
offset = dOffset;
|
|
289
263
|
const variantField = variants[Number(discriminator)] ?? null;
|
|
290
264
|
if (!variantField) {
|
|
@@ -292,329 +266,270 @@ function getDataEnumDecoder(variants, config = {}) {
|
|
|
292
266
|
`Enum discriminator out of range. Expected a number between 0 and ${variants.length - 1}, got ${discriminator}.`
|
|
293
267
|
);
|
|
294
268
|
}
|
|
295
|
-
const [variant, vOffset] = variantField[1].
|
|
269
|
+
const [variant, vOffset] = variantField[1].read(bytes, offset);
|
|
296
270
|
offset = vOffset;
|
|
297
271
|
return [{ __kind: variantField[0], ...variant ?? {} }, offset];
|
|
298
272
|
}
|
|
299
|
-
};
|
|
273
|
+
});
|
|
300
274
|
}
|
|
301
275
|
function getDataEnumCodec(variants, config = {}) {
|
|
302
276
|
return combineCodec(getDataEnumEncoder(variants, config), getDataEnumDecoder(variants, config));
|
|
303
277
|
}
|
|
304
|
-
function
|
|
305
|
-
if (
|
|
306
|
-
|
|
278
|
+
function getDataEnumFixedSize(variants, prefix) {
|
|
279
|
+
if (variants.length === 0)
|
|
280
|
+
return isFixedSize(prefix) ? prefix.fixedSize : null;
|
|
281
|
+
if (!isFixedSize(variants[0][1]))
|
|
282
|
+
return null;
|
|
283
|
+
const variantSize = variants[0][1].fixedSize;
|
|
284
|
+
const sameSizedVariants = variants.every(
|
|
285
|
+
(variant) => isFixedSize(variant[1]) && variant[1].fixedSize === variantSize
|
|
286
|
+
);
|
|
287
|
+
if (!sameSizedVariants)
|
|
288
|
+
return null;
|
|
289
|
+
return isFixedSize(prefix) ? prefix.fixedSize + variantSize : null;
|
|
290
|
+
}
|
|
291
|
+
function getDataEnumMaxSize(variants, prefix) {
|
|
292
|
+
const maxVariantSize = maxCodecSizes(variants.map(([, codec]) => getMaxSize(codec)));
|
|
293
|
+
return sumCodecSizes([getMaxSize(prefix), maxVariantSize]) ?? void 0;
|
|
294
|
+
}
|
|
295
|
+
function getVariantDiscriminator(variants, variant) {
|
|
296
|
+
const discriminator = variants.findIndex(([key]) => variant.__kind === key);
|
|
297
|
+
if (discriminator < 0) {
|
|
298
|
+
throw new Error(
|
|
299
|
+
`Invalid data enum variant. Expected one of [${variants.map(([key]) => key).join(", ")}], got "${variant.__kind}".`
|
|
300
|
+
);
|
|
307
301
|
}
|
|
308
|
-
return
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
302
|
+
return discriminator;
|
|
303
|
+
}
|
|
304
|
+
function getTupleEncoder(items) {
|
|
305
|
+
const fixedSize = sumCodecSizes(items.map(getFixedSize));
|
|
306
|
+
const maxSize = sumCodecSizes(items.map(getMaxSize)) ?? void 0;
|
|
307
|
+
return createEncoder({
|
|
308
|
+
...fixedSize === null ? {
|
|
309
|
+
getSizeFromValue: (value) => items.map((item, index) => getEncodedSize(value[index], item)).reduce((all, one) => all + one, 0),
|
|
310
|
+
maxSize
|
|
311
|
+
} : { fixedSize },
|
|
312
|
+
write: (value, bytes, offset) => {
|
|
313
|
+
assertValidNumberOfItemsForCodec("tuple", items.length, value.length);
|
|
314
|
+
items.forEach((item, index) => {
|
|
315
|
+
offset = item.write(value[index], bytes, offset);
|
|
316
|
+
});
|
|
317
|
+
return offset;
|
|
318
|
+
}
|
|
319
|
+
});
|
|
320
|
+
}
|
|
321
|
+
function getTupleDecoder(items) {
|
|
322
|
+
const fixedSize = sumCodecSizes(items.map(getFixedSize));
|
|
323
|
+
const maxSize = sumCodecSizes(items.map(getMaxSize)) ?? void 0;
|
|
324
|
+
return createDecoder({
|
|
325
|
+
...fixedSize === null ? { maxSize } : { fixedSize },
|
|
326
|
+
read: (bytes, offset) => {
|
|
327
|
+
const values = [];
|
|
328
|
+
items.forEach((item) => {
|
|
329
|
+
const [newValue, newOffset] = item.read(bytes, offset);
|
|
330
|
+
values.push(newValue);
|
|
331
|
+
offset = newOffset;
|
|
332
|
+
});
|
|
333
|
+
return [values, offset];
|
|
334
|
+
}
|
|
335
|
+
});
|
|
336
|
+
}
|
|
337
|
+
function getTupleCodec(items) {
|
|
338
|
+
return combineCodec(
|
|
339
|
+
getTupleEncoder(items),
|
|
340
|
+
getTupleDecoder(items)
|
|
341
|
+
);
|
|
313
342
|
}
|
|
343
|
+
|
|
344
|
+
// src/map.ts
|
|
314
345
|
function getMapEncoder(key, value, config = {}) {
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
if (typeof size === "number") {
|
|
320
|
-
assertValidNumberOfItemsForCodec("map", size, map.size);
|
|
321
|
-
}
|
|
322
|
-
const itemBytes = Array.from(map, ([k, v]) => mergeBytes([key.encode(k), value.encode(v)]));
|
|
323
|
-
return mergeBytes([getArrayLikeCodecSizePrefix(size, map.size), ...itemBytes]);
|
|
324
|
-
}
|
|
325
|
-
};
|
|
346
|
+
return mapEncoder(
|
|
347
|
+
getArrayEncoder(getTupleEncoder([key, value]), config),
|
|
348
|
+
(map) => [...map.entries()]
|
|
349
|
+
);
|
|
326
350
|
}
|
|
327
351
|
function getMapDecoder(key, value, config = {}) {
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
const map = /* @__PURE__ */ new Map();
|
|
333
|
-
if (typeof size === "object" && bytes.slice(offset).length === 0) {
|
|
334
|
-
return [map, offset];
|
|
335
|
-
}
|
|
336
|
-
const [resolvedSize, newOffset] = decodeArrayLikeCodecSize(
|
|
337
|
-
size,
|
|
338
|
-
[key.fixedSize, value.fixedSize],
|
|
339
|
-
bytes,
|
|
340
|
-
offset
|
|
341
|
-
);
|
|
342
|
-
offset = newOffset;
|
|
343
|
-
for (let i = 0; i < resolvedSize; i += 1) {
|
|
344
|
-
const [decodedKey, kOffset] = key.decode(bytes, offset);
|
|
345
|
-
offset = kOffset;
|
|
346
|
-
const [decodedValue, vOffset] = value.decode(bytes, offset);
|
|
347
|
-
offset = vOffset;
|
|
348
|
-
map.set(decodedKey, decodedValue);
|
|
349
|
-
}
|
|
350
|
-
return [map, offset];
|
|
351
|
-
}
|
|
352
|
-
};
|
|
352
|
+
return mapDecoder(
|
|
353
|
+
getArrayDecoder(getTupleDecoder([key, value]), config),
|
|
354
|
+
(entries) => new Map(entries)
|
|
355
|
+
);
|
|
353
356
|
}
|
|
354
357
|
function getMapCodec(key, value, config = {}) {
|
|
355
358
|
return combineCodec(getMapEncoder(key, value, config), getMapDecoder(key, value, config));
|
|
356
359
|
}
|
|
357
|
-
function nullableCodecHelper(item, prefix, fixed, description) {
|
|
358
|
-
let descriptionSuffix = `; ${prefix.description}`;
|
|
359
|
-
let fixedSize = item.fixedSize === 0 ? prefix.fixedSize : null;
|
|
360
|
-
if (fixed) {
|
|
361
|
-
assertFixedSizeCodec(item, "Fixed nullables can only be used with fixed-size codecs.");
|
|
362
|
-
assertFixedSizeCodec(prefix, "Fixed nullables can only be used with fixed-size prefix.");
|
|
363
|
-
descriptionSuffix += "; fixed";
|
|
364
|
-
fixedSize = prefix.fixedSize + item.fixedSize;
|
|
365
|
-
}
|
|
366
|
-
return {
|
|
367
|
-
description: description ?? `nullable(${item.description + descriptionSuffix})`,
|
|
368
|
-
fixedSize,
|
|
369
|
-
maxSize: sumCodecSizes([prefix.maxSize, item.maxSize])
|
|
370
|
-
};
|
|
371
|
-
}
|
|
372
360
|
function getNullableEncoder(item, config = {}) {
|
|
373
361
|
const prefix = config.prefix ?? getU8Encoder();
|
|
374
362
|
const fixed = config.fixed ?? false;
|
|
375
|
-
|
|
376
|
-
|
|
377
|
-
|
|
378
|
-
|
|
379
|
-
|
|
380
|
-
|
|
381
|
-
|
|
363
|
+
const isZeroSizeItem = isFixedSize(item) && isFixedSize(prefix) && item.fixedSize === 0;
|
|
364
|
+
if (fixed || isZeroSizeItem) {
|
|
365
|
+
assertIsFixedSize(item, "Fixed nullables can only be used with fixed-size codecs.");
|
|
366
|
+
assertIsFixedSize(prefix, "Fixed nullables can only be used with fixed-size prefix.");
|
|
367
|
+
const fixedSize = prefix.fixedSize + item.fixedSize;
|
|
368
|
+
return createEncoder({
|
|
369
|
+
fixedSize,
|
|
370
|
+
write: (option, bytes, offset) => {
|
|
371
|
+
const prefixOffset = prefix.write(Number(option !== null), bytes, offset);
|
|
372
|
+
if (option !== null) {
|
|
373
|
+
item.write(option, bytes, prefixOffset);
|
|
374
|
+
}
|
|
375
|
+
return offset + fixedSize;
|
|
376
|
+
}
|
|
377
|
+
});
|
|
378
|
+
}
|
|
379
|
+
return createEncoder({
|
|
380
|
+
getSizeFromValue: (option) => getEncodedSize(Number(option !== null), prefix) + (option !== null ? getEncodedSize(option, item) : 0),
|
|
381
|
+
maxSize: sumCodecSizes([prefix, item].map(getMaxSize)) ?? void 0,
|
|
382
|
+
write: (option, bytes, offset) => {
|
|
383
|
+
offset = prefix.write(Number(option !== null), bytes, offset);
|
|
384
|
+
if (option !== null) {
|
|
385
|
+
offset = item.write(option, bytes, offset);
|
|
386
|
+
}
|
|
387
|
+
return offset;
|
|
382
388
|
}
|
|
383
|
-
};
|
|
389
|
+
});
|
|
384
390
|
}
|
|
385
391
|
function getNullableDecoder(item, config = {}) {
|
|
386
392
|
const prefix = config.prefix ?? getU8Decoder();
|
|
387
393
|
const fixed = config.fixed ?? false;
|
|
388
|
-
|
|
389
|
-
|
|
390
|
-
|
|
394
|
+
let fixedSize = null;
|
|
395
|
+
const isZeroSizeItem = isFixedSize(item) && isFixedSize(prefix) && item.fixedSize === 0;
|
|
396
|
+
if (fixed || isZeroSizeItem) {
|
|
397
|
+
assertIsFixedSize(item, "Fixed nullables can only be used with fixed-size codecs.");
|
|
398
|
+
assertIsFixedSize(prefix, "Fixed nullables can only be used with fixed-size prefix.");
|
|
399
|
+
fixedSize = prefix.fixedSize + item.fixedSize;
|
|
400
|
+
}
|
|
401
|
+
return createDecoder({
|
|
402
|
+
...fixedSize === null ? { maxSize: sumCodecSizes([prefix, item].map(getMaxSize)) ?? void 0 } : { fixedSize },
|
|
403
|
+
read: (bytes, offset) => {
|
|
391
404
|
if (bytes.length - offset <= 0) {
|
|
392
405
|
return [null, offset];
|
|
393
406
|
}
|
|
394
|
-
const
|
|
395
|
-
const [isSome, prefixOffset] = prefix.decode(bytes, offset);
|
|
396
|
-
offset = prefixOffset;
|
|
407
|
+
const [isSome, prefixOffset] = prefix.read(bytes, offset);
|
|
397
408
|
if (isSome === 0) {
|
|
398
|
-
return [null,
|
|
409
|
+
return [null, fixedSize !== null ? offset + fixedSize : prefixOffset];
|
|
399
410
|
}
|
|
400
|
-
const [value, newOffset] = item.
|
|
401
|
-
offset
|
|
402
|
-
return [value, fixed ? fixedOffset : offset];
|
|
411
|
+
const [value, newOffset] = item.read(bytes, prefixOffset);
|
|
412
|
+
return [value, fixedSize !== null ? offset + fixedSize : newOffset];
|
|
403
413
|
}
|
|
404
|
-
};
|
|
414
|
+
});
|
|
405
415
|
}
|
|
406
416
|
function getNullableCodec(item, config = {}) {
|
|
407
|
-
|
|
417
|
+
const configCast = config;
|
|
418
|
+
return combineCodec(getNullableEncoder(item, configCast), getNullableDecoder(item, configCast));
|
|
419
|
+
}
|
|
420
|
+
function getScalarEnumEncoder(constructor, config = {}) {
|
|
421
|
+
const prefix = config.size ?? getU8Encoder();
|
|
422
|
+
const { minRange, maxRange, stringValues, enumKeys, enumValues } = getScalarEnumStats(constructor);
|
|
423
|
+
return mapEncoder(prefix, (value) => {
|
|
424
|
+
const isInvalidNumber = typeof value === "number" && (value < minRange || value > maxRange);
|
|
425
|
+
const isInvalidString = typeof value === "string" && !stringValues.includes(value);
|
|
426
|
+
if (isInvalidNumber || isInvalidString) {
|
|
427
|
+
throw new Error(
|
|
428
|
+
`Invalid scalar enum variant. Expected one of [${stringValues.join(", ")}] or a number between ${minRange} and ${maxRange}, got "${value}".`
|
|
429
|
+
);
|
|
430
|
+
}
|
|
431
|
+
if (typeof value === "number")
|
|
432
|
+
return value;
|
|
433
|
+
const valueIndex = enumValues.indexOf(value);
|
|
434
|
+
if (valueIndex >= 0)
|
|
435
|
+
return valueIndex;
|
|
436
|
+
return enumKeys.indexOf(value);
|
|
437
|
+
});
|
|
438
|
+
}
|
|
439
|
+
function getScalarEnumDecoder(constructor, config = {}) {
|
|
440
|
+
const prefix = config.size ?? getU8Decoder();
|
|
441
|
+
const { minRange, maxRange, isNumericEnum, enumValues } = getScalarEnumStats(constructor);
|
|
442
|
+
return mapDecoder(prefix, (value) => {
|
|
443
|
+
const valueAsNumber = Number(value);
|
|
444
|
+
if (valueAsNumber < minRange || valueAsNumber > maxRange) {
|
|
445
|
+
throw new Error(
|
|
446
|
+
`Enum discriminator out of range. Expected a number between ${minRange} and ${maxRange}, got ${valueAsNumber}.`
|
|
447
|
+
);
|
|
448
|
+
}
|
|
449
|
+
return isNumericEnum ? valueAsNumber : enumValues[valueAsNumber];
|
|
450
|
+
});
|
|
408
451
|
}
|
|
409
|
-
function
|
|
452
|
+
function getScalarEnumCodec(constructor, config = {}) {
|
|
453
|
+
return combineCodec(getScalarEnumEncoder(constructor, config), getScalarEnumDecoder(constructor, config));
|
|
454
|
+
}
|
|
455
|
+
function getScalarEnumStats(constructor) {
|
|
410
456
|
const enumKeys = Object.keys(constructor);
|
|
411
457
|
const enumValues = Object.values(constructor);
|
|
412
458
|
const isNumericEnum = enumValues.some((v) => typeof v === "number");
|
|
413
|
-
const valueDescriptions = enumValues.filter((v) => typeof v === "string").join(", ");
|
|
414
459
|
const minRange = 0;
|
|
415
460
|
const maxRange = isNumericEnum ? enumValues.length / 2 - 1 : enumValues.length - 1;
|
|
416
461
|
const stringValues = isNumericEnum ? [...enumKeys] : [.../* @__PURE__ */ new Set([...enumKeys, ...enumValues])];
|
|
417
462
|
return {
|
|
418
|
-
description: description ?? `enum(${valueDescriptions}; ${prefix.description})`,
|
|
419
463
|
enumKeys,
|
|
420
464
|
enumValues,
|
|
421
|
-
fixedSize: prefix.fixedSize,
|
|
422
465
|
isNumericEnum,
|
|
423
466
|
maxRange,
|
|
424
|
-
maxSize: prefix.maxSize,
|
|
425
467
|
minRange,
|
|
426
468
|
stringValues
|
|
427
469
|
};
|
|
428
470
|
}
|
|
429
|
-
function getScalarEnumEncoder(constructor, config = {}) {
|
|
430
|
-
const prefix = config.size ?? getU8Encoder();
|
|
431
|
-
const { description, fixedSize, maxSize, minRange, maxRange, stringValues, enumKeys, enumValues } = scalarEnumCoderHelper(constructor, prefix, config.description);
|
|
432
|
-
return {
|
|
433
|
-
description,
|
|
434
|
-
encode: (value) => {
|
|
435
|
-
const isInvalidNumber = typeof value === "number" && (value < minRange || value > maxRange);
|
|
436
|
-
const isInvalidString = typeof value === "string" && !stringValues.includes(value);
|
|
437
|
-
if (isInvalidNumber || isInvalidString) {
|
|
438
|
-
throw new Error(
|
|
439
|
-
`Invalid scalar enum variant. Expected one of [${stringValues.join(", ")}] or a number between ${minRange} and ${maxRange}, got "${value}".`
|
|
440
|
-
);
|
|
441
|
-
}
|
|
442
|
-
if (typeof value === "number")
|
|
443
|
-
return prefix.encode(value);
|
|
444
|
-
const valueIndex = enumValues.indexOf(value);
|
|
445
|
-
if (valueIndex >= 0)
|
|
446
|
-
return prefix.encode(valueIndex);
|
|
447
|
-
return prefix.encode(enumKeys.indexOf(value));
|
|
448
|
-
},
|
|
449
|
-
fixedSize,
|
|
450
|
-
maxSize
|
|
451
|
-
};
|
|
452
|
-
}
|
|
453
|
-
function getScalarEnumDecoder(constructor, config = {}) {
|
|
454
|
-
const prefix = config.size ?? getU8Decoder();
|
|
455
|
-
const { description, fixedSize, maxSize, minRange, maxRange, isNumericEnum, enumValues } = scalarEnumCoderHelper(
|
|
456
|
-
constructor,
|
|
457
|
-
prefix,
|
|
458
|
-
config.description
|
|
459
|
-
);
|
|
460
|
-
return {
|
|
461
|
-
decode: (bytes, offset = 0) => {
|
|
462
|
-
assertByteArrayIsNotEmptyForCodec("enum", bytes, offset);
|
|
463
|
-
const [value, newOffset] = prefix.decode(bytes, offset);
|
|
464
|
-
const valueAsNumber = Number(value);
|
|
465
|
-
offset = newOffset;
|
|
466
|
-
if (valueAsNumber < minRange || valueAsNumber > maxRange) {
|
|
467
|
-
throw new Error(
|
|
468
|
-
`Enum discriminator out of range. Expected a number between ${minRange} and ${maxRange}, got ${valueAsNumber}.`
|
|
469
|
-
);
|
|
470
|
-
}
|
|
471
|
-
return [isNumericEnum ? valueAsNumber : enumValues[valueAsNumber], offset];
|
|
472
|
-
},
|
|
473
|
-
description,
|
|
474
|
-
fixedSize,
|
|
475
|
-
maxSize
|
|
476
|
-
};
|
|
477
|
-
}
|
|
478
|
-
function getScalarEnumCodec(constructor, config = {}) {
|
|
479
|
-
return combineCodec(getScalarEnumEncoder(constructor, config), getScalarEnumDecoder(constructor, config));
|
|
480
|
-
}
|
|
481
|
-
function setCodecHelper(item, size, description) {
|
|
482
|
-
if (size === "remainder" && item.fixedSize === null) {
|
|
483
|
-
throw new Error('Codecs of "remainder" size must have fixed-size items.');
|
|
484
|
-
}
|
|
485
|
-
return {
|
|
486
|
-
description: description ?? `set(${item.description}; ${getArrayLikeCodecSizeDescription(size)})`,
|
|
487
|
-
fixedSize: getArrayLikeCodecSizeFromChildren(size, [item.fixedSize]),
|
|
488
|
-
maxSize: getArrayLikeCodecSizeFromChildren(size, [item.maxSize])
|
|
489
|
-
};
|
|
490
|
-
}
|
|
491
471
|
function getSetEncoder(item, config = {}) {
|
|
492
|
-
|
|
493
|
-
return {
|
|
494
|
-
...setCodecHelper(item, size, config.description),
|
|
495
|
-
encode: (set) => {
|
|
496
|
-
if (typeof size === "number" && set.size !== size) {
|
|
497
|
-
assertValidNumberOfItemsForCodec("set", size, set.size);
|
|
498
|
-
}
|
|
499
|
-
const itemBytes = Array.from(set, (value) => item.encode(value));
|
|
500
|
-
return mergeBytes([getArrayLikeCodecSizePrefix(size, set.size), ...itemBytes]);
|
|
501
|
-
}
|
|
502
|
-
};
|
|
472
|
+
return mapEncoder(getArrayEncoder(item, config), (set) => [...set]);
|
|
503
473
|
}
|
|
504
474
|
function getSetDecoder(item, config = {}) {
|
|
505
|
-
|
|
506
|
-
return {
|
|
507
|
-
...setCodecHelper(item, size, config.description),
|
|
508
|
-
decode: (bytes, offset = 0) => {
|
|
509
|
-
const set = /* @__PURE__ */ new Set();
|
|
510
|
-
if (typeof size === "object" && bytes.slice(offset).length === 0) {
|
|
511
|
-
return [set, offset];
|
|
512
|
-
}
|
|
513
|
-
const [resolvedSize, newOffset] = decodeArrayLikeCodecSize(size, [item.fixedSize], bytes, offset);
|
|
514
|
-
offset = newOffset;
|
|
515
|
-
for (let i = 0; i < resolvedSize; i += 1) {
|
|
516
|
-
const [value, newOffset2] = item.decode(bytes, offset);
|
|
517
|
-
offset = newOffset2;
|
|
518
|
-
set.add(value);
|
|
519
|
-
}
|
|
520
|
-
return [set, offset];
|
|
521
|
-
}
|
|
522
|
-
};
|
|
475
|
+
return mapDecoder(getArrayDecoder(item, config), (entries) => new Set(entries));
|
|
523
476
|
}
|
|
524
477
|
function getSetCodec(item, config = {}) {
|
|
525
478
|
return combineCodec(getSetEncoder(item, config), getSetDecoder(item, config));
|
|
526
479
|
}
|
|
527
|
-
function
|
|
528
|
-
const
|
|
529
|
-
|
|
530
|
-
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
|
|
534
|
-
|
|
535
|
-
|
|
536
|
-
|
|
537
|
-
|
|
538
|
-
|
|
539
|
-
|
|
540
|
-
return
|
|
480
|
+
function getStructEncoder(fields) {
|
|
481
|
+
const fieldCodecs = fields.map(([, codec]) => codec);
|
|
482
|
+
const fixedSize = sumCodecSizes(fieldCodecs.map(getFixedSize));
|
|
483
|
+
const maxSize = sumCodecSizes(fieldCodecs.map(getMaxSize)) ?? void 0;
|
|
484
|
+
return createEncoder({
|
|
485
|
+
...fixedSize === null ? {
|
|
486
|
+
getSizeFromValue: (value) => fields.map(([key, codec]) => getEncodedSize(value[key], codec)).reduce((all, one) => all + one, 0),
|
|
487
|
+
maxSize
|
|
488
|
+
} : { fixedSize },
|
|
489
|
+
write: (struct, bytes, offset) => {
|
|
490
|
+
fields.forEach(([key, codec]) => {
|
|
491
|
+
offset = codec.write(struct[key], bytes, offset);
|
|
492
|
+
});
|
|
493
|
+
return offset;
|
|
541
494
|
}
|
|
542
|
-
};
|
|
543
|
-
}
|
|
544
|
-
function getStructDecoder(fields
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
|
|
495
|
+
});
|
|
496
|
+
}
|
|
497
|
+
function getStructDecoder(fields) {
|
|
498
|
+
const fieldCodecs = fields.map(([, codec]) => codec);
|
|
499
|
+
const fixedSize = sumCodecSizes(fieldCodecs.map(getFixedSize));
|
|
500
|
+
const maxSize = sumCodecSizes(fieldCodecs.map(getMaxSize)) ?? void 0;
|
|
501
|
+
return createDecoder({
|
|
502
|
+
...fixedSize === null ? { maxSize } : { fixedSize },
|
|
503
|
+
read: (bytes, offset) => {
|
|
548
504
|
const struct = {};
|
|
549
505
|
fields.forEach(([key, codec]) => {
|
|
550
|
-
const [value, newOffset] = codec.
|
|
506
|
+
const [value, newOffset] = codec.read(bytes, offset);
|
|
551
507
|
offset = newOffset;
|
|
552
508
|
struct[key] = value;
|
|
553
509
|
});
|
|
554
510
|
return [struct, offset];
|
|
555
511
|
}
|
|
556
|
-
};
|
|
557
|
-
}
|
|
558
|
-
function getStructCodec(fields, config = {}) {
|
|
559
|
-
return combineCodec(getStructEncoder(fields, config), getStructDecoder(fields, config));
|
|
512
|
+
});
|
|
560
513
|
}
|
|
561
|
-
function
|
|
562
|
-
|
|
563
|
-
return {
|
|
564
|
-
description: description ?? `tuple(${itemDescriptions})`,
|
|
565
|
-
fixedSize: sumCodecSizes(items.map((item) => item.fixedSize)),
|
|
566
|
-
maxSize: sumCodecSizes(items.map((item) => item.maxSize))
|
|
567
|
-
};
|
|
568
|
-
}
|
|
569
|
-
function getTupleEncoder(items, config = {}) {
|
|
570
|
-
return {
|
|
571
|
-
...tupleCodecHelper(items, config.description),
|
|
572
|
-
encode: (value) => {
|
|
573
|
-
assertValidNumberOfItemsForCodec("tuple", items.length, value.length);
|
|
574
|
-
return mergeBytes(items.map((item, index) => item.encode(value[index])));
|
|
575
|
-
}
|
|
576
|
-
};
|
|
514
|
+
function getStructCodec(fields) {
|
|
515
|
+
return combineCodec(getStructEncoder(fields), getStructDecoder(fields));
|
|
577
516
|
}
|
|
578
|
-
function
|
|
579
|
-
return {
|
|
580
|
-
...tupleCodecHelper(items, config.description),
|
|
581
|
-
decode: (bytes, offset = 0) => {
|
|
582
|
-
const values = [];
|
|
583
|
-
items.forEach((codec) => {
|
|
584
|
-
const [newValue, newOffset] = codec.decode(bytes, offset);
|
|
585
|
-
values.push(newValue);
|
|
586
|
-
offset = newOffset;
|
|
587
|
-
});
|
|
588
|
-
return [values, offset];
|
|
589
|
-
}
|
|
590
|
-
};
|
|
591
|
-
}
|
|
592
|
-
function getTupleCodec(items, config = {}) {
|
|
593
|
-
return combineCodec(
|
|
594
|
-
getTupleEncoder(items, config),
|
|
595
|
-
getTupleDecoder(items, config)
|
|
596
|
-
);
|
|
597
|
-
}
|
|
598
|
-
function getUnitEncoder(config = {}) {
|
|
599
|
-
return {
|
|
600
|
-
description: config.description ?? "unit",
|
|
601
|
-
encode: () => new Uint8Array(),
|
|
517
|
+
function getUnitEncoder() {
|
|
518
|
+
return createEncoder({
|
|
602
519
|
fixedSize: 0,
|
|
603
|
-
|
|
604
|
-
};
|
|
520
|
+
write: (_value, _bytes, offset) => offset
|
|
521
|
+
});
|
|
605
522
|
}
|
|
606
|
-
function getUnitDecoder(
|
|
607
|
-
return {
|
|
608
|
-
decode: (_bytes, offset = 0) => [void 0, offset],
|
|
609
|
-
description: config.description ?? "unit",
|
|
523
|
+
function getUnitDecoder() {
|
|
524
|
+
return createDecoder({
|
|
610
525
|
fixedSize: 0,
|
|
611
|
-
|
|
612
|
-
};
|
|
526
|
+
read: (_bytes, offset) => [void 0, offset]
|
|
527
|
+
});
|
|
613
528
|
}
|
|
614
|
-
function getUnitCodec(
|
|
615
|
-
return combineCodec(getUnitEncoder(
|
|
529
|
+
function getUnitCodec() {
|
|
530
|
+
return combineCodec(getUnitEncoder(), getUnitDecoder());
|
|
616
531
|
}
|
|
617
532
|
|
|
618
|
-
export { assertValidNumberOfItemsForCodec,
|
|
533
|
+
export { assertValidNumberOfItemsForCodec, getArrayCodec, getArrayDecoder, getArrayEncoder, getBitArrayCodec, getBitArrayDecoder, getBitArrayEncoder, getBooleanCodec, getBooleanDecoder, getBooleanEncoder, getBytesCodec, getBytesDecoder, getBytesEncoder, getDataEnumCodec, getDataEnumDecoder, getDataEnumEncoder, getMapCodec, getMapDecoder, getMapEncoder, getNullableCodec, getNullableDecoder, getNullableEncoder, getScalarEnumCodec, getScalarEnumDecoder, getScalarEnumEncoder, getSetCodec, getSetDecoder, getSetEncoder, getStructCodec, getStructDecoder, getStructEncoder, getTupleCodec, getTupleDecoder, getTupleEncoder, getUnitCodec, getUnitDecoder, getUnitEncoder };
|
|
619
534
|
//# sourceMappingURL=out.js.map
|
|
620
535
|
//# sourceMappingURL=index.native.js.map
|