@solana/codecs-data-structures 2.0.0-experimental.fbd3974 → 2.0.0-experimental.fcff844
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.browser.cjs +385 -476
- package/dist/index.browser.cjs.map +1 -1
- package/dist/index.browser.js +387 -472
- package/dist/index.browser.js.map +1 -1
- package/dist/index.development.js +501 -566
- package/dist/index.development.js.map +1 -1
- package/dist/index.native.js +387 -474
- package/dist/index.native.js.map +1 -1
- package/dist/index.node.cjs +385 -476
- package/dist/index.node.cjs.map +1 -1
- package/dist/index.node.js +387 -472
- package/dist/index.node.js.map +1 -1
- package/dist/index.production.min.js +37 -41
- package/dist/types/array.d.ts +54 -10
- package/dist/types/array.d.ts.map +1 -0
- package/dist/types/assertions.d.ts.map +1 -0
- package/dist/types/bit-array.d.ts +9 -9
- package/dist/types/bit-array.d.ts.map +1 -0
- package/dist/types/boolean.d.ts +22 -10
- package/dist/types/boolean.d.ts.map +1 -0
- package/dist/types/bytes.d.ts +18 -9
- package/dist/types/bytes.d.ts.map +1 -0
- package/dist/types/data-enum.d.ts +20 -20
- package/dist/types/data-enum.d.ts.map +1 -0
- package/dist/types/index.d.ts +0 -1
- package/dist/types/index.d.ts.map +1 -0
- package/dist/types/map.d.ts +43 -10
- package/dist/types/map.d.ts.map +1 -0
- package/dist/types/nullable.d.ts +28 -10
- package/dist/types/nullable.d.ts.map +1 -0
- package/dist/types/scalar-enum.d.ts +22 -10
- package/dist/types/scalar-enum.d.ts.map +1 -0
- package/dist/types/set.d.ts +43 -10
- package/dist/types/set.d.ts.map +1 -0
- package/dist/types/struct.d.ts +28 -18
- package/dist/types/struct.d.ts.map +1 -0
- package/dist/types/tuple.d.ts +22 -15
- package/dist/types/tuple.d.ts.map +1 -0
- package/dist/types/unit.d.ts +4 -12
- package/dist/types/unit.d.ts.map +1 -0
- package/dist/types/utils.d.ts +10 -2
- package/dist/types/utils.d.ts.map +1 -0
- package/package.json +6 -6
- package/dist/types/array-like-codec-size.d.ts +0 -20
package/dist/index.browser.js
CHANGED
|
@@ -1,9 +1,14 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { assertIsFixedSize, createEncoder, getEncodedSize, createDecoder, combineCodec, assertByteArrayHasEnoughBytesForCodec, mapEncoder, mapDecoder, fixEncoder, fixDecoder, assertByteArrayIsNotEmptyForCodec, isFixedSize } from '@solana/codecs-core';
|
|
2
2
|
import { getU32Encoder, getU32Decoder, getU8Encoder, getU8Decoder } from '@solana/codecs-numbers';
|
|
3
3
|
|
|
4
4
|
// src/array.ts
|
|
5
5
|
|
|
6
|
-
// src/
|
|
6
|
+
// src/assertions.ts
|
|
7
|
+
function assertValidNumberOfItemsForCodec(codecDescription, expected, actual) {
|
|
8
|
+
if (expected !== actual) {
|
|
9
|
+
throw new Error(`Expected [${codecDescription}] to have ${expected} items, got ${actual}.`);
|
|
10
|
+
}
|
|
11
|
+
}
|
|
7
12
|
function maxCodecSizes(sizes) {
|
|
8
13
|
return sizes.reduce(
|
|
9
14
|
(all, size) => all === null || size === null ? null : Math.max(all, size),
|
|
@@ -13,106 +18,107 @@ function maxCodecSizes(sizes) {
|
|
|
13
18
|
function sumCodecSizes(sizes) {
|
|
14
19
|
return sizes.reduce((all, size) => all === null || size === null ? null : all + size, 0);
|
|
15
20
|
}
|
|
21
|
+
function getFixedSize(codec) {
|
|
22
|
+
return isFixedSize(codec) ? codec.fixedSize : null;
|
|
23
|
+
}
|
|
24
|
+
function getMaxSize(codec) {
|
|
25
|
+
return isFixedSize(codec) ? codec.fixedSize : codec.maxSize ?? null;
|
|
26
|
+
}
|
|
16
27
|
|
|
17
|
-
// src/array
|
|
18
|
-
function
|
|
28
|
+
// src/array.ts
|
|
29
|
+
function getArrayEncoder(item, config = {}) {
|
|
30
|
+
const size = config.size ?? getU32Encoder();
|
|
31
|
+
if (size === "remainder") {
|
|
32
|
+
assertIsFixedSize(item, 'Codecs of "remainder" size must have fixed-size items.');
|
|
33
|
+
}
|
|
34
|
+
const fixedSize = computeArrayLikeCodecSize(size, getFixedSize(item));
|
|
35
|
+
const maxSize = computeArrayLikeCodecSize(size, getMaxSize(item)) ?? void 0;
|
|
36
|
+
return createEncoder({
|
|
37
|
+
...fixedSize !== null ? { fixedSize } : {
|
|
38
|
+
getSizeFromValue: (array) => {
|
|
39
|
+
const prefixSize = typeof size === "object" ? getEncodedSize(array.length, size) : 0;
|
|
40
|
+
return prefixSize + [...array].reduce((all, value) => all + getEncodedSize(value, item), 0);
|
|
41
|
+
},
|
|
42
|
+
maxSize
|
|
43
|
+
},
|
|
44
|
+
write: (array, bytes, offset) => {
|
|
45
|
+
if (typeof size === "number") {
|
|
46
|
+
assertValidNumberOfItemsForCodec("array", size, array.length);
|
|
47
|
+
}
|
|
48
|
+
if (typeof size === "object") {
|
|
49
|
+
offset = size.write(array.length, bytes, offset);
|
|
50
|
+
}
|
|
51
|
+
array.forEach((value) => {
|
|
52
|
+
offset = item.write(value, bytes, offset);
|
|
53
|
+
});
|
|
54
|
+
return offset;
|
|
55
|
+
}
|
|
56
|
+
});
|
|
57
|
+
}
|
|
58
|
+
function getArrayDecoder(item, config = {}) {
|
|
59
|
+
const size = config.size ?? getU32Decoder();
|
|
60
|
+
if (size === "remainder") {
|
|
61
|
+
assertIsFixedSize(item, 'Codecs of "remainder" size must have fixed-size items.');
|
|
62
|
+
}
|
|
63
|
+
const itemSize = getFixedSize(item);
|
|
64
|
+
const fixedSize = computeArrayLikeCodecSize(size, itemSize);
|
|
65
|
+
const maxSize = computeArrayLikeCodecSize(size, getMaxSize(item)) ?? void 0;
|
|
66
|
+
return createDecoder({
|
|
67
|
+
...fixedSize !== null ? { fixedSize } : { maxSize },
|
|
68
|
+
read: (bytes, offset) => {
|
|
69
|
+
const array = [];
|
|
70
|
+
if (typeof size === "object" && bytes.slice(offset).length === 0) {
|
|
71
|
+
return [array, offset];
|
|
72
|
+
}
|
|
73
|
+
const [resolvedSize, newOffset] = readArrayLikeCodecSize(size, itemSize, bytes, offset);
|
|
74
|
+
offset = newOffset;
|
|
75
|
+
for (let i = 0; i < resolvedSize; i += 1) {
|
|
76
|
+
const [value, newOffset2] = item.read(bytes, offset);
|
|
77
|
+
offset = newOffset2;
|
|
78
|
+
array.push(value);
|
|
79
|
+
}
|
|
80
|
+
return [array, offset];
|
|
81
|
+
}
|
|
82
|
+
});
|
|
83
|
+
}
|
|
84
|
+
function getArrayCodec(item, config = {}) {
|
|
85
|
+
return combineCodec(getArrayEncoder(item, config), getArrayDecoder(item, config));
|
|
86
|
+
}
|
|
87
|
+
function readArrayLikeCodecSize(size, itemSize, bytes, offset) {
|
|
19
88
|
if (typeof size === "number") {
|
|
20
89
|
return [size, offset];
|
|
21
90
|
}
|
|
22
91
|
if (typeof size === "object") {
|
|
23
|
-
return size.
|
|
92
|
+
return size.read(bytes, offset);
|
|
24
93
|
}
|
|
25
94
|
if (size === "remainder") {
|
|
26
|
-
|
|
27
|
-
if (childrenSize === null) {
|
|
95
|
+
if (itemSize === null) {
|
|
28
96
|
throw new Error('Codecs of "remainder" size must have fixed-size items.');
|
|
29
97
|
}
|
|
30
|
-
const remainder = bytes.
|
|
31
|
-
if (remainder %
|
|
98
|
+
const remainder = Math.max(0, bytes.length - offset);
|
|
99
|
+
if (remainder % itemSize !== 0) {
|
|
32
100
|
throw new Error(
|
|
33
|
-
`The remainder of the byte array (${remainder} bytes) cannot be split into chunks of ${
|
|
101
|
+
`The remainder of the byte array (${remainder} bytes) cannot be split into chunks of ${itemSize} bytes. Codecs of "remainder" size must have a remainder that is a multiple of its item size. In other words, ${remainder} modulo ${itemSize} should be equal to zero.`
|
|
34
102
|
);
|
|
35
103
|
}
|
|
36
|
-
return [remainder /
|
|
104
|
+
return [remainder / itemSize, offset];
|
|
37
105
|
}
|
|
38
106
|
throw new Error(`Unrecognized array-like codec size: ${JSON.stringify(size)}`);
|
|
39
107
|
}
|
|
40
|
-
function
|
|
41
|
-
return typeof size === "object" ? size.description : `${size}`;
|
|
42
|
-
}
|
|
43
|
-
function getArrayLikeCodecSizeFromChildren(size, childrenSizes) {
|
|
108
|
+
function computeArrayLikeCodecSize(size, itemSize) {
|
|
44
109
|
if (typeof size !== "number")
|
|
45
110
|
return null;
|
|
46
111
|
if (size === 0)
|
|
47
112
|
return 0;
|
|
48
|
-
|
|
49
|
-
return childrenSize === null ? null : childrenSize * size;
|
|
50
|
-
}
|
|
51
|
-
function getArrayLikeCodecSizePrefix(size, realSize) {
|
|
52
|
-
return typeof size === "object" ? size.encode(realSize) : new Uint8Array();
|
|
53
|
-
}
|
|
54
|
-
|
|
55
|
-
// src/assertions.ts
|
|
56
|
-
function assertValidNumberOfItemsForCodec(codecDescription, expected, actual) {
|
|
57
|
-
if (expected !== actual) {
|
|
58
|
-
throw new Error(`Expected [${codecDescription}] to have ${expected} items, got ${actual}.`);
|
|
59
|
-
}
|
|
60
|
-
}
|
|
61
|
-
|
|
62
|
-
// src/array.ts
|
|
63
|
-
function arrayCodecHelper(item, size, description) {
|
|
64
|
-
if (size === "remainder" && item.fixedSize === null) {
|
|
65
|
-
throw new Error('Codecs of "remainder" size must have fixed-size items.');
|
|
66
|
-
}
|
|
67
|
-
return {
|
|
68
|
-
description: description ?? `array(${item.description}; ${getArrayLikeCodecSizeDescription(size)})`,
|
|
69
|
-
fixedSize: getArrayLikeCodecSizeFromChildren(size, [item.fixedSize]),
|
|
70
|
-
maxSize: getArrayLikeCodecSizeFromChildren(size, [item.maxSize])
|
|
71
|
-
};
|
|
72
|
-
}
|
|
73
|
-
function getArrayEncoder(item, options = {}) {
|
|
74
|
-
const size = options.size ?? getU32Encoder();
|
|
75
|
-
return {
|
|
76
|
-
...arrayCodecHelper(item, size, options.description),
|
|
77
|
-
encode: (value) => {
|
|
78
|
-
if (typeof size === "number") {
|
|
79
|
-
assertValidNumberOfItemsForCodec("array", size, value.length);
|
|
80
|
-
}
|
|
81
|
-
return mergeBytes([getArrayLikeCodecSizePrefix(size, value.length), ...value.map((v) => item.encode(v))]);
|
|
82
|
-
}
|
|
83
|
-
};
|
|
84
|
-
}
|
|
85
|
-
function getArrayDecoder(item, options = {}) {
|
|
86
|
-
const size = options.size ?? getU32Decoder();
|
|
87
|
-
return {
|
|
88
|
-
...arrayCodecHelper(item, size, options.description),
|
|
89
|
-
decode: (bytes, offset = 0) => {
|
|
90
|
-
if (typeof size === "object" && bytes.slice(offset).length === 0) {
|
|
91
|
-
return [[], offset];
|
|
92
|
-
}
|
|
93
|
-
const [resolvedSize, newOffset] = decodeArrayLikeCodecSize(size, [item.fixedSize], bytes, offset);
|
|
94
|
-
offset = newOffset;
|
|
95
|
-
const values = [];
|
|
96
|
-
for (let i = 0; i < resolvedSize; i += 1) {
|
|
97
|
-
const [value, newOffset2] = item.decode(bytes, offset);
|
|
98
|
-
values.push(value);
|
|
99
|
-
offset = newOffset2;
|
|
100
|
-
}
|
|
101
|
-
return [values, offset];
|
|
102
|
-
}
|
|
103
|
-
};
|
|
104
|
-
}
|
|
105
|
-
function getArrayCodec(item, options = {}) {
|
|
106
|
-
return combineCodec(getArrayEncoder(item, options), getArrayDecoder(item, options));
|
|
113
|
+
return itemSize === null ? null : itemSize * size;
|
|
107
114
|
}
|
|
108
|
-
|
|
109
|
-
const
|
|
110
|
-
const backward =
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
const bytes = [];
|
|
115
|
+
function getBitArrayEncoder(size, config = {}) {
|
|
116
|
+
const parsedConfig = typeof config === "boolean" ? { backward: config } : config;
|
|
117
|
+
const backward = parsedConfig.backward ?? false;
|
|
118
|
+
return createEncoder({
|
|
119
|
+
fixedSize: size,
|
|
120
|
+
write(value, bytes, offset) {
|
|
121
|
+
const bytesToAdd = [];
|
|
116
122
|
for (let i = 0; i < size; i += 1) {
|
|
117
123
|
let byte = 0;
|
|
118
124
|
for (let j = 0; j < 8; j += 1) {
|
|
@@ -120,23 +126,22 @@ var getBitArrayEncoder = (size, options = {}) => {
|
|
|
120
126
|
byte |= feature << (backward ? j : 7 - j);
|
|
121
127
|
}
|
|
122
128
|
if (backward) {
|
|
123
|
-
|
|
129
|
+
bytesToAdd.unshift(byte);
|
|
124
130
|
} else {
|
|
125
|
-
|
|
131
|
+
bytesToAdd.push(byte);
|
|
126
132
|
}
|
|
127
133
|
}
|
|
128
|
-
|
|
129
|
-
|
|
134
|
+
bytes.set(bytesToAdd, offset);
|
|
135
|
+
return size;
|
|
136
|
+
}
|
|
137
|
+
});
|
|
138
|
+
}
|
|
139
|
+
function getBitArrayDecoder(size, config = {}) {
|
|
140
|
+
const parsedConfig = typeof config === "boolean" ? { backward: config } : config;
|
|
141
|
+
const backward = parsedConfig.backward ?? false;
|
|
142
|
+
return createDecoder({
|
|
130
143
|
fixedSize: size,
|
|
131
|
-
|
|
132
|
-
};
|
|
133
|
-
};
|
|
134
|
-
var getBitArrayDecoder = (size, options = {}) => {
|
|
135
|
-
const parsedOptions = typeof options === "boolean" ? { backward: options } : options;
|
|
136
|
-
const backward = parsedOptions.backward ?? false;
|
|
137
|
-
const backwardSuffix = backward ? "; backward" : "";
|
|
138
|
-
return {
|
|
139
|
-
decode(bytes, offset = 0) {
|
|
144
|
+
read(bytes, offset) {
|
|
140
145
|
assertByteArrayHasEnoughBytesForCodec("bitArray", size, bytes, offset);
|
|
141
146
|
const booleans = [];
|
|
142
147
|
let slice = bytes.slice(offset, offset + size);
|
|
@@ -153,138 +158,107 @@ var getBitArrayDecoder = (size, options = {}) => {
|
|
|
153
158
|
}
|
|
154
159
|
});
|
|
155
160
|
return [booleans, offset + size];
|
|
156
|
-
}
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
}
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
}
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
maxSize: size.fixedSize
|
|
185
|
-
};
|
|
186
|
-
}
|
|
187
|
-
function getBooleanCodec(options = {}) {
|
|
188
|
-
return combineCodec(getBooleanEncoder(options), getBooleanDecoder(options));
|
|
189
|
-
}
|
|
190
|
-
function getBytesEncoder(options = {}) {
|
|
191
|
-
const size = options.size ?? "variable";
|
|
192
|
-
const sizeDescription = typeof size === "object" ? size.description : `${size}`;
|
|
193
|
-
const description = options.description ?? `bytes(${sizeDescription})`;
|
|
194
|
-
const byteEncoder = {
|
|
195
|
-
description,
|
|
196
|
-
encode: (value) => value,
|
|
197
|
-
fixedSize: null,
|
|
198
|
-
maxSize: null
|
|
199
|
-
};
|
|
161
|
+
}
|
|
162
|
+
});
|
|
163
|
+
}
|
|
164
|
+
function getBitArrayCodec(size, config = {}) {
|
|
165
|
+
return combineCodec(getBitArrayEncoder(size, config), getBitArrayDecoder(size, config));
|
|
166
|
+
}
|
|
167
|
+
function getBooleanEncoder(config = {}) {
|
|
168
|
+
const size = config.size ?? getU8Encoder();
|
|
169
|
+
assertIsFixedSize(size, "Codec [bool] requires a fixed size.");
|
|
170
|
+
return mapEncoder(size, (value) => value ? 1 : 0);
|
|
171
|
+
}
|
|
172
|
+
function getBooleanDecoder(config = {}) {
|
|
173
|
+
const size = config.size ?? getU8Decoder();
|
|
174
|
+
assertIsFixedSize(size, "Codec [bool] requires a fixed size.");
|
|
175
|
+
return mapDecoder(size, (value) => Number(value) === 1);
|
|
176
|
+
}
|
|
177
|
+
function getBooleanCodec(config = {}) {
|
|
178
|
+
return combineCodec(getBooleanEncoder(config), getBooleanDecoder(config));
|
|
179
|
+
}
|
|
180
|
+
function getBytesEncoder(config = {}) {
|
|
181
|
+
const size = config.size ?? "variable";
|
|
182
|
+
const byteEncoder = createEncoder({
|
|
183
|
+
getSizeFromValue: (value) => value.length,
|
|
184
|
+
write: (value, bytes, offset) => {
|
|
185
|
+
bytes.set(value, offset);
|
|
186
|
+
return offset + value.length;
|
|
187
|
+
}
|
|
188
|
+
});
|
|
200
189
|
if (size === "variable") {
|
|
201
190
|
return byteEncoder;
|
|
202
191
|
}
|
|
203
192
|
if (typeof size === "number") {
|
|
204
|
-
return fixEncoder(byteEncoder, size
|
|
193
|
+
return fixEncoder(byteEncoder, size);
|
|
205
194
|
}
|
|
206
|
-
return {
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
return mergeBytes([lengthBytes, contentBytes]);
|
|
195
|
+
return createEncoder({
|
|
196
|
+
getSizeFromValue: (value) => getEncodedSize(value.length, size) + value.length,
|
|
197
|
+
write: (value, bytes, offset) => {
|
|
198
|
+
offset = size.write(value.length, bytes, offset);
|
|
199
|
+
return byteEncoder.write(value, bytes, offset);
|
|
212
200
|
}
|
|
213
|
-
};
|
|
201
|
+
});
|
|
214
202
|
}
|
|
215
|
-
function getBytesDecoder(
|
|
216
|
-
const size =
|
|
217
|
-
const
|
|
218
|
-
|
|
219
|
-
const byteDecoder = {
|
|
220
|
-
decode: (bytes, offset = 0) => {
|
|
203
|
+
function getBytesDecoder(config = {}) {
|
|
204
|
+
const size = config.size ?? "variable";
|
|
205
|
+
const byteDecoder = createDecoder({
|
|
206
|
+
read: (bytes, offset) => {
|
|
221
207
|
const slice = bytes.slice(offset);
|
|
222
208
|
return [slice, offset + slice.length];
|
|
223
|
-
}
|
|
224
|
-
|
|
225
|
-
fixedSize: null,
|
|
226
|
-
maxSize: null
|
|
227
|
-
};
|
|
209
|
+
}
|
|
210
|
+
});
|
|
228
211
|
if (size === "variable") {
|
|
229
212
|
return byteDecoder;
|
|
230
213
|
}
|
|
231
214
|
if (typeof size === "number") {
|
|
232
|
-
return fixDecoder(byteDecoder, size
|
|
215
|
+
return fixDecoder(byteDecoder, size);
|
|
233
216
|
}
|
|
234
|
-
return {
|
|
235
|
-
|
|
236
|
-
decode: (bytes, offset = 0) => {
|
|
217
|
+
return createDecoder({
|
|
218
|
+
read: (bytes, offset) => {
|
|
237
219
|
assertByteArrayIsNotEmptyForCodec("bytes", bytes, offset);
|
|
238
|
-
const [lengthBigInt, lengthOffset] = size.
|
|
220
|
+
const [lengthBigInt, lengthOffset] = size.read(bytes, offset);
|
|
239
221
|
const length = Number(lengthBigInt);
|
|
240
222
|
offset = lengthOffset;
|
|
241
223
|
const contentBytes = bytes.slice(offset, offset + length);
|
|
242
224
|
assertByteArrayHasEnoughBytesForCodec("bytes", length, contentBytes);
|
|
243
|
-
const [value, contentOffset] = byteDecoder.
|
|
225
|
+
const [value, contentOffset] = byteDecoder.read(contentBytes, 0);
|
|
244
226
|
offset += contentOffset;
|
|
245
227
|
return [value, offset];
|
|
246
228
|
}
|
|
247
|
-
};
|
|
248
|
-
}
|
|
249
|
-
function getBytesCodec(
|
|
250
|
-
return combineCodec(getBytesEncoder(
|
|
251
|
-
}
|
|
252
|
-
function
|
|
253
|
-
const
|
|
254
|
-
const
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
if (discriminator < 0) {
|
|
270
|
-
throw new Error(
|
|
271
|
-
`Invalid data enum variant. Expected one of [${variants.map(([key]) => key).join(", ")}], got "${variant.__kind}".`
|
|
272
|
-
);
|
|
273
|
-
}
|
|
274
|
-
const variantPrefix = prefix.encode(discriminator);
|
|
275
|
-
const variantSerializer = variants[discriminator][1];
|
|
276
|
-
const variantBytes = variantSerializer.encode(variant);
|
|
277
|
-
return mergeBytes([variantPrefix, variantBytes]);
|
|
229
|
+
});
|
|
230
|
+
}
|
|
231
|
+
function getBytesCodec(config = {}) {
|
|
232
|
+
return combineCodec(getBytesEncoder(config), getBytesDecoder(config));
|
|
233
|
+
}
|
|
234
|
+
function getDataEnumEncoder(variants, config = {}) {
|
|
235
|
+
const prefix = config.size ?? getU8Encoder();
|
|
236
|
+
const fixedSize = getDataEnumFixedSize(variants, prefix);
|
|
237
|
+
return createEncoder({
|
|
238
|
+
...fixedSize !== null ? { fixedSize } : {
|
|
239
|
+
getSizeFromValue: (variant) => {
|
|
240
|
+
const discriminator = getVariantDiscriminator(variants, variant);
|
|
241
|
+
const variantEncoder = variants[discriminator][1];
|
|
242
|
+
return getEncodedSize(discriminator, prefix) + getEncodedSize(variant, variantEncoder);
|
|
243
|
+
},
|
|
244
|
+
maxSize: getDataEnumMaxSize(variants, prefix)
|
|
245
|
+
},
|
|
246
|
+
write: (variant, bytes, offset) => {
|
|
247
|
+
const discriminator = getVariantDiscriminator(variants, variant);
|
|
248
|
+
offset = prefix.write(discriminator, bytes, offset);
|
|
249
|
+
const variantEncoder = variants[discriminator][1];
|
|
250
|
+
return variantEncoder.write(variant, bytes, offset);
|
|
278
251
|
}
|
|
279
|
-
};
|
|
280
|
-
}
|
|
281
|
-
function getDataEnumDecoder(variants,
|
|
282
|
-
const prefix =
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
252
|
+
});
|
|
253
|
+
}
|
|
254
|
+
function getDataEnumDecoder(variants, config = {}) {
|
|
255
|
+
const prefix = config.size ?? getU8Decoder();
|
|
256
|
+
const fixedSize = getDataEnumFixedSize(variants, prefix);
|
|
257
|
+
return createDecoder({
|
|
258
|
+
...fixedSize !== null ? { fixedSize } : { maxSize: getDataEnumMaxSize(variants, prefix) },
|
|
259
|
+
read: (bytes, offset) => {
|
|
286
260
|
assertByteArrayIsNotEmptyForCodec("dataEnum", bytes, offset);
|
|
287
|
-
const [discriminator, dOffset] = prefix.
|
|
261
|
+
const [discriminator, dOffset] = prefix.read(bytes, offset);
|
|
288
262
|
offset = dOffset;
|
|
289
263
|
const variantField = variants[Number(discriminator)] ?? null;
|
|
290
264
|
if (!variantField) {
|
|
@@ -292,329 +266,270 @@ function getDataEnumDecoder(variants, options = {}) {
|
|
|
292
266
|
`Enum discriminator out of range. Expected a number between 0 and ${variants.length - 1}, got ${discriminator}.`
|
|
293
267
|
);
|
|
294
268
|
}
|
|
295
|
-
const [variant, vOffset] = variantField[1].
|
|
269
|
+
const [variant, vOffset] = variantField[1].read(bytes, offset);
|
|
296
270
|
offset = vOffset;
|
|
297
271
|
return [{ __kind: variantField[0], ...variant ?? {} }, offset];
|
|
298
272
|
}
|
|
299
|
-
};
|
|
273
|
+
});
|
|
300
274
|
}
|
|
301
|
-
function getDataEnumCodec(variants,
|
|
302
|
-
return combineCodec(getDataEnumEncoder(variants,
|
|
275
|
+
function getDataEnumCodec(variants, config = {}) {
|
|
276
|
+
return combineCodec(getDataEnumEncoder(variants, config), getDataEnumDecoder(variants, config));
|
|
303
277
|
}
|
|
304
|
-
function
|
|
305
|
-
if (
|
|
306
|
-
|
|
278
|
+
function getDataEnumFixedSize(variants, prefix) {
|
|
279
|
+
if (variants.length === 0)
|
|
280
|
+
return isFixedSize(prefix) ? prefix.fixedSize : null;
|
|
281
|
+
if (!isFixedSize(variants[0][1]))
|
|
282
|
+
return null;
|
|
283
|
+
const variantSize = variants[0][1].fixedSize;
|
|
284
|
+
const sameSizedVariants = variants.every(
|
|
285
|
+
(variant) => isFixedSize(variant[1]) && variant[1].fixedSize === variantSize
|
|
286
|
+
);
|
|
287
|
+
if (!sameSizedVariants)
|
|
288
|
+
return null;
|
|
289
|
+
return isFixedSize(prefix) ? prefix.fixedSize + variantSize : null;
|
|
290
|
+
}
|
|
291
|
+
function getDataEnumMaxSize(variants, prefix) {
|
|
292
|
+
const maxVariantSize = maxCodecSizes(variants.map(([, codec]) => getMaxSize(codec)));
|
|
293
|
+
return sumCodecSizes([getMaxSize(prefix), maxVariantSize]) ?? void 0;
|
|
294
|
+
}
|
|
295
|
+
function getVariantDiscriminator(variants, variant) {
|
|
296
|
+
const discriminator = variants.findIndex(([key]) => variant.__kind === key);
|
|
297
|
+
if (discriminator < 0) {
|
|
298
|
+
throw new Error(
|
|
299
|
+
`Invalid data enum variant. Expected one of [${variants.map(([key]) => key).join(", ")}], got "${variant.__kind}".`
|
|
300
|
+
);
|
|
307
301
|
}
|
|
308
|
-
return
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
return
|
|
302
|
+
return discriminator;
|
|
303
|
+
}
|
|
304
|
+
function getTupleEncoder(items) {
|
|
305
|
+
const fixedSize = sumCodecSizes(items.map(getFixedSize));
|
|
306
|
+
const maxSize = sumCodecSizes(items.map(getMaxSize)) ?? void 0;
|
|
307
|
+
return createEncoder({
|
|
308
|
+
...fixedSize === null ? {
|
|
309
|
+
getSizeFromValue: (value) => items.map((item, index) => getEncodedSize(value[index], item)).reduce((all, one) => all + one, 0),
|
|
310
|
+
maxSize
|
|
311
|
+
} : { fixedSize },
|
|
312
|
+
write: (value, bytes, offset) => {
|
|
313
|
+
assertValidNumberOfItemsForCodec("tuple", items.length, value.length);
|
|
314
|
+
items.forEach((item, index) => {
|
|
315
|
+
offset = item.write(value[index], bytes, offset);
|
|
316
|
+
});
|
|
317
|
+
return offset;
|
|
324
318
|
}
|
|
325
|
-
};
|
|
319
|
+
});
|
|
320
|
+
}
|
|
321
|
+
function getTupleDecoder(items) {
|
|
322
|
+
const fixedSize = sumCodecSizes(items.map(getFixedSize));
|
|
323
|
+
const maxSize = sumCodecSizes(items.map(getMaxSize)) ?? void 0;
|
|
324
|
+
return createDecoder({
|
|
325
|
+
...fixedSize === null ? { maxSize } : { fixedSize },
|
|
326
|
+
read: (bytes, offset) => {
|
|
327
|
+
const values = [];
|
|
328
|
+
items.forEach((item) => {
|
|
329
|
+
const [newValue, newOffset] = item.read(bytes, offset);
|
|
330
|
+
values.push(newValue);
|
|
331
|
+
offset = newOffset;
|
|
332
|
+
});
|
|
333
|
+
return [values, offset];
|
|
334
|
+
}
|
|
335
|
+
});
|
|
326
336
|
}
|
|
327
|
-
function
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
337
|
+
function getTupleCodec(items) {
|
|
338
|
+
return combineCodec(
|
|
339
|
+
getTupleEncoder(items),
|
|
340
|
+
getTupleDecoder(items)
|
|
341
|
+
);
|
|
342
|
+
}
|
|
343
|
+
|
|
344
|
+
// src/map.ts
|
|
345
|
+
function getMapEncoder(key, value, config = {}) {
|
|
346
|
+
return mapEncoder(
|
|
347
|
+
getArrayEncoder(getTupleEncoder([key, value]), config),
|
|
348
|
+
(map) => [...map.entries()]
|
|
349
|
+
);
|
|
350
|
+
}
|
|
351
|
+
function getMapDecoder(key, value, config = {}) {
|
|
352
|
+
return mapDecoder(
|
|
353
|
+
getArrayDecoder(getTupleDecoder([key, value]), config),
|
|
354
|
+
(entries) => new Map(entries)
|
|
355
|
+
);
|
|
356
|
+
}
|
|
357
|
+
function getMapCodec(key, value, config = {}) {
|
|
358
|
+
return combineCodec(getMapEncoder(key, value, config), getMapDecoder(key, value, config));
|
|
359
|
+
}
|
|
360
|
+
function getNullableEncoder(item, config = {}) {
|
|
361
|
+
const prefix = config.prefix ?? getU8Encoder();
|
|
362
|
+
const fixed = config.fixed ?? false;
|
|
363
|
+
const isZeroSizeItem = isFixedSize(item) && isFixedSize(prefix) && item.fixedSize === 0;
|
|
364
|
+
if (fixed || isZeroSizeItem) {
|
|
365
|
+
assertIsFixedSize(item, "Fixed nullables can only be used with fixed-size codecs.");
|
|
366
|
+
assertIsFixedSize(prefix, "Fixed nullables can only be used with fixed-size prefix.");
|
|
367
|
+
const fixedSize = prefix.fixedSize + item.fixedSize;
|
|
368
|
+
return createEncoder({
|
|
369
|
+
fixedSize,
|
|
370
|
+
write: (option, bytes, offset) => {
|
|
371
|
+
const prefixOffset = prefix.write(Number(option !== null), bytes, offset);
|
|
372
|
+
if (option !== null) {
|
|
373
|
+
item.write(option, bytes, prefixOffset);
|
|
374
|
+
}
|
|
375
|
+
return offset + fixedSize;
|
|
335
376
|
}
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
offset =
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
offset = kOffset;
|
|
346
|
-
const [decodedValue, vOffset] = value.decode(bytes, offset);
|
|
347
|
-
offset = vOffset;
|
|
348
|
-
map.set(decodedKey, decodedValue);
|
|
377
|
+
});
|
|
378
|
+
}
|
|
379
|
+
return createEncoder({
|
|
380
|
+
getSizeFromValue: (option) => getEncodedSize(Number(option !== null), prefix) + (option !== null ? getEncodedSize(option, item) : 0),
|
|
381
|
+
maxSize: sumCodecSizes([prefix, item].map(getMaxSize)) ?? void 0,
|
|
382
|
+
write: (option, bytes, offset) => {
|
|
383
|
+
offset = prefix.write(Number(option !== null), bytes, offset);
|
|
384
|
+
if (option !== null) {
|
|
385
|
+
offset = item.write(option, bytes, offset);
|
|
349
386
|
}
|
|
350
|
-
return
|
|
387
|
+
return offset;
|
|
351
388
|
}
|
|
352
|
-
};
|
|
353
|
-
}
|
|
354
|
-
function
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
assertFixedSizeCodec(prefix, "Fixed nullables can only be used with fixed-size prefix.");
|
|
363
|
-
descriptionSuffix += "; fixed";
|
|
389
|
+
});
|
|
390
|
+
}
|
|
391
|
+
function getNullableDecoder(item, config = {}) {
|
|
392
|
+
const prefix = config.prefix ?? getU8Decoder();
|
|
393
|
+
const fixed = config.fixed ?? false;
|
|
394
|
+
let fixedSize = null;
|
|
395
|
+
const isZeroSizeItem = isFixedSize(item) && isFixedSize(prefix) && item.fixedSize === 0;
|
|
396
|
+
if (fixed || isZeroSizeItem) {
|
|
397
|
+
assertIsFixedSize(item, "Fixed nullables can only be used with fixed-size codecs.");
|
|
398
|
+
assertIsFixedSize(prefix, "Fixed nullables can only be used with fixed-size prefix.");
|
|
364
399
|
fixedSize = prefix.fixedSize + item.fixedSize;
|
|
365
400
|
}
|
|
366
|
-
return {
|
|
367
|
-
|
|
368
|
-
|
|
369
|
-
maxSize: sumCodecSizes([prefix.maxSize, item.maxSize])
|
|
370
|
-
};
|
|
371
|
-
}
|
|
372
|
-
function getNullableEncoder(item, options = {}) {
|
|
373
|
-
const prefix = options.prefix ?? getU8Encoder();
|
|
374
|
-
const fixed = options.fixed ?? false;
|
|
375
|
-
return {
|
|
376
|
-
...nullableCodecHelper(item, prefix, fixed, options.description),
|
|
377
|
-
encode: (option) => {
|
|
378
|
-
const prefixByte = prefix.encode(Number(option !== null));
|
|
379
|
-
let itemBytes = option !== null ? item.encode(option) : new Uint8Array();
|
|
380
|
-
itemBytes = fixed ? fixBytes(itemBytes, item.fixedSize) : itemBytes;
|
|
381
|
-
return mergeBytes([prefixByte, itemBytes]);
|
|
382
|
-
}
|
|
383
|
-
};
|
|
384
|
-
}
|
|
385
|
-
function getNullableDecoder(item, options = {}) {
|
|
386
|
-
const prefix = options.prefix ?? getU8Decoder();
|
|
387
|
-
const fixed = options.fixed ?? false;
|
|
388
|
-
return {
|
|
389
|
-
...nullableCodecHelper(item, prefix, fixed, options.description),
|
|
390
|
-
decode: (bytes, offset = 0) => {
|
|
401
|
+
return createDecoder({
|
|
402
|
+
...fixedSize === null ? { maxSize: sumCodecSizes([prefix, item].map(getMaxSize)) ?? void 0 } : { fixedSize },
|
|
403
|
+
read: (bytes, offset) => {
|
|
391
404
|
if (bytes.length - offset <= 0) {
|
|
392
405
|
return [null, offset];
|
|
393
406
|
}
|
|
394
|
-
const
|
|
395
|
-
const [isSome, prefixOffset] = prefix.decode(bytes, offset);
|
|
396
|
-
offset = prefixOffset;
|
|
407
|
+
const [isSome, prefixOffset] = prefix.read(bytes, offset);
|
|
397
408
|
if (isSome === 0) {
|
|
398
|
-
return [null,
|
|
409
|
+
return [null, fixedSize !== null ? offset + fixedSize : prefixOffset];
|
|
399
410
|
}
|
|
400
|
-
const [value, newOffset] = item.
|
|
401
|
-
offset
|
|
402
|
-
return [value, fixed ? fixedOffset : offset];
|
|
411
|
+
const [value, newOffset] = item.read(bytes, prefixOffset);
|
|
412
|
+
return [value, fixedSize !== null ? offset + fixedSize : newOffset];
|
|
403
413
|
}
|
|
404
|
-
};
|
|
414
|
+
});
|
|
415
|
+
}
|
|
416
|
+
function getNullableCodec(item, config = {}) {
|
|
417
|
+
const configCast = config;
|
|
418
|
+
return combineCodec(getNullableEncoder(item, configCast), getNullableDecoder(item, configCast));
|
|
419
|
+
}
|
|
420
|
+
function getScalarEnumEncoder(constructor, config = {}) {
|
|
421
|
+
const prefix = config.size ?? getU8Encoder();
|
|
422
|
+
const { minRange, maxRange, stringValues, enumKeys, enumValues } = getScalarEnumStats(constructor);
|
|
423
|
+
return mapEncoder(prefix, (value) => {
|
|
424
|
+
const isInvalidNumber = typeof value === "number" && (value < minRange || value > maxRange);
|
|
425
|
+
const isInvalidString = typeof value === "string" && !stringValues.includes(value);
|
|
426
|
+
if (isInvalidNumber || isInvalidString) {
|
|
427
|
+
throw new Error(
|
|
428
|
+
`Invalid scalar enum variant. Expected one of [${stringValues.join(", ")}] or a number between ${minRange} and ${maxRange}, got "${value}".`
|
|
429
|
+
);
|
|
430
|
+
}
|
|
431
|
+
if (typeof value === "number")
|
|
432
|
+
return value;
|
|
433
|
+
const valueIndex = enumValues.indexOf(value);
|
|
434
|
+
if (valueIndex >= 0)
|
|
435
|
+
return valueIndex;
|
|
436
|
+
return enumKeys.indexOf(value);
|
|
437
|
+
});
|
|
438
|
+
}
|
|
439
|
+
function getScalarEnumDecoder(constructor, config = {}) {
|
|
440
|
+
const prefix = config.size ?? getU8Decoder();
|
|
441
|
+
const { minRange, maxRange, isNumericEnum, enumValues } = getScalarEnumStats(constructor);
|
|
442
|
+
return mapDecoder(prefix, (value) => {
|
|
443
|
+
const valueAsNumber = Number(value);
|
|
444
|
+
if (valueAsNumber < minRange || valueAsNumber > maxRange) {
|
|
445
|
+
throw new Error(
|
|
446
|
+
`Enum discriminator out of range. Expected a number between ${minRange} and ${maxRange}, got ${valueAsNumber}.`
|
|
447
|
+
);
|
|
448
|
+
}
|
|
449
|
+
return isNumericEnum ? valueAsNumber : enumValues[valueAsNumber];
|
|
450
|
+
});
|
|
405
451
|
}
|
|
406
|
-
function
|
|
407
|
-
return combineCodec(
|
|
452
|
+
function getScalarEnumCodec(constructor, config = {}) {
|
|
453
|
+
return combineCodec(getScalarEnumEncoder(constructor, config), getScalarEnumDecoder(constructor, config));
|
|
408
454
|
}
|
|
409
|
-
function
|
|
455
|
+
function getScalarEnumStats(constructor) {
|
|
410
456
|
const enumKeys = Object.keys(constructor);
|
|
411
457
|
const enumValues = Object.values(constructor);
|
|
412
458
|
const isNumericEnum = enumValues.some((v) => typeof v === "number");
|
|
413
|
-
const valueDescriptions = enumValues.filter((v) => typeof v === "string").join(", ");
|
|
414
459
|
const minRange = 0;
|
|
415
460
|
const maxRange = isNumericEnum ? enumValues.length / 2 - 1 : enumValues.length - 1;
|
|
416
461
|
const stringValues = isNumericEnum ? [...enumKeys] : [.../* @__PURE__ */ new Set([...enumKeys, ...enumValues])];
|
|
417
462
|
return {
|
|
418
|
-
description: description ?? `enum(${valueDescriptions}; ${prefix.description})`,
|
|
419
463
|
enumKeys,
|
|
420
464
|
enumValues,
|
|
421
|
-
fixedSize: prefix.fixedSize,
|
|
422
465
|
isNumericEnum,
|
|
423
466
|
maxRange,
|
|
424
|
-
maxSize: prefix.maxSize,
|
|
425
467
|
minRange,
|
|
426
468
|
stringValues
|
|
427
469
|
};
|
|
428
470
|
}
|
|
429
|
-
function
|
|
430
|
-
|
|
431
|
-
const { description, fixedSize, maxSize, minRange, maxRange, stringValues, enumKeys, enumValues } = scalarEnumCoderHelper(constructor, prefix, options.description);
|
|
432
|
-
return {
|
|
433
|
-
description,
|
|
434
|
-
encode: (value) => {
|
|
435
|
-
const isInvalidNumber = typeof value === "number" && (value < minRange || value > maxRange);
|
|
436
|
-
const isInvalidString = typeof value === "string" && !stringValues.includes(value);
|
|
437
|
-
if (isInvalidNumber || isInvalidString) {
|
|
438
|
-
throw new Error(
|
|
439
|
-
`Invalid scalar enum variant. Expected one of [${stringValues.join(", ")}] or a number between ${minRange} and ${maxRange}, got "${value}".`
|
|
440
|
-
);
|
|
441
|
-
}
|
|
442
|
-
if (typeof value === "number")
|
|
443
|
-
return prefix.encode(value);
|
|
444
|
-
const valueIndex = enumValues.indexOf(value);
|
|
445
|
-
if (valueIndex >= 0)
|
|
446
|
-
return prefix.encode(valueIndex);
|
|
447
|
-
return prefix.encode(enumKeys.indexOf(value));
|
|
448
|
-
},
|
|
449
|
-
fixedSize,
|
|
450
|
-
maxSize
|
|
451
|
-
};
|
|
452
|
-
}
|
|
453
|
-
function getScalarEnumDecoder(constructor, options = {}) {
|
|
454
|
-
const prefix = options.size ?? getU8Decoder();
|
|
455
|
-
const { description, fixedSize, maxSize, minRange, maxRange, isNumericEnum, enumValues } = scalarEnumCoderHelper(
|
|
456
|
-
constructor,
|
|
457
|
-
prefix,
|
|
458
|
-
options.description
|
|
459
|
-
);
|
|
460
|
-
return {
|
|
461
|
-
decode: (bytes, offset = 0) => {
|
|
462
|
-
assertByteArrayIsNotEmptyForCodec("enum", bytes, offset);
|
|
463
|
-
const [value, newOffset] = prefix.decode(bytes, offset);
|
|
464
|
-
const valueAsNumber = Number(value);
|
|
465
|
-
offset = newOffset;
|
|
466
|
-
if (valueAsNumber < minRange || valueAsNumber > maxRange) {
|
|
467
|
-
throw new Error(
|
|
468
|
-
`Enum discriminator out of range. Expected a number between ${minRange} and ${maxRange}, got ${valueAsNumber}.`
|
|
469
|
-
);
|
|
470
|
-
}
|
|
471
|
-
return [isNumericEnum ? valueAsNumber : enumValues[valueAsNumber], offset];
|
|
472
|
-
},
|
|
473
|
-
description,
|
|
474
|
-
fixedSize,
|
|
475
|
-
maxSize
|
|
476
|
-
};
|
|
477
|
-
}
|
|
478
|
-
function getScalarEnumCodec(constructor, options = {}) {
|
|
479
|
-
return combineCodec(getScalarEnumEncoder(constructor, options), getScalarEnumDecoder(constructor, options));
|
|
480
|
-
}
|
|
481
|
-
function setCodecHelper(item, size, description) {
|
|
482
|
-
if (size === "remainder" && item.fixedSize === null) {
|
|
483
|
-
throw new Error('Codecs of "remainder" size must have fixed-size items.');
|
|
484
|
-
}
|
|
485
|
-
return {
|
|
486
|
-
description: description ?? `set(${item.description}; ${getArrayLikeCodecSizeDescription(size)})`,
|
|
487
|
-
fixedSize: getArrayLikeCodecSizeFromChildren(size, [item.fixedSize]),
|
|
488
|
-
maxSize: getArrayLikeCodecSizeFromChildren(size, [item.maxSize])
|
|
489
|
-
};
|
|
490
|
-
}
|
|
491
|
-
function getSetEncoder(item, options = {}) {
|
|
492
|
-
const size = options.size ?? getU32Encoder();
|
|
493
|
-
return {
|
|
494
|
-
...setCodecHelper(item, size, options.description),
|
|
495
|
-
encode: (set) => {
|
|
496
|
-
if (typeof size === "number" && set.size !== size) {
|
|
497
|
-
assertValidNumberOfItemsForCodec("set", size, set.size);
|
|
498
|
-
}
|
|
499
|
-
const itemBytes = Array.from(set, (value) => item.encode(value));
|
|
500
|
-
return mergeBytes([getArrayLikeCodecSizePrefix(size, set.size), ...itemBytes]);
|
|
501
|
-
}
|
|
502
|
-
};
|
|
503
|
-
}
|
|
504
|
-
function getSetDecoder(item, options = {}) {
|
|
505
|
-
const size = options.size ?? getU32Decoder();
|
|
506
|
-
return {
|
|
507
|
-
...setCodecHelper(item, size, options.description),
|
|
508
|
-
decode: (bytes, offset = 0) => {
|
|
509
|
-
const set = /* @__PURE__ */ new Set();
|
|
510
|
-
if (typeof size === "object" && bytes.slice(offset).length === 0) {
|
|
511
|
-
return [set, offset];
|
|
512
|
-
}
|
|
513
|
-
const [resolvedSize, newOffset] = decodeArrayLikeCodecSize(size, [item.fixedSize], bytes, offset);
|
|
514
|
-
offset = newOffset;
|
|
515
|
-
for (let i = 0; i < resolvedSize; i += 1) {
|
|
516
|
-
const [value, newOffset2] = item.decode(bytes, offset);
|
|
517
|
-
offset = newOffset2;
|
|
518
|
-
set.add(value);
|
|
519
|
-
}
|
|
520
|
-
return [set, offset];
|
|
521
|
-
}
|
|
522
|
-
};
|
|
471
|
+
function getSetEncoder(item, config = {}) {
|
|
472
|
+
return mapEncoder(getArrayEncoder(item, config), (set) => [...set]);
|
|
523
473
|
}
|
|
524
|
-
function
|
|
525
|
-
return
|
|
474
|
+
function getSetDecoder(item, config = {}) {
|
|
475
|
+
return mapDecoder(getArrayDecoder(item, config), (entries) => new Set(entries));
|
|
526
476
|
}
|
|
527
|
-
function
|
|
528
|
-
|
|
529
|
-
return {
|
|
530
|
-
description: description ?? `struct(${fieldDescriptions})`,
|
|
531
|
-
fixedSize: sumCodecSizes(fields.map(([, field]) => field.fixedSize)),
|
|
532
|
-
maxSize: sumCodecSizes(fields.map(([, field]) => field.maxSize))
|
|
533
|
-
};
|
|
477
|
+
function getSetCodec(item, config = {}) {
|
|
478
|
+
return combineCodec(getSetEncoder(item, config), getSetDecoder(item, config));
|
|
534
479
|
}
|
|
535
|
-
function getStructEncoder(fields
|
|
536
|
-
|
|
537
|
-
|
|
538
|
-
|
|
539
|
-
|
|
540
|
-
|
|
480
|
+
function getStructEncoder(fields) {
|
|
481
|
+
const fieldCodecs = fields.map(([, codec]) => codec);
|
|
482
|
+
const fixedSize = sumCodecSizes(fieldCodecs.map(getFixedSize));
|
|
483
|
+
const maxSize = sumCodecSizes(fieldCodecs.map(getMaxSize)) ?? void 0;
|
|
484
|
+
return createEncoder({
|
|
485
|
+
...fixedSize === null ? {
|
|
486
|
+
getSizeFromValue: (value) => fields.map(([key, codec]) => getEncodedSize(value[key], codec)).reduce((all, one) => all + one, 0),
|
|
487
|
+
maxSize
|
|
488
|
+
} : { fixedSize },
|
|
489
|
+
write: (struct, bytes, offset) => {
|
|
490
|
+
fields.forEach(([key, codec]) => {
|
|
491
|
+
offset = codec.write(struct[key], bytes, offset);
|
|
492
|
+
});
|
|
493
|
+
return offset;
|
|
541
494
|
}
|
|
542
|
-
};
|
|
543
|
-
}
|
|
544
|
-
function getStructDecoder(fields
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
|
|
495
|
+
});
|
|
496
|
+
}
|
|
497
|
+
function getStructDecoder(fields) {
|
|
498
|
+
const fieldCodecs = fields.map(([, codec]) => codec);
|
|
499
|
+
const fixedSize = sumCodecSizes(fieldCodecs.map(getFixedSize));
|
|
500
|
+
const maxSize = sumCodecSizes(fieldCodecs.map(getMaxSize)) ?? void 0;
|
|
501
|
+
return createDecoder({
|
|
502
|
+
...fixedSize === null ? { maxSize } : { fixedSize },
|
|
503
|
+
read: (bytes, offset) => {
|
|
548
504
|
const struct = {};
|
|
549
505
|
fields.forEach(([key, codec]) => {
|
|
550
|
-
const [value, newOffset] = codec.
|
|
506
|
+
const [value, newOffset] = codec.read(bytes, offset);
|
|
551
507
|
offset = newOffset;
|
|
552
508
|
struct[key] = value;
|
|
553
509
|
});
|
|
554
510
|
return [struct, offset];
|
|
555
511
|
}
|
|
556
|
-
};
|
|
557
|
-
}
|
|
558
|
-
function getStructCodec(fields, options = {}) {
|
|
559
|
-
return combineCodec(getStructEncoder(fields, options), getStructDecoder(fields, options));
|
|
560
|
-
}
|
|
561
|
-
function tupleCodecHelper(items, description) {
|
|
562
|
-
const itemDescriptions = items.map((item) => item.description).join(", ");
|
|
563
|
-
return {
|
|
564
|
-
description: description ?? `tuple(${itemDescriptions})`,
|
|
565
|
-
fixedSize: sumCodecSizes(items.map((item) => item.fixedSize)),
|
|
566
|
-
maxSize: sumCodecSizes(items.map((item) => item.maxSize))
|
|
567
|
-
};
|
|
568
|
-
}
|
|
569
|
-
function getTupleEncoder(items, options = {}) {
|
|
570
|
-
return {
|
|
571
|
-
...tupleCodecHelper(items, options.description),
|
|
572
|
-
encode: (value) => {
|
|
573
|
-
assertValidNumberOfItemsForCodec("tuple", items.length, value.length);
|
|
574
|
-
return mergeBytes(items.map((item, index) => item.encode(value[index])));
|
|
575
|
-
}
|
|
576
|
-
};
|
|
577
|
-
}
|
|
578
|
-
function getTupleDecoder(items, options = {}) {
|
|
579
|
-
return {
|
|
580
|
-
...tupleCodecHelper(items, options.description),
|
|
581
|
-
decode: (bytes, offset = 0) => {
|
|
582
|
-
const values = [];
|
|
583
|
-
items.forEach((codec) => {
|
|
584
|
-
const [newValue, newOffset] = codec.decode(bytes, offset);
|
|
585
|
-
values.push(newValue);
|
|
586
|
-
offset = newOffset;
|
|
587
|
-
});
|
|
588
|
-
return [values, offset];
|
|
589
|
-
}
|
|
590
|
-
};
|
|
512
|
+
});
|
|
591
513
|
}
|
|
592
|
-
function
|
|
593
|
-
return combineCodec(
|
|
594
|
-
getTupleEncoder(items, options),
|
|
595
|
-
getTupleDecoder(items, options)
|
|
596
|
-
);
|
|
514
|
+
function getStructCodec(fields) {
|
|
515
|
+
return combineCodec(getStructEncoder(fields), getStructDecoder(fields));
|
|
597
516
|
}
|
|
598
|
-
function getUnitEncoder(
|
|
599
|
-
return {
|
|
600
|
-
description: options.description ?? "unit",
|
|
601
|
-
encode: () => new Uint8Array(),
|
|
517
|
+
function getUnitEncoder() {
|
|
518
|
+
return createEncoder({
|
|
602
519
|
fixedSize: 0,
|
|
603
|
-
|
|
604
|
-
};
|
|
520
|
+
write: (_value, _bytes, offset) => offset
|
|
521
|
+
});
|
|
605
522
|
}
|
|
606
|
-
function getUnitDecoder(
|
|
607
|
-
return {
|
|
608
|
-
decode: (_bytes, offset = 0) => [void 0, offset],
|
|
609
|
-
description: options.description ?? "unit",
|
|
523
|
+
function getUnitDecoder() {
|
|
524
|
+
return createDecoder({
|
|
610
525
|
fixedSize: 0,
|
|
611
|
-
|
|
612
|
-
};
|
|
526
|
+
read: (_bytes, offset) => [void 0, offset]
|
|
527
|
+
});
|
|
613
528
|
}
|
|
614
|
-
function getUnitCodec(
|
|
615
|
-
return combineCodec(getUnitEncoder(
|
|
529
|
+
function getUnitCodec() {
|
|
530
|
+
return combineCodec(getUnitEncoder(), getUnitDecoder());
|
|
616
531
|
}
|
|
617
532
|
|
|
618
|
-
export { assertValidNumberOfItemsForCodec,
|
|
533
|
+
export { assertValidNumberOfItemsForCodec, getArrayCodec, getArrayDecoder, getArrayEncoder, getBitArrayCodec, getBitArrayDecoder, getBitArrayEncoder, getBooleanCodec, getBooleanDecoder, getBooleanEncoder, getBytesCodec, getBytesDecoder, getBytesEncoder, getDataEnumCodec, getDataEnumDecoder, getDataEnumEncoder, getMapCodec, getMapDecoder, getMapEncoder, getNullableCodec, getNullableDecoder, getNullableEncoder, getScalarEnumCodec, getScalarEnumDecoder, getScalarEnumEncoder, getSetCodec, getSetDecoder, getSetEncoder, getStructCodec, getStructDecoder, getStructEncoder, getTupleCodec, getTupleDecoder, getTupleEncoder, getUnitCodec, getUnitDecoder, getUnitEncoder };
|
|
619
534
|
//# sourceMappingURL=out.js.map
|
|
620
535
|
//# sourceMappingURL=index.browser.js.map
|