@solana/codecs-data-structures 2.0.0-experimental.fe489b3 → 2.0.0-experimental.feaeef2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +482 -4
- package/dist/index.browser.cjs +401 -488
- package/dist/index.browser.cjs.map +1 -1
- package/dist/index.browser.js +403 -486
- package/dist/index.browser.js.map +1 -1
- package/dist/index.native.js +403 -486
- package/dist/index.native.js.map +1 -1
- package/dist/index.node.cjs +401 -488
- package/dist/index.node.cjs.map +1 -1
- package/dist/index.node.js +403 -486
- package/dist/index.node.js.map +1 -1
- package/dist/types/array.d.ts +39 -10
- package/dist/types/array.d.ts.map +1 -0
- package/dist/types/assertions.d.ts.map +1 -0
- package/dist/types/bit-array.d.ts +9 -9
- package/dist/types/bit-array.d.ts.map +1 -0
- package/dist/types/boolean.d.ts +22 -10
- package/dist/types/boolean.d.ts.map +1 -0
- package/dist/types/bytes.d.ts +18 -9
- package/dist/types/bytes.d.ts.map +1 -0
- package/dist/types/data-enum.d.ts +22 -24
- package/dist/types/data-enum.d.ts.map +1 -0
- package/dist/types/index.d.ts +13 -14
- package/dist/types/index.d.ts.map +1 -0
- package/dist/types/map.d.ts +28 -10
- package/dist/types/map.d.ts.map +1 -0
- package/dist/types/nullable.d.ts +28 -10
- package/dist/types/nullable.d.ts.map +1 -0
- package/dist/types/scalar-enum.d.ts +46 -15
- package/dist/types/scalar-enum.d.ts.map +1 -0
- package/dist/types/set.d.ts +28 -10
- package/dist/types/set.d.ts.map +1 -0
- package/dist/types/struct.d.ts +16 -21
- package/dist/types/struct.d.ts.map +1 -0
- package/dist/types/tuple.d.ts +22 -15
- package/dist/types/tuple.d.ts.map +1 -0
- package/dist/types/unit.d.ts +4 -12
- package/dist/types/unit.d.ts.map +1 -0
- package/dist/types/utils.d.ts +10 -2
- package/dist/types/utils.d.ts.map +1 -0
- package/package.json +13 -34
- package/dist/index.development.js +0 -865
- package/dist/index.development.js.map +0 -1
- package/dist/index.production.min.js +0 -51
- package/dist/types/array-like-codec-size.d.ts +0 -20
package/dist/index.browser.js
CHANGED
|
@@ -1,9 +1,17 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { createEncoder, getEncodedSize, createDecoder, combineCodec, assertByteArrayHasEnoughBytesForCodec, assertIsFixedSize, mapEncoder, mapDecoder, fixEncoder, fixDecoder, assertByteArrayIsNotEmptyForCodec, isFixedSize } from '@solana/codecs-core';
|
|
2
2
|
import { getU32Encoder, getU32Decoder, getU8Encoder, getU8Decoder } from '@solana/codecs-numbers';
|
|
3
|
+
import { SolanaError, SOLANA_ERROR__CODECS__INVALID_NUMBER_OF_ITEMS, SOLANA_ERROR__CODECS__ENUM_DISCRIMINATOR_OUT_OF_RANGE, SOLANA_ERROR__CODECS__INVALID_DATA_ENUM_VARIANT, SOLANA_ERROR__CODECS__INVALID_SCALAR_ENUM_VARIANT } from '@solana/errors';
|
|
3
4
|
|
|
4
5
|
// src/array.ts
|
|
5
|
-
|
|
6
|
-
|
|
6
|
+
function assertValidNumberOfItemsForCodec(codecDescription, expected, actual) {
|
|
7
|
+
if (expected !== actual) {
|
|
8
|
+
throw new SolanaError(SOLANA_ERROR__CODECS__INVALID_NUMBER_OF_ITEMS, {
|
|
9
|
+
actual,
|
|
10
|
+
codecDescription,
|
|
11
|
+
expected
|
|
12
|
+
});
|
|
13
|
+
}
|
|
14
|
+
}
|
|
7
15
|
function maxCodecSizes(sizes) {
|
|
8
16
|
return sizes.reduce(
|
|
9
17
|
(all, size) => all === null || size === null ? null : Math.max(all, size),
|
|
@@ -13,106 +21,88 @@ function maxCodecSizes(sizes) {
|
|
|
13
21
|
function sumCodecSizes(sizes) {
|
|
14
22
|
return sizes.reduce((all, size) => all === null || size === null ? null : all + size, 0);
|
|
15
23
|
}
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
function decodeArrayLikeCodecSize(size, childrenSizes, bytes, offset) {
|
|
19
|
-
if (typeof size === "number") {
|
|
20
|
-
return [size, offset];
|
|
21
|
-
}
|
|
22
|
-
if (typeof size === "object") {
|
|
23
|
-
return size.decode(bytes, offset);
|
|
24
|
-
}
|
|
25
|
-
if (size === "remainder") {
|
|
26
|
-
const childrenSize = sumCodecSizes(childrenSizes);
|
|
27
|
-
if (childrenSize === null) {
|
|
28
|
-
throw new Error('Codecs of "remainder" size must have fixed-size items.');
|
|
29
|
-
}
|
|
30
|
-
const remainder = bytes.slice(offset).length;
|
|
31
|
-
if (remainder % childrenSize !== 0) {
|
|
32
|
-
throw new Error(
|
|
33
|
-
`The remainder of the byte array (${remainder} bytes) cannot be split into chunks of ${childrenSize} bytes. Codecs of "remainder" size must have a remainder that is a multiple of its item size. In other words, ${remainder} modulo ${childrenSize} should be equal to zero.`
|
|
34
|
-
);
|
|
35
|
-
}
|
|
36
|
-
return [remainder / childrenSize, offset];
|
|
37
|
-
}
|
|
38
|
-
throw new Error(`Unrecognized array-like codec size: ${JSON.stringify(size)}`);
|
|
24
|
+
function getFixedSize(codec) {
|
|
25
|
+
return isFixedSize(codec) ? codec.fixedSize : null;
|
|
39
26
|
}
|
|
40
|
-
function
|
|
41
|
-
return
|
|
42
|
-
}
|
|
43
|
-
function getArrayLikeCodecSizeFromChildren(size, childrenSizes) {
|
|
44
|
-
if (typeof size !== "number")
|
|
45
|
-
return null;
|
|
46
|
-
if (size === 0)
|
|
47
|
-
return 0;
|
|
48
|
-
const childrenSize = sumCodecSizes(childrenSizes);
|
|
49
|
-
return childrenSize === null ? null : childrenSize * size;
|
|
50
|
-
}
|
|
51
|
-
function getArrayLikeCodecSizePrefix(size, realSize) {
|
|
52
|
-
return typeof size === "object" ? size.encode(realSize) : new Uint8Array();
|
|
53
|
-
}
|
|
54
|
-
|
|
55
|
-
// src/assertions.ts
|
|
56
|
-
function assertValidNumberOfItemsForCodec(codecDescription, expected, actual) {
|
|
57
|
-
if (expected !== actual) {
|
|
58
|
-
throw new Error(`Expected [${codecDescription}] to have ${expected} items, got ${actual}.`);
|
|
59
|
-
}
|
|
27
|
+
function getMaxSize(codec) {
|
|
28
|
+
return isFixedSize(codec) ? codec.fixedSize : codec.maxSize ?? null;
|
|
60
29
|
}
|
|
61
30
|
|
|
62
31
|
// src/array.ts
|
|
63
|
-
function
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
return {
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
}
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
...arrayCodecHelper(item, size, options.description),
|
|
77
|
-
encode: (value) => {
|
|
32
|
+
function getArrayEncoder(item, config = {}) {
|
|
33
|
+
const size = config.size ?? getU32Encoder();
|
|
34
|
+
const fixedSize = computeArrayLikeCodecSize(size, getFixedSize(item));
|
|
35
|
+
const maxSize = computeArrayLikeCodecSize(size, getMaxSize(item)) ?? void 0;
|
|
36
|
+
return createEncoder({
|
|
37
|
+
...fixedSize !== null ? { fixedSize } : {
|
|
38
|
+
getSizeFromValue: (array) => {
|
|
39
|
+
const prefixSize = typeof size === "object" ? getEncodedSize(array.length, size) : 0;
|
|
40
|
+
return prefixSize + [...array].reduce((all, value) => all + getEncodedSize(value, item), 0);
|
|
41
|
+
},
|
|
42
|
+
maxSize
|
|
43
|
+
},
|
|
44
|
+
write: (array, bytes, offset) => {
|
|
78
45
|
if (typeof size === "number") {
|
|
79
|
-
assertValidNumberOfItemsForCodec("array", size,
|
|
46
|
+
assertValidNumberOfItemsForCodec("array", size, array.length);
|
|
80
47
|
}
|
|
81
|
-
|
|
48
|
+
if (typeof size === "object") {
|
|
49
|
+
offset = size.write(array.length, bytes, offset);
|
|
50
|
+
}
|
|
51
|
+
array.forEach((value) => {
|
|
52
|
+
offset = item.write(value, bytes, offset);
|
|
53
|
+
});
|
|
54
|
+
return offset;
|
|
82
55
|
}
|
|
83
|
-
};
|
|
84
|
-
}
|
|
85
|
-
function getArrayDecoder(item,
|
|
86
|
-
const size =
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
56
|
+
});
|
|
57
|
+
}
|
|
58
|
+
function getArrayDecoder(item, config = {}) {
|
|
59
|
+
const size = config.size ?? getU32Decoder();
|
|
60
|
+
const itemSize = getFixedSize(item);
|
|
61
|
+
const fixedSize = computeArrayLikeCodecSize(size, itemSize);
|
|
62
|
+
const maxSize = computeArrayLikeCodecSize(size, getMaxSize(item)) ?? void 0;
|
|
63
|
+
return createDecoder({
|
|
64
|
+
...fixedSize !== null ? { fixedSize } : { maxSize },
|
|
65
|
+
read: (bytes, offset) => {
|
|
66
|
+
const array = [];
|
|
90
67
|
if (typeof size === "object" && bytes.slice(offset).length === 0) {
|
|
91
|
-
return [
|
|
68
|
+
return [array, offset];
|
|
69
|
+
}
|
|
70
|
+
if (size === "remainder") {
|
|
71
|
+
while (offset < bytes.length) {
|
|
72
|
+
const [value, newOffset2] = item.read(bytes, offset);
|
|
73
|
+
offset = newOffset2;
|
|
74
|
+
array.push(value);
|
|
75
|
+
}
|
|
76
|
+
return [array, offset];
|
|
92
77
|
}
|
|
93
|
-
const [resolvedSize, newOffset] =
|
|
78
|
+
const [resolvedSize, newOffset] = typeof size === "number" ? [size, offset] : size.read(bytes, offset);
|
|
94
79
|
offset = newOffset;
|
|
95
|
-
const values = [];
|
|
96
80
|
for (let i = 0; i < resolvedSize; i += 1) {
|
|
97
|
-
const [value, newOffset2] = item.
|
|
98
|
-
values.push(value);
|
|
81
|
+
const [value, newOffset2] = item.read(bytes, offset);
|
|
99
82
|
offset = newOffset2;
|
|
83
|
+
array.push(value);
|
|
100
84
|
}
|
|
101
|
-
return [
|
|
85
|
+
return [array, offset];
|
|
102
86
|
}
|
|
103
|
-
};
|
|
87
|
+
});
|
|
104
88
|
}
|
|
105
|
-
function getArrayCodec(item,
|
|
106
|
-
return combineCodec(getArrayEncoder(item,
|
|
89
|
+
function getArrayCodec(item, config = {}) {
|
|
90
|
+
return combineCodec(getArrayEncoder(item, config), getArrayDecoder(item, config));
|
|
107
91
|
}
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
92
|
+
function computeArrayLikeCodecSize(size, itemSize) {
|
|
93
|
+
if (typeof size !== "number")
|
|
94
|
+
return null;
|
|
95
|
+
if (size === 0)
|
|
96
|
+
return 0;
|
|
97
|
+
return itemSize === null ? null : itemSize * size;
|
|
98
|
+
}
|
|
99
|
+
function getBitArrayEncoder(size, config = {}) {
|
|
100
|
+
const parsedConfig = typeof config === "boolean" ? { backward: config } : config;
|
|
101
|
+
const backward = parsedConfig.backward ?? false;
|
|
102
|
+
return createEncoder({
|
|
103
|
+
fixedSize: size,
|
|
104
|
+
write(value, bytes, offset) {
|
|
105
|
+
const bytesToAdd = [];
|
|
116
106
|
for (let i = 0; i < size; i += 1) {
|
|
117
107
|
let byte = 0;
|
|
118
108
|
for (let j = 0; j < 8; j += 1) {
|
|
@@ -120,23 +110,22 @@ var getBitArrayEncoder = (size, options = {}) => {
|
|
|
120
110
|
byte |= feature << (backward ? j : 7 - j);
|
|
121
111
|
}
|
|
122
112
|
if (backward) {
|
|
123
|
-
|
|
113
|
+
bytesToAdd.unshift(byte);
|
|
124
114
|
} else {
|
|
125
|
-
|
|
115
|
+
bytesToAdd.push(byte);
|
|
126
116
|
}
|
|
127
117
|
}
|
|
128
|
-
|
|
129
|
-
|
|
118
|
+
bytes.set(bytesToAdd, offset);
|
|
119
|
+
return size;
|
|
120
|
+
}
|
|
121
|
+
});
|
|
122
|
+
}
|
|
123
|
+
function getBitArrayDecoder(size, config = {}) {
|
|
124
|
+
const parsedConfig = typeof config === "boolean" ? { backward: config } : config;
|
|
125
|
+
const backward = parsedConfig.backward ?? false;
|
|
126
|
+
return createDecoder({
|
|
130
127
|
fixedSize: size,
|
|
131
|
-
|
|
132
|
-
};
|
|
133
|
-
};
|
|
134
|
-
var getBitArrayDecoder = (size, options = {}) => {
|
|
135
|
-
const parsedOptions = typeof options === "boolean" ? { backward: options } : options;
|
|
136
|
-
const backward = parsedOptions.backward ?? false;
|
|
137
|
-
const backwardSuffix = backward ? "; backward" : "";
|
|
138
|
-
return {
|
|
139
|
-
decode(bytes, offset = 0) {
|
|
128
|
+
read(bytes, offset) {
|
|
140
129
|
assertByteArrayHasEnoughBytesForCodec("bitArray", size, bytes, offset);
|
|
141
130
|
const booleans = [];
|
|
142
131
|
let slice = bytes.slice(offset, offset + size);
|
|
@@ -153,468 +142,396 @@ var getBitArrayDecoder = (size, options = {}) => {
|
|
|
153
142
|
}
|
|
154
143
|
});
|
|
155
144
|
return [booleans, offset + size];
|
|
156
|
-
}
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
}
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
}
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
maxSize: size.fixedSize
|
|
185
|
-
};
|
|
186
|
-
}
|
|
187
|
-
function getBooleanCodec(options = {}) {
|
|
188
|
-
return combineCodec(getBooleanEncoder(options), getBooleanDecoder(options));
|
|
189
|
-
}
|
|
190
|
-
function getBytesEncoder(options = {}) {
|
|
191
|
-
const size = options.size ?? "variable";
|
|
192
|
-
const sizeDescription = typeof size === "object" ? size.description : `${size}`;
|
|
193
|
-
const description = options.description ?? `bytes(${sizeDescription})`;
|
|
194
|
-
const byteEncoder = {
|
|
195
|
-
description,
|
|
196
|
-
encode: (value) => value,
|
|
197
|
-
fixedSize: null,
|
|
198
|
-
maxSize: null
|
|
199
|
-
};
|
|
145
|
+
}
|
|
146
|
+
});
|
|
147
|
+
}
|
|
148
|
+
function getBitArrayCodec(size, config = {}) {
|
|
149
|
+
return combineCodec(getBitArrayEncoder(size, config), getBitArrayDecoder(size, config));
|
|
150
|
+
}
|
|
151
|
+
function getBooleanEncoder(config = {}) {
|
|
152
|
+
const size = config.size ?? getU8Encoder();
|
|
153
|
+
assertIsFixedSize(size);
|
|
154
|
+
return mapEncoder(size, (value) => value ? 1 : 0);
|
|
155
|
+
}
|
|
156
|
+
function getBooleanDecoder(config = {}) {
|
|
157
|
+
const size = config.size ?? getU8Decoder();
|
|
158
|
+
assertIsFixedSize(size);
|
|
159
|
+
return mapDecoder(size, (value) => Number(value) === 1);
|
|
160
|
+
}
|
|
161
|
+
function getBooleanCodec(config = {}) {
|
|
162
|
+
return combineCodec(getBooleanEncoder(config), getBooleanDecoder(config));
|
|
163
|
+
}
|
|
164
|
+
function getBytesEncoder(config = {}) {
|
|
165
|
+
const size = config.size ?? "variable";
|
|
166
|
+
const byteEncoder = createEncoder({
|
|
167
|
+
getSizeFromValue: (value) => value.length,
|
|
168
|
+
write: (value, bytes, offset) => {
|
|
169
|
+
bytes.set(value, offset);
|
|
170
|
+
return offset + value.length;
|
|
171
|
+
}
|
|
172
|
+
});
|
|
200
173
|
if (size === "variable") {
|
|
201
174
|
return byteEncoder;
|
|
202
175
|
}
|
|
203
176
|
if (typeof size === "number") {
|
|
204
|
-
return fixEncoder(byteEncoder, size
|
|
177
|
+
return fixEncoder(byteEncoder, size);
|
|
205
178
|
}
|
|
206
|
-
return {
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
return mergeBytes([lengthBytes, contentBytes]);
|
|
179
|
+
return createEncoder({
|
|
180
|
+
getSizeFromValue: (value) => getEncodedSize(value.length, size) + value.length,
|
|
181
|
+
write: (value, bytes, offset) => {
|
|
182
|
+
offset = size.write(value.length, bytes, offset);
|
|
183
|
+
return byteEncoder.write(value, bytes, offset);
|
|
212
184
|
}
|
|
213
|
-
};
|
|
185
|
+
});
|
|
214
186
|
}
|
|
215
|
-
function getBytesDecoder(
|
|
216
|
-
const size =
|
|
217
|
-
const
|
|
218
|
-
|
|
219
|
-
const byteDecoder = {
|
|
220
|
-
decode: (bytes, offset = 0) => {
|
|
187
|
+
function getBytesDecoder(config = {}) {
|
|
188
|
+
const size = config.size ?? "variable";
|
|
189
|
+
const byteDecoder = createDecoder({
|
|
190
|
+
read: (bytes, offset) => {
|
|
221
191
|
const slice = bytes.slice(offset);
|
|
222
192
|
return [slice, offset + slice.length];
|
|
223
|
-
}
|
|
224
|
-
|
|
225
|
-
fixedSize: null,
|
|
226
|
-
maxSize: null
|
|
227
|
-
};
|
|
193
|
+
}
|
|
194
|
+
});
|
|
228
195
|
if (size === "variable") {
|
|
229
196
|
return byteDecoder;
|
|
230
197
|
}
|
|
231
198
|
if (typeof size === "number") {
|
|
232
|
-
return fixDecoder(byteDecoder, size
|
|
199
|
+
return fixDecoder(byteDecoder, size);
|
|
233
200
|
}
|
|
234
|
-
return {
|
|
235
|
-
|
|
236
|
-
decode: (bytes, offset = 0) => {
|
|
201
|
+
return createDecoder({
|
|
202
|
+
read: (bytes, offset) => {
|
|
237
203
|
assertByteArrayIsNotEmptyForCodec("bytes", bytes, offset);
|
|
238
|
-
const [lengthBigInt, lengthOffset] = size.
|
|
204
|
+
const [lengthBigInt, lengthOffset] = size.read(bytes, offset);
|
|
239
205
|
const length = Number(lengthBigInt);
|
|
240
206
|
offset = lengthOffset;
|
|
241
207
|
const contentBytes = bytes.slice(offset, offset + length);
|
|
242
208
|
assertByteArrayHasEnoughBytesForCodec("bytes", length, contentBytes);
|
|
243
|
-
const [value, contentOffset] = byteDecoder.
|
|
209
|
+
const [value, contentOffset] = byteDecoder.read(contentBytes, 0);
|
|
244
210
|
offset += contentOffset;
|
|
245
211
|
return [value, offset];
|
|
246
212
|
}
|
|
247
|
-
};
|
|
248
|
-
}
|
|
249
|
-
function getBytesCodec(
|
|
250
|
-
return combineCodec(getBytesEncoder(
|
|
251
|
-
}
|
|
252
|
-
function
|
|
253
|
-
const
|
|
254
|
-
const
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
if (discriminator < 0) {
|
|
270
|
-
throw new Error(
|
|
271
|
-
`Invalid data enum variant. Expected one of [${variants.map(([key]) => key).join(", ")}], got "${variant.__kind}".`
|
|
272
|
-
);
|
|
273
|
-
}
|
|
274
|
-
const variantPrefix = prefix.encode(discriminator);
|
|
275
|
-
const variantSerializer = variants[discriminator][1];
|
|
276
|
-
const variantBytes = variantSerializer.encode(variant);
|
|
277
|
-
return mergeBytes([variantPrefix, variantBytes]);
|
|
213
|
+
});
|
|
214
|
+
}
|
|
215
|
+
function getBytesCodec(config = {}) {
|
|
216
|
+
return combineCodec(getBytesEncoder(config), getBytesDecoder(config));
|
|
217
|
+
}
|
|
218
|
+
function getDataEnumEncoder(variants, config = {}) {
|
|
219
|
+
const prefix = config.size ?? getU8Encoder();
|
|
220
|
+
const fixedSize = getDataEnumFixedSize(variants, prefix);
|
|
221
|
+
return createEncoder({
|
|
222
|
+
...fixedSize !== null ? { fixedSize } : {
|
|
223
|
+
getSizeFromValue: (variant) => {
|
|
224
|
+
const discriminator = getVariantDiscriminator(variants, variant);
|
|
225
|
+
const variantEncoder = variants[discriminator][1];
|
|
226
|
+
return getEncodedSize(discriminator, prefix) + getEncodedSize(variant, variantEncoder);
|
|
227
|
+
},
|
|
228
|
+
maxSize: getDataEnumMaxSize(variants, prefix)
|
|
229
|
+
},
|
|
230
|
+
write: (variant, bytes, offset) => {
|
|
231
|
+
const discriminator = getVariantDiscriminator(variants, variant);
|
|
232
|
+
offset = prefix.write(discriminator, bytes, offset);
|
|
233
|
+
const variantEncoder = variants[discriminator][1];
|
|
234
|
+
return variantEncoder.write(variant, bytes, offset);
|
|
278
235
|
}
|
|
279
|
-
};
|
|
280
|
-
}
|
|
281
|
-
function getDataEnumDecoder(variants,
|
|
282
|
-
const prefix =
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
236
|
+
});
|
|
237
|
+
}
|
|
238
|
+
function getDataEnumDecoder(variants, config = {}) {
|
|
239
|
+
const prefix = config.size ?? getU8Decoder();
|
|
240
|
+
const fixedSize = getDataEnumFixedSize(variants, prefix);
|
|
241
|
+
return createDecoder({
|
|
242
|
+
...fixedSize !== null ? { fixedSize } : { maxSize: getDataEnumMaxSize(variants, prefix) },
|
|
243
|
+
read: (bytes, offset) => {
|
|
286
244
|
assertByteArrayIsNotEmptyForCodec("dataEnum", bytes, offset);
|
|
287
|
-
const [discriminator, dOffset] = prefix.
|
|
245
|
+
const [discriminator, dOffset] = prefix.read(bytes, offset);
|
|
288
246
|
offset = dOffset;
|
|
289
247
|
const variantField = variants[Number(discriminator)] ?? null;
|
|
290
248
|
if (!variantField) {
|
|
291
|
-
throw new
|
|
292
|
-
|
|
293
|
-
|
|
249
|
+
throw new SolanaError(SOLANA_ERROR__CODECS__ENUM_DISCRIMINATOR_OUT_OF_RANGE, {
|
|
250
|
+
discriminator,
|
|
251
|
+
maxRange: variants.length - 1,
|
|
252
|
+
minRange: 0
|
|
253
|
+
});
|
|
294
254
|
}
|
|
295
|
-
const [variant, vOffset] = variantField[1].
|
|
255
|
+
const [variant, vOffset] = variantField[1].read(bytes, offset);
|
|
296
256
|
offset = vOffset;
|
|
297
257
|
return [{ __kind: variantField[0], ...variant ?? {} }, offset];
|
|
298
258
|
}
|
|
299
|
-
};
|
|
259
|
+
});
|
|
300
260
|
}
|
|
301
|
-
function getDataEnumCodec(variants,
|
|
302
|
-
return combineCodec(
|
|
261
|
+
function getDataEnumCodec(variants, config = {}) {
|
|
262
|
+
return combineCodec(
|
|
263
|
+
getDataEnumEncoder(variants, config),
|
|
264
|
+
getDataEnumDecoder(variants, config)
|
|
265
|
+
);
|
|
303
266
|
}
|
|
304
|
-
function
|
|
305
|
-
if (
|
|
306
|
-
|
|
267
|
+
function getDataEnumFixedSize(variants, prefix) {
|
|
268
|
+
if (variants.length === 0)
|
|
269
|
+
return isFixedSize(prefix) ? prefix.fixedSize : null;
|
|
270
|
+
if (!isFixedSize(variants[0][1]))
|
|
271
|
+
return null;
|
|
272
|
+
const variantSize = variants[0][1].fixedSize;
|
|
273
|
+
const sameSizedVariants = variants.every(
|
|
274
|
+
(variant) => isFixedSize(variant[1]) && variant[1].fixedSize === variantSize
|
|
275
|
+
);
|
|
276
|
+
if (!sameSizedVariants)
|
|
277
|
+
return null;
|
|
278
|
+
return isFixedSize(prefix) ? prefix.fixedSize + variantSize : null;
|
|
279
|
+
}
|
|
280
|
+
function getDataEnumMaxSize(variants, prefix) {
|
|
281
|
+
const maxVariantSize = maxCodecSizes(variants.map(([, codec]) => getMaxSize(codec)));
|
|
282
|
+
return sumCodecSizes([getMaxSize(prefix), maxVariantSize]) ?? void 0;
|
|
283
|
+
}
|
|
284
|
+
function getVariantDiscriminator(variants, variant) {
|
|
285
|
+
const discriminator = variants.findIndex(([key]) => variant.__kind === key);
|
|
286
|
+
if (discriminator < 0) {
|
|
287
|
+
throw new SolanaError(SOLANA_ERROR__CODECS__INVALID_DATA_ENUM_VARIANT, {
|
|
288
|
+
value: variant.__kind,
|
|
289
|
+
variants: variants.map(([key]) => key)
|
|
290
|
+
});
|
|
307
291
|
}
|
|
308
|
-
return
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
return
|
|
292
|
+
return discriminator;
|
|
293
|
+
}
|
|
294
|
+
function getTupleEncoder(items) {
|
|
295
|
+
const fixedSize = sumCodecSizes(items.map(getFixedSize));
|
|
296
|
+
const maxSize = sumCodecSizes(items.map(getMaxSize)) ?? void 0;
|
|
297
|
+
return createEncoder({
|
|
298
|
+
...fixedSize === null ? {
|
|
299
|
+
getSizeFromValue: (value) => items.map((item, index) => getEncodedSize(value[index], item)).reduce((all, one) => all + one, 0),
|
|
300
|
+
maxSize
|
|
301
|
+
} : { fixedSize },
|
|
302
|
+
write: (value, bytes, offset) => {
|
|
303
|
+
assertValidNumberOfItemsForCodec("tuple", items.length, value.length);
|
|
304
|
+
items.forEach((item, index) => {
|
|
305
|
+
offset = item.write(value[index], bytes, offset);
|
|
306
|
+
});
|
|
307
|
+
return offset;
|
|
324
308
|
}
|
|
325
|
-
};
|
|
309
|
+
});
|
|
310
|
+
}
|
|
311
|
+
function getTupleDecoder(items) {
|
|
312
|
+
const fixedSize = sumCodecSizes(items.map(getFixedSize));
|
|
313
|
+
const maxSize = sumCodecSizes(items.map(getMaxSize)) ?? void 0;
|
|
314
|
+
return createDecoder({
|
|
315
|
+
...fixedSize === null ? { maxSize } : { fixedSize },
|
|
316
|
+
read: (bytes, offset) => {
|
|
317
|
+
const values = [];
|
|
318
|
+
items.forEach((item) => {
|
|
319
|
+
const [newValue, newOffset] = item.read(bytes, offset);
|
|
320
|
+
values.push(newValue);
|
|
321
|
+
offset = newOffset;
|
|
322
|
+
});
|
|
323
|
+
return [values, offset];
|
|
324
|
+
}
|
|
325
|
+
});
|
|
326
326
|
}
|
|
327
|
-
function
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
327
|
+
function getTupleCodec(items) {
|
|
328
|
+
return combineCodec(
|
|
329
|
+
getTupleEncoder(items),
|
|
330
|
+
getTupleDecoder(items)
|
|
331
|
+
);
|
|
332
|
+
}
|
|
333
|
+
|
|
334
|
+
// src/map.ts
|
|
335
|
+
function getMapEncoder(key, value, config = {}) {
|
|
336
|
+
return mapEncoder(
|
|
337
|
+
getArrayEncoder(getTupleEncoder([key, value]), config),
|
|
338
|
+
(map) => [...map.entries()]
|
|
339
|
+
);
|
|
340
|
+
}
|
|
341
|
+
function getMapDecoder(key, value, config = {}) {
|
|
342
|
+
return mapDecoder(
|
|
343
|
+
getArrayDecoder(getTupleDecoder([key, value]), config),
|
|
344
|
+
(entries) => new Map(entries)
|
|
345
|
+
);
|
|
346
|
+
}
|
|
347
|
+
function getMapCodec(key, value, config = {}) {
|
|
348
|
+
return combineCodec(getMapEncoder(key, value, config), getMapDecoder(key, value, config));
|
|
349
|
+
}
|
|
350
|
+
function getNullableEncoder(item, config = {}) {
|
|
351
|
+
const prefix = config.prefix ?? getU8Encoder();
|
|
352
|
+
const fixed = config.fixed ?? false;
|
|
353
|
+
const isZeroSizeItem = isFixedSize(item) && isFixedSize(prefix) && item.fixedSize === 0;
|
|
354
|
+
if (fixed || isZeroSizeItem) {
|
|
355
|
+
assertIsFixedSize(item);
|
|
356
|
+
assertIsFixedSize(prefix);
|
|
357
|
+
const fixedSize = prefix.fixedSize + item.fixedSize;
|
|
358
|
+
return createEncoder({
|
|
359
|
+
fixedSize,
|
|
360
|
+
write: (option, bytes, offset) => {
|
|
361
|
+
const prefixOffset = prefix.write(Number(option !== null), bytes, offset);
|
|
362
|
+
if (option !== null) {
|
|
363
|
+
item.write(option, bytes, prefixOffset);
|
|
364
|
+
}
|
|
365
|
+
return offset + fixedSize;
|
|
335
366
|
}
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
offset =
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
offset = kOffset;
|
|
346
|
-
const [decodedValue, vOffset] = value.decode(bytes, offset);
|
|
347
|
-
offset = vOffset;
|
|
348
|
-
map.set(decodedKey, decodedValue);
|
|
367
|
+
});
|
|
368
|
+
}
|
|
369
|
+
return createEncoder({
|
|
370
|
+
getSizeFromValue: (option) => getEncodedSize(Number(option !== null), prefix) + (option !== null ? getEncodedSize(option, item) : 0),
|
|
371
|
+
maxSize: sumCodecSizes([prefix, item].map(getMaxSize)) ?? void 0,
|
|
372
|
+
write: (option, bytes, offset) => {
|
|
373
|
+
offset = prefix.write(Number(option !== null), bytes, offset);
|
|
374
|
+
if (option !== null) {
|
|
375
|
+
offset = item.write(option, bytes, offset);
|
|
349
376
|
}
|
|
350
|
-
return
|
|
377
|
+
return offset;
|
|
351
378
|
}
|
|
352
|
-
};
|
|
353
|
-
}
|
|
354
|
-
function
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
assertFixedSizeCodec(prefix, "Fixed nullables can only be used with fixed-size prefix.");
|
|
363
|
-
descriptionSuffix += "; fixed";
|
|
379
|
+
});
|
|
380
|
+
}
|
|
381
|
+
function getNullableDecoder(item, config = {}) {
|
|
382
|
+
const prefix = config.prefix ?? getU8Decoder();
|
|
383
|
+
const fixed = config.fixed ?? false;
|
|
384
|
+
let fixedSize = null;
|
|
385
|
+
const isZeroSizeItem = isFixedSize(item) && isFixedSize(prefix) && item.fixedSize === 0;
|
|
386
|
+
if (fixed || isZeroSizeItem) {
|
|
387
|
+
assertIsFixedSize(item);
|
|
388
|
+
assertIsFixedSize(prefix);
|
|
364
389
|
fixedSize = prefix.fixedSize + item.fixedSize;
|
|
365
390
|
}
|
|
366
|
-
return {
|
|
367
|
-
|
|
368
|
-
|
|
369
|
-
maxSize: sumCodecSizes([prefix.maxSize, item.maxSize])
|
|
370
|
-
};
|
|
371
|
-
}
|
|
372
|
-
function getNullableEncoder(item, options = {}) {
|
|
373
|
-
const prefix = options.prefix ?? getU8Encoder();
|
|
374
|
-
const fixed = options.fixed ?? false;
|
|
375
|
-
return {
|
|
376
|
-
...nullableCodecHelper(item, prefix, fixed, options.description),
|
|
377
|
-
encode: (option) => {
|
|
378
|
-
const prefixByte = prefix.encode(Number(option !== null));
|
|
379
|
-
let itemBytes = option !== null ? item.encode(option) : new Uint8Array();
|
|
380
|
-
itemBytes = fixed ? fixBytes(itemBytes, item.fixedSize) : itemBytes;
|
|
381
|
-
return mergeBytes([prefixByte, itemBytes]);
|
|
382
|
-
}
|
|
383
|
-
};
|
|
384
|
-
}
|
|
385
|
-
function getNullableDecoder(item, options = {}) {
|
|
386
|
-
const prefix = options.prefix ?? getU8Decoder();
|
|
387
|
-
const fixed = options.fixed ?? false;
|
|
388
|
-
return {
|
|
389
|
-
...nullableCodecHelper(item, prefix, fixed, options.description),
|
|
390
|
-
decode: (bytes, offset = 0) => {
|
|
391
|
+
return createDecoder({
|
|
392
|
+
...fixedSize === null ? { maxSize: sumCodecSizes([prefix, item].map(getMaxSize)) ?? void 0 } : { fixedSize },
|
|
393
|
+
read: (bytes, offset) => {
|
|
391
394
|
if (bytes.length - offset <= 0) {
|
|
392
395
|
return [null, offset];
|
|
393
396
|
}
|
|
394
|
-
const
|
|
395
|
-
const [isSome, prefixOffset] = prefix.decode(bytes, offset);
|
|
396
|
-
offset = prefixOffset;
|
|
397
|
+
const [isSome, prefixOffset] = prefix.read(bytes, offset);
|
|
397
398
|
if (isSome === 0) {
|
|
398
|
-
return [null,
|
|
399
|
+
return [null, fixedSize !== null ? offset + fixedSize : prefixOffset];
|
|
399
400
|
}
|
|
400
|
-
const [value, newOffset] = item.
|
|
401
|
-
offset
|
|
402
|
-
return [value, fixed ? fixedOffset : offset];
|
|
401
|
+
const [value, newOffset] = item.read(bytes, prefixOffset);
|
|
402
|
+
return [value, fixedSize !== null ? offset + fixedSize : newOffset];
|
|
403
403
|
}
|
|
404
|
-
};
|
|
404
|
+
});
|
|
405
|
+
}
|
|
406
|
+
function getNullableCodec(item, config = {}) {
|
|
407
|
+
const configCast = config;
|
|
408
|
+
return combineCodec(getNullableEncoder(item, configCast), getNullableDecoder(item, configCast));
|
|
409
|
+
}
|
|
410
|
+
function getScalarEnumEncoder(constructor, config = {}) {
|
|
411
|
+
const prefix = config.size ?? getU8Encoder();
|
|
412
|
+
const { minRange, maxRange, allStringInputs, enumKeys, enumValues } = getScalarEnumStats(constructor);
|
|
413
|
+
return mapEncoder(prefix, (value) => {
|
|
414
|
+
const isInvalidNumber = typeof value === "number" && (value < minRange || value > maxRange);
|
|
415
|
+
const isInvalidString = typeof value === "string" && !allStringInputs.includes(value);
|
|
416
|
+
if (isInvalidNumber || isInvalidString) {
|
|
417
|
+
throw new SolanaError(SOLANA_ERROR__CODECS__INVALID_SCALAR_ENUM_VARIANT, {
|
|
418
|
+
maxRange,
|
|
419
|
+
minRange,
|
|
420
|
+
value,
|
|
421
|
+
variants: allStringInputs
|
|
422
|
+
});
|
|
423
|
+
}
|
|
424
|
+
if (typeof value === "number")
|
|
425
|
+
return value;
|
|
426
|
+
const valueIndex = enumValues.indexOf(value);
|
|
427
|
+
if (valueIndex >= 0)
|
|
428
|
+
return valueIndex;
|
|
429
|
+
return enumKeys.indexOf(value);
|
|
430
|
+
});
|
|
431
|
+
}
|
|
432
|
+
function getScalarEnumDecoder(constructor, config = {}) {
|
|
433
|
+
const prefix = config.size ?? getU8Decoder();
|
|
434
|
+
const { minRange, maxRange, enumKeys } = getScalarEnumStats(constructor);
|
|
435
|
+
return mapDecoder(prefix, (value) => {
|
|
436
|
+
const valueAsNumber = Number(value);
|
|
437
|
+
if (valueAsNumber < minRange || valueAsNumber > maxRange) {
|
|
438
|
+
throw new SolanaError(SOLANA_ERROR__CODECS__ENUM_DISCRIMINATOR_OUT_OF_RANGE, {
|
|
439
|
+
discriminator: valueAsNumber,
|
|
440
|
+
maxRange,
|
|
441
|
+
minRange
|
|
442
|
+
});
|
|
443
|
+
}
|
|
444
|
+
return constructor[enumKeys[valueAsNumber]];
|
|
445
|
+
});
|
|
405
446
|
}
|
|
406
|
-
function
|
|
407
|
-
return combineCodec(
|
|
447
|
+
function getScalarEnumCodec(constructor, config = {}) {
|
|
448
|
+
return combineCodec(getScalarEnumEncoder(constructor, config), getScalarEnumDecoder(constructor, config));
|
|
408
449
|
}
|
|
409
|
-
function
|
|
410
|
-
const
|
|
411
|
-
const
|
|
412
|
-
|
|
413
|
-
|
|
450
|
+
function getScalarEnumStats(constructor) {
|
|
451
|
+
const numericValues = Object.values(constructor).filter((v) => typeof v === "number");
|
|
452
|
+
const deduplicatedConstructor = Object.fromEntries(
|
|
453
|
+
Object.entries(constructor).slice(numericValues.length)
|
|
454
|
+
);
|
|
455
|
+
const enumKeys = Object.keys(deduplicatedConstructor);
|
|
456
|
+
const enumValues = Object.values(deduplicatedConstructor);
|
|
414
457
|
const minRange = 0;
|
|
415
|
-
const maxRange =
|
|
416
|
-
const
|
|
458
|
+
const maxRange = enumValues.length - 1;
|
|
459
|
+
const allStringInputs = [
|
|
460
|
+
.../* @__PURE__ */ new Set([...enumKeys, ...enumValues.filter((v) => typeof v === "string")])
|
|
461
|
+
];
|
|
417
462
|
return {
|
|
418
|
-
|
|
463
|
+
allStringInputs,
|
|
419
464
|
enumKeys,
|
|
420
465
|
enumValues,
|
|
421
|
-
fixedSize: prefix.fixedSize,
|
|
422
|
-
isNumericEnum,
|
|
423
466
|
maxRange,
|
|
424
|
-
|
|
425
|
-
minRange,
|
|
426
|
-
stringValues
|
|
427
|
-
};
|
|
428
|
-
}
|
|
429
|
-
function getScalarEnumEncoder(constructor, options = {}) {
|
|
430
|
-
const prefix = options.size ?? getU8Encoder();
|
|
431
|
-
const { description, fixedSize, maxSize, minRange, maxRange, stringValues, enumKeys, enumValues } = scalarEnumCoderHelper(constructor, prefix, options.description);
|
|
432
|
-
return {
|
|
433
|
-
description,
|
|
434
|
-
encode: (value) => {
|
|
435
|
-
const isInvalidNumber = typeof value === "number" && (value < minRange || value > maxRange);
|
|
436
|
-
const isInvalidString = typeof value === "string" && !stringValues.includes(value);
|
|
437
|
-
if (isInvalidNumber || isInvalidString) {
|
|
438
|
-
throw new Error(
|
|
439
|
-
`Invalid scalar enum variant. Expected one of [${stringValues.join(", ")}] or a number between ${minRange} and ${maxRange}, got "${value}".`
|
|
440
|
-
);
|
|
441
|
-
}
|
|
442
|
-
if (typeof value === "number")
|
|
443
|
-
return prefix.encode(value);
|
|
444
|
-
const valueIndex = enumValues.indexOf(value);
|
|
445
|
-
if (valueIndex >= 0)
|
|
446
|
-
return prefix.encode(valueIndex);
|
|
447
|
-
return prefix.encode(enumKeys.indexOf(value));
|
|
448
|
-
},
|
|
449
|
-
fixedSize,
|
|
450
|
-
maxSize
|
|
451
|
-
};
|
|
452
|
-
}
|
|
453
|
-
function getScalarEnumDecoder(constructor, options = {}) {
|
|
454
|
-
const prefix = options.size ?? getU8Decoder();
|
|
455
|
-
const { description, fixedSize, maxSize, minRange, maxRange, isNumericEnum, enumValues } = scalarEnumCoderHelper(
|
|
456
|
-
constructor,
|
|
457
|
-
prefix,
|
|
458
|
-
options.description
|
|
459
|
-
);
|
|
460
|
-
return {
|
|
461
|
-
decode: (bytes, offset = 0) => {
|
|
462
|
-
assertByteArrayIsNotEmptyForCodec("enum", bytes, offset);
|
|
463
|
-
const [value, newOffset] = prefix.decode(bytes, offset);
|
|
464
|
-
const valueAsNumber = Number(value);
|
|
465
|
-
offset = newOffset;
|
|
466
|
-
if (valueAsNumber < minRange || valueAsNumber > maxRange) {
|
|
467
|
-
throw new Error(
|
|
468
|
-
`Enum discriminator out of range. Expected a number between ${minRange} and ${maxRange}, got ${valueAsNumber}.`
|
|
469
|
-
);
|
|
470
|
-
}
|
|
471
|
-
return [isNumericEnum ? valueAsNumber : enumValues[valueAsNumber], offset];
|
|
472
|
-
},
|
|
473
|
-
description,
|
|
474
|
-
fixedSize,
|
|
475
|
-
maxSize
|
|
476
|
-
};
|
|
477
|
-
}
|
|
478
|
-
function getScalarEnumCodec(constructor, options = {}) {
|
|
479
|
-
return combineCodec(getScalarEnumEncoder(constructor, options), getScalarEnumDecoder(constructor, options));
|
|
480
|
-
}
|
|
481
|
-
function setCodecHelper(item, size, description) {
|
|
482
|
-
if (size === "remainder" && item.fixedSize === null) {
|
|
483
|
-
throw new Error('Codecs of "remainder" size must have fixed-size items.');
|
|
484
|
-
}
|
|
485
|
-
return {
|
|
486
|
-
description: description ?? `set(${item.description}; ${getArrayLikeCodecSizeDescription(size)})`,
|
|
487
|
-
fixedSize: getArrayLikeCodecSizeFromChildren(size, [item.fixedSize]),
|
|
488
|
-
maxSize: getArrayLikeCodecSizeFromChildren(size, [item.maxSize])
|
|
467
|
+
minRange
|
|
489
468
|
};
|
|
490
469
|
}
|
|
491
|
-
function getSetEncoder(item,
|
|
492
|
-
|
|
493
|
-
return {
|
|
494
|
-
...setCodecHelper(item, size, options.description),
|
|
495
|
-
encode: (set) => {
|
|
496
|
-
if (typeof size === "number" && set.size !== size) {
|
|
497
|
-
assertValidNumberOfItemsForCodec("set", size, set.size);
|
|
498
|
-
}
|
|
499
|
-
const itemBytes = Array.from(set, (value) => item.encode(value));
|
|
500
|
-
return mergeBytes([getArrayLikeCodecSizePrefix(size, set.size), ...itemBytes]);
|
|
501
|
-
}
|
|
502
|
-
};
|
|
470
|
+
function getSetEncoder(item, config = {}) {
|
|
471
|
+
return mapEncoder(getArrayEncoder(item, config), (set) => [...set]);
|
|
503
472
|
}
|
|
504
|
-
function getSetDecoder(item,
|
|
505
|
-
|
|
506
|
-
return {
|
|
507
|
-
...setCodecHelper(item, size, options.description),
|
|
508
|
-
decode: (bytes, offset = 0) => {
|
|
509
|
-
const set = /* @__PURE__ */ new Set();
|
|
510
|
-
if (typeof size === "object" && bytes.slice(offset).length === 0) {
|
|
511
|
-
return [set, offset];
|
|
512
|
-
}
|
|
513
|
-
const [resolvedSize, newOffset] = decodeArrayLikeCodecSize(size, [item.fixedSize], bytes, offset);
|
|
514
|
-
offset = newOffset;
|
|
515
|
-
for (let i = 0; i < resolvedSize; i += 1) {
|
|
516
|
-
const [value, newOffset2] = item.decode(bytes, offset);
|
|
517
|
-
offset = newOffset2;
|
|
518
|
-
set.add(value);
|
|
519
|
-
}
|
|
520
|
-
return [set, offset];
|
|
521
|
-
}
|
|
522
|
-
};
|
|
523
|
-
}
|
|
524
|
-
function getSetCodec(item, options = {}) {
|
|
525
|
-
return combineCodec(getSetEncoder(item, options), getSetDecoder(item, options));
|
|
473
|
+
function getSetDecoder(item, config = {}) {
|
|
474
|
+
return mapDecoder(getArrayDecoder(item, config), (entries) => new Set(entries));
|
|
526
475
|
}
|
|
527
|
-
function
|
|
528
|
-
|
|
529
|
-
return {
|
|
530
|
-
description: description ?? `struct(${fieldDescriptions})`,
|
|
531
|
-
fixedSize: sumCodecSizes(fields.map(([, field]) => field.fixedSize)),
|
|
532
|
-
maxSize: sumCodecSizes(fields.map(([, field]) => field.maxSize))
|
|
533
|
-
};
|
|
476
|
+
function getSetCodec(item, config = {}) {
|
|
477
|
+
return combineCodec(getSetEncoder(item, config), getSetDecoder(item, config));
|
|
534
478
|
}
|
|
535
|
-
function getStructEncoder(fields
|
|
536
|
-
|
|
537
|
-
|
|
538
|
-
|
|
539
|
-
|
|
540
|
-
|
|
479
|
+
function getStructEncoder(fields) {
|
|
480
|
+
const fieldCodecs = fields.map(([, codec]) => codec);
|
|
481
|
+
const fixedSize = sumCodecSizes(fieldCodecs.map(getFixedSize));
|
|
482
|
+
const maxSize = sumCodecSizes(fieldCodecs.map(getMaxSize)) ?? void 0;
|
|
483
|
+
return createEncoder({
|
|
484
|
+
...fixedSize === null ? {
|
|
485
|
+
getSizeFromValue: (value) => fields.map(([key, codec]) => getEncodedSize(value[key], codec)).reduce((all, one) => all + one, 0),
|
|
486
|
+
maxSize
|
|
487
|
+
} : { fixedSize },
|
|
488
|
+
write: (struct, bytes, offset) => {
|
|
489
|
+
fields.forEach(([key, codec]) => {
|
|
490
|
+
offset = codec.write(struct[key], bytes, offset);
|
|
491
|
+
});
|
|
492
|
+
return offset;
|
|
541
493
|
}
|
|
542
|
-
};
|
|
543
|
-
}
|
|
544
|
-
function getStructDecoder(fields
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
|
|
494
|
+
});
|
|
495
|
+
}
|
|
496
|
+
function getStructDecoder(fields) {
|
|
497
|
+
const fieldCodecs = fields.map(([, codec]) => codec);
|
|
498
|
+
const fixedSize = sumCodecSizes(fieldCodecs.map(getFixedSize));
|
|
499
|
+
const maxSize = sumCodecSizes(fieldCodecs.map(getMaxSize)) ?? void 0;
|
|
500
|
+
return createDecoder({
|
|
501
|
+
...fixedSize === null ? { maxSize } : { fixedSize },
|
|
502
|
+
read: (bytes, offset) => {
|
|
548
503
|
const struct = {};
|
|
549
504
|
fields.forEach(([key, codec]) => {
|
|
550
|
-
const [value, newOffset] = codec.
|
|
505
|
+
const [value, newOffset] = codec.read(bytes, offset);
|
|
551
506
|
offset = newOffset;
|
|
552
507
|
struct[key] = value;
|
|
553
508
|
});
|
|
554
509
|
return [struct, offset];
|
|
555
510
|
}
|
|
556
|
-
};
|
|
557
|
-
}
|
|
558
|
-
function getStructCodec(fields, options = {}) {
|
|
559
|
-
return combineCodec(getStructEncoder(fields, options), getStructDecoder(fields, options));
|
|
560
|
-
}
|
|
561
|
-
function tupleCodecHelper(items, description) {
|
|
562
|
-
const itemDescriptions = items.map((item) => item.description).join(", ");
|
|
563
|
-
return {
|
|
564
|
-
description: description ?? `tuple(${itemDescriptions})`,
|
|
565
|
-
fixedSize: sumCodecSizes(items.map((item) => item.fixedSize)),
|
|
566
|
-
maxSize: sumCodecSizes(items.map((item) => item.maxSize))
|
|
567
|
-
};
|
|
568
|
-
}
|
|
569
|
-
function getTupleEncoder(items, options = {}) {
|
|
570
|
-
return {
|
|
571
|
-
...tupleCodecHelper(items, options.description),
|
|
572
|
-
encode: (value) => {
|
|
573
|
-
assertValidNumberOfItemsForCodec("tuple", items.length, value.length);
|
|
574
|
-
return mergeBytes(items.map((item, index) => item.encode(value[index])));
|
|
575
|
-
}
|
|
576
|
-
};
|
|
577
|
-
}
|
|
578
|
-
function getTupleDecoder(items, options = {}) {
|
|
579
|
-
return {
|
|
580
|
-
...tupleCodecHelper(items, options.description),
|
|
581
|
-
decode: (bytes, offset = 0) => {
|
|
582
|
-
const values = [];
|
|
583
|
-
items.forEach((codec) => {
|
|
584
|
-
const [newValue, newOffset] = codec.decode(bytes, offset);
|
|
585
|
-
values.push(newValue);
|
|
586
|
-
offset = newOffset;
|
|
587
|
-
});
|
|
588
|
-
return [values, offset];
|
|
589
|
-
}
|
|
590
|
-
};
|
|
511
|
+
});
|
|
591
512
|
}
|
|
592
|
-
function
|
|
513
|
+
function getStructCodec(fields) {
|
|
593
514
|
return combineCodec(
|
|
594
|
-
|
|
595
|
-
|
|
515
|
+
getStructEncoder(fields),
|
|
516
|
+
getStructDecoder(fields)
|
|
596
517
|
);
|
|
597
518
|
}
|
|
598
|
-
function getUnitEncoder(
|
|
599
|
-
return {
|
|
600
|
-
description: options.description ?? "unit",
|
|
601
|
-
encode: () => new Uint8Array(),
|
|
519
|
+
function getUnitEncoder() {
|
|
520
|
+
return createEncoder({
|
|
602
521
|
fixedSize: 0,
|
|
603
|
-
|
|
604
|
-
};
|
|
522
|
+
write: (_value, _bytes, offset) => offset
|
|
523
|
+
});
|
|
605
524
|
}
|
|
606
|
-
function getUnitDecoder(
|
|
607
|
-
return {
|
|
608
|
-
decode: (_bytes, offset = 0) => [void 0, offset],
|
|
609
|
-
description: options.description ?? "unit",
|
|
525
|
+
function getUnitDecoder() {
|
|
526
|
+
return createDecoder({
|
|
610
527
|
fixedSize: 0,
|
|
611
|
-
|
|
612
|
-
};
|
|
528
|
+
read: (_bytes, offset) => [void 0, offset]
|
|
529
|
+
});
|
|
613
530
|
}
|
|
614
|
-
function getUnitCodec(
|
|
615
|
-
return combineCodec(getUnitEncoder(
|
|
531
|
+
function getUnitCodec() {
|
|
532
|
+
return combineCodec(getUnitEncoder(), getUnitDecoder());
|
|
616
533
|
}
|
|
617
534
|
|
|
618
|
-
export { assertValidNumberOfItemsForCodec,
|
|
535
|
+
export { assertValidNumberOfItemsForCodec, getArrayCodec, getArrayDecoder, getArrayEncoder, getBitArrayCodec, getBitArrayDecoder, getBitArrayEncoder, getBooleanCodec, getBooleanDecoder, getBooleanEncoder, getBytesCodec, getBytesDecoder, getBytesEncoder, getDataEnumCodec, getDataEnumDecoder, getDataEnumEncoder, getMapCodec, getMapDecoder, getMapEncoder, getNullableCodec, getNullableDecoder, getNullableEncoder, getScalarEnumCodec, getScalarEnumDecoder, getScalarEnumEncoder, getSetCodec, getSetDecoder, getSetEncoder, getStructCodec, getStructDecoder, getStructEncoder, getTupleCodec, getTupleDecoder, getTupleEncoder, getUnitCodec, getUnitDecoder, getUnitEncoder };
|
|
619
536
|
//# sourceMappingURL=out.js.map
|
|
620
537
|
//# sourceMappingURL=index.browser.js.map
|