@solana/codecs-data-structures 2.0.0-experimental.e4483d3 → 2.0.0-experimental.e587f8d
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +482 -4
- package/dist/index.browser.cjs +349 -436
- package/dist/index.browser.cjs.map +1 -1
- package/dist/index.browser.js +351 -434
- package/dist/index.browser.js.map +1 -1
- package/dist/index.native.js +351 -434
- package/dist/index.native.js.map +1 -1
- package/dist/index.node.cjs +349 -436
- package/dist/index.node.cjs.map +1 -1
- package/dist/index.node.js +353 -434
- package/dist/index.node.js.map +1 -1
- package/dist/types/array.d.ts +35 -6
- package/dist/types/array.d.ts.map +1 -1
- package/dist/types/assertions.d.ts.map +1 -1
- package/dist/types/bit-array.d.ts +5 -5
- package/dist/types/bit-array.d.ts.map +1 -1
- package/dist/types/boolean.d.ts +18 -6
- package/dist/types/boolean.d.ts.map +1 -1
- package/dist/types/bytes.d.ts +14 -5
- package/dist/types/bytes.d.ts.map +1 -1
- package/dist/types/data-enum.d.ts +18 -20
- package/dist/types/data-enum.d.ts.map +1 -1
- package/dist/types/index.d.ts +13 -14
- package/dist/types/index.d.ts.map +1 -1
- package/dist/types/map.d.ts +24 -6
- package/dist/types/map.d.ts.map +1 -1
- package/dist/types/nullable.d.ts +24 -6
- package/dist/types/nullable.d.ts.map +1 -1
- package/dist/types/scalar-enum.d.ts +42 -11
- package/dist/types/scalar-enum.d.ts.map +1 -1
- package/dist/types/set.d.ts +24 -6
- package/dist/types/set.d.ts.map +1 -1
- package/dist/types/struct.d.ts +16 -21
- package/dist/types/struct.d.ts.map +1 -1
- package/dist/types/tuple.d.ts +22 -15
- package/dist/types/tuple.d.ts.map +1 -1
- package/dist/types/unit.d.ts +4 -12
- package/dist/types/unit.d.ts.map +1 -1
- package/dist/types/utils.d.ts +10 -2
- package/dist/types/utils.d.ts.map +1 -1
- package/package.json +13 -34
- package/dist/index.development.js +0 -889
- package/dist/index.development.js.map +0 -1
- package/dist/index.production.min.js +0 -51
- package/dist/types/array-like-codec-size.d.ts +0 -20
- package/dist/types/array-like-codec-size.d.ts.map +0 -1
package/dist/index.browser.js
CHANGED
|
@@ -1,9 +1,17 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { createEncoder, getEncodedSize, createDecoder, combineCodec, assertByteArrayHasEnoughBytesForCodec, assertIsFixedSize, mapEncoder, mapDecoder, fixEncoder, fixDecoder, assertByteArrayIsNotEmptyForCodec, isFixedSize } from '@solana/codecs-core';
|
|
2
2
|
import { getU32Encoder, getU32Decoder, getU8Encoder, getU8Decoder } from '@solana/codecs-numbers';
|
|
3
|
+
import { SolanaError, SOLANA_ERROR__CODECS__INVALID_NUMBER_OF_ITEMS, SOLANA_ERROR__CODECS__ENUM_DISCRIMINATOR_OUT_OF_RANGE, SOLANA_ERROR__CODECS__INVALID_DATA_ENUM_VARIANT, SOLANA_ERROR__CODECS__INVALID_SCALAR_ENUM_VARIANT } from '@solana/errors';
|
|
3
4
|
|
|
4
5
|
// src/array.ts
|
|
5
|
-
|
|
6
|
-
|
|
6
|
+
function assertValidNumberOfItemsForCodec(codecDescription, expected, actual) {
|
|
7
|
+
if (expected !== actual) {
|
|
8
|
+
throw new SolanaError(SOLANA_ERROR__CODECS__INVALID_NUMBER_OF_ITEMS, {
|
|
9
|
+
actual,
|
|
10
|
+
codecDescription,
|
|
11
|
+
expected
|
|
12
|
+
});
|
|
13
|
+
}
|
|
14
|
+
}
|
|
7
15
|
function maxCodecSizes(sizes) {
|
|
8
16
|
return sizes.reduce(
|
|
9
17
|
(all, size) => all === null || size === null ? null : Math.max(all, size),
|
|
@@ -13,106 +21,88 @@ function maxCodecSizes(sizes) {
|
|
|
13
21
|
function sumCodecSizes(sizes) {
|
|
14
22
|
return sizes.reduce((all, size) => all === null || size === null ? null : all + size, 0);
|
|
15
23
|
}
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
function decodeArrayLikeCodecSize(size, childrenSizes, bytes, offset) {
|
|
19
|
-
if (typeof size === "number") {
|
|
20
|
-
return [size, offset];
|
|
21
|
-
}
|
|
22
|
-
if (typeof size === "object") {
|
|
23
|
-
return size.decode(bytes, offset);
|
|
24
|
-
}
|
|
25
|
-
if (size === "remainder") {
|
|
26
|
-
const childrenSize = sumCodecSizes(childrenSizes);
|
|
27
|
-
if (childrenSize === null) {
|
|
28
|
-
throw new Error('Codecs of "remainder" size must have fixed-size items.');
|
|
29
|
-
}
|
|
30
|
-
const remainder = bytes.slice(offset).length;
|
|
31
|
-
if (remainder % childrenSize !== 0) {
|
|
32
|
-
throw new Error(
|
|
33
|
-
`The remainder of the byte array (${remainder} bytes) cannot be split into chunks of ${childrenSize} bytes. Codecs of "remainder" size must have a remainder that is a multiple of its item size. In other words, ${remainder} modulo ${childrenSize} should be equal to zero.`
|
|
34
|
-
);
|
|
35
|
-
}
|
|
36
|
-
return [remainder / childrenSize, offset];
|
|
37
|
-
}
|
|
38
|
-
throw new Error(`Unrecognized array-like codec size: ${JSON.stringify(size)}`);
|
|
39
|
-
}
|
|
40
|
-
function getArrayLikeCodecSizeDescription(size) {
|
|
41
|
-
return typeof size === "object" ? size.description : `${size}`;
|
|
24
|
+
function getFixedSize(codec) {
|
|
25
|
+
return isFixedSize(codec) ? codec.fixedSize : null;
|
|
42
26
|
}
|
|
43
|
-
function
|
|
44
|
-
|
|
45
|
-
return null;
|
|
46
|
-
if (size === 0)
|
|
47
|
-
return 0;
|
|
48
|
-
const childrenSize = sumCodecSizes(childrenSizes);
|
|
49
|
-
return childrenSize === null ? null : childrenSize * size;
|
|
50
|
-
}
|
|
51
|
-
function getArrayLikeCodecSizePrefix(size, realSize) {
|
|
52
|
-
return typeof size === "object" ? size.encode(realSize) : new Uint8Array();
|
|
53
|
-
}
|
|
54
|
-
|
|
55
|
-
// src/assertions.ts
|
|
56
|
-
function assertValidNumberOfItemsForCodec(codecDescription, expected, actual) {
|
|
57
|
-
if (expected !== actual) {
|
|
58
|
-
throw new Error(`Expected [${codecDescription}] to have ${expected} items, got ${actual}.`);
|
|
59
|
-
}
|
|
27
|
+
function getMaxSize(codec) {
|
|
28
|
+
return isFixedSize(codec) ? codec.fixedSize : codec.maxSize ?? null;
|
|
60
29
|
}
|
|
61
30
|
|
|
62
31
|
// src/array.ts
|
|
63
|
-
function arrayCodecHelper(item, size, description) {
|
|
64
|
-
if (size === "remainder" && item.fixedSize === null) {
|
|
65
|
-
throw new Error('Codecs of "remainder" size must have fixed-size items.');
|
|
66
|
-
}
|
|
67
|
-
return {
|
|
68
|
-
description: description ?? `array(${item.description}; ${getArrayLikeCodecSizeDescription(size)})`,
|
|
69
|
-
fixedSize: getArrayLikeCodecSizeFromChildren(size, [item.fixedSize]),
|
|
70
|
-
maxSize: getArrayLikeCodecSizeFromChildren(size, [item.maxSize])
|
|
71
|
-
};
|
|
72
|
-
}
|
|
73
32
|
function getArrayEncoder(item, config = {}) {
|
|
74
33
|
const size = config.size ?? getU32Encoder();
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
34
|
+
const fixedSize = computeArrayLikeCodecSize(size, getFixedSize(item));
|
|
35
|
+
const maxSize = computeArrayLikeCodecSize(size, getMaxSize(item)) ?? void 0;
|
|
36
|
+
return createEncoder({
|
|
37
|
+
...fixedSize !== null ? { fixedSize } : {
|
|
38
|
+
getSizeFromValue: (array) => {
|
|
39
|
+
const prefixSize = typeof size === "object" ? getEncodedSize(array.length, size) : 0;
|
|
40
|
+
return prefixSize + [...array].reduce((all, value) => all + getEncodedSize(value, item), 0);
|
|
41
|
+
},
|
|
42
|
+
maxSize
|
|
43
|
+
},
|
|
44
|
+
write: (array, bytes, offset) => {
|
|
78
45
|
if (typeof size === "number") {
|
|
79
|
-
assertValidNumberOfItemsForCodec("array", size,
|
|
46
|
+
assertValidNumberOfItemsForCodec("array", size, array.length);
|
|
47
|
+
}
|
|
48
|
+
if (typeof size === "object") {
|
|
49
|
+
offset = size.write(array.length, bytes, offset);
|
|
80
50
|
}
|
|
81
|
-
|
|
51
|
+
array.forEach((value) => {
|
|
52
|
+
offset = item.write(value, bytes, offset);
|
|
53
|
+
});
|
|
54
|
+
return offset;
|
|
82
55
|
}
|
|
83
|
-
};
|
|
56
|
+
});
|
|
84
57
|
}
|
|
85
58
|
function getArrayDecoder(item, config = {}) {
|
|
86
59
|
const size = config.size ?? getU32Decoder();
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
60
|
+
const itemSize = getFixedSize(item);
|
|
61
|
+
const fixedSize = computeArrayLikeCodecSize(size, itemSize);
|
|
62
|
+
const maxSize = computeArrayLikeCodecSize(size, getMaxSize(item)) ?? void 0;
|
|
63
|
+
return createDecoder({
|
|
64
|
+
...fixedSize !== null ? { fixedSize } : { maxSize },
|
|
65
|
+
read: (bytes, offset) => {
|
|
66
|
+
const array = [];
|
|
90
67
|
if (typeof size === "object" && bytes.slice(offset).length === 0) {
|
|
91
|
-
return [
|
|
68
|
+
return [array, offset];
|
|
92
69
|
}
|
|
93
|
-
|
|
70
|
+
if (size === "remainder") {
|
|
71
|
+
while (offset < bytes.length) {
|
|
72
|
+
const [value, newOffset2] = item.read(bytes, offset);
|
|
73
|
+
offset = newOffset2;
|
|
74
|
+
array.push(value);
|
|
75
|
+
}
|
|
76
|
+
return [array, offset];
|
|
77
|
+
}
|
|
78
|
+
const [resolvedSize, newOffset] = typeof size === "number" ? [size, offset] : size.read(bytes, offset);
|
|
94
79
|
offset = newOffset;
|
|
95
|
-
const values = [];
|
|
96
80
|
for (let i = 0; i < resolvedSize; i += 1) {
|
|
97
|
-
const [value, newOffset2] = item.
|
|
98
|
-
values.push(value);
|
|
81
|
+
const [value, newOffset2] = item.read(bytes, offset);
|
|
99
82
|
offset = newOffset2;
|
|
83
|
+
array.push(value);
|
|
100
84
|
}
|
|
101
|
-
return [
|
|
85
|
+
return [array, offset];
|
|
102
86
|
}
|
|
103
|
-
};
|
|
87
|
+
});
|
|
104
88
|
}
|
|
105
89
|
function getArrayCodec(item, config = {}) {
|
|
106
90
|
return combineCodec(getArrayEncoder(item, config), getArrayDecoder(item, config));
|
|
107
91
|
}
|
|
108
|
-
|
|
92
|
+
function computeArrayLikeCodecSize(size, itemSize) {
|
|
93
|
+
if (typeof size !== "number")
|
|
94
|
+
return null;
|
|
95
|
+
if (size === 0)
|
|
96
|
+
return 0;
|
|
97
|
+
return itemSize === null ? null : itemSize * size;
|
|
98
|
+
}
|
|
99
|
+
function getBitArrayEncoder(size, config = {}) {
|
|
109
100
|
const parsedConfig = typeof config === "boolean" ? { backward: config } : config;
|
|
110
101
|
const backward = parsedConfig.backward ?? false;
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
const bytes = [];
|
|
102
|
+
return createEncoder({
|
|
103
|
+
fixedSize: size,
|
|
104
|
+
write(value, bytes, offset) {
|
|
105
|
+
const bytesToAdd = [];
|
|
116
106
|
for (let i = 0; i < size; i += 1) {
|
|
117
107
|
let byte = 0;
|
|
118
108
|
for (let j = 0; j < 8; j += 1) {
|
|
@@ -120,23 +110,22 @@ var getBitArrayEncoder = (size, config = {}) => {
|
|
|
120
110
|
byte |= feature << (backward ? j : 7 - j);
|
|
121
111
|
}
|
|
122
112
|
if (backward) {
|
|
123
|
-
|
|
113
|
+
bytesToAdd.unshift(byte);
|
|
124
114
|
} else {
|
|
125
|
-
|
|
115
|
+
bytesToAdd.push(byte);
|
|
126
116
|
}
|
|
127
117
|
}
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
}
|
|
134
|
-
var getBitArrayDecoder = (size, config = {}) => {
|
|
118
|
+
bytes.set(bytesToAdd, offset);
|
|
119
|
+
return size;
|
|
120
|
+
}
|
|
121
|
+
});
|
|
122
|
+
}
|
|
123
|
+
function getBitArrayDecoder(size, config = {}) {
|
|
135
124
|
const parsedConfig = typeof config === "boolean" ? { backward: config } : config;
|
|
136
125
|
const backward = parsedConfig.backward ?? false;
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
126
|
+
return createDecoder({
|
|
127
|
+
fixedSize: size,
|
|
128
|
+
read(bytes, offset) {
|
|
140
129
|
assertByteArrayHasEnoughBytesForCodec("bitArray", size, bytes, offset);
|
|
141
130
|
const booleans = [];
|
|
142
131
|
let slice = bytes.slice(offset, offset + size);
|
|
@@ -153,468 +142,396 @@ var getBitArrayDecoder = (size, config = {}) => {
|
|
|
153
142
|
}
|
|
154
143
|
});
|
|
155
144
|
return [booleans, offset + size];
|
|
156
|
-
}
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
}
|
|
162
|
-
var getBitArrayCodec = (size, config = {}) => combineCodec(getBitArrayEncoder(size, config), getBitArrayDecoder(size, config));
|
|
145
|
+
}
|
|
146
|
+
});
|
|
147
|
+
}
|
|
148
|
+
function getBitArrayCodec(size, config = {}) {
|
|
149
|
+
return combineCodec(getBitArrayEncoder(size, config), getBitArrayDecoder(size, config));
|
|
150
|
+
}
|
|
163
151
|
function getBooleanEncoder(config = {}) {
|
|
164
152
|
const size = config.size ?? getU8Encoder();
|
|
165
|
-
|
|
166
|
-
return
|
|
167
|
-
description: config.description ?? `bool(${size.description})`,
|
|
168
|
-
encode: (value) => size.encode(value ? 1 : 0),
|
|
169
|
-
fixedSize: size.fixedSize,
|
|
170
|
-
maxSize: size.fixedSize
|
|
171
|
-
};
|
|
153
|
+
assertIsFixedSize(size);
|
|
154
|
+
return mapEncoder(size, (value) => value ? 1 : 0);
|
|
172
155
|
}
|
|
173
156
|
function getBooleanDecoder(config = {}) {
|
|
174
157
|
const size = config.size ?? getU8Decoder();
|
|
175
|
-
|
|
176
|
-
return
|
|
177
|
-
decode: (bytes, offset = 0) => {
|
|
178
|
-
assertByteArrayIsNotEmptyForCodec("bool", bytes, offset);
|
|
179
|
-
const [value, vOffset] = size.decode(bytes, offset);
|
|
180
|
-
return [value === 1, vOffset];
|
|
181
|
-
},
|
|
182
|
-
description: config.description ?? `bool(${size.description})`,
|
|
183
|
-
fixedSize: size.fixedSize,
|
|
184
|
-
maxSize: size.fixedSize
|
|
185
|
-
};
|
|
158
|
+
assertIsFixedSize(size);
|
|
159
|
+
return mapDecoder(size, (value) => Number(value) === 1);
|
|
186
160
|
}
|
|
187
161
|
function getBooleanCodec(config = {}) {
|
|
188
162
|
return combineCodec(getBooleanEncoder(config), getBooleanDecoder(config));
|
|
189
163
|
}
|
|
190
164
|
function getBytesEncoder(config = {}) {
|
|
191
165
|
const size = config.size ?? "variable";
|
|
192
|
-
const
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
};
|
|
166
|
+
const byteEncoder = createEncoder({
|
|
167
|
+
getSizeFromValue: (value) => value.length,
|
|
168
|
+
write: (value, bytes, offset) => {
|
|
169
|
+
bytes.set(value, offset);
|
|
170
|
+
return offset + value.length;
|
|
171
|
+
}
|
|
172
|
+
});
|
|
200
173
|
if (size === "variable") {
|
|
201
174
|
return byteEncoder;
|
|
202
175
|
}
|
|
203
176
|
if (typeof size === "number") {
|
|
204
|
-
return fixEncoder(byteEncoder, size
|
|
177
|
+
return fixEncoder(byteEncoder, size);
|
|
205
178
|
}
|
|
206
|
-
return {
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
return mergeBytes([lengthBytes, contentBytes]);
|
|
179
|
+
return createEncoder({
|
|
180
|
+
getSizeFromValue: (value) => getEncodedSize(value.length, size) + value.length,
|
|
181
|
+
write: (value, bytes, offset) => {
|
|
182
|
+
offset = size.write(value.length, bytes, offset);
|
|
183
|
+
return byteEncoder.write(value, bytes, offset);
|
|
212
184
|
}
|
|
213
|
-
};
|
|
185
|
+
});
|
|
214
186
|
}
|
|
215
187
|
function getBytesDecoder(config = {}) {
|
|
216
188
|
const size = config.size ?? "variable";
|
|
217
|
-
const
|
|
218
|
-
|
|
219
|
-
const byteDecoder = {
|
|
220
|
-
decode: (bytes, offset = 0) => {
|
|
189
|
+
const byteDecoder = createDecoder({
|
|
190
|
+
read: (bytes, offset) => {
|
|
221
191
|
const slice = bytes.slice(offset);
|
|
222
192
|
return [slice, offset + slice.length];
|
|
223
|
-
}
|
|
224
|
-
|
|
225
|
-
fixedSize: null,
|
|
226
|
-
maxSize: null
|
|
227
|
-
};
|
|
193
|
+
}
|
|
194
|
+
});
|
|
228
195
|
if (size === "variable") {
|
|
229
196
|
return byteDecoder;
|
|
230
197
|
}
|
|
231
198
|
if (typeof size === "number") {
|
|
232
|
-
return fixDecoder(byteDecoder, size
|
|
199
|
+
return fixDecoder(byteDecoder, size);
|
|
233
200
|
}
|
|
234
|
-
return {
|
|
235
|
-
|
|
236
|
-
decode: (bytes, offset = 0) => {
|
|
201
|
+
return createDecoder({
|
|
202
|
+
read: (bytes, offset) => {
|
|
237
203
|
assertByteArrayIsNotEmptyForCodec("bytes", bytes, offset);
|
|
238
|
-
const [lengthBigInt, lengthOffset] = size.
|
|
204
|
+
const [lengthBigInt, lengthOffset] = size.read(bytes, offset);
|
|
239
205
|
const length = Number(lengthBigInt);
|
|
240
206
|
offset = lengthOffset;
|
|
241
207
|
const contentBytes = bytes.slice(offset, offset + length);
|
|
242
208
|
assertByteArrayHasEnoughBytesForCodec("bytes", length, contentBytes);
|
|
243
|
-
const [value, contentOffset] = byteDecoder.
|
|
209
|
+
const [value, contentOffset] = byteDecoder.read(contentBytes, 0);
|
|
244
210
|
offset += contentOffset;
|
|
245
211
|
return [value, offset];
|
|
246
212
|
}
|
|
247
|
-
};
|
|
213
|
+
});
|
|
248
214
|
}
|
|
249
215
|
function getBytesCodec(config = {}) {
|
|
250
216
|
return combineCodec(getBytesEncoder(config), getBytesDecoder(config));
|
|
251
217
|
}
|
|
252
|
-
function dataEnumCodecHelper(variants, prefix, description) {
|
|
253
|
-
const fieldDescriptions = variants.map(([name, codec]) => `${String(name)}${codec ? `: ${codec.description}` : ""}`).join(", ");
|
|
254
|
-
const allVariantHaveTheSameFixedSize = variants.every((one, _i, all) => one[1].fixedSize === all[0][1].fixedSize);
|
|
255
|
-
const fixedVariantSize = allVariantHaveTheSameFixedSize ? variants[0][1].fixedSize : null;
|
|
256
|
-
const maxVariantSize = maxCodecSizes(variants.map(([, field]) => field.maxSize));
|
|
257
|
-
return {
|
|
258
|
-
description: description ?? `dataEnum(${fieldDescriptions}; ${prefix.description})`,
|
|
259
|
-
fixedSize: variants.length === 0 ? prefix.fixedSize : sumCodecSizes([prefix.fixedSize, fixedVariantSize]),
|
|
260
|
-
maxSize: variants.length === 0 ? prefix.maxSize : sumCodecSizes([prefix.maxSize, maxVariantSize])
|
|
261
|
-
};
|
|
262
|
-
}
|
|
263
218
|
function getDataEnumEncoder(variants, config = {}) {
|
|
264
219
|
const prefix = config.size ?? getU8Encoder();
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
const
|
|
277
|
-
|
|
220
|
+
const fixedSize = getDataEnumFixedSize(variants, prefix);
|
|
221
|
+
return createEncoder({
|
|
222
|
+
...fixedSize !== null ? { fixedSize } : {
|
|
223
|
+
getSizeFromValue: (variant) => {
|
|
224
|
+
const discriminator = getVariantDiscriminator(variants, variant);
|
|
225
|
+
const variantEncoder = variants[discriminator][1];
|
|
226
|
+
return getEncodedSize(discriminator, prefix) + getEncodedSize(variant, variantEncoder);
|
|
227
|
+
},
|
|
228
|
+
maxSize: getDataEnumMaxSize(variants, prefix)
|
|
229
|
+
},
|
|
230
|
+
write: (variant, bytes, offset) => {
|
|
231
|
+
const discriminator = getVariantDiscriminator(variants, variant);
|
|
232
|
+
offset = prefix.write(discriminator, bytes, offset);
|
|
233
|
+
const variantEncoder = variants[discriminator][1];
|
|
234
|
+
return variantEncoder.write(variant, bytes, offset);
|
|
278
235
|
}
|
|
279
|
-
};
|
|
236
|
+
});
|
|
280
237
|
}
|
|
281
238
|
function getDataEnumDecoder(variants, config = {}) {
|
|
282
239
|
const prefix = config.size ?? getU8Decoder();
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
240
|
+
const fixedSize = getDataEnumFixedSize(variants, prefix);
|
|
241
|
+
return createDecoder({
|
|
242
|
+
...fixedSize !== null ? { fixedSize } : { maxSize: getDataEnumMaxSize(variants, prefix) },
|
|
243
|
+
read: (bytes, offset) => {
|
|
286
244
|
assertByteArrayIsNotEmptyForCodec("dataEnum", bytes, offset);
|
|
287
|
-
const [discriminator, dOffset] = prefix.
|
|
245
|
+
const [discriminator, dOffset] = prefix.read(bytes, offset);
|
|
288
246
|
offset = dOffset;
|
|
289
247
|
const variantField = variants[Number(discriminator)] ?? null;
|
|
290
248
|
if (!variantField) {
|
|
291
|
-
throw new
|
|
292
|
-
|
|
293
|
-
|
|
249
|
+
throw new SolanaError(SOLANA_ERROR__CODECS__ENUM_DISCRIMINATOR_OUT_OF_RANGE, {
|
|
250
|
+
discriminator,
|
|
251
|
+
maxRange: variants.length - 1,
|
|
252
|
+
minRange: 0
|
|
253
|
+
});
|
|
294
254
|
}
|
|
295
|
-
const [variant, vOffset] = variantField[1].
|
|
255
|
+
const [variant, vOffset] = variantField[1].read(bytes, offset);
|
|
296
256
|
offset = vOffset;
|
|
297
257
|
return [{ __kind: variantField[0], ...variant ?? {} }, offset];
|
|
298
258
|
}
|
|
299
|
-
};
|
|
259
|
+
});
|
|
300
260
|
}
|
|
301
261
|
function getDataEnumCodec(variants, config = {}) {
|
|
302
|
-
return combineCodec(
|
|
262
|
+
return combineCodec(
|
|
263
|
+
getDataEnumEncoder(variants, config),
|
|
264
|
+
getDataEnumDecoder(variants, config)
|
|
265
|
+
);
|
|
303
266
|
}
|
|
304
|
-
function
|
|
305
|
-
if (
|
|
306
|
-
|
|
267
|
+
function getDataEnumFixedSize(variants, prefix) {
|
|
268
|
+
if (variants.length === 0)
|
|
269
|
+
return isFixedSize(prefix) ? prefix.fixedSize : null;
|
|
270
|
+
if (!isFixedSize(variants[0][1]))
|
|
271
|
+
return null;
|
|
272
|
+
const variantSize = variants[0][1].fixedSize;
|
|
273
|
+
const sameSizedVariants = variants.every(
|
|
274
|
+
(variant) => isFixedSize(variant[1]) && variant[1].fixedSize === variantSize
|
|
275
|
+
);
|
|
276
|
+
if (!sameSizedVariants)
|
|
277
|
+
return null;
|
|
278
|
+
return isFixedSize(prefix) ? prefix.fixedSize + variantSize : null;
|
|
279
|
+
}
|
|
280
|
+
function getDataEnumMaxSize(variants, prefix) {
|
|
281
|
+
const maxVariantSize = maxCodecSizes(variants.map(([, codec]) => getMaxSize(codec)));
|
|
282
|
+
return sumCodecSizes([getMaxSize(prefix), maxVariantSize]) ?? void 0;
|
|
283
|
+
}
|
|
284
|
+
function getVariantDiscriminator(variants, variant) {
|
|
285
|
+
const discriminator = variants.findIndex(([key]) => variant.__kind === key);
|
|
286
|
+
if (discriminator < 0) {
|
|
287
|
+
throw new SolanaError(SOLANA_ERROR__CODECS__INVALID_DATA_ENUM_VARIANT, {
|
|
288
|
+
value: variant.__kind,
|
|
289
|
+
variants: variants.map(([key]) => key)
|
|
290
|
+
});
|
|
307
291
|
}
|
|
308
|
-
return
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
292
|
+
return discriminator;
|
|
293
|
+
}
|
|
294
|
+
function getTupleEncoder(items) {
|
|
295
|
+
const fixedSize = sumCodecSizes(items.map(getFixedSize));
|
|
296
|
+
const maxSize = sumCodecSizes(items.map(getMaxSize)) ?? void 0;
|
|
297
|
+
return createEncoder({
|
|
298
|
+
...fixedSize === null ? {
|
|
299
|
+
getSizeFromValue: (value) => items.map((item, index) => getEncodedSize(value[index], item)).reduce((all, one) => all + one, 0),
|
|
300
|
+
maxSize
|
|
301
|
+
} : { fixedSize },
|
|
302
|
+
write: (value, bytes, offset) => {
|
|
303
|
+
assertValidNumberOfItemsForCodec("tuple", items.length, value.length);
|
|
304
|
+
items.forEach((item, index) => {
|
|
305
|
+
offset = item.write(value[index], bytes, offset);
|
|
306
|
+
});
|
|
307
|
+
return offset;
|
|
308
|
+
}
|
|
309
|
+
});
|
|
310
|
+
}
|
|
311
|
+
function getTupleDecoder(items) {
|
|
312
|
+
const fixedSize = sumCodecSizes(items.map(getFixedSize));
|
|
313
|
+
const maxSize = sumCodecSizes(items.map(getMaxSize)) ?? void 0;
|
|
314
|
+
return createDecoder({
|
|
315
|
+
...fixedSize === null ? { maxSize } : { fixedSize },
|
|
316
|
+
read: (bytes, offset) => {
|
|
317
|
+
const values = [];
|
|
318
|
+
items.forEach((item) => {
|
|
319
|
+
const [newValue, newOffset] = item.read(bytes, offset);
|
|
320
|
+
values.push(newValue);
|
|
321
|
+
offset = newOffset;
|
|
322
|
+
});
|
|
323
|
+
return [values, offset];
|
|
324
|
+
}
|
|
325
|
+
});
|
|
313
326
|
}
|
|
327
|
+
function getTupleCodec(items) {
|
|
328
|
+
return combineCodec(
|
|
329
|
+
getTupleEncoder(items),
|
|
330
|
+
getTupleDecoder(items)
|
|
331
|
+
);
|
|
332
|
+
}
|
|
333
|
+
|
|
334
|
+
// src/map.ts
|
|
314
335
|
function getMapEncoder(key, value, config = {}) {
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
if (typeof size === "number") {
|
|
320
|
-
assertValidNumberOfItemsForCodec("map", size, map.size);
|
|
321
|
-
}
|
|
322
|
-
const itemBytes = Array.from(map, ([k, v]) => mergeBytes([key.encode(k), value.encode(v)]));
|
|
323
|
-
return mergeBytes([getArrayLikeCodecSizePrefix(size, map.size), ...itemBytes]);
|
|
324
|
-
}
|
|
325
|
-
};
|
|
336
|
+
return mapEncoder(
|
|
337
|
+
getArrayEncoder(getTupleEncoder([key, value]), config),
|
|
338
|
+
(map) => [...map.entries()]
|
|
339
|
+
);
|
|
326
340
|
}
|
|
327
341
|
function getMapDecoder(key, value, config = {}) {
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
const map = /* @__PURE__ */ new Map();
|
|
333
|
-
if (typeof size === "object" && bytes.slice(offset).length === 0) {
|
|
334
|
-
return [map, offset];
|
|
335
|
-
}
|
|
336
|
-
const [resolvedSize, newOffset] = decodeArrayLikeCodecSize(
|
|
337
|
-
size,
|
|
338
|
-
[key.fixedSize, value.fixedSize],
|
|
339
|
-
bytes,
|
|
340
|
-
offset
|
|
341
|
-
);
|
|
342
|
-
offset = newOffset;
|
|
343
|
-
for (let i = 0; i < resolvedSize; i += 1) {
|
|
344
|
-
const [decodedKey, kOffset] = key.decode(bytes, offset);
|
|
345
|
-
offset = kOffset;
|
|
346
|
-
const [decodedValue, vOffset] = value.decode(bytes, offset);
|
|
347
|
-
offset = vOffset;
|
|
348
|
-
map.set(decodedKey, decodedValue);
|
|
349
|
-
}
|
|
350
|
-
return [map, offset];
|
|
351
|
-
}
|
|
352
|
-
};
|
|
342
|
+
return mapDecoder(
|
|
343
|
+
getArrayDecoder(getTupleDecoder([key, value]), config),
|
|
344
|
+
(entries) => new Map(entries)
|
|
345
|
+
);
|
|
353
346
|
}
|
|
354
347
|
function getMapCodec(key, value, config = {}) {
|
|
355
348
|
return combineCodec(getMapEncoder(key, value, config), getMapDecoder(key, value, config));
|
|
356
349
|
}
|
|
357
|
-
function nullableCodecHelper(item, prefix, fixed, description) {
|
|
358
|
-
let descriptionSuffix = `; ${prefix.description}`;
|
|
359
|
-
let fixedSize = item.fixedSize === 0 ? prefix.fixedSize : null;
|
|
360
|
-
if (fixed) {
|
|
361
|
-
assertFixedSizeCodec(item, "Fixed nullables can only be used with fixed-size codecs.");
|
|
362
|
-
assertFixedSizeCodec(prefix, "Fixed nullables can only be used with fixed-size prefix.");
|
|
363
|
-
descriptionSuffix += "; fixed";
|
|
364
|
-
fixedSize = prefix.fixedSize + item.fixedSize;
|
|
365
|
-
}
|
|
366
|
-
return {
|
|
367
|
-
description: description ?? `nullable(${item.description + descriptionSuffix})`,
|
|
368
|
-
fixedSize,
|
|
369
|
-
maxSize: sumCodecSizes([prefix.maxSize, item.maxSize])
|
|
370
|
-
};
|
|
371
|
-
}
|
|
372
350
|
function getNullableEncoder(item, config = {}) {
|
|
373
351
|
const prefix = config.prefix ?? getU8Encoder();
|
|
374
352
|
const fixed = config.fixed ?? false;
|
|
375
|
-
|
|
376
|
-
|
|
377
|
-
|
|
378
|
-
|
|
379
|
-
|
|
380
|
-
|
|
381
|
-
|
|
353
|
+
const isZeroSizeItem = isFixedSize(item) && isFixedSize(prefix) && item.fixedSize === 0;
|
|
354
|
+
if (fixed || isZeroSizeItem) {
|
|
355
|
+
assertIsFixedSize(item);
|
|
356
|
+
assertIsFixedSize(prefix);
|
|
357
|
+
const fixedSize = prefix.fixedSize + item.fixedSize;
|
|
358
|
+
return createEncoder({
|
|
359
|
+
fixedSize,
|
|
360
|
+
write: (option, bytes, offset) => {
|
|
361
|
+
const prefixOffset = prefix.write(Number(option !== null), bytes, offset);
|
|
362
|
+
if (option !== null) {
|
|
363
|
+
item.write(option, bytes, prefixOffset);
|
|
364
|
+
}
|
|
365
|
+
return offset + fixedSize;
|
|
366
|
+
}
|
|
367
|
+
});
|
|
368
|
+
}
|
|
369
|
+
return createEncoder({
|
|
370
|
+
getSizeFromValue: (option) => getEncodedSize(Number(option !== null), prefix) + (option !== null ? getEncodedSize(option, item) : 0),
|
|
371
|
+
maxSize: sumCodecSizes([prefix, item].map(getMaxSize)) ?? void 0,
|
|
372
|
+
write: (option, bytes, offset) => {
|
|
373
|
+
offset = prefix.write(Number(option !== null), bytes, offset);
|
|
374
|
+
if (option !== null) {
|
|
375
|
+
offset = item.write(option, bytes, offset);
|
|
376
|
+
}
|
|
377
|
+
return offset;
|
|
382
378
|
}
|
|
383
|
-
};
|
|
379
|
+
});
|
|
384
380
|
}
|
|
385
381
|
function getNullableDecoder(item, config = {}) {
|
|
386
382
|
const prefix = config.prefix ?? getU8Decoder();
|
|
387
383
|
const fixed = config.fixed ?? false;
|
|
388
|
-
|
|
389
|
-
|
|
390
|
-
|
|
384
|
+
let fixedSize = null;
|
|
385
|
+
const isZeroSizeItem = isFixedSize(item) && isFixedSize(prefix) && item.fixedSize === 0;
|
|
386
|
+
if (fixed || isZeroSizeItem) {
|
|
387
|
+
assertIsFixedSize(item);
|
|
388
|
+
assertIsFixedSize(prefix);
|
|
389
|
+
fixedSize = prefix.fixedSize + item.fixedSize;
|
|
390
|
+
}
|
|
391
|
+
return createDecoder({
|
|
392
|
+
...fixedSize === null ? { maxSize: sumCodecSizes([prefix, item].map(getMaxSize)) ?? void 0 } : { fixedSize },
|
|
393
|
+
read: (bytes, offset) => {
|
|
391
394
|
if (bytes.length - offset <= 0) {
|
|
392
395
|
return [null, offset];
|
|
393
396
|
}
|
|
394
|
-
const
|
|
395
|
-
const [isSome, prefixOffset] = prefix.decode(bytes, offset);
|
|
396
|
-
offset = prefixOffset;
|
|
397
|
+
const [isSome, prefixOffset] = prefix.read(bytes, offset);
|
|
397
398
|
if (isSome === 0) {
|
|
398
|
-
return [null,
|
|
399
|
+
return [null, fixedSize !== null ? offset + fixedSize : prefixOffset];
|
|
399
400
|
}
|
|
400
|
-
const [value, newOffset] = item.
|
|
401
|
-
offset
|
|
402
|
-
return [value, fixed ? fixedOffset : offset];
|
|
401
|
+
const [value, newOffset] = item.read(bytes, prefixOffset);
|
|
402
|
+
return [value, fixedSize !== null ? offset + fixedSize : newOffset];
|
|
403
403
|
}
|
|
404
|
-
};
|
|
404
|
+
});
|
|
405
405
|
}
|
|
406
406
|
function getNullableCodec(item, config = {}) {
|
|
407
|
-
|
|
408
|
-
|
|
409
|
-
function scalarEnumCoderHelper(constructor, prefix, description) {
|
|
410
|
-
const enumKeys = Object.keys(constructor);
|
|
411
|
-
const enumValues = Object.values(constructor);
|
|
412
|
-
const isNumericEnum = enumValues.some((v) => typeof v === "number");
|
|
413
|
-
const valueDescriptions = enumValues.filter((v) => typeof v === "string").join(", ");
|
|
414
|
-
const minRange = 0;
|
|
415
|
-
const maxRange = isNumericEnum ? enumValues.length / 2 - 1 : enumValues.length - 1;
|
|
416
|
-
const stringValues = isNumericEnum ? [...enumKeys] : [.../* @__PURE__ */ new Set([...enumKeys, ...enumValues])];
|
|
417
|
-
return {
|
|
418
|
-
description: description ?? `enum(${valueDescriptions}; ${prefix.description})`,
|
|
419
|
-
enumKeys,
|
|
420
|
-
enumValues,
|
|
421
|
-
fixedSize: prefix.fixedSize,
|
|
422
|
-
isNumericEnum,
|
|
423
|
-
maxRange,
|
|
424
|
-
maxSize: prefix.maxSize,
|
|
425
|
-
minRange,
|
|
426
|
-
stringValues
|
|
427
|
-
};
|
|
407
|
+
const configCast = config;
|
|
408
|
+
return combineCodec(getNullableEncoder(item, configCast), getNullableDecoder(item, configCast));
|
|
428
409
|
}
|
|
429
410
|
function getScalarEnumEncoder(constructor, config = {}) {
|
|
430
411
|
const prefix = config.size ?? getU8Encoder();
|
|
431
|
-
const {
|
|
432
|
-
return {
|
|
433
|
-
|
|
434
|
-
|
|
435
|
-
|
|
436
|
-
|
|
437
|
-
|
|
438
|
-
|
|
439
|
-
|
|
440
|
-
|
|
441
|
-
}
|
|
442
|
-
|
|
443
|
-
|
|
444
|
-
|
|
445
|
-
|
|
446
|
-
|
|
447
|
-
return
|
|
448
|
-
|
|
449
|
-
|
|
450
|
-
maxSize
|
|
451
|
-
};
|
|
412
|
+
const { minRange, maxRange, allStringInputs, enumKeys, enumValues } = getScalarEnumStats(constructor);
|
|
413
|
+
return mapEncoder(prefix, (value) => {
|
|
414
|
+
const isInvalidNumber = typeof value === "number" && (value < minRange || value > maxRange);
|
|
415
|
+
const isInvalidString = typeof value === "string" && !allStringInputs.includes(value);
|
|
416
|
+
if (isInvalidNumber || isInvalidString) {
|
|
417
|
+
throw new SolanaError(SOLANA_ERROR__CODECS__INVALID_SCALAR_ENUM_VARIANT, {
|
|
418
|
+
maxRange,
|
|
419
|
+
minRange,
|
|
420
|
+
value,
|
|
421
|
+
variants: allStringInputs
|
|
422
|
+
});
|
|
423
|
+
}
|
|
424
|
+
if (typeof value === "number")
|
|
425
|
+
return value;
|
|
426
|
+
const valueIndex = enumValues.indexOf(value);
|
|
427
|
+
if (valueIndex >= 0)
|
|
428
|
+
return valueIndex;
|
|
429
|
+
return enumKeys.indexOf(value);
|
|
430
|
+
});
|
|
452
431
|
}
|
|
453
432
|
function getScalarEnumDecoder(constructor, config = {}) {
|
|
454
433
|
const prefix = config.size ?? getU8Decoder();
|
|
455
|
-
const {
|
|
456
|
-
|
|
457
|
-
|
|
458
|
-
|
|
459
|
-
|
|
460
|
-
|
|
461
|
-
|
|
462
|
-
|
|
463
|
-
|
|
464
|
-
|
|
465
|
-
|
|
466
|
-
|
|
467
|
-
throw new Error(
|
|
468
|
-
`Enum discriminator out of range. Expected a number between ${minRange} and ${maxRange}, got ${valueAsNumber}.`
|
|
469
|
-
);
|
|
470
|
-
}
|
|
471
|
-
return [isNumericEnum ? valueAsNumber : enumValues[valueAsNumber], offset];
|
|
472
|
-
},
|
|
473
|
-
description,
|
|
474
|
-
fixedSize,
|
|
475
|
-
maxSize
|
|
476
|
-
};
|
|
434
|
+
const { minRange, maxRange, enumKeys } = getScalarEnumStats(constructor);
|
|
435
|
+
return mapDecoder(prefix, (value) => {
|
|
436
|
+
const valueAsNumber = Number(value);
|
|
437
|
+
if (valueAsNumber < minRange || valueAsNumber > maxRange) {
|
|
438
|
+
throw new SolanaError(SOLANA_ERROR__CODECS__ENUM_DISCRIMINATOR_OUT_OF_RANGE, {
|
|
439
|
+
discriminator: valueAsNumber,
|
|
440
|
+
maxRange,
|
|
441
|
+
minRange
|
|
442
|
+
});
|
|
443
|
+
}
|
|
444
|
+
return constructor[enumKeys[valueAsNumber]];
|
|
445
|
+
});
|
|
477
446
|
}
|
|
478
447
|
function getScalarEnumCodec(constructor, config = {}) {
|
|
479
448
|
return combineCodec(getScalarEnumEncoder(constructor, config), getScalarEnumDecoder(constructor, config));
|
|
480
449
|
}
|
|
481
|
-
function
|
|
482
|
-
|
|
483
|
-
|
|
484
|
-
|
|
450
|
+
function getScalarEnumStats(constructor) {
|
|
451
|
+
const numericValues = Object.values(constructor).filter((v) => typeof v === "number");
|
|
452
|
+
const deduplicatedConstructor = Object.fromEntries(
|
|
453
|
+
Object.entries(constructor).slice(numericValues.length)
|
|
454
|
+
);
|
|
455
|
+
const enumKeys = Object.keys(deduplicatedConstructor);
|
|
456
|
+
const enumValues = Object.values(deduplicatedConstructor);
|
|
457
|
+
const minRange = 0;
|
|
458
|
+
const maxRange = enumValues.length - 1;
|
|
459
|
+
const allStringInputs = [
|
|
460
|
+
.../* @__PURE__ */ new Set([...enumKeys, ...enumValues.filter((v) => typeof v === "string")])
|
|
461
|
+
];
|
|
485
462
|
return {
|
|
486
|
-
|
|
487
|
-
|
|
488
|
-
|
|
463
|
+
allStringInputs,
|
|
464
|
+
enumKeys,
|
|
465
|
+
enumValues,
|
|
466
|
+
maxRange,
|
|
467
|
+
minRange
|
|
489
468
|
};
|
|
490
469
|
}
|
|
491
470
|
function getSetEncoder(item, config = {}) {
|
|
492
|
-
|
|
493
|
-
return {
|
|
494
|
-
...setCodecHelper(item, size, config.description),
|
|
495
|
-
encode: (set) => {
|
|
496
|
-
if (typeof size === "number" && set.size !== size) {
|
|
497
|
-
assertValidNumberOfItemsForCodec("set", size, set.size);
|
|
498
|
-
}
|
|
499
|
-
const itemBytes = Array.from(set, (value) => item.encode(value));
|
|
500
|
-
return mergeBytes([getArrayLikeCodecSizePrefix(size, set.size), ...itemBytes]);
|
|
501
|
-
}
|
|
502
|
-
};
|
|
471
|
+
return mapEncoder(getArrayEncoder(item, config), (set) => [...set]);
|
|
503
472
|
}
|
|
504
473
|
function getSetDecoder(item, config = {}) {
|
|
505
|
-
|
|
506
|
-
return {
|
|
507
|
-
...setCodecHelper(item, size, config.description),
|
|
508
|
-
decode: (bytes, offset = 0) => {
|
|
509
|
-
const set = /* @__PURE__ */ new Set();
|
|
510
|
-
if (typeof size === "object" && bytes.slice(offset).length === 0) {
|
|
511
|
-
return [set, offset];
|
|
512
|
-
}
|
|
513
|
-
const [resolvedSize, newOffset] = decodeArrayLikeCodecSize(size, [item.fixedSize], bytes, offset);
|
|
514
|
-
offset = newOffset;
|
|
515
|
-
for (let i = 0; i < resolvedSize; i += 1) {
|
|
516
|
-
const [value, newOffset2] = item.decode(bytes, offset);
|
|
517
|
-
offset = newOffset2;
|
|
518
|
-
set.add(value);
|
|
519
|
-
}
|
|
520
|
-
return [set, offset];
|
|
521
|
-
}
|
|
522
|
-
};
|
|
474
|
+
return mapDecoder(getArrayDecoder(item, config), (entries) => new Set(entries));
|
|
523
475
|
}
|
|
524
476
|
function getSetCodec(item, config = {}) {
|
|
525
477
|
return combineCodec(getSetEncoder(item, config), getSetDecoder(item, config));
|
|
526
478
|
}
|
|
527
|
-
function
|
|
528
|
-
const
|
|
529
|
-
|
|
530
|
-
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
|
|
534
|
-
|
|
535
|
-
|
|
536
|
-
|
|
537
|
-
|
|
538
|
-
|
|
539
|
-
|
|
540
|
-
return
|
|
479
|
+
function getStructEncoder(fields) {
|
|
480
|
+
const fieldCodecs = fields.map(([, codec]) => codec);
|
|
481
|
+
const fixedSize = sumCodecSizes(fieldCodecs.map(getFixedSize));
|
|
482
|
+
const maxSize = sumCodecSizes(fieldCodecs.map(getMaxSize)) ?? void 0;
|
|
483
|
+
return createEncoder({
|
|
484
|
+
...fixedSize === null ? {
|
|
485
|
+
getSizeFromValue: (value) => fields.map(([key, codec]) => getEncodedSize(value[key], codec)).reduce((all, one) => all + one, 0),
|
|
486
|
+
maxSize
|
|
487
|
+
} : { fixedSize },
|
|
488
|
+
write: (struct, bytes, offset) => {
|
|
489
|
+
fields.forEach(([key, codec]) => {
|
|
490
|
+
offset = codec.write(struct[key], bytes, offset);
|
|
491
|
+
});
|
|
492
|
+
return offset;
|
|
541
493
|
}
|
|
542
|
-
};
|
|
543
|
-
}
|
|
544
|
-
function getStructDecoder(fields
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
|
|
494
|
+
});
|
|
495
|
+
}
|
|
496
|
+
function getStructDecoder(fields) {
|
|
497
|
+
const fieldCodecs = fields.map(([, codec]) => codec);
|
|
498
|
+
const fixedSize = sumCodecSizes(fieldCodecs.map(getFixedSize));
|
|
499
|
+
const maxSize = sumCodecSizes(fieldCodecs.map(getMaxSize)) ?? void 0;
|
|
500
|
+
return createDecoder({
|
|
501
|
+
...fixedSize === null ? { maxSize } : { fixedSize },
|
|
502
|
+
read: (bytes, offset) => {
|
|
548
503
|
const struct = {};
|
|
549
504
|
fields.forEach(([key, codec]) => {
|
|
550
|
-
const [value, newOffset] = codec.
|
|
505
|
+
const [value, newOffset] = codec.read(bytes, offset);
|
|
551
506
|
offset = newOffset;
|
|
552
507
|
struct[key] = value;
|
|
553
508
|
});
|
|
554
509
|
return [struct, offset];
|
|
555
510
|
}
|
|
556
|
-
};
|
|
557
|
-
}
|
|
558
|
-
function getStructCodec(fields, config = {}) {
|
|
559
|
-
return combineCodec(getStructEncoder(fields, config), getStructDecoder(fields, config));
|
|
560
|
-
}
|
|
561
|
-
function tupleCodecHelper(items, description) {
|
|
562
|
-
const itemDescriptions = items.map((item) => item.description).join(", ");
|
|
563
|
-
return {
|
|
564
|
-
description: description ?? `tuple(${itemDescriptions})`,
|
|
565
|
-
fixedSize: sumCodecSizes(items.map((item) => item.fixedSize)),
|
|
566
|
-
maxSize: sumCodecSizes(items.map((item) => item.maxSize))
|
|
567
|
-
};
|
|
568
|
-
}
|
|
569
|
-
function getTupleEncoder(items, config = {}) {
|
|
570
|
-
return {
|
|
571
|
-
...tupleCodecHelper(items, config.description),
|
|
572
|
-
encode: (value) => {
|
|
573
|
-
assertValidNumberOfItemsForCodec("tuple", items.length, value.length);
|
|
574
|
-
return mergeBytes(items.map((item, index) => item.encode(value[index])));
|
|
575
|
-
}
|
|
576
|
-
};
|
|
577
|
-
}
|
|
578
|
-
function getTupleDecoder(items, config = {}) {
|
|
579
|
-
return {
|
|
580
|
-
...tupleCodecHelper(items, config.description),
|
|
581
|
-
decode: (bytes, offset = 0) => {
|
|
582
|
-
const values = [];
|
|
583
|
-
items.forEach((codec) => {
|
|
584
|
-
const [newValue, newOffset] = codec.decode(bytes, offset);
|
|
585
|
-
values.push(newValue);
|
|
586
|
-
offset = newOffset;
|
|
587
|
-
});
|
|
588
|
-
return [values, offset];
|
|
589
|
-
}
|
|
590
|
-
};
|
|
511
|
+
});
|
|
591
512
|
}
|
|
592
|
-
function
|
|
513
|
+
function getStructCodec(fields) {
|
|
593
514
|
return combineCodec(
|
|
594
|
-
|
|
595
|
-
|
|
515
|
+
getStructEncoder(fields),
|
|
516
|
+
getStructDecoder(fields)
|
|
596
517
|
);
|
|
597
518
|
}
|
|
598
|
-
function getUnitEncoder(
|
|
599
|
-
return {
|
|
600
|
-
description: config.description ?? "unit",
|
|
601
|
-
encode: () => new Uint8Array(),
|
|
519
|
+
function getUnitEncoder() {
|
|
520
|
+
return createEncoder({
|
|
602
521
|
fixedSize: 0,
|
|
603
|
-
|
|
604
|
-
};
|
|
522
|
+
write: (_value, _bytes, offset) => offset
|
|
523
|
+
});
|
|
605
524
|
}
|
|
606
|
-
function getUnitDecoder(
|
|
607
|
-
return {
|
|
608
|
-
decode: (_bytes, offset = 0) => [void 0, offset],
|
|
609
|
-
description: config.description ?? "unit",
|
|
525
|
+
function getUnitDecoder() {
|
|
526
|
+
return createDecoder({
|
|
610
527
|
fixedSize: 0,
|
|
611
|
-
|
|
612
|
-
};
|
|
528
|
+
read: (_bytes, offset) => [void 0, offset]
|
|
529
|
+
});
|
|
613
530
|
}
|
|
614
|
-
function getUnitCodec(
|
|
615
|
-
return combineCodec(getUnitEncoder(
|
|
531
|
+
function getUnitCodec() {
|
|
532
|
+
return combineCodec(getUnitEncoder(), getUnitDecoder());
|
|
616
533
|
}
|
|
617
534
|
|
|
618
|
-
export { assertValidNumberOfItemsForCodec,
|
|
535
|
+
export { assertValidNumberOfItemsForCodec, getArrayCodec, getArrayDecoder, getArrayEncoder, getBitArrayCodec, getBitArrayDecoder, getBitArrayEncoder, getBooleanCodec, getBooleanDecoder, getBooleanEncoder, getBytesCodec, getBytesDecoder, getBytesEncoder, getDataEnumCodec, getDataEnumDecoder, getDataEnumEncoder, getMapCodec, getMapDecoder, getMapEncoder, getNullableCodec, getNullableDecoder, getNullableEncoder, getScalarEnumCodec, getScalarEnumDecoder, getScalarEnumEncoder, getSetCodec, getSetDecoder, getSetEncoder, getStructCodec, getStructDecoder, getStructEncoder, getTupleCodec, getTupleDecoder, getTupleEncoder, getUnitCodec, getUnitDecoder, getUnitEncoder };
|
|
619
536
|
//# sourceMappingURL=out.js.map
|
|
620
537
|
//# sourceMappingURL=index.browser.js.map
|