@2702rebels/wpidata 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +28 -0
- package/README.md +5 -0
- package/dist/abstractions.cjs +0 -0
- package/dist/abstractions.d.cts +246 -0
- package/dist/abstractions.d.cts.map +1 -0
- package/dist/abstractions.d.mts +246 -0
- package/dist/abstractions.d.mts.map +1 -0
- package/dist/abstractions.mjs +1 -0
- package/dist/formats/json.cjs +32 -0
- package/dist/formats/json.d.cts +14 -0
- package/dist/formats/json.d.cts.map +1 -0
- package/dist/formats/json.d.mts +14 -0
- package/dist/formats/json.d.mts.map +1 -0
- package/dist/formats/json.mjs +33 -0
- package/dist/formats/json.mjs.map +1 -0
- package/dist/formats/msgpack.cjs +30 -0
- package/dist/formats/msgpack.d.cts +14 -0
- package/dist/formats/msgpack.d.cts.map +1 -0
- package/dist/formats/msgpack.d.mts +14 -0
- package/dist/formats/msgpack.d.mts.map +1 -0
- package/dist/formats/msgpack.mjs +31 -0
- package/dist/formats/msgpack.mjs.map +1 -0
- package/dist/formats/protobuf.cjs +130 -0
- package/dist/formats/protobuf.d.cts +68 -0
- package/dist/formats/protobuf.d.cts.map +1 -0
- package/dist/formats/protobuf.d.mts +68 -0
- package/dist/formats/protobuf.d.mts.map +1 -0
- package/dist/formats/protobuf.mjs +128 -0
- package/dist/formats/protobuf.mjs.map +1 -0
- package/dist/formats/struct.cjs +593 -0
- package/dist/formats/struct.d.cts +134 -0
- package/dist/formats/struct.d.cts.map +1 -0
- package/dist/formats/struct.d.mts +134 -0
- package/dist/formats/struct.d.mts.map +1 -0
- package/dist/formats/struct.mjs +591 -0
- package/dist/formats/struct.mjs.map +1 -0
- package/dist/sink.cjs +360 -0
- package/dist/sink.d.cts +93 -0
- package/dist/sink.d.cts.map +1 -0
- package/dist/sink.d.mts +93 -0
- package/dist/sink.d.mts.map +1 -0
- package/dist/sink.mjs +361 -0
- package/dist/sink.mjs.map +1 -0
- package/dist/types/protobuf.cjs +0 -0
- package/dist/types/protobuf.d.cts +302 -0
- package/dist/types/protobuf.d.cts.map +1 -0
- package/dist/types/protobuf.d.mts +302 -0
- package/dist/types/protobuf.d.mts.map +1 -0
- package/dist/types/protobuf.mjs +1 -0
- package/dist/types/sendable.cjs +0 -0
- package/dist/types/sendable.d.cts +225 -0
- package/dist/types/sendable.d.cts.map +1 -0
- package/dist/types/sendable.d.mts +225 -0
- package/dist/types/sendable.d.mts.map +1 -0
- package/dist/types/sendable.mjs +1 -0
- package/dist/types/struct.cjs +0 -0
- package/dist/types/struct.d.cts +304 -0
- package/dist/types/struct.d.cts.map +1 -0
- package/dist/types/struct.d.mts +304 -0
- package/dist/types/struct.d.mts.map +1 -0
- package/dist/types/struct.mjs +1 -0
- package/dist/utils.cjs +140 -0
- package/dist/utils.d.cts +40 -0
- package/dist/utils.d.cts.map +1 -0
- package/dist/utils.d.mts +40 -0
- package/dist/utils.d.mts.map +1 -0
- package/dist/utils.mjs +135 -0
- package/dist/utils.mjs.map +1 -0
- package/package.json +51 -0
- package/src/abstractions.ts +308 -0
- package/src/formats/json.ts +53 -0
- package/src/formats/msgpack.ts +42 -0
- package/src/formats/protobuf.ts +213 -0
- package/src/formats/struct.test.ts +814 -0
- package/src/formats/struct.ts +992 -0
- package/src/sink.ts +611 -0
- package/src/types/protobuf.ts +334 -0
- package/src/types/sendable.ts +244 -0
- package/src/types/struct.ts +333 -0
- package/src/utils.ts +241 -0
|
@@ -0,0 +1,593 @@
|
|
|
1
|
+
const require_utils = require('../utils.cjs');
|
|
2
|
+
|
|
3
|
+
//#region src/formats/struct.ts
|
|
4
|
+
const error = (message) => {
|
|
5
|
+
return new Error(message);
|
|
6
|
+
};
|
|
7
|
+
const utf8decoder = new TextDecoder("utf-8", { fatal: true });
|
|
8
|
+
const utf8encoder = new TextEncoder();
|
|
9
|
+
/**
|
|
10
|
+
* Unpacks serialized struct data into JSON object.
|
|
11
|
+
*
|
|
12
|
+
* Implementation of WPILiB packed struct serialization protocol
|
|
13
|
+
* https://github.com/wpilibsuite/allwpilib/blob/main/wpiutil/doc/struct.adoc
|
|
14
|
+
*
|
|
15
|
+
* @param name struct type name
|
|
16
|
+
* @param data serialized binary data
|
|
17
|
+
* @param repository repository of available descriptors
|
|
18
|
+
* @param options additional options
|
|
19
|
+
*/
|
|
20
|
+
function unpack(name, data, repository, options) {
|
|
21
|
+
const descriptor = repository.descriptors.get(name);
|
|
22
|
+
if (descriptor == null) throw error(`Failed to unpack struct data: missing '${name}' type definition`);
|
|
23
|
+
if (descriptor.size === 0) throw error(`Failed to unpack struct data: '${name}' type definition has unresolved dependencies`);
|
|
24
|
+
const result = {};
|
|
25
|
+
unpackStruct(result, descriptor, require_utils.toDataView(data), 0, options?.useEnum ? transformEnums : (_, v) => v);
|
|
26
|
+
return result;
|
|
27
|
+
}
|
|
28
|
+
/**
|
|
29
|
+
* Packs JSON object into serialized struct data.
|
|
30
|
+
*
|
|
31
|
+
* @param name struct type name
|
|
32
|
+
* @param value JSON object to pack
|
|
33
|
+
* @param repository repository of available descriptors
|
|
34
|
+
* @returns ArrayBuffer containing serialized data
|
|
35
|
+
*/
|
|
36
|
+
function pack(name, value, repository) {
|
|
37
|
+
const descriptor = repository.descriptors.get(name);
|
|
38
|
+
if (descriptor == null) throw error(`Failed to pack struct data: missing '${name}' type definition`);
|
|
39
|
+
if (descriptor.size === 0) throw error(`Failed to pack struct data: '${name}' type definition has unresolved dependencies`);
|
|
40
|
+
const buffer = new ArrayBuffer(descriptor.size);
|
|
41
|
+
packStruct(value, descriptor, new DataView(buffer), 0, transformValue);
|
|
42
|
+
return new Uint8Array(buffer);
|
|
43
|
+
}
|
|
44
|
+
/**
|
|
45
|
+
* Transforms field values according to the field descriptor.
|
|
46
|
+
*
|
|
47
|
+
* Handles conversion of enum strings into their numeric representation.
|
|
48
|
+
*/
|
|
49
|
+
function transformValue(field, value) {
|
|
50
|
+
if (field.enum != null && typeof value === "string") {
|
|
51
|
+
for (const [key, v] of field.enum) if (v === value) return key;
|
|
52
|
+
}
|
|
53
|
+
return value;
|
|
54
|
+
}
|
|
55
|
+
/** Transforms numeric values to enum names for fields that support enums. */
|
|
56
|
+
function transformEnums(field, value) {
|
|
57
|
+
if (field.enum != null && typeof value === "number") {
|
|
58
|
+
const enumName = field.enum.get(value);
|
|
59
|
+
if (enumName != null) return enumName;
|
|
60
|
+
}
|
|
61
|
+
return value;
|
|
62
|
+
}
|
|
63
|
+
/**
|
|
64
|
+
* Unpacks data per descriptor specification and populates `sink` placeholder instance.
|
|
65
|
+
*
|
|
66
|
+
* @param sink target object to populate with parsed data
|
|
67
|
+
* @param descriptor struct type descriptor
|
|
68
|
+
* @param view source buffer view
|
|
69
|
+
* @param byteOffset offset in bytes within `view`
|
|
70
|
+
* @param transformer primitive field value transformer
|
|
71
|
+
*/
|
|
72
|
+
function unpackStruct(sink, descriptor, view, byteOffset, transformer) {
|
|
73
|
+
for (const field of descriptor.fields) if (field.type === "ref") {
|
|
74
|
+
if (field.typeRef == null || typeof field.typeRef !== "object") throw error(`Failed to unpack struct data: field '${field.identifier}' references unresolved type`);
|
|
75
|
+
const result = {};
|
|
76
|
+
unpackStruct(result, field.typeRef, view, byteOffset + field.offset, transformer);
|
|
77
|
+
sink[field.identifier] = result;
|
|
78
|
+
} else if (field.arraySize != null) if (field.type === "char") sink[field.identifier] = transformer(field, decodeStringValue(view, byteOffset + field.offset, field.arraySize));
|
|
79
|
+
else {
|
|
80
|
+
const result = [];
|
|
81
|
+
for (let i = 0; i < field.arraySize; ++i) result.push(transformer(field, decodePrimitiveValue(field, view, byteOffset + field.offset + i * field.size)));
|
|
82
|
+
}
|
|
83
|
+
else if (field.bitWidth != null) sink[field.identifier] = transformer(field, decodeBitFieldValue(field, view, byteOffset + field.offset));
|
|
84
|
+
else sink[field.identifier] = transformer(field, decodePrimitiveValue(field, view, byteOffset + field.offset));
|
|
85
|
+
}
|
|
86
|
+
/**
|
|
87
|
+
* Packs data per descriptor specification.
|
|
88
|
+
*
|
|
89
|
+
* @param source source object to pack
|
|
90
|
+
* @param descriptor struct type descriptor
|
|
91
|
+
* @param view target buffer view
|
|
92
|
+
* @param byteOffset offset in bytes within `view`
|
|
93
|
+
* @param transformer primitive field value transformer
|
|
94
|
+
*/
|
|
95
|
+
function packStruct(source, descriptor, view, byteOffset, transformer) {
|
|
96
|
+
for (const field of descriptor.fields) {
|
|
97
|
+
const value = source[field.identifier];
|
|
98
|
+
if (field.type === "ref") {
|
|
99
|
+
if (field.typeRef == null || typeof field.typeRef !== "object") throw error(`Failed to pack struct data: field '${field.identifier}' references unresolved type`);
|
|
100
|
+
packStruct(value ?? {}, field.typeRef, view, byteOffset + field.offset, transformer);
|
|
101
|
+
} else if (field.arraySize != null) if (field.type === "char") encodeStringValue(view, byteOffset + field.offset, field.arraySize, transformer(field, value) ?? "");
|
|
102
|
+
else for (let i = 0; i < field.arraySize; ++i) encodePrimitiveValue(field, view, byteOffset + field.offset + i * field.size, transformer(field, value));
|
|
103
|
+
else if (field.bitWidth != null) encodeBitFieldValue(field, view, byteOffset + field.offset, transformer(field, value));
|
|
104
|
+
else encodePrimitiveValue(field, view, byteOffset + field.offset, transformer(field, value));
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
/**
|
|
108
|
+
* Decodes a string field value.
|
|
109
|
+
*
|
|
110
|
+
* Assumes UTF-8 encoding, handles zero-termination, continuation bytes.
|
|
111
|
+
*/
|
|
112
|
+
function decodeStringValue(view, byteOffset, byteLength) {
|
|
113
|
+
let length = byteLength;
|
|
114
|
+
for (; length > 0; --length) if (view.getUint8(byteOffset + length - 1) !== 0) break;
|
|
115
|
+
if (length === 0) return "";
|
|
116
|
+
if ((view.getUint8(byteOffset + length - 1) & 128) !== 0) {
|
|
117
|
+
let start = length;
|
|
118
|
+
for (; start > 0; --start) if ((view.getUint8(byteOffset + start - 1) & 64) != 0) break;
|
|
119
|
+
if (start == 0) return "";
|
|
120
|
+
start--;
|
|
121
|
+
const b = view.getUint8(byteOffset + start);
|
|
122
|
+
if ((b & 224) === 192) {
|
|
123
|
+
if (start !== length - 2) length = start;
|
|
124
|
+
} else if ((b & 240) === 224) {
|
|
125
|
+
if (start !== length - 3) length = start;
|
|
126
|
+
} else if ((b & 248) === 240) {
|
|
127
|
+
if (start !== length - 4) length = start;
|
|
128
|
+
}
|
|
129
|
+
}
|
|
130
|
+
return utf8decoder.decode(new DataView(view.buffer, view.byteOffset + byteOffset, length));
|
|
131
|
+
}
|
|
132
|
+
/**
|
|
133
|
+
* Encodes a string field value.
|
|
134
|
+
*
|
|
135
|
+
* The implementation relies on the behavior of `Uint8Array` that is initialized to all zeros
|
|
136
|
+
* and automatically clamps the size of the encoded data to the length of the array.
|
|
137
|
+
*/
|
|
138
|
+
function encodeStringValue(view, byteOffset, byteLength, value) {
|
|
139
|
+
utf8encoder.encodeInto(value, new Uint8Array(view.buffer, view.byteOffset + byteOffset, byteLength));
|
|
140
|
+
}
|
|
141
|
+
/**
|
|
142
|
+
* Decodes a primitive field value.
|
|
143
|
+
*
|
|
144
|
+
* † Javascript limits integer types to 53-bit representation.
|
|
145
|
+
* Decoding 64-bit integers may result in loss of precision as values
|
|
146
|
+
* that do not fit within the safe integer limit will be represented by
|
|
147
|
+
* floating-point numbers with double precision.
|
|
148
|
+
*
|
|
149
|
+
* ‡ Decoding a character value only makes sense for reading fields that
|
|
150
|
+
* consist of exactly one character, where UTF-8 is essentially ASCII.
|
|
151
|
+
* In practice UTF-8 encoded strings use multiple bytes to represent
|
|
152
|
+
* non-ASCII characters and must be handled in a special way when array
|
|
153
|
+
* of chars is decoded. @see {decodeStringValue}.
|
|
154
|
+
*/
|
|
155
|
+
function decodePrimitiveValue(field, view, byteOffset) {
|
|
156
|
+
switch (field.type) {
|
|
157
|
+
case "bool": return view.getUint8(byteOffset) !== 0;
|
|
158
|
+
case "char": return String.fromCharCode(view.getUint8(byteOffset));
|
|
159
|
+
case "int8": return view.getInt8(byteOffset);
|
|
160
|
+
case "int16": return view.getInt16(byteOffset, true);
|
|
161
|
+
case "int32": return view.getInt32(byteOffset, true);
|
|
162
|
+
case "int64": return Number(view.getBigInt64(byteOffset, true));
|
|
163
|
+
case "uint8": return view.getUint8(byteOffset);
|
|
164
|
+
case "uint16": return view.getUint16(byteOffset, true);
|
|
165
|
+
case "uint32": return view.getUint32(byteOffset, true);
|
|
166
|
+
case "uint64": return Number(view.getBigUint64(byteOffset, true));
|
|
167
|
+
case "float": return view.getFloat32(byteOffset, true);
|
|
168
|
+
case "double": return view.getFloat64(byteOffset, true);
|
|
169
|
+
}
|
|
170
|
+
}
|
|
171
|
+
/**
|
|
172
|
+
* Encodes a primitive field value.
|
|
173
|
+
*/
|
|
174
|
+
function encodePrimitiveValue(field, view, byteOffset, value) {
|
|
175
|
+
const v = value == null ? 0 : typeof value === "string" ? value.length === 0 ? 0 : value.charCodeAt(0) : Number(value);
|
|
176
|
+
switch (field.type) {
|
|
177
|
+
case "bool":
|
|
178
|
+
view.setUint8(byteOffset, v);
|
|
179
|
+
break;
|
|
180
|
+
case "char":
|
|
181
|
+
view.setUint8(byteOffset, v);
|
|
182
|
+
break;
|
|
183
|
+
case "int8":
|
|
184
|
+
view.setInt8(byteOffset, v);
|
|
185
|
+
break;
|
|
186
|
+
case "int16":
|
|
187
|
+
view.setInt16(byteOffset, v, true);
|
|
188
|
+
break;
|
|
189
|
+
case "int32":
|
|
190
|
+
view.setInt32(byteOffset, v, true);
|
|
191
|
+
break;
|
|
192
|
+
case "int64":
|
|
193
|
+
view.setBigInt64(byteOffset, BigInt(v), true);
|
|
194
|
+
break;
|
|
195
|
+
case "uint8":
|
|
196
|
+
view.setUint8(byteOffset, v);
|
|
197
|
+
break;
|
|
198
|
+
case "uint16":
|
|
199
|
+
view.setUint16(byteOffset, v, true);
|
|
200
|
+
break;
|
|
201
|
+
case "uint32":
|
|
202
|
+
view.setUint32(byteOffset, v, true);
|
|
203
|
+
break;
|
|
204
|
+
case "uint64":
|
|
205
|
+
view.setBigUint64(byteOffset, BigInt(v), true);
|
|
206
|
+
break;
|
|
207
|
+
case "float":
|
|
208
|
+
view.setFloat32(byteOffset, v, true);
|
|
209
|
+
break;
|
|
210
|
+
case "double":
|
|
211
|
+
view.setFloat64(byteOffset, v, true);
|
|
212
|
+
break;
|
|
213
|
+
}
|
|
214
|
+
}
|
|
215
|
+
/**
|
|
216
|
+
* Decodes a bit-field integer value.
|
|
217
|
+
*/
|
|
218
|
+
function decodeBitFieldValue(field, view, byteOffset) {
|
|
219
|
+
const width = field.bitWidth;
|
|
220
|
+
const shift = field.bitShift;
|
|
221
|
+
if (field.size === 8) if (width <= 32) {
|
|
222
|
+
const h32 = view.getUint32(byteOffset + 4, true);
|
|
223
|
+
const l32 = view.getUint32(byteOffset, true);
|
|
224
|
+
const v = (shift >= 32 ? h32 >>> shift - 32 : l32 >>> shift | h32 << 32 - shift) & bitmask(width);
|
|
225
|
+
return field.type === "int64" ? v << 32 - width >> 32 - width : v;
|
|
226
|
+
} else {
|
|
227
|
+
const data = view.getBigUint64(byteOffset, true);
|
|
228
|
+
return Number(data >> BigInt(shift) & 2n ** BigInt(width) - 1n);
|
|
229
|
+
}
|
|
230
|
+
else {
|
|
231
|
+
const data = field.size === 4 ? view.getUint32(byteOffset, true) : field.size === 2 ? view.getUint16(byteOffset, true) : view.getUint8(byteOffset);
|
|
232
|
+
switch (field.type) {
|
|
233
|
+
case "bool":
|
|
234
|
+
case "uint8":
|
|
235
|
+
case "uint16":
|
|
236
|
+
case "uint32": {
|
|
237
|
+
const v = data >>> shift & bitmask(width);
|
|
238
|
+
return field.type === "bool" ? v !== 0 : v;
|
|
239
|
+
}
|
|
240
|
+
}
|
|
241
|
+
return data << 32 - shift - width >> 32 - width;
|
|
242
|
+
}
|
|
243
|
+
}
|
|
244
|
+
/**
|
|
245
|
+
* Encodes a bit-field integer value.
|
|
246
|
+
*/
|
|
247
|
+
function encodeBitFieldValue(field, view, byteOffset, value) {
|
|
248
|
+
const width = field.bitWidth;
|
|
249
|
+
const shift = field.bitShift;
|
|
250
|
+
const n = value == null ? 0 : Number(value);
|
|
251
|
+
const overlay = (n$1, v, mask, shift$1) => v & ~(mask << shift$1) | (n$1 & mask) << shift$1;
|
|
252
|
+
if (field.size === 8) if (width <= 32) {
|
|
253
|
+
const mask = bitmask(width);
|
|
254
|
+
if (shift >= 32) view.setUint32(byteOffset + 4, overlay(n, view.getUint32(byteOffset + 4, true), mask, shift - 32), true);
|
|
255
|
+
else if (shift + width <= 32) view.setUint32(byteOffset, overlay(n, view.getUint32(byteOffset, true), mask, shift), true);
|
|
256
|
+
else {
|
|
257
|
+
const h32 = view.getUint32(byteOffset + 4, true);
|
|
258
|
+
const l32 = view.getUint32(byteOffset, true);
|
|
259
|
+
const nm = n & mask;
|
|
260
|
+
view.setUint32(byteOffset + 4, h32 & ~bitmask(shift + width - 32) | nm >>> 32 - shift, true);
|
|
261
|
+
view.setUint32(byteOffset, overlay(nm, l32, bitmask(32 - shift), shift), true);
|
|
262
|
+
}
|
|
263
|
+
} else {
|
|
264
|
+
const data = view.getBigUint64(byteOffset, true);
|
|
265
|
+
const mask = 2n ** BigInt(width) - 1n;
|
|
266
|
+
view.setBigUint64(byteOffset, data & ~(mask << BigInt(shift)) | (BigInt(n) & mask) << BigInt(shift), true);
|
|
267
|
+
}
|
|
268
|
+
else {
|
|
269
|
+
const mask = bitmask(width);
|
|
270
|
+
switch (field.size) {
|
|
271
|
+
case 4:
|
|
272
|
+
view.setUint32(byteOffset, overlay(n, view.getUint32(byteOffset, true), mask, shift), true);
|
|
273
|
+
break;
|
|
274
|
+
case 2:
|
|
275
|
+
view.setUint16(byteOffset, overlay(n, view.getUint16(byteOffset, true), mask, shift), true);
|
|
276
|
+
break;
|
|
277
|
+
case 1:
|
|
278
|
+
view.setUint8(byteOffset, overlay(n, view.getUint8(byteOffset), mask, shift));
|
|
279
|
+
break;
|
|
280
|
+
}
|
|
281
|
+
}
|
|
282
|
+
}
|
|
283
|
+
/** Constructs bitmask of the specified `width` (max 32 bits). */
|
|
284
|
+
const bitmask = (width) => -1 >>> 32 - width;
|
|
285
|
+
/** Bytes size of the value type. */
|
|
286
|
+
const fieldTypeByteSize = {
|
|
287
|
+
bool: 1,
|
|
288
|
+
char: 1,
|
|
289
|
+
int8: 1,
|
|
290
|
+
int16: 2,
|
|
291
|
+
int32: 4,
|
|
292
|
+
int64: 8,
|
|
293
|
+
uint8: 1,
|
|
294
|
+
uint16: 2,
|
|
295
|
+
uint32: 4,
|
|
296
|
+
uint64: 8,
|
|
297
|
+
float: 4,
|
|
298
|
+
double: 8
|
|
299
|
+
};
|
|
300
|
+
const getFieldType = (type) => {
|
|
301
|
+
switch (type) {
|
|
302
|
+
case "bool":
|
|
303
|
+
case "char":
|
|
304
|
+
case "int8":
|
|
305
|
+
case "int16":
|
|
306
|
+
case "int32":
|
|
307
|
+
case "int64":
|
|
308
|
+
case "uint8":
|
|
309
|
+
case "uint16":
|
|
310
|
+
case "uint32":
|
|
311
|
+
case "uint64":
|
|
312
|
+
case "float":
|
|
313
|
+
case "double": return type;
|
|
314
|
+
case "float32": return "float";
|
|
315
|
+
case "float64": return "double";
|
|
316
|
+
default: return "ref";
|
|
317
|
+
}
|
|
318
|
+
};
|
|
319
|
+
/** Repository of struct descriptors. */
|
|
320
|
+
var StructRepository = class StructRepository {
|
|
321
|
+
unresolved = [];
|
|
322
|
+
/** Descriptors in the repository. */
|
|
323
|
+
descriptors = /* @__PURE__ */ new Map();
|
|
324
|
+
/**
|
|
325
|
+
* Computes field bte offsets and returns total packed size in bytes.
|
|
326
|
+
*
|
|
327
|
+
* This method assumes that all fields have resolved external dependencies
|
|
328
|
+
* and will compute byte offsets and bit shifts for bit-field packed fields.
|
|
329
|
+
*/
|
|
330
|
+
static computeFieldOffsets(fields) {
|
|
331
|
+
let offset = 0;
|
|
332
|
+
let bitBlock = 0;
|
|
333
|
+
let bitAvail = 0;
|
|
334
|
+
for (const field of fields) if (field.bitWidth != null) {
|
|
335
|
+
if (bitBlock !== field.size && field.type !== "bool" || field.bitWidth > bitAvail) {
|
|
336
|
+
if (bitBlock > 0) offset += bitBlock;
|
|
337
|
+
bitBlock = field.size;
|
|
338
|
+
bitAvail = bitBlock << 3;
|
|
339
|
+
}
|
|
340
|
+
if (field.type === "bool") field.size = bitBlock;
|
|
341
|
+
field.offset = offset;
|
|
342
|
+
field.bitShift = (bitBlock << 3) - bitAvail;
|
|
343
|
+
bitAvail -= field.bitWidth;
|
|
344
|
+
} else {
|
|
345
|
+
if (bitBlock > 0) {
|
|
346
|
+
offset += bitBlock;
|
|
347
|
+
bitBlock = 0;
|
|
348
|
+
bitAvail = 0;
|
|
349
|
+
}
|
|
350
|
+
field.offset = offset;
|
|
351
|
+
offset += field.size * (field.arraySize ?? 1);
|
|
352
|
+
}
|
|
353
|
+
return offset + bitBlock;
|
|
354
|
+
}
|
|
355
|
+
/**
|
|
356
|
+
* Attempts to finalize any unresolved descriptors with the recently resolved one.
|
|
357
|
+
*/
|
|
358
|
+
resolve(descriptor) {
|
|
359
|
+
const resolved = [];
|
|
360
|
+
for (let i = this.unresolved.length - 1; i >= 0; --i) {
|
|
361
|
+
const d = this.unresolved[i];
|
|
362
|
+
if (d.unresolved?.has(descriptor.name)) {
|
|
363
|
+
d.unresolved?.delete(descriptor.name);
|
|
364
|
+
d.fields.forEach((_) => {
|
|
365
|
+
if (_.typeRef === descriptor.name) {
|
|
366
|
+
_.typeRef = descriptor;
|
|
367
|
+
_.size = descriptor.size;
|
|
368
|
+
}
|
|
369
|
+
});
|
|
370
|
+
if (d.unresolved.size === 0) {
|
|
371
|
+
d.unresolved = void 0;
|
|
372
|
+
d.size = StructRepository.computeFieldOffsets(d.fields);
|
|
373
|
+
this.unresolved.splice(i, 1);
|
|
374
|
+
resolved.push(d);
|
|
375
|
+
}
|
|
376
|
+
}
|
|
377
|
+
}
|
|
378
|
+
for (const d of resolved) this.resolve(d);
|
|
379
|
+
}
|
|
380
|
+
/**
|
|
381
|
+
* Determines whether type can be transformed, indicating that
|
|
382
|
+
* the parsed type descriptor is present.
|
|
383
|
+
*
|
|
384
|
+
* @param name struct type name
|
|
385
|
+
*/
|
|
386
|
+
canTransform(name) {
|
|
387
|
+
const d = this.descriptors.get(name);
|
|
388
|
+
return d != null && d.size > 0;
|
|
389
|
+
}
|
|
390
|
+
/**
|
|
391
|
+
* Gets the struct type serialized size in bytes.
|
|
392
|
+
*
|
|
393
|
+
* @param name struct type name
|
|
394
|
+
*/
|
|
395
|
+
getSize(name) {
|
|
396
|
+
const size = this.descriptors.get(name)?.size;
|
|
397
|
+
if (size == null || size == 0) throw new Error(`Descriptor for type '${name}' does not exist or is not fully defined`);
|
|
398
|
+
return size;
|
|
399
|
+
}
|
|
400
|
+
/**
|
|
401
|
+
* Unpacks serialized struct data into JSON object.
|
|
402
|
+
*
|
|
403
|
+
* @param name struct type name
|
|
404
|
+
* @param data serialized binary data
|
|
405
|
+
* @param options additional options
|
|
406
|
+
*/
|
|
407
|
+
unpack(name, data, options) {
|
|
408
|
+
return unpack(name, data, this, options);
|
|
409
|
+
}
|
|
410
|
+
/**
|
|
411
|
+
* Packs JSON object into serialized struct data.
|
|
412
|
+
*
|
|
413
|
+
* @param name struct type name
|
|
414
|
+
* @param value JSON object to pack
|
|
415
|
+
*/
|
|
416
|
+
pack(name, value) {
|
|
417
|
+
return pack(name, value, this);
|
|
418
|
+
}
|
|
419
|
+
/**
|
|
420
|
+
* Parses struct schema and adds the resulting descriptor to the repository.
|
|
421
|
+
*
|
|
422
|
+
* The descriptor may not be fully processed if it references other structs that
|
|
423
|
+
* we have not seen yet. Such pending descriptors will be processed automatically,
|
|
424
|
+
* once corresponding structs have been added. This code checks for circular
|
|
425
|
+
* dependencies and will fail when one is detected.
|
|
426
|
+
*
|
|
427
|
+
* @param name struct type name
|
|
428
|
+
* @param data struct schema in UTF-8 encoded binary representation
|
|
429
|
+
* @returns parsed descriptor or `null` if the operation failed
|
|
430
|
+
*/
|
|
431
|
+
add(name, data) {
|
|
432
|
+
let decoded;
|
|
433
|
+
try {
|
|
434
|
+
decoded = utf8decoder.decode(data);
|
|
435
|
+
} catch (exception) {
|
|
436
|
+
throw error(exception instanceof TypeError ? `Failed to parse schema: ${exception.message}` : `Failed to parse schema: unknown error`);
|
|
437
|
+
}
|
|
438
|
+
const fields = [];
|
|
439
|
+
const tokens = decoded.split(";").map((_) => _.trim()).filter((_) => _.length > 0);
|
|
440
|
+
const unresolved = /* @__PURE__ */ new Set();
|
|
441
|
+
for (const token of tokens) {
|
|
442
|
+
const m = /^(?:(?:enum)?\s*(?:{\s*}|{(?<enum>(?:\s*\w\s*=\s*-?\d+\s*)(?:,\s*\w\s*=\s*-?\d+\s*)*),?\s*}))?\s*(?<type>\w+)\s+(?<id>\w+)\s*(?:(?:\[\s*(?<array>\d+)\s*\])|:\s*(?<bits>[1-9]\d?))?$/i.exec(token);
|
|
443
|
+
if (m == null || m.groups == null) throw error(`Failed to parse schema: invalid declaration '${token}'`);
|
|
444
|
+
const id = m.groups["id"];
|
|
445
|
+
const typeRaw = m.groups["type"];
|
|
446
|
+
if (fields.some((_) => _.identifier === id)) throw error(`Failed to parse schema: duplicate '${id}' field declaration`);
|
|
447
|
+
const field = {
|
|
448
|
+
identifier: id,
|
|
449
|
+
type: getFieldType(typeRaw),
|
|
450
|
+
offset: -1,
|
|
451
|
+
size: 0
|
|
452
|
+
};
|
|
453
|
+
if (field.type === "ref") {
|
|
454
|
+
field.typeRef = this.descriptors.get(typeRaw);
|
|
455
|
+
if (field.typeRef == null) {
|
|
456
|
+
field.typeRef = typeRaw;
|
|
457
|
+
unresolved.add(typeRaw);
|
|
458
|
+
} else if (field.typeRef.size === 0) throw error(`Failed to parse schema: circular dependency detected between '${name}' and '${field.typeRef.name}'`);
|
|
459
|
+
else field.size = field.typeRef.size;
|
|
460
|
+
} else field.size = fieldTypeByteSize[field.type];
|
|
461
|
+
const bitWidthRaw = m.groups["bits"];
|
|
462
|
+
if (bitWidthRaw != null) {
|
|
463
|
+
field.bitWidth = parseInt(bitWidthRaw, 10);
|
|
464
|
+
if (Number.isNaN(field.bitWidth)) throw error(`Failed to parse schema: non-numeric bit-field width in '${id}' field declaration`);
|
|
465
|
+
switch (field.type) {
|
|
466
|
+
case "bool":
|
|
467
|
+
if (field.bitWidth !== 1) throw error(`Failed to parse schema: invalid boolean bit-field width '${id}' field declaration`);
|
|
468
|
+
break;
|
|
469
|
+
case "int8":
|
|
470
|
+
case "int16":
|
|
471
|
+
case "int32":
|
|
472
|
+
case "int64":
|
|
473
|
+
case "uint8":
|
|
474
|
+
case "uint16":
|
|
475
|
+
case "uint32":
|
|
476
|
+
case "uint64":
|
|
477
|
+
if (field.bitWidth < 1 || field.bitWidth > fieldTypeByteSize[field.type] << 3) throw error(`Failed to parse schema: invalid integer bit-field width '${id}' field declaration`);
|
|
478
|
+
break;
|
|
479
|
+
default: throw error(`Failed to parse schema: bit-field in non-integer/boolean '${id}' field declaration`);
|
|
480
|
+
}
|
|
481
|
+
}
|
|
482
|
+
const arraySizeRaw = m.groups["array"];
|
|
483
|
+
if (arraySizeRaw != null) {
|
|
484
|
+
field.arraySize = parseInt(arraySizeRaw, 10);
|
|
485
|
+
if (Number.isNaN(field.arraySize) || field.arraySize <= 0) throw error(`Failed to parse schema: invalid array size in '${id}' field declaration`);
|
|
486
|
+
}
|
|
487
|
+
const enumBodyRaw = m.groups["enum"];
|
|
488
|
+
if (enumBodyRaw) {
|
|
489
|
+
switch (field.type) {
|
|
490
|
+
case "int8":
|
|
491
|
+
case "int16":
|
|
492
|
+
case "int32":
|
|
493
|
+
case "int64":
|
|
494
|
+
case "uint8":
|
|
495
|
+
case "uint16":
|
|
496
|
+
case "uint32":
|
|
497
|
+
case "uint64": break;
|
|
498
|
+
default: throw error(`Failed to parse schema: enum declaration in non-integer '${id}' field declaration`);
|
|
499
|
+
}
|
|
500
|
+
field.enum = /* @__PURE__ */ new Map();
|
|
501
|
+
for (const tuple of enumBodyRaw.split(",")) {
|
|
502
|
+
const [enumName, valueRaw] = tuple.trim().split("=", 2);
|
|
503
|
+
const enumValue = parseInt(valueRaw.trim(), 10);
|
|
504
|
+
if (Number.isNaN(enumValue)) throw error(`Failed to parse schema: enum declaration contains non-integer value '${valueRaw}' in '${id}' field declaration`);
|
|
505
|
+
field.enum.set(enumValue, enumName);
|
|
506
|
+
}
|
|
507
|
+
}
|
|
508
|
+
fields.push(field);
|
|
509
|
+
}
|
|
510
|
+
const descriptor = {
|
|
511
|
+
name,
|
|
512
|
+
fields,
|
|
513
|
+
size: unresolved.size > 0 ? 0 : StructRepository.computeFieldOffsets(fields),
|
|
514
|
+
unresolved: unresolved.size > 0 ? unresolved : void 0
|
|
515
|
+
};
|
|
516
|
+
this.descriptors.set(name, descriptor);
|
|
517
|
+
if (descriptor.size > 0) this.resolve(descriptor);
|
|
518
|
+
else this.unresolved.push(descriptor);
|
|
519
|
+
return descriptor;
|
|
520
|
+
}
|
|
521
|
+
};
|
|
522
|
+
/** Implements {@link DataTransformer} interface for the `struct` serialization protocol. */
|
|
523
|
+
var StructDataTransformer = class {
|
|
524
|
+
repo = new StructRepository();
|
|
525
|
+
inspect(source, name, type, metadata) {
|
|
526
|
+
if (name.startsWith("/.schema/struct:")) {
|
|
527
|
+
if (type !== "structschema") throw new Error(`Unexpected type '${type}' for struct schema entry`);
|
|
528
|
+
return name.substring(16);
|
|
529
|
+
}
|
|
530
|
+
if (type.startsWith("struct:")) {
|
|
531
|
+
const isArrayType = type.endsWith("[]");
|
|
532
|
+
return {
|
|
533
|
+
source,
|
|
534
|
+
id: name,
|
|
535
|
+
dataType: "json",
|
|
536
|
+
publishedDataType: type,
|
|
537
|
+
transformer: this,
|
|
538
|
+
structuredType: {
|
|
539
|
+
name: isArrayType ? type.slice(7, -2) : type.slice(7),
|
|
540
|
+
format: "struct",
|
|
541
|
+
isArray: isArrayType
|
|
542
|
+
},
|
|
543
|
+
metadata
|
|
544
|
+
};
|
|
545
|
+
}
|
|
546
|
+
}
|
|
547
|
+
schema(typeName, value) {
|
|
548
|
+
this.repo.add(typeName, require_utils.toUint8Array(value));
|
|
549
|
+
}
|
|
550
|
+
deserialize(value, type) {
|
|
551
|
+
if (type == null) throw new Error(`Transformation requires type to be specified. This situation should not be possible if the transformer is wired correctly.`);
|
|
552
|
+
if (this.repo.canTransform(type.name)) {
|
|
553
|
+
const buffer = require_utils.toUint8Array(value);
|
|
554
|
+
if (type.isArray) {
|
|
555
|
+
const result = [];
|
|
556
|
+
const itemSize = this.repo.getSize(type.name);
|
|
557
|
+
let byteOffset = 0;
|
|
558
|
+
while (byteOffset + itemSize <= buffer.byteLength) {
|
|
559
|
+
result.push(this.repo.unpack(type.name, new DataView(buffer.buffer, buffer.byteOffset + byteOffset, itemSize), { useEnum: true }));
|
|
560
|
+
byteOffset += itemSize;
|
|
561
|
+
}
|
|
562
|
+
return result;
|
|
563
|
+
}
|
|
564
|
+
return this.repo.unpack(type.name, buffer, { useEnum: true });
|
|
565
|
+
}
|
|
566
|
+
}
|
|
567
|
+
serialize(value, type) {
|
|
568
|
+
if (type == null) throw new Error(`Transformation requires type to be specified. This situation should not be possible if the transformer is wired correctly.`);
|
|
569
|
+
if (value == null || typeof value !== "object") throw new Error("Only JSON objects can be serialized");
|
|
570
|
+
if (this.repo.canTransform(type.name)) {
|
|
571
|
+
if (Array.isArray(value)) {
|
|
572
|
+
const itemSize = this.repo.getSize(type.name);
|
|
573
|
+
const result = new Uint8Array(itemSize * value.length);
|
|
574
|
+
for (let i = 0; i < value.length; ++i) {
|
|
575
|
+
const part = this.repo.pack(type.name, value[i]);
|
|
576
|
+
result.set(part, i * itemSize);
|
|
577
|
+
}
|
|
578
|
+
return result;
|
|
579
|
+
}
|
|
580
|
+
return this.repo.pack(type.name, value);
|
|
581
|
+
}
|
|
582
|
+
throw new Error(`Struct serialization is not supported for '${type.name}'`);
|
|
583
|
+
}
|
|
584
|
+
canTransform(type) {
|
|
585
|
+
return this.repo.canTransform(type);
|
|
586
|
+
}
|
|
587
|
+
};
|
|
588
|
+
|
|
589
|
+
//#endregion
|
|
590
|
+
exports.StructDataTransformer = StructDataTransformer;
|
|
591
|
+
exports.StructRepository = StructRepository;
|
|
592
|
+
exports.pack = pack;
|
|
593
|
+
exports.unpack = unpack;
|