@2702rebels/wpidata 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (80) hide show
  1. package/LICENSE +28 -0
  2. package/README.md +5 -0
  3. package/dist/abstractions.cjs +0 -0
  4. package/dist/abstractions.d.cts +246 -0
  5. package/dist/abstractions.d.cts.map +1 -0
  6. package/dist/abstractions.d.mts +246 -0
  7. package/dist/abstractions.d.mts.map +1 -0
  8. package/dist/abstractions.mjs +1 -0
  9. package/dist/formats/json.cjs +32 -0
  10. package/dist/formats/json.d.cts +14 -0
  11. package/dist/formats/json.d.cts.map +1 -0
  12. package/dist/formats/json.d.mts +14 -0
  13. package/dist/formats/json.d.mts.map +1 -0
  14. package/dist/formats/json.mjs +33 -0
  15. package/dist/formats/json.mjs.map +1 -0
  16. package/dist/formats/msgpack.cjs +30 -0
  17. package/dist/formats/msgpack.d.cts +14 -0
  18. package/dist/formats/msgpack.d.cts.map +1 -0
  19. package/dist/formats/msgpack.d.mts +14 -0
  20. package/dist/formats/msgpack.d.mts.map +1 -0
  21. package/dist/formats/msgpack.mjs +31 -0
  22. package/dist/formats/msgpack.mjs.map +1 -0
  23. package/dist/formats/protobuf.cjs +130 -0
  24. package/dist/formats/protobuf.d.cts +68 -0
  25. package/dist/formats/protobuf.d.cts.map +1 -0
  26. package/dist/formats/protobuf.d.mts +68 -0
  27. package/dist/formats/protobuf.d.mts.map +1 -0
  28. package/dist/formats/protobuf.mjs +128 -0
  29. package/dist/formats/protobuf.mjs.map +1 -0
  30. package/dist/formats/struct.cjs +593 -0
  31. package/dist/formats/struct.d.cts +134 -0
  32. package/dist/formats/struct.d.cts.map +1 -0
  33. package/dist/formats/struct.d.mts +134 -0
  34. package/dist/formats/struct.d.mts.map +1 -0
  35. package/dist/formats/struct.mjs +591 -0
  36. package/dist/formats/struct.mjs.map +1 -0
  37. package/dist/sink.cjs +360 -0
  38. package/dist/sink.d.cts +93 -0
  39. package/dist/sink.d.cts.map +1 -0
  40. package/dist/sink.d.mts +93 -0
  41. package/dist/sink.d.mts.map +1 -0
  42. package/dist/sink.mjs +361 -0
  43. package/dist/sink.mjs.map +1 -0
  44. package/dist/types/protobuf.cjs +0 -0
  45. package/dist/types/protobuf.d.cts +302 -0
  46. package/dist/types/protobuf.d.cts.map +1 -0
  47. package/dist/types/protobuf.d.mts +302 -0
  48. package/dist/types/protobuf.d.mts.map +1 -0
  49. package/dist/types/protobuf.mjs +1 -0
  50. package/dist/types/sendable.cjs +0 -0
  51. package/dist/types/sendable.d.cts +225 -0
  52. package/dist/types/sendable.d.cts.map +1 -0
  53. package/dist/types/sendable.d.mts +225 -0
  54. package/dist/types/sendable.d.mts.map +1 -0
  55. package/dist/types/sendable.mjs +1 -0
  56. package/dist/types/struct.cjs +0 -0
  57. package/dist/types/struct.d.cts +304 -0
  58. package/dist/types/struct.d.cts.map +1 -0
  59. package/dist/types/struct.d.mts +304 -0
  60. package/dist/types/struct.d.mts.map +1 -0
  61. package/dist/types/struct.mjs +1 -0
  62. package/dist/utils.cjs +140 -0
  63. package/dist/utils.d.cts +40 -0
  64. package/dist/utils.d.cts.map +1 -0
  65. package/dist/utils.d.mts +40 -0
  66. package/dist/utils.d.mts.map +1 -0
  67. package/dist/utils.mjs +135 -0
  68. package/dist/utils.mjs.map +1 -0
  69. package/package.json +51 -0
  70. package/src/abstractions.ts +308 -0
  71. package/src/formats/json.ts +53 -0
  72. package/src/formats/msgpack.ts +42 -0
  73. package/src/formats/protobuf.ts +213 -0
  74. package/src/formats/struct.test.ts +814 -0
  75. package/src/formats/struct.ts +992 -0
  76. package/src/sink.ts +611 -0
  77. package/src/types/protobuf.ts +334 -0
  78. package/src/types/sendable.ts +244 -0
  79. package/src/types/struct.ts +333 -0
  80. package/src/utils.ts +241 -0
@@ -0,0 +1,992 @@
1
+ import { toDataView, toUint8Array } from "../utils";
2
+
3
+ import type { DataChannel, DataTransformer, DataTypeImpl, StructuredTypeDescriptor } from "../abstractions";
4
+
5
+ const error = (message: string) => {
6
+ return new Error(message);
7
+ };
8
+
9
+ export type StructFieldType =
10
+ | "ref"
11
+ | "bool"
12
+ | "char"
13
+ | "int8"
14
+ | "int16"
15
+ | "int32"
16
+ | "int64"
17
+ | "uint8"
18
+ | "uint16"
19
+ | "uint32"
20
+ | "uint64"
21
+ | "float"
22
+ | "double";
23
+
24
+ /** Describes individual field in {@link StructDescriptor}. */
25
+ export type StructFieldDescriptor = {
26
+ /** Field identifier */
27
+ identifier: string;
28
+ /** Field value type */
29
+ type: StructFieldType;
30
+ /** Reference for complex (nested) type */
31
+ typeRef?: StructDescriptor | string;
32
+ /** Offset to the packed field data in bytes */
33
+ offset: number;
34
+ /** Size of the packed field data in bytes */
35
+ size: number;
36
+ /** Fixed array size */
37
+ arraySize?: number;
38
+ /** Bit width of the field data (bit-field only) */
39
+ bitWidth?: number;
40
+ /** Bit shift for the field data (bit-field only) */
41
+ bitShift?: number;
42
+ /** Enum specification */
43
+ enum?: Map<number, string>;
44
+ };
45
+
46
+ /** Describes named struct type. */
47
+ export type StructDescriptor = {
48
+ /** Struct type name */
49
+ name: string;
50
+ /** Struct fields in packed order */
51
+ fields: ReadonlyArray<StructFieldDescriptor>;
52
+ /** Total packed size in bytes */
53
+ size: number;
54
+ /** Missing dependencies */
55
+ unresolved?: Set<string>;
56
+ };
57
+
58
+ const utf8decoder = new TextDecoder("utf-8", {
59
+ // throw TypeError on invalid data instead of silent substitution
60
+ fatal: true,
61
+ });
62
+
63
+ const utf8encoder = new TextEncoder();
64
+
65
+ export type UnpackStructOptions = {
66
+ /**
67
+ * Indicates that integer numeric values with enum specification should be converted
68
+ * to the corresponding enum string value if possible.
69
+ */
70
+ useEnum?: boolean;
71
+ };
72
+
73
+ /**
74
+ * Unpacks serialized struct data into JSON object.
75
+ *
76
+ * Implementation of WPILiB packed struct serialization protocol
77
+ * https://github.com/wpilibsuite/allwpilib/blob/main/wpiutil/doc/struct.adoc
78
+ *
79
+ * @param name struct type name
80
+ * @param data serialized binary data
81
+ * @param repository repository of available descriptors
82
+ * @param options additional options
83
+ */
84
+ export function unpack(
85
+ name: string,
86
+ data: DataView<ArrayBufferLike> | Uint8Array<ArrayBufferLike>,
87
+ repository: StructRepository,
88
+ options?: UnpackStructOptions
89
+ ) {
90
+ const descriptor = repository.descriptors.get(name);
91
+ if (descriptor == null) {
92
+ throw error(`Failed to unpack struct data: missing '${name}' type definition`);
93
+ }
94
+
95
+ // descriptor exists but is not mapped yet due to unresolved dependencies
96
+ if (descriptor.size === 0) {
97
+ throw error(`Failed to unpack struct data: '${name}' type definition has unresolved dependencies`);
98
+ }
99
+
100
+ const result: Record<string, unknown> = {};
101
+ unpackStruct(result, descriptor, toDataView(data), 0, options?.useEnum ? transformEnums : (_, v) => v);
102
+ return result;
103
+ }
104
+
105
+ /**
106
+ * Packs JSON object into serialized struct data.
107
+ *
108
+ * @param name struct type name
109
+ * @param value JSON object to pack
110
+ * @param repository repository of available descriptors
111
+ * @returns ArrayBuffer containing serialized data
112
+ */
113
+ export function pack(name: string, value: Record<string, unknown>, repository: StructRepository) {
114
+ const descriptor = repository.descriptors.get(name);
115
+ if (descriptor == null) {
116
+ throw error(`Failed to pack struct data: missing '${name}' type definition`);
117
+ }
118
+
119
+ // descriptor exists but is not mapped yet due to unresolved dependencies
120
+ if (descriptor.size === 0) {
121
+ throw error(`Failed to pack struct data: '${name}' type definition has unresolved dependencies`);
122
+ }
123
+
124
+ const buffer = new ArrayBuffer(descriptor.size);
125
+ const view = new DataView(buffer);
126
+
127
+ packStruct(value, descriptor, view, 0, transformValue);
128
+ return new Uint8Array(buffer);
129
+ }
130
+
131
+ /**
132
+ * Transforms field values according to the field descriptor.
133
+ *
134
+ * Handles conversion of enum strings into their numeric representation.
135
+ */
136
+ function transformValue(field: StructFieldDescriptor, value: unknown) {
137
+ if (field.enum != null && typeof value === "string") {
138
+ for (const [key, v] of field.enum) {
139
+ if (v === value) {
140
+ return key;
141
+ }
142
+ }
143
+ }
144
+
145
+ return value;
146
+ }
147
+
148
+ /** Transforms numeric values to enum names for fields that support enums. */
149
+ function transformEnums(field: StructFieldDescriptor, value: unknown) {
150
+ if (field.enum != null && typeof value === "number") {
151
+ const enumName = field.enum.get(value);
152
+ if (enumName != null) {
153
+ return enumName;
154
+ }
155
+ }
156
+
157
+ return value;
158
+ }
159
+
160
+ /**
161
+ * Unpacks data per descriptor specification and populates `sink` placeholder instance.
162
+ *
163
+ * @param sink target object to populate with parsed data
164
+ * @param descriptor struct type descriptor
165
+ * @param view source buffer view
166
+ * @param byteOffset offset in bytes within `view`
167
+ * @param transformer primitive field value transformer
168
+ */
169
+ function unpackStruct(
170
+ sink: Record<string, unknown>,
171
+ descriptor: StructDescriptor,
172
+ view: DataView,
173
+ byteOffset: number,
174
+ transformer: (field: StructFieldDescriptor, value: unknown) => unknown
175
+ ) {
176
+ for (const field of descriptor.fields) {
177
+ if (field.type === "ref") {
178
+ // nested structure
179
+ if (field.typeRef == null || typeof field.typeRef !== "object") {
180
+ throw error(`Failed to unpack struct data: field '${field.identifier}' references unresolved type`);
181
+ }
182
+
183
+ const result: Record<string, unknown> = {};
184
+ unpackStruct(result, field.typeRef, view, byteOffset + field.offset, transformer);
185
+ sink[field.identifier] = result;
186
+ } else if (field.arraySize != null) {
187
+ if (field.type === "char") {
188
+ // array of chars is UTF-8 encoded string
189
+ sink[field.identifier] = transformer(
190
+ field,
191
+ decodeStringValue(view, byteOffset + field.offset, field.arraySize)
192
+ );
193
+ } else {
194
+ // array of booleans or numeric values
195
+ const result: Array<unknown> = [];
196
+ for (let i = 0; i < field.arraySize; ++i) {
197
+ result.push(
198
+ transformer(field, decodePrimitiveValue(field, view, byteOffset + field.offset + i * field.size))
199
+ );
200
+ }
201
+ }
202
+ } else if (field.bitWidth != null) {
203
+ sink[field.identifier] = transformer(field, decodeBitFieldValue(field, view, byteOffset + field.offset));
204
+ } else {
205
+ sink[field.identifier] = transformer(field, decodePrimitiveValue(field, view, byteOffset + field.offset));
206
+ }
207
+ }
208
+ }
209
+
210
+ /**
211
+ * Packs data per descriptor specification.
212
+ *
213
+ * @param source source object to pack
214
+ * @param descriptor struct type descriptor
215
+ * @param view target buffer view
216
+ * @param byteOffset offset in bytes within `view`
217
+ * @param transformer primitive field value transformer
218
+ */
219
+ function packStruct(
220
+ source: Record<string, unknown>,
221
+ descriptor: StructDescriptor,
222
+ view: DataView,
223
+ byteOffset: number,
224
+ transformer: (field: StructFieldDescriptor, value: unknown) => unknown
225
+ ) {
226
+ for (const field of descriptor.fields) {
227
+ const value = source[field.identifier];
228
+
229
+ if (field.type === "ref") {
230
+ // nested structure
231
+ if (field.typeRef == null || typeof field.typeRef !== "object") {
232
+ throw error(`Failed to pack struct data: field '${field.identifier}' references unresolved type`);
233
+ }
234
+
235
+ packStruct((value ?? {}) as Record<string, unknown>, field.typeRef, view, byteOffset + field.offset, transformer);
236
+ } else if (field.arraySize != null) {
237
+ if (field.type === "char") {
238
+ // array of chars is UTF-8 encoded string
239
+ encodeStringValue(
240
+ view,
241
+ byteOffset + field.offset,
242
+ field.arraySize,
243
+ (transformer(field, value) ?? "") as string
244
+ );
245
+ } else {
246
+ // array of booleans or numeric values
247
+ for (let i = 0; i < field.arraySize; ++i) {
248
+ encodePrimitiveValue(field, view, byteOffset + field.offset + i * field.size, transformer(field, value));
249
+ }
250
+ }
251
+ } else if (field.bitWidth != null) {
252
+ encodeBitFieldValue(field, view, byteOffset + field.offset, transformer(field, value));
253
+ } else {
254
+ encodePrimitiveValue(field, view, byteOffset + field.offset, transformer(field, value));
255
+ }
256
+ }
257
+ }
258
+
259
+ /**
260
+ * Decodes a string field value.
261
+ *
262
+ * Assumes UTF-8 encoding, handles zero-termination, continuation bytes.
263
+ */
264
+ function decodeStringValue(view: DataView, byteOffset: number, byteLength: number) {
265
+ // the array can can be zero terminated, we need to find last non-zero byte
266
+ let length = byteLength;
267
+ for (; length > 0; --length) {
268
+ if (view.getUint8(byteOffset + length - 1) !== 0) {
269
+ break;
270
+ }
271
+ }
272
+
273
+ if (length === 0) {
274
+ return "";
275
+ }
276
+
277
+ // UTF-8 continuation bytes (deal with garbage)
278
+ if ((view.getUint8(byteOffset + length - 1) & 0x80) !== 0) {
279
+ let start = length;
280
+ for (; start > 0; --start) {
281
+ if ((view.getUint8(byteOffset + start - 1) & 0x40) != 0) {
282
+ break;
283
+ }
284
+ }
285
+
286
+ if (start == 0) {
287
+ return "";
288
+ }
289
+
290
+ start--;
291
+ const b = view.getUint8(byteOffset + start);
292
+ if ((b & 0xe0) === 0xc0) {
293
+ if (start !== length - 2) {
294
+ length = start;
295
+ }
296
+ } else if ((b & 0xf0) === 0xe0) {
297
+ if (start !== length - 3) {
298
+ length = start;
299
+ }
300
+ } else if ((b & 0xf8) === 0xf0) {
301
+ if (start !== length - 4) {
302
+ length = start;
303
+ }
304
+ }
305
+ }
306
+
307
+ // create restricted view
308
+ return utf8decoder.decode(new DataView(view.buffer, view.byteOffset + byteOffset, length));
309
+ }
310
+
311
+ /**
312
+ * Encodes a string field value.
313
+ *
314
+ * The implementation relies on the behavior of `Uint8Array` that is initialized to all zeros
315
+ * and automatically clamps the size of the encoded data to the length of the array.
316
+ */
317
+ function encodeStringValue(view: DataView, byteOffset: number, byteLength: number, value: string) {
318
+ utf8encoder.encodeInto(value, new Uint8Array(view.buffer, view.byteOffset + byteOffset, byteLength));
319
+ }
320
+
321
+ /**
322
+ * Decodes a primitive field value.
323
+ *
324
+ * † Javascript limits integer types to 53-bit representation.
325
+ * Decoding 64-bit integers may result in loss of precision as values
326
+ * that do not fit within the safe integer limit will be represented by
327
+ * floating-point numbers with double precision.
328
+ *
329
+ * ‡ Decoding a character value only makes sense for reading fields that
330
+ * consist of exactly one character, where UTF-8 is essentially ASCII.
331
+ * In practice UTF-8 encoded strings use multiple bytes to represent
332
+ * non-ASCII characters and must be handled in a special way when array
333
+ * of chars is decoded. @see {decodeStringValue}.
334
+ */
335
+ function decodePrimitiveValue(field: StructFieldDescriptor, view: DataView, byteOffset: number) {
336
+ switch (field.type) {
337
+ case "bool":
338
+ return view.getUint8(byteOffset) !== 0;
339
+ case "char":
340
+ return String.fromCharCode(view.getUint8(byteOffset)); // ‡
341
+ case "int8":
342
+ return view.getInt8(byteOffset);
343
+ case "int16":
344
+ return view.getInt16(byteOffset, true);
345
+ case "int32":
346
+ return view.getInt32(byteOffset, true);
347
+ case "int64":
348
+ return Number(view.getBigInt64(byteOffset, true)); // †
349
+ case "uint8":
350
+ return view.getUint8(byteOffset);
351
+ case "uint16":
352
+ return view.getUint16(byteOffset, true);
353
+ case "uint32":
354
+ return view.getUint32(byteOffset, true);
355
+ case "uint64":
356
+ return Number(view.getBigUint64(byteOffset, true)); // †
357
+ case "float":
358
+ return view.getFloat32(byteOffset, true);
359
+ case "double":
360
+ return view.getFloat64(byteOffset, true);
361
+ }
362
+ }
363
+
364
+ /**
365
+ * Encodes a primitive field value.
366
+ */
367
+ function encodePrimitiveValue(field: StructFieldDescriptor, view: DataView, byteOffset: number, value: unknown) {
368
+ // convert to numeric representation
369
+ const v =
370
+ value == null ? 0 : typeof value === "string" ? (value.length === 0 ? 0 : value.charCodeAt(0)) : Number(value);
371
+
372
+ switch (field.type) {
373
+ case "bool":
374
+ view.setUint8(byteOffset, v);
375
+ break;
376
+ case "char":
377
+ view.setUint8(byteOffset, v);
378
+ break;
379
+ case "int8":
380
+ view.setInt8(byteOffset, v);
381
+ break;
382
+ case "int16":
383
+ view.setInt16(byteOffset, v, true);
384
+ break;
385
+ case "int32":
386
+ view.setInt32(byteOffset, v, true);
387
+ break;
388
+ case "int64":
389
+ view.setBigInt64(byteOffset, BigInt(v), true);
390
+ break;
391
+ case "uint8":
392
+ view.setUint8(byteOffset, v);
393
+ break;
394
+ case "uint16":
395
+ view.setUint16(byteOffset, v, true);
396
+ break;
397
+ case "uint32":
398
+ view.setUint32(byteOffset, v, true);
399
+ break;
400
+ case "uint64":
401
+ view.setBigUint64(byteOffset, BigInt(v), true);
402
+ break;
403
+ case "float":
404
+ view.setFloat32(byteOffset, v, true);
405
+ break;
406
+ case "double":
407
+ view.setFloat64(byteOffset, v, true);
408
+ break;
409
+ }
410
+ }
411
+
412
+ /**
413
+ * Decodes a bit-field integer value.
414
+ */
415
+ function decodeBitFieldValue(field: StructFieldDescriptor, view: DataView, byteOffset: number) {
416
+ const width = field.bitWidth!;
417
+ const shift = field.bitShift!;
418
+
419
+ if (field.size === 8) {
420
+ if (width <= 32) {
421
+ // we can fit in 32-bit integer, use hi/lo 32-bit blocks to reconstruct the value
422
+ // 64-bit is stored in LE, so high bits are in the block at a higher address
423
+ const h32 = view.getUint32(byteOffset + 4, true);
424
+ const l32 = view.getUint32(byteOffset, true);
425
+
426
+ // example: width = 13, shift = 22
427
+ // ........ ........ ........ .....xxx | xxxxxxxx xx...... ........ ........
428
+ // +-------------- h32 --------------+ +-------------- l32 --------------+
429
+ // unsigned
430
+ // l32 >>> shift = 00000000 00000000 000000xx xxxxxxxx
431
+ // h32 << (32 - shift) = ........ ........ ...xxx00 00000000
432
+ // | = ........ ........ 000xxxxx xxxxxxxx
433
+ // & mask = 00000000 00000000 000xxxxx xxxxxxxx
434
+ const v = (shift >= 32 ? h32 >>> (shift - 32) : (l32 >>> shift) | (h32 << (32 - shift))) & bitmask(width);
435
+ return field.type === "int64" ? (v << (32 - width)) >> (32 - width) : v;
436
+ } else {
437
+ // we have to resort to unsigned case only due to Javascript limitations
438
+ // that prevent us from performing bit manipulations on 64-bit integers
439
+ const data = view.getBigUint64(byteOffset, true);
440
+ return Number((data >> BigInt(shift)) & (2n ** BigInt(width) - 1n));
441
+ }
442
+ } else {
443
+ // read the block containing the field
444
+ const data =
445
+ field.size === 4
446
+ ? view.getUint32(byteOffset, true)
447
+ : field.size === 2
448
+ ? view.getUint16(byteOffset, true)
449
+ : view.getUint8(byteOffset);
450
+
451
+ // for unsigned (and boolean) we can just shift and mask
452
+ // note the use of `>>>` unsigned shift operator here
453
+ switch (field.type) {
454
+ case "bool":
455
+ case "uint8":
456
+ case "uint16":
457
+ case "uint32": {
458
+ const v = (data >>> shift) & bitmask(width);
459
+ return field.type === "bool" ? v !== 0 : v;
460
+ }
461
+ }
462
+
463
+ // signed integer situation, first shift left to clear high bits and set the sign bit,
464
+ // then shift right, which also clears low bits so masking is not necessary
465
+ // note the use of `<<` and `>>` shift operators here
466
+ // these are overloaded for 32-bit integers
467
+ return (data << (32 - shift - width)) >> (32 - width);
468
+ }
469
+ }
470
+
471
+ /**
472
+ * Encodes a bit-field integer value.
473
+ */
474
+ function encodeBitFieldValue(field: StructFieldDescriptor, view: DataView, byteOffset: number, value: unknown) {
475
+ const width = field.bitWidth!;
476
+ const shift = field.bitShift!;
477
+
478
+ // convert to numeric representation
479
+ const n = value == null ? 0 : Number(value);
480
+ const overlay = (n: number, v: number, mask: number, shift: number) => (v & ~(mask << shift)) | ((n & mask) << shift);
481
+
482
+ if (field.size === 8) {
483
+ if (width <= 32) {
484
+ // we can fit in 32-bit integer, use hi/lo 32-bit blocks to overlay the value
485
+ const mask = bitmask(width);
486
+ if (shift >= 32) {
487
+ view.setUint32(byteOffset + 4, overlay(n, view.getUint32(byteOffset + 4, true), mask, shift - 32), true);
488
+ } else if (shift + width <= 32) {
489
+ view.setUint32(byteOffset, overlay(n, view.getUint32(byteOffset, true), mask, shift), true);
490
+ } else {
491
+ // 64-bit is stored in LE, so high bits are in the block at a higher address
492
+ const h32 = view.getUint32(byteOffset + 4, true);
493
+ const l32 = view.getUint32(byteOffset, true);
494
+ const nm = n & mask;
495
+ view.setUint32(byteOffset + 4, (h32 & ~bitmask(shift + width - 32)) | (nm >>> (32 - shift)), true);
496
+ view.setUint32(byteOffset, overlay(nm, l32, bitmask(32 - shift), shift), true);
497
+ }
498
+ } else {
499
+ const data = view.getBigUint64(byteOffset, true);
500
+ const mask = 2n ** BigInt(width) - 1n;
501
+ view.setBigUint64(byteOffset, (data & ~(mask << BigInt(shift))) | ((BigInt(n) & mask) << BigInt(shift)), true);
502
+ }
503
+ } else {
504
+ const mask = bitmask(width);
505
+ switch (field.size) {
506
+ case 4:
507
+ view.setUint32(byteOffset, overlay(n, view.getUint32(byteOffset, true), mask, shift), true);
508
+ break;
509
+ case 2:
510
+ view.setUint16(byteOffset, overlay(n, view.getUint16(byteOffset, true), mask, shift), true);
511
+ break;
512
+ case 1:
513
+ view.setUint8(byteOffset, overlay(n, view.getUint8(byteOffset), mask, shift));
514
+ break;
515
+ }
516
+ }
517
+ }
518
+
519
+ /** Constructs bitmask of the specified `width` (max 32 bits). */
520
+ const bitmask = (width: number) => -1 >>> (32 - width);
521
+
522
+ /** Bytes size of the value type. */
523
+ const fieldTypeByteSize = {
524
+ bool: 1,
525
+ char: 1,
526
+ int8: 1,
527
+ int16: 2,
528
+ int32: 4,
529
+ int64: 8,
530
+ uint8: 1,
531
+ uint16: 2,
532
+ uint32: 4,
533
+ uint64: 8,
534
+ float: 4,
535
+ double: 8,
536
+ } as const;
537
+
538
+ const getFieldType = (type: string): StructFieldType => {
539
+ switch (type) {
540
+ case "bool":
541
+ case "char":
542
+ case "int8":
543
+ case "int16":
544
+ case "int32":
545
+ case "int64":
546
+ case "uint8":
547
+ case "uint16":
548
+ case "uint32":
549
+ case "uint64":
550
+ case "float":
551
+ case "double":
552
+ return type;
553
+ case "float32":
554
+ return "float";
555
+ case "float64":
556
+ return "double";
557
+ default:
558
+ return "ref";
559
+ }
560
+ };
561
+
562
+ /** Repository of struct descriptors. */
563
+ export class StructRepository {
564
+ private readonly unresolved: Array<StructDescriptor> = [];
565
+
566
+ /** Descriptors in the repository. */
567
+ public readonly descriptors = new Map<string, StructDescriptor>();
568
+
569
+ /**
570
+ * Computes field bte offsets and returns total packed size in bytes.
571
+ *
572
+ * This method assumes that all fields have resolved external dependencies
573
+ * and will compute byte offsets and bit shifts for bit-field packed fields.
574
+ */
575
+ private static computeFieldOffsets(fields: ReadonlyArray<StructFieldDescriptor>) {
576
+ let offset = 0; // offset in bytes
577
+ let bitBlock = 0; // size in bytes of the current bit-field block
578
+ let bitAvail = 0; // available bits in the current bit-field
579
+
580
+ for (const field of fields) {
581
+ if (field.bitWidth != null) {
582
+ // current bit-field must be of the same size (except for booleans)
583
+ // and should have sufficient bits remaining
584
+ if ((bitBlock !== field.size && field.type !== "bool") || field.bitWidth > bitAvail) {
585
+ // terminate current bit-field
586
+ if (bitBlock > 0) {
587
+ offset += bitBlock;
588
+ }
589
+
590
+ // start new bit-field block
591
+ bitBlock = field.size;
592
+ bitAvail = bitBlock << 3;
593
+ }
594
+
595
+ // booleans are "spliced" onto current integer block size
596
+ if (field.type === "bool") {
597
+ field.size = bitBlock;
598
+ }
599
+
600
+ field.offset = offset;
601
+ field.bitShift = (bitBlock << 3) - bitAvail;
602
+ bitAvail -= field.bitWidth;
603
+ } else {
604
+ // terminate current bit-field
605
+ if (bitBlock > 0) {
606
+ offset += bitBlock;
607
+
608
+ // reset bit-field block
609
+ bitBlock = 0;
610
+ bitAvail = 0;
611
+ }
612
+
613
+ field.offset = offset;
614
+ offset += field.size * (field.arraySize ?? 1);
615
+ }
616
+ }
617
+
618
+ // account for the terminal bit-field
619
+ return offset + bitBlock;
620
+ }
621
+
622
+ /**
623
+ * Attempts to finalize any unresolved descriptors with the recently resolved one.
624
+ */
625
+ private resolve(descriptor: StructDescriptor) {
626
+ const resolved: Array<StructDescriptor> = [];
627
+ for (let i = this.unresolved.length - 1; i >= 0; --i) {
628
+ const d = this.unresolved[i]!;
629
+ if (d.unresolved?.has(descriptor.name)) {
630
+ d.unresolved?.delete(descriptor.name);
631
+ d.fields.forEach((_) => {
632
+ if (_.typeRef === descriptor.name) {
633
+ _.typeRef = descriptor;
634
+ _.size = descriptor.size;
635
+ }
636
+ });
637
+
638
+ // no more unresolved references, we can finalize this descriptor
639
+ if (d.unresolved.size === 0) {
640
+ d.unresolved = undefined;
641
+ d.size = StructRepository.computeFieldOffsets(d.fields);
642
+ this.unresolved.splice(i, 1);
643
+ resolved.push(d);
644
+ }
645
+ }
646
+ }
647
+
648
+ // resolve recursively
649
+ for (const d of resolved) {
650
+ this.resolve(d);
651
+ }
652
+ }
653
+
654
+ /**
655
+ * Determines whether type can be transformed, indicating that
656
+ * the parsed type descriptor is present.
657
+ *
658
+ * @param name struct type name
659
+ */
660
+ public canTransform(name: string) {
661
+ const d = this.descriptors.get(name);
662
+ return d != null && d.size > 0;
663
+ }
664
+
665
+ /**
666
+ * Gets the struct type serialized size in bytes.
667
+ *
668
+ * @param name struct type name
669
+ */
670
+ public getSize(name: string) {
671
+ const size = this.descriptors.get(name)?.size;
672
+ if (size == null || size == 0) {
673
+ throw new Error(`Descriptor for type '${name}' does not exist or is not fully defined`);
674
+ }
675
+
676
+ return size;
677
+ }
678
+
679
+ /**
680
+ * Unpacks serialized struct data into JSON object.
681
+ *
682
+ * @param name struct type name
683
+ * @param data serialized binary data
684
+ * @param options additional options
685
+ */
686
+ public unpack(
687
+ name: string,
688
+ data: DataView<ArrayBufferLike> | Uint8Array<ArrayBufferLike>,
689
+ options?: UnpackStructOptions
690
+ ) {
691
+ return unpack(name, data, this, options);
692
+ }
693
+
694
+ /**
695
+ * Packs JSON object into serialized struct data.
696
+ *
697
+ * @param name struct type name
698
+ * @param value JSON object to pack
699
+ */
700
+ public pack(name: string, value: Record<string, unknown>) {
701
+ return pack(name, value, this);
702
+ }
703
+
704
+ /**
705
+ * Parses struct schema and adds the resulting descriptor to the repository.
706
+ *
707
+ * The descriptor may not be fully processed if it references other structs that
708
+ * we have not seen yet. Such pending descriptors will be processed automatically,
709
+ * once corresponding structs have been added. This code checks for circular
710
+ * dependencies and will fail when one is detected.
711
+ *
712
+ * @param name struct type name
713
+ * @param data struct schema in UTF-8 encoded binary representation
714
+ * @returns parsed descriptor or `null` if the operation failed
715
+ */
716
+ public add(name: string, data: DataView<ArrayBufferLike> | Uint8Array<ArrayBufferLike>): StructDescriptor | null {
717
+ let decoded: string | undefined;
718
+ try {
719
+ decoded = utf8decoder.decode(data);
720
+ } catch (exception) {
721
+ throw error(
722
+ exception instanceof TypeError
723
+ ? `Failed to parse schema: ${exception.message}`
724
+ : `Failed to parse schema: unknown error`
725
+ );
726
+ }
727
+
728
+ const fields: Array<StructFieldDescriptor> = [];
729
+ const tokens = decoded
730
+ .split(";")
731
+ .map((_) => _.trim())
732
+ .filter((_) => _.length > 0);
733
+
734
+ const unresolved = new Set<string>();
735
+ for (const token of tokens) {
736
+ // regular expression to parse individual declaration specification
737
+ // returns the following named capture groups:
738
+ // - `enum` -- entire body of non-empty enum specification
739
+ // - `type` -- type name
740
+ // - `id` -- identifier name
741
+ // - `array` -- if present length of the array
742
+ // - `bits` -- if present bit-field width
743
+ //
744
+ // if present `size` and `bits` are mutually exclusive;
745
+ // no attempt is made to allow `enum` specification only for integer data types
746
+ const re =
747
+ /^(?:(?:enum)?\s*(?:{\s*}|{(?<enum>(?:\s*\w\s*=\s*-?\d+\s*)(?:,\s*\w\s*=\s*-?\d+\s*)*),?\s*}))?\s*(?<type>\w+)\s+(?<id>\w+)\s*(?:(?:\[\s*(?<array>\d+)\s*\])|:\s*(?<bits>[1-9]\d?))?$/i;
748
+
749
+ const m = re.exec(token);
750
+ if (m == null || m.groups == null) {
751
+ throw error(`Failed to parse schema: invalid declaration '${token}'`);
752
+ }
753
+
754
+ const id = m.groups["id"]!;
755
+ const typeRaw = m.groups["type"]!;
756
+
757
+ // check for duplicates
758
+ if (fields.some((_) => _.identifier === id)) {
759
+ throw error(`Failed to parse schema: duplicate '${id}' field declaration`);
760
+ }
761
+
762
+ const field: StructFieldDescriptor = {
763
+ identifier: id,
764
+ type: getFieldType(typeRaw),
765
+ offset: -1,
766
+ size: 0,
767
+ };
768
+
769
+ if (field.type === "ref") {
770
+ field.typeRef = this.descriptors.get(typeRaw);
771
+ if (field.typeRef == null) {
772
+ field.typeRef = typeRaw;
773
+ unresolved.add(typeRaw);
774
+ } else if (field.typeRef.size === 0) {
775
+ throw error(
776
+ `Failed to parse schema: circular dependency detected between '${name}' and '${field.typeRef.name}'`
777
+ );
778
+ } else {
779
+ field.size = field.typeRef.size;
780
+ }
781
+ } else {
782
+ field.size = fieldTypeByteSize[field.type];
783
+ }
784
+
785
+ // parse and validate bit-field specification
786
+ const bitWidthRaw = m.groups["bits"];
787
+ if (bitWidthRaw != null) {
788
+ field.bitWidth = parseInt(bitWidthRaw, 10);
789
+ if (Number.isNaN(field.bitWidth)) {
790
+ throw error(`Failed to parse schema: non-numeric bit-field width in '${id}' field declaration`);
791
+ }
792
+
793
+ switch (field.type) {
794
+ case "bool":
795
+ if (field.bitWidth !== 1) {
796
+ throw error(`Failed to parse schema: invalid boolean bit-field width '${id}' field declaration`);
797
+ }
798
+ break;
799
+ case "int8":
800
+ case "int16":
801
+ case "int32":
802
+ case "int64":
803
+ case "uint8":
804
+ case "uint16":
805
+ case "uint32":
806
+ case "uint64":
807
+ if (field.bitWidth < 1 || field.bitWidth > fieldTypeByteSize[field.type] << 3) {
808
+ throw error(`Failed to parse schema: invalid integer bit-field width '${id}' field declaration`);
809
+ }
810
+ break;
811
+ default:
812
+ throw error(`Failed to parse schema: bit-field in non-integer/boolean '${id}' field declaration`);
813
+ }
814
+ }
815
+
816
+ // parse and validate array size specification
817
+ const arraySizeRaw = m.groups["array"];
818
+ if (arraySizeRaw != null) {
819
+ field.arraySize = parseInt(arraySizeRaw, 10);
820
+ if (Number.isNaN(field.arraySize) || field.arraySize <= 0) {
821
+ throw error(`Failed to parse schema: invalid array size in '${id}' field declaration`);
822
+ }
823
+ }
824
+
825
+ const enumBodyRaw = m.groups["enum"];
826
+ if (enumBodyRaw) {
827
+ // enum is only allowed for integer declarations
828
+ switch (field.type) {
829
+ case "int8":
830
+ case "int16":
831
+ case "int32":
832
+ case "int64":
833
+ case "uint8":
834
+ case "uint16":
835
+ case "uint32":
836
+ case "uint64":
837
+ break;
838
+ default:
839
+ throw error(`Failed to parse schema: enum declaration in non-integer '${id}' field declaration`);
840
+ }
841
+
842
+ // parse enum values
843
+ field.enum = new Map();
844
+ for (const tuple of enumBodyRaw.split(",")) {
845
+ const [enumName, valueRaw] = tuple.trim().split("=", 2);
846
+ const enumValue = parseInt(valueRaw!.trim(), 10);
847
+ if (Number.isNaN(enumValue)) {
848
+ throw error(
849
+ `Failed to parse schema: enum declaration contains non-integer value '${valueRaw}' in '${id}' field declaration`
850
+ );
851
+ }
852
+ field.enum.set(enumValue, enumName!);
853
+ }
854
+ }
855
+
856
+ fields.push(field);
857
+ }
858
+
859
+ // if this descriptor has external dependencies that we have not observed yet,
860
+ // we cannot map its fields and its packed size is set to zero for now
861
+ const descriptor = {
862
+ name,
863
+ fields,
864
+ size: unresolved.size > 0 ? 0 : StructRepository.computeFieldOffsets(fields),
865
+ unresolved: unresolved.size > 0 ? unresolved : undefined,
866
+ };
867
+
868
+ this.descriptors.set(name, descriptor);
869
+
870
+ // if this descriptor is final, see if it resolves any unresolved ones
871
+ if (descriptor.size > 0) {
872
+ this.resolve(descriptor);
873
+ } else {
874
+ this.unresolved.push(descriptor);
875
+ }
876
+
877
+ return descriptor;
878
+ }
879
+ }
880
+
881
+ /** Implements {@link DataTransformer} interface for the `struct` serialization protocol. */
882
+ export class StructDataTransformer implements DataTransformer {
883
+ private readonly repo = new StructRepository();
884
+
885
+ public inspect(
886
+ source: string,
887
+ name: string,
888
+ type: string,
889
+ metadata?: string | Record<string, unknown>
890
+ ): DataChannel | string | undefined {
891
+ if (name.startsWith("/.schema/struct:")) {
892
+ if (type !== "structschema") {
893
+ throw new Error(`Unexpected type '${type}' for struct schema entry`);
894
+ }
895
+
896
+ return name.substring(16);
897
+ }
898
+
899
+ if (type.startsWith("struct:")) {
900
+ // strip `[]` array suffix from the type name
901
+ const isArrayType = type.endsWith("[]");
902
+ return {
903
+ source,
904
+ id: name,
905
+ dataType: "json",
906
+ publishedDataType: type,
907
+ transformer: this,
908
+ structuredType: {
909
+ name: isArrayType ? type.slice(7, -2) : type.slice(7),
910
+ format: "struct",
911
+ isArray: isArrayType,
912
+ },
913
+ metadata,
914
+ };
915
+ }
916
+
917
+ return undefined;
918
+ }
919
+
920
+ public schema(typeName: string, value: unknown): void {
921
+ this.repo.add(typeName, toUint8Array(value));
922
+ }
923
+
924
+ public deserialize(value: unknown, type?: StructuredTypeDescriptor): DataTypeImpl | undefined {
925
+ if (type == null) {
926
+ throw new Error(
927
+ `Transformation requires type to be specified. This situation should not be possible if the transformer is wired correctly.`
928
+ );
929
+ }
930
+
931
+ if (this.repo.canTransform(type.name)) {
932
+ const buffer = toUint8Array(value);
933
+
934
+ if (type.isArray) {
935
+ // special case for a top-level array: buffer contains consecutive fixed-size items
936
+ const result: Array<Record<string, unknown>> = [];
937
+ const itemSize = this.repo.getSize(type.name);
938
+ let byteOffset = 0;
939
+
940
+ while (byteOffset + itemSize <= buffer.byteLength) {
941
+ result.push(
942
+ this.repo.unpack(type.name, new DataView(buffer.buffer, buffer.byteOffset + byteOffset, itemSize), {
943
+ useEnum: true,
944
+ })
945
+ );
946
+ byteOffset += itemSize;
947
+ }
948
+
949
+ return result;
950
+ }
951
+
952
+ return this.repo.unpack(type.name, buffer, { useEnum: true });
953
+ }
954
+
955
+ return undefined;
956
+ }
957
+
958
+ public serialize(value: unknown, type?: StructuredTypeDescriptor): Uint8Array {
959
+ if (type == null) {
960
+ throw new Error(
961
+ `Transformation requires type to be specified. This situation should not be possible if the transformer is wired correctly.`
962
+ );
963
+ }
964
+
965
+ if (value == null || typeof value !== "object") {
966
+ throw new Error("Only JSON objects can be serialized");
967
+ }
968
+
969
+ if (this.repo.canTransform(type.name)) {
970
+ if (Array.isArray(value)) {
971
+ // special case for a top-level array: buffer contains consecutive fixed-size items
972
+ const itemSize = this.repo.getSize(type.name);
973
+ const result = new Uint8Array(itemSize * value.length);
974
+
975
+ for (let i = 0; i < value.length; ++i) {
976
+ const part = this.repo.pack(type.name, value[i] as Record<string, unknown>);
977
+ result.set(part, i * itemSize);
978
+ }
979
+
980
+ return result;
981
+ }
982
+
983
+ return this.repo.pack(type.name, value as Record<string, unknown>);
984
+ }
985
+
986
+ throw new Error(`Struct serialization is not supported for '${type.name}'`);
987
+ }
988
+
989
+ public canTransform(type: string): boolean {
990
+ return this.repo.canTransform(type);
991
+ }
992
+ }