@2702rebels/wpidata 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (80) hide show
  1. package/LICENSE +28 -0
  2. package/README.md +5 -0
  3. package/dist/abstractions.cjs +0 -0
  4. package/dist/abstractions.d.cts +246 -0
  5. package/dist/abstractions.d.cts.map +1 -0
  6. package/dist/abstractions.d.mts +246 -0
  7. package/dist/abstractions.d.mts.map +1 -0
  8. package/dist/abstractions.mjs +1 -0
  9. package/dist/formats/json.cjs +32 -0
  10. package/dist/formats/json.d.cts +14 -0
  11. package/dist/formats/json.d.cts.map +1 -0
  12. package/dist/formats/json.d.mts +14 -0
  13. package/dist/formats/json.d.mts.map +1 -0
  14. package/dist/formats/json.mjs +33 -0
  15. package/dist/formats/json.mjs.map +1 -0
  16. package/dist/formats/msgpack.cjs +30 -0
  17. package/dist/formats/msgpack.d.cts +14 -0
  18. package/dist/formats/msgpack.d.cts.map +1 -0
  19. package/dist/formats/msgpack.d.mts +14 -0
  20. package/dist/formats/msgpack.d.mts.map +1 -0
  21. package/dist/formats/msgpack.mjs +31 -0
  22. package/dist/formats/msgpack.mjs.map +1 -0
  23. package/dist/formats/protobuf.cjs +130 -0
  24. package/dist/formats/protobuf.d.cts +68 -0
  25. package/dist/formats/protobuf.d.cts.map +1 -0
  26. package/dist/formats/protobuf.d.mts +68 -0
  27. package/dist/formats/protobuf.d.mts.map +1 -0
  28. package/dist/formats/protobuf.mjs +128 -0
  29. package/dist/formats/protobuf.mjs.map +1 -0
  30. package/dist/formats/struct.cjs +593 -0
  31. package/dist/formats/struct.d.cts +134 -0
  32. package/dist/formats/struct.d.cts.map +1 -0
  33. package/dist/formats/struct.d.mts +134 -0
  34. package/dist/formats/struct.d.mts.map +1 -0
  35. package/dist/formats/struct.mjs +591 -0
  36. package/dist/formats/struct.mjs.map +1 -0
  37. package/dist/sink.cjs +360 -0
  38. package/dist/sink.d.cts +93 -0
  39. package/dist/sink.d.cts.map +1 -0
  40. package/dist/sink.d.mts +93 -0
  41. package/dist/sink.d.mts.map +1 -0
  42. package/dist/sink.mjs +361 -0
  43. package/dist/sink.mjs.map +1 -0
  44. package/dist/types/protobuf.cjs +0 -0
  45. package/dist/types/protobuf.d.cts +302 -0
  46. package/dist/types/protobuf.d.cts.map +1 -0
  47. package/dist/types/protobuf.d.mts +302 -0
  48. package/dist/types/protobuf.d.mts.map +1 -0
  49. package/dist/types/protobuf.mjs +1 -0
  50. package/dist/types/sendable.cjs +0 -0
  51. package/dist/types/sendable.d.cts +225 -0
  52. package/dist/types/sendable.d.cts.map +1 -0
  53. package/dist/types/sendable.d.mts +225 -0
  54. package/dist/types/sendable.d.mts.map +1 -0
  55. package/dist/types/sendable.mjs +1 -0
  56. package/dist/types/struct.cjs +0 -0
  57. package/dist/types/struct.d.cts +304 -0
  58. package/dist/types/struct.d.cts.map +1 -0
  59. package/dist/types/struct.d.mts +304 -0
  60. package/dist/types/struct.d.mts.map +1 -0
  61. package/dist/types/struct.mjs +1 -0
  62. package/dist/utils.cjs +140 -0
  63. package/dist/utils.d.cts +40 -0
  64. package/dist/utils.d.cts.map +1 -0
  65. package/dist/utils.d.mts +40 -0
  66. package/dist/utils.d.mts.map +1 -0
  67. package/dist/utils.mjs +135 -0
  68. package/dist/utils.mjs.map +1 -0
  69. package/package.json +51 -0
  70. package/src/abstractions.ts +308 -0
  71. package/src/formats/json.ts +53 -0
  72. package/src/formats/msgpack.ts +42 -0
  73. package/src/formats/protobuf.ts +213 -0
  74. package/src/formats/struct.test.ts +814 -0
  75. package/src/formats/struct.ts +992 -0
  76. package/src/sink.ts +611 -0
  77. package/src/types/protobuf.ts +334 -0
  78. package/src/types/sendable.ts +244 -0
  79. package/src/types/struct.ts +333 -0
  80. package/src/utils.ts +241 -0
package/LICENSE ADDED
@@ -0,0 +1,28 @@
1
+ BSD 3-Clause License
2
+
3
+ Copyright (c) 2025, FRC Team 2702 (Rebels)
4
+
5
+ Redistribution and use in source and binary forms, with or without
6
+ modification, are permitted provided that the following conditions are met:
7
+
8
+ 1. Redistributions of source code must retain the above copyright notice, this
9
+ list of conditions and the following disclaimer.
10
+
11
+ 2. Redistributions in binary form must reproduce the above copyright notice,
12
+ this list of conditions and the following disclaimer in the documentation
13
+ and/or other materials provided with the distribution.
14
+
15
+ 3. Neither the name of the copyright holder nor the names of its
16
+ contributors may be used to endorse or promote products derived from
17
+ this software without specific prior written permission.
18
+
19
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
20
+ AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
21
+ IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
22
+ DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
23
+ FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
24
+ DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
25
+ SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
26
+ CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
27
+ OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
28
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package/README.md ADDED
@@ -0,0 +1,5 @@
1
+ # 🤖 2702 Rebels — Parsers and tools for FRC WPILIB data sources
2
+
3
+ Implements data transformers for parsing FRC WPILIB specific data encodings used with NetworkTables and WPILOG
4
+ protocols. Provides a generic data sink to be used at a receiving end of the data pipeline that supports
5
+ transformations, in-memory data retention.
File without changes
@@ -0,0 +1,246 @@
1
+ //#region src/abstractions.d.ts
2
+ type DataType = "boolean" | "string" | "number" | "booleanArray" | "stringArray" | "numberArray" | "json" | "binary" | "composite";
3
+ type DataTypeImpl = boolean | string | number | Array<boolean> | Array<string> | Array<number> | Record<string, unknown> | Array<Record<string, unknown>> | Uint8Array;
4
+ interface StructuredTypeDescriptor {
5
+ /**
6
+ * **Type name**
7
+ *
8
+ * Typically the name represents the original type name without any adorners.
9
+ * For example, `struct:Pose3d`, `struct:Pose3d[]` would be represented
10
+ * as `Pose3d`, and `proto:wpi.proto.ProtobufPose3d` as `wpi.proto.ProtobufPose3d`.
11
+ *
12
+ * To determine if the parsed value is an array, consult `array` indicator and
13
+ * use `Array.isArray` method.
14
+ */
15
+ readonly name: string;
16
+ /**
17
+ * **Type format**
18
+ *
19
+ * Represents the origin (protocol) of the deserialized data.
20
+ *
21
+ * Due to WPILIB idiosyncrasies deserialized JSON representation of the same
22
+ * structured type may differ across various serialization protocols.
23
+ */
24
+ readonly format: "protobuf" | "struct" | "composite";
25
+ /**
26
+ * **Array indicator**
27
+ *
28
+ * Indicates that the value contains a top-level array of type.
29
+ */
30
+ readonly isArray?: boolean;
31
+ }
32
+ interface DataChannelRecord<T extends DataTypeImpl = DataTypeImpl> {
33
+ /**
34
+ * **Timestamp (microseconds)**
35
+ *
36
+ * Timestamp of the record in microseconds. Typically represents robot time.
37
+ */
38
+ readonly timestamp: number;
39
+ /**
40
+ * **Value**
41
+ *
42
+ * Record value. Depends on the {@link DataChannel.dataType}.
43
+ */
44
+ value: T;
45
+ }
46
+ interface DataChannelBase<T extends DataTypeImpl = DataTypeImpl> {
47
+ /**
48
+ * **Channel source**
49
+ *
50
+ * For example, `nt` or `wpilog`. This field is considered opaque.
51
+ */
52
+ readonly source: string;
53
+ /**
54
+ * **Channel identifier**
55
+ *
56
+ * Must be unique within the data source.
57
+ * NT4 — topic name
58
+ * WPILOG — data channel identifier
59
+ */
60
+ readonly id: string;
61
+ /**
62
+ * **Data representation type**
63
+ *
64
+ * This type indicates the processed data representation in the Javascript world.
65
+ * For example, NT4 source may differentiate between numeric types, such as
66
+ * 32-bit integer and 64-bit float, yet both are stored as `number`.
67
+ *
68
+ * Complex types are typically serialized using JSON, msgpack, struct or protobuf
69
+ * protocols and require a corresponding data transformer to be registered in
70
+ * the pipeline to unpack them. If no transformer is present the raw `binary`
71
+ * representation is used where data is stored in a byte array buffer.
72
+ */
73
+ readonly dataType: DataType;
74
+ /**
75
+ * **Raw data type**
76
+ *
77
+ * Raw data type string to be used for publishing. Retains original type specifics
78
+ * that cannot be represented in the JavaScript world, such as differences between
79
+ * various numeric types.
80
+ */
81
+ readonly publishedDataType: string;
82
+ /**
83
+ * **Protocol transformer**
84
+ *
85
+ * Unpacks raw binary data into JSON object according to the serialization protocol.
86
+ */
87
+ readonly transformer?: DataTransformer;
88
+ /**
89
+ * **Structured type descriptor**
90
+ *
91
+ * Applicable to complex types, serialized as struct or protobuf. Must be set
92
+ * by the corresponding transformer.
93
+ */
94
+ structuredType?: StructuredTypeDescriptor;
95
+ /**
96
+ * **Channel metadata**
97
+ *
98
+ * For NT4 metadata is extracted from topic properties beyond the default set.
99
+ * For WPILOG metadata is read and attempted to be parsed as JSON object first
100
+ * and stored as string otherwise.
101
+ *
102
+ * This field represents the most recently seen values as published by the source.
103
+ */
104
+ metadata?: Record<string, unknown> | string;
105
+ /**
106
+ * **Timestamped records**
107
+ *
108
+ * Timestamps are provided by the data source. The array is sorted by the timestamp
109
+ * in ascending order, earlier records have smaller indices.
110
+ */
111
+ records?: Array<DataChannelRecord<T>>;
112
+ /**
113
+ * **Timestamped dead-letter queue**
114
+ *
115
+ * Records that cannot be yet unpacked due to missing type descriptors.
116
+ *
117
+ * Timestamps are provided by the data source. The array is sorted by the timestamp
118
+ * in ascending order, earlier records have smaller indices.
119
+ */
120
+ dlq?: Array<DataChannelRecord<Uint8Array>>;
121
+ /**
122
+ * **Composite channel container**
123
+ *
124
+ * Returns the container composite channel for sub-channels that are part of it.
125
+ */
126
+ composite?: CompositeDataChannel;
127
+ /**
128
+ * **Publishes value to the channel**
129
+ *
130
+ * @param value value to publish
131
+ * @param path optional path to the sub-channel
132
+ * @param options additional publisher options
133
+ */
134
+ publish?: (value: unknown, path?: ReadonlyArray<string>, options?: DataChannelPublisherOptions) => void;
135
+ }
136
+ interface BooleanDataChannel extends DataChannelBase<boolean> {
137
+ readonly dataType: "boolean";
138
+ }
139
+ interface StringDataChannel extends DataChannelBase<string> {
140
+ readonly dataType: "string";
141
+ }
142
+ interface NumberDataChannel extends DataChannelBase<number> {
143
+ readonly dataType: "number";
144
+ }
145
+ interface BooleanArrayDataChannel extends DataChannelBase<Array<boolean>> {
146
+ readonly dataType: "booleanArray";
147
+ }
148
+ interface StringArrayDataChannel extends DataChannelBase<Array<string>> {
149
+ readonly dataType: "stringArray";
150
+ }
151
+ interface NumberArrayDataChannel extends DataChannelBase<Array<number>> {
152
+ readonly dataType: "numberArray";
153
+ }
154
+ interface JsonDataChannel extends DataChannelBase<Record<string, unknown>> {
155
+ readonly dataType: "json";
156
+ }
157
+ interface BinaryDataChannel extends DataChannelBase<Uint8Array> {
158
+ readonly dataType: "binary";
159
+ }
160
+ interface CompositeDataChannel extends DataChannelBase<Record<string, unknown>> {
161
+ readonly dataType: "composite";
162
+ /**
163
+ * **Sub-channel identifiers and record paths**
164
+ *
165
+ * Identifiers of individual data channels that comprise this composite channel
166
+ * and their respective paths for the data to be written in the JSON record.
167
+ */
168
+ channels: Map<string, Array<string>>;
169
+ }
170
+ /**
171
+ * Represents an abstract data channel extracted from one of the supported data sources,
172
+ * such as NetworkTables or WPILOG.
173
+ *
174
+ * A channel tracks one or more data values as a set of timestamped records.
175
+ */
176
+ type DataChannel = BooleanDataChannel | StringDataChannel | NumberDataChannel | BooleanArrayDataChannel | StringArrayDataChannel | NumberArrayDataChannel | JsonDataChannel | BinaryDataChannel | CompositeDataChannel;
177
+ /**
178
+ * Represents a transformer in the data pipeline.
179
+ *
180
+ * Transformers enable conversion of raw binary data into one of the supported
181
+ * {@link DataType} formats. For example, unpacking `struct`, `protobuf` or `msgpack`
182
+ * binary data requires a corresponding transformer.
183
+ *
184
+ * Some transforms, e.g. `struct` or `protobuf` may require schema, transported
185
+ * separately from the data block entries.
186
+ */
187
+ interface DataTransformer {
188
+ /**
189
+ * Inspects a channel to determine whether it represents a schema or a data channel
190
+ * that must run through this transformer.
191
+ *
192
+ * This method must return either a {@link DataChannel} instance indicating a data
193
+ * channel, which must have {@link DataChannel.transformer} field set, or a string
194
+ * containing the schema type name indicating a schema channel. Otherwise,
195
+ * a value of `undefined` must be returned.
196
+ *
197
+ * @param source source
198
+ * @param name channel name
199
+ * @param type channel type
200
+ * @param metadata channel metadata
201
+ * @returns data channel, schema type name or `undefined`
202
+ */
203
+ inspect(source: string, name: string, type: string, metadata?: Record<string, unknown> | string): DataChannel | string | undefined;
204
+ /**
205
+ * Ingests raw schema data for the specified type name. This method will be invoked
206
+ * when the data arrives on a schema channel previously identified by {@link inspect}.
207
+ *
208
+ * @param typeName schema type name previously returned by {@link inspect}
209
+ * @param value raw schema data
210
+ */
211
+ schema(typeName: string, value: unknown): void;
212
+ /**
213
+ * Deserializes raw data. If the data cannot be transformed yet due to missing
214
+ * schema dependencies, this method must return `undefined` and the data will
215
+ * stored in the dead-letter queue to retry automatically at a later time.
216
+ *
217
+ * @param value raw value to deserialize
218
+ * @param type an optional {@link DataChannel.structuredType} descriptor
219
+ * @returns a deserialized value on success or `undefined` otherwise
220
+ */
221
+ deserialize(value: unknown, type?: StructuredTypeDescriptor): DataTypeImpl | undefined;
222
+ /**
223
+ * Serializes data into a raw format for publishing.
224
+ *
225
+ * @param value value to serialize
226
+ * @param type an optional {@link DataChannel.structuredType} descriptor
227
+ * @returns a serialized representation of the value
228
+ */
229
+ serialize(value: unknown, type?: StructuredTypeDescriptor): string | Uint8Array;
230
+ /**
231
+ * Determines whether data with the specified `type` can be transformed.
232
+ *
233
+ * The transformer may not be able to transform certain types if it is missing
234
+ * corresponding type descriptors yet to arrive via schema channel.
235
+ *
236
+ * @param type type name
237
+ */
238
+ canTransform(type: string): boolean;
239
+ }
240
+ interface DataChannelPublisherOptions {
241
+ /** Published structured type */
242
+ structuredType?: StructuredTypeDescriptor;
243
+ }
244
+ //#endregion
245
+ export { BinaryDataChannel, BooleanArrayDataChannel, BooleanDataChannel, CompositeDataChannel, DataChannel, DataChannelBase, DataChannelPublisherOptions, DataChannelRecord, DataTransformer, DataType, DataTypeImpl, JsonDataChannel, NumberArrayDataChannel, NumberDataChannel, StringArrayDataChannel, StringDataChannel, StructuredTypeDescriptor };
246
+ //# sourceMappingURL=abstractions.d.cts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"abstractions.d.cts","names":[],"sources":["../src/abstractions.ts"],"sourcesContent":[],"mappings":";KAAY,QAAA;AAAA,KAWA,YAAA,GAXQ,OAAA,GAAA,MAAA,GAAA,MAAA,GAehB,KAfgB,CAAA,OAAA,CAAA,GAgBhB,KAhBgB,CAAA,MAAA,CAAA,GAiBhB,KAjBgB,CAAA,MAAA,CAAA,GAkBhB,MAlBgB,CAAA,MAAA,EAAA,OAAA,CAAA,GAmBhB,KAnBgB,CAmBV,MAnBU,CAAA,MAAA,EAAA,OAAA,CAAA,CAAA,GAoBhB,UApBgB;AAWR,UAWK,wBAAA,CAXO;EAIpB;;;;;;;;AAOJ;AA+BA;EAA6C,SAAA,IAAA,EAAA,MAAA;EAAe;;;AAgB5D;;;;;EAqDmB,SAAA,MAAA,EAAA,UAAA,GAAA,QAAA,GAAA,WAAA;EAWN;;;;;EAkBC,SAAA,OAAA,CAAA,EAAA,OAAA;;AAOA,UAzGG,iBAyGH,CAAA,UAzG+B,YAyG/B,GAzG8C,YAyG9C,CAAA,CAAA;EASsB;;;AAGpC;AAIA;EAIiB,SAAA,SAAA,EAAkB,MAAA;EAIlB;AAIjB;AAIA;AAIA;AAIA;EAIiB,KAAA,EAxIR,CAwIQ;;AASO,UA9IP,eA8IO,CAAA,UA9ImB,YA8InB,GA9IkC,YA8IlC,CAAA,CAAA;EAAZ;;;AASZ;;EAEI,SAAA,MAAA,EAAA,MAAA;EACA;;;;;;;EAMoB,SAAA,EAAA,EAAA,MAAA;EAYP;;;;;;;;AA+DjB;;;;qBA9MqB;;;;;;;;;;;;;;yBAgBI;;;;;;;mBAQN;;;;;;;;;;aAWN;;;;;;;YAQD,MAAM,kBAAkB;;;;;;;;;QAU5B,MAAM,kBAAkB;;;;;;cAOlB;;;;;;;;oCASsB,iCAAiC;;UAGpD,kBAAA,SAA2B;;;UAI3B,iBAAA,SAA0B;;;UAI1B,iBAAA,SAA0B;;;UAI1B,uBAAA,SAAgC,gBAAgB;;;UAIhD,sBAAA,SAA+B,gBAAgB;;;UAI/C,sBAAA,SAA+B,gBAAgB;;;UAI/C,eAAA,SAAwB,gBAAgB;;;UAIxC,iBAAA,SAA0B,gBAAgB;;;UAI1C,oBAAA,SAA6B,gBAAgB;;;;;;;;YASlD,YAAY;;;;;;;;KASZ,WAAA,GACR,qBACA,oBACA,oBACA,0BACA,yBACA,yBACA,kBACA,oBACA;;;;;;;;;;;UAYa,eAAA;;;;;;;;;;;;;;;;iEAoBF,mCACV;;;;;;;;;;;;;;;;;;qCAoBgC,2BAA2B;;;;;;;;mCAS7B,oCAAoC;;;;;;;;;;;UAatD,2BAAA;;mBAEE"}
@@ -0,0 +1,246 @@
1
+ //#region src/abstractions.d.ts
2
+ type DataType = "boolean" | "string" | "number" | "booleanArray" | "stringArray" | "numberArray" | "json" | "binary" | "composite";
3
+ type DataTypeImpl = boolean | string | number | Array<boolean> | Array<string> | Array<number> | Record<string, unknown> | Array<Record<string, unknown>> | Uint8Array;
4
+ interface StructuredTypeDescriptor {
5
+ /**
6
+ * **Type name**
7
+ *
8
+ * Typically the name represents the original type name without any adorners.
9
+ * For example, `struct:Pose3d`, `struct:Pose3d[]` would be represented
10
+ * as `Pose3d`, and `proto:wpi.proto.ProtobufPose3d` as `wpi.proto.ProtobufPose3d`.
11
+ *
12
+ * To determine if the parsed value is an array, consult `array` indicator and
13
+ * use `Array.isArray` method.
14
+ */
15
+ readonly name: string;
16
+ /**
17
+ * **Type format**
18
+ *
19
+ * Represents the origin (protocol) of the deserialized data.
20
+ *
21
+ * Due to WPILIB idiosyncrasies deserialized JSON representation of the same
22
+ * structured type may differ across various serialization protocols.
23
+ */
24
+ readonly format: "protobuf" | "struct" | "composite";
25
+ /**
26
+ * **Array indicator**
27
+ *
28
+ * Indicates that the value contains a top-level array of type.
29
+ */
30
+ readonly isArray?: boolean;
31
+ }
32
+ interface DataChannelRecord<T extends DataTypeImpl = DataTypeImpl> {
33
+ /**
34
+ * **Timestamp (microseconds)**
35
+ *
36
+ * Timestamp of the record in microseconds. Typically represents robot time.
37
+ */
38
+ readonly timestamp: number;
39
+ /**
40
+ * **Value**
41
+ *
42
+ * Record value. Depends on the {@link DataChannel.dataType}.
43
+ */
44
+ value: T;
45
+ }
46
+ interface DataChannelBase<T extends DataTypeImpl = DataTypeImpl> {
47
+ /**
48
+ * **Channel source**
49
+ *
50
+ * For example, `nt` or `wpilog`. This field is considered opaque.
51
+ */
52
+ readonly source: string;
53
+ /**
54
+ * **Channel identifier**
55
+ *
56
+ * Must be unique within the data source.
57
+ * NT4 — topic name
58
+ * WPILOG — data channel identifier
59
+ */
60
+ readonly id: string;
61
+ /**
62
+ * **Data representation type**
63
+ *
64
+ * This type indicates the processed data representation in the Javascript world.
65
+ * For example, NT4 source may differentiate between numeric types, such as
66
+ * 32-bit integer and 64-bit float, yet both are stored as `number`.
67
+ *
68
+ * Complex types are typically serialized using JSON, msgpack, struct or protobuf
69
+ * protocols and require a corresponding data transformer to be registered in
70
+ * the pipeline to unpack them. If no transformer is present the raw `binary`
71
+ * representation is used where data is stored in a byte array buffer.
72
+ */
73
+ readonly dataType: DataType;
74
+ /**
75
+ * **Raw data type**
76
+ *
77
+ * Raw data type string to be used for publishing. Retains original type specifics
78
+ * that cannot be represented in the JavaScript world, such as differences between
79
+ * various numeric types.
80
+ */
81
+ readonly publishedDataType: string;
82
+ /**
83
+ * **Protocol transformer**
84
+ *
85
+ * Unpacks raw binary data into JSON object according to the serialization protocol.
86
+ */
87
+ readonly transformer?: DataTransformer;
88
+ /**
89
+ * **Structured type descriptor**
90
+ *
91
+ * Applicable to complex types, serialized as struct or protobuf. Must be set
92
+ * by the corresponding transformer.
93
+ */
94
+ structuredType?: StructuredTypeDescriptor;
95
+ /**
96
+ * **Channel metadata**
97
+ *
98
+ * For NT4 metadata is extracted from topic properties beyond the default set.
99
+ * For WPILOG metadata is read and attempted to be parsed as JSON object first
100
+ * and stored as string otherwise.
101
+ *
102
+ * This field represents the most recently seen values as published by the source.
103
+ */
104
+ metadata?: Record<string, unknown> | string;
105
+ /**
106
+ * **Timestamped records**
107
+ *
108
+ * Timestamps are provided by the data source. The array is sorted by the timestamp
109
+ * in ascending order, earlier records have smaller indices.
110
+ */
111
+ records?: Array<DataChannelRecord<T>>;
112
+ /**
113
+ * **Timestamped dead-letter queue**
114
+ *
115
+ * Records that cannot be yet unpacked due to missing type descriptors.
116
+ *
117
+ * Timestamps are provided by the data source. The array is sorted by the timestamp
118
+ * in ascending order, earlier records have smaller indices.
119
+ */
120
+ dlq?: Array<DataChannelRecord<Uint8Array>>;
121
+ /**
122
+ * **Composite channel container**
123
+ *
124
+ * Returns the container composite channel for sub-channels that are part of it.
125
+ */
126
+ composite?: CompositeDataChannel;
127
+ /**
128
+ * **Publishes value to the channel**
129
+ *
130
+ * @param value value to publish
131
+ * @param path optional path to the sub-channel
132
+ * @param options additional publisher options
133
+ */
134
+ publish?: (value: unknown, path?: ReadonlyArray<string>, options?: DataChannelPublisherOptions) => void;
135
+ }
136
+ interface BooleanDataChannel extends DataChannelBase<boolean> {
137
+ readonly dataType: "boolean";
138
+ }
139
+ interface StringDataChannel extends DataChannelBase<string> {
140
+ readonly dataType: "string";
141
+ }
142
+ interface NumberDataChannel extends DataChannelBase<number> {
143
+ readonly dataType: "number";
144
+ }
145
+ interface BooleanArrayDataChannel extends DataChannelBase<Array<boolean>> {
146
+ readonly dataType: "booleanArray";
147
+ }
148
+ interface StringArrayDataChannel extends DataChannelBase<Array<string>> {
149
+ readonly dataType: "stringArray";
150
+ }
151
+ interface NumberArrayDataChannel extends DataChannelBase<Array<number>> {
152
+ readonly dataType: "numberArray";
153
+ }
154
+ interface JsonDataChannel extends DataChannelBase<Record<string, unknown>> {
155
+ readonly dataType: "json";
156
+ }
157
+ interface BinaryDataChannel extends DataChannelBase<Uint8Array> {
158
+ readonly dataType: "binary";
159
+ }
160
+ interface CompositeDataChannel extends DataChannelBase<Record<string, unknown>> {
161
+ readonly dataType: "composite";
162
+ /**
163
+ * **Sub-channel identifiers and record paths**
164
+ *
165
+ * Identifiers of individual data channels that comprise this composite channel
166
+ * and their respective paths for the data to be written in the JSON record.
167
+ */
168
+ channels: Map<string, Array<string>>;
169
+ }
170
+ /**
171
+ * Represents an abstract data channel extracted from one of the supported data sources,
172
+ * such as NetworkTables or WPILOG.
173
+ *
174
+ * A channel tracks one or more data values as a set of timestamped records.
175
+ */
176
+ type DataChannel = BooleanDataChannel | StringDataChannel | NumberDataChannel | BooleanArrayDataChannel | StringArrayDataChannel | NumberArrayDataChannel | JsonDataChannel | BinaryDataChannel | CompositeDataChannel;
177
+ /**
178
+ * Represents a transformer in the data pipeline.
179
+ *
180
+ * Transformers enable conversion of raw binary data into one of the supported
181
+ * {@link DataType} formats. For example, unpacking `struct`, `protobuf` or `msgpack`
182
+ * binary data requires a corresponding transformer.
183
+ *
184
+ * Some transforms, e.g. `struct` or `protobuf` may require schema, transported
185
+ * separately from the data block entries.
186
+ */
187
+ interface DataTransformer {
188
+ /**
189
+ * Inspects a channel to determine whether it represents a schema or a data channel
190
+ * that must run through this transformer.
191
+ *
192
+ * This method must return either a {@link DataChannel} instance indicating a data
193
+ * channel, which must have {@link DataChannel.transformer} field set, or a string
194
+ * containing the schema type name indicating a schema channel. Otherwise,
195
+ * a value of `undefined` must be returned.
196
+ *
197
+ * @param source source
198
+ * @param name channel name
199
+ * @param type channel type
200
+ * @param metadata channel metadata
201
+ * @returns data channel, schema type name or `undefined`
202
+ */
203
+ inspect(source: string, name: string, type: string, metadata?: Record<string, unknown> | string): DataChannel | string | undefined;
204
+ /**
205
+ * Ingests raw schema data for the specified type name. This method will be invoked
206
+ * when the data arrives on a schema channel previously identified by {@link inspect}.
207
+ *
208
+ * @param typeName schema type name previously returned by {@link inspect}
209
+ * @param value raw schema data
210
+ */
211
+ schema(typeName: string, value: unknown): void;
212
+ /**
213
+ * Deserializes raw data. If the data cannot be transformed yet due to missing
214
+ * schema dependencies, this method must return `undefined` and the data will
215
+ * stored in the dead-letter queue to retry automatically at a later time.
216
+ *
217
+ * @param value raw value to deserialize
218
+ * @param type an optional {@link DataChannel.structuredType} descriptor
219
+ * @returns a deserialized value on success or `undefined` otherwise
220
+ */
221
+ deserialize(value: unknown, type?: StructuredTypeDescriptor): DataTypeImpl | undefined;
222
+ /**
223
+ * Serializes data into a raw format for publishing.
224
+ *
225
+ * @param value value to serialize
226
+ * @param type an optional {@link DataChannel.structuredType} descriptor
227
+ * @returns a serialized representation of the value
228
+ */
229
+ serialize(value: unknown, type?: StructuredTypeDescriptor): string | Uint8Array;
230
+ /**
231
+ * Determines whether data with the specified `type` can be transformed.
232
+ *
233
+ * The transformer may not be able to transform certain types if it is missing
234
+ * corresponding type descriptors yet to arrive via schema channel.
235
+ *
236
+ * @param type type name
237
+ */
238
+ canTransform(type: string): boolean;
239
+ }
240
+ interface DataChannelPublisherOptions {
241
+ /** Published structured type */
242
+ structuredType?: StructuredTypeDescriptor;
243
+ }
244
+ //#endregion
245
+ export { BinaryDataChannel, BooleanArrayDataChannel, BooleanDataChannel, CompositeDataChannel, DataChannel, DataChannelBase, DataChannelPublisherOptions, DataChannelRecord, DataTransformer, DataType, DataTypeImpl, JsonDataChannel, NumberArrayDataChannel, NumberDataChannel, StringArrayDataChannel, StringDataChannel, StructuredTypeDescriptor };
246
+ //# sourceMappingURL=abstractions.d.mts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"abstractions.d.mts","names":[],"sources":["../src/abstractions.ts"],"sourcesContent":[],"mappings":";KAAY,QAAA;AAAA,KAWA,YAAA,GAXQ,OAAA,GAAA,MAAA,GAAA,MAAA,GAehB,KAfgB,CAAA,OAAA,CAAA,GAgBhB,KAhBgB,CAAA,MAAA,CAAA,GAiBhB,KAjBgB,CAAA,MAAA,CAAA,GAkBhB,MAlBgB,CAAA,MAAA,EAAA,OAAA,CAAA,GAmBhB,KAnBgB,CAmBV,MAnBU,CAAA,MAAA,EAAA,OAAA,CAAA,CAAA,GAoBhB,UApBgB;AAWR,UAWK,wBAAA,CAXO;EAIpB;;;;;;;;AAOJ;AA+BA;EAA6C,SAAA,IAAA,EAAA,MAAA;EAAe;;;AAgB5D;;;;;EAqDmB,SAAA,MAAA,EAAA,UAAA,GAAA,QAAA,GAAA,WAAA;EAWN;;;;;EAkBC,SAAA,OAAA,CAAA,EAAA,OAAA;;AAOA,UAzGG,iBAyGH,CAAA,UAzG+B,YAyG/B,GAzG8C,YAyG9C,CAAA,CAAA;EASsB;;;AAGpC;AAIA;EAIiB,SAAA,SAAA,EAAkB,MAAA;EAIlB;AAIjB;AAIA;AAIA;AAIA;EAIiB,KAAA,EAxIR,CAwIQ;;AASO,UA9IP,eA8IO,CAAA,UA9ImB,YA8InB,GA9IkC,YA8IlC,CAAA,CAAA;EAAZ;;;AASZ;;EAEI,SAAA,MAAA,EAAA,MAAA;EACA;;;;;;;EAMoB,SAAA,EAAA,EAAA,MAAA;EAYP;;;;;;;;AA+DjB;;;;qBA9MqB;;;;;;;;;;;;;;yBAgBI;;;;;;;mBAQN;;;;;;;;;;aAWN;;;;;;;YAQD,MAAM,kBAAkB;;;;;;;;;QAU5B,MAAM,kBAAkB;;;;;;cAOlB;;;;;;;;oCASsB,iCAAiC;;UAGpD,kBAAA,SAA2B;;;UAI3B,iBAAA,SAA0B;;;UAI1B,iBAAA,SAA0B;;;UAI1B,uBAAA,SAAgC,gBAAgB;;;UAIhD,sBAAA,SAA+B,gBAAgB;;;UAI/C,sBAAA,SAA+B,gBAAgB;;;UAI/C,eAAA,SAAwB,gBAAgB;;;UAIxC,iBAAA,SAA0B,gBAAgB;;;UAI1C,oBAAA,SAA6B,gBAAgB;;;;;;;;YASlD,YAAY;;;;;;;;KASZ,WAAA,GACR,qBACA,oBACA,oBACA,0BACA,yBACA,yBACA,kBACA,oBACA;;;;;;;;;;;UAYa,eAAA;;;;;;;;;;;;;;;;iEAoBF,mCACV;;;;;;;;;;;;;;;;;;qCAoBgC,2BAA2B;;;;;;;;mCAS7B,oCAAoC;;;;;;;;;;;UAatD,2BAAA;;mBAEE"}
@@ -0,0 +1 @@
1
+ export { };
@@ -0,0 +1,32 @@
1
+ const require_utils = require('../utils.cjs');
2
+
3
+ //#region src/formats/json.ts
4
+ const utf8decoder = new TextDecoder("utf-8", { fatal: true });
5
+ /** Implements {@link DataTransformer} interface for the `json` serialization protocol. */
6
+ var JsonDataTransformer = class {
7
+ inspect(source, name, type, metadata) {
8
+ if (type === "json") return {
9
+ source,
10
+ id: name,
11
+ dataType: "json",
12
+ publishedDataType: type,
13
+ transformer: this,
14
+ metadata
15
+ };
16
+ }
17
+ schema() {}
18
+ deserialize(value) {
19
+ if (typeof value === "string") return value ? JSON.parse(value) : {};
20
+ return JSON.parse(utf8decoder.decode(require_utils.toUint8Array(value)));
21
+ }
22
+ serialize(value) {
23
+ if (value == null || typeof value !== "object") throw new Error("Only JSON objects can be serialized");
24
+ return JSON.stringify(value);
25
+ }
26
+ canTransform() {
27
+ return true;
28
+ }
29
+ };
30
+
31
+ //#endregion
32
+ exports.JsonDataTransformer = JsonDataTransformer;
@@ -0,0 +1,14 @@
1
+ import { DataChannel, DataTransformer, DataTypeImpl } from "../abstractions.cjs";
2
+
3
+ //#region src/formats/json.d.ts
4
+ /** Implements {@link DataTransformer} interface for the `json` serialization protocol. */
5
+ declare class JsonDataTransformer implements DataTransformer {
6
+ inspect(source: string, name: string, type: string, metadata?: string | Record<string, unknown>): DataChannel | string | undefined;
7
+ schema(): void;
8
+ deserialize(value: unknown): DataTypeImpl | undefined;
9
+ serialize(value: unknown): string | Uint8Array;
10
+ canTransform(): boolean;
11
+ }
12
+ //#endregion
13
+ export { JsonDataTransformer };
14
+ //# sourceMappingURL=json.d.cts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"json.d.cts","names":[],"sources":["../../src/formats/json.ts"],"sourcesContent":[],"mappings":";;;;cAUa,mBAAA,YAA+B;EAA/B,OAAA,CAAA,MAAA,EAAA,MAAoB,EAAA,IAAA,EAAA,MAAA,EAAA,IAAA,EAAA,MAAA,EAAA,QAAA,CAAA,EAAA,MAAA,GAKT,MALS,CAAA,MAAA,EAAA,OAAA,CAAA,CAAA,EAM5B,WAN4B,GAAA,MAAA,GAAA,SAAA;EAKT,MAAA,CAAA,CAAA,EAAA,IAAA;EACnB,WAAA,CAAA,KAAA,EAAA,OAAA,CAAA,EAiBiC,YAjBjC,GAAA,SAAA;EAiBiC,SAAA,CAAA,KAAA,EAAA,OAAA,CAAA,EAAA,MAAA,GAQO,UARP;EAQO,YAAA,CAAA,CAAA,EAAA,OAAA"}
@@ -0,0 +1,14 @@
1
+ import { DataChannel, DataTransformer, DataTypeImpl } from "../abstractions.mjs";
2
+
3
+ //#region src/formats/json.d.ts
4
+ /** Implements {@link DataTransformer} interface for the `json` serialization protocol. */
5
+ declare class JsonDataTransformer implements DataTransformer {
6
+ inspect(source: string, name: string, type: string, metadata?: string | Record<string, unknown>): DataChannel | string | undefined;
7
+ schema(): void;
8
+ deserialize(value: unknown): DataTypeImpl | undefined;
9
+ serialize(value: unknown): string | Uint8Array;
10
+ canTransform(): boolean;
11
+ }
12
+ //#endregion
13
+ export { JsonDataTransformer };
14
+ //# sourceMappingURL=json.d.mts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"json.d.mts","names":[],"sources":["../../src/formats/json.ts"],"sourcesContent":[],"mappings":";;;;cAUa,mBAAA,YAA+B;EAA/B,OAAA,CAAA,MAAA,EAAA,MAAoB,EAAA,IAAA,EAAA,MAAA,EAAA,IAAA,EAAA,MAAA,EAAA,QAAA,CAAA,EAAA,MAAA,GAKT,MALS,CAAA,MAAA,EAAA,OAAA,CAAA,CAAA,EAM5B,WAN4B,GAAA,MAAA,GAAA,SAAA;EAKT,MAAA,CAAA,CAAA,EAAA,IAAA;EACnB,WAAA,CAAA,KAAA,EAAA,OAAA,CAAA,EAiBiC,YAjBjC,GAAA,SAAA;EAiBiC,SAAA,CAAA,KAAA,EAAA,OAAA,CAAA,EAAA,MAAA,GAQO,UARP;EAQO,YAAA,CAAA,CAAA,EAAA,OAAA"}
@@ -0,0 +1,33 @@
1
+ import { toUint8Array } from "../utils.mjs";
2
+
3
+ //#region src/formats/json.ts
4
+ const utf8decoder = new TextDecoder("utf-8", { fatal: true });
5
+ /** Implements {@link DataTransformer} interface for the `json` serialization protocol. */
6
+ var JsonDataTransformer = class {
7
+ inspect(source, name, type, metadata) {
8
+ if (type === "json") return {
9
+ source,
10
+ id: name,
11
+ dataType: "json",
12
+ publishedDataType: type,
13
+ transformer: this,
14
+ metadata
15
+ };
16
+ }
17
+ schema() {}
18
+ deserialize(value) {
19
+ if (typeof value === "string") return value ? JSON.parse(value) : {};
20
+ return JSON.parse(utf8decoder.decode(toUint8Array(value)));
21
+ }
22
+ serialize(value) {
23
+ if (value == null || typeof value !== "object") throw new Error("Only JSON objects can be serialized");
24
+ return JSON.stringify(value);
25
+ }
26
+ canTransform() {
27
+ return true;
28
+ }
29
+ };
30
+
31
+ //#endregion
32
+ export { JsonDataTransformer };
33
+ //# sourceMappingURL=json.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"json.mjs","names":[],"sources":["../../src/formats/json.ts"],"sourcesContent":["import { toUint8Array } from \"../utils\";\n\nimport type { DataChannel, DataTransformer, DataTypeImpl } from \"../abstractions\";\n\nconst utf8decoder = new TextDecoder(\"utf-8\", {\n // throw TypeError on invalid data instead of silent substitution\n fatal: true,\n});\n\n/** Implements {@link DataTransformer} interface for the `json` serialization protocol. */\nexport class JsonDataTransformer implements DataTransformer {\n public inspect(\n source: string,\n name: string,\n type: string,\n metadata?: string | Record<string, unknown>\n ): DataChannel | string | undefined {\n if (type === \"json\") {\n return {\n source,\n id: name,\n dataType: \"json\",\n publishedDataType: type,\n transformer: this,\n metadata,\n };\n }\n\n return undefined;\n }\n\n public schema() {}\n\n public deserialize(value: unknown): DataTypeImpl | undefined {\n if (typeof value === \"string\") {\n return value ? JSON.parse(value) : {};\n }\n\n return JSON.parse(utf8decoder.decode(toUint8Array(value)));\n }\n\n public serialize(value: unknown): string | Uint8Array {\n if (value == null || typeof value !== \"object\") {\n throw new Error(\"Only JSON objects can be serialized\");\n }\n\n return JSON.stringify(value);\n }\n\n public canTransform(): boolean {\n return true;\n }\n}\n"],"mappings":";;;AAIA,MAAM,cAAc,IAAI,YAAY,SAAS,EAE3C,OAAO,MACR,CAAC;;AAGF,IAAa,sBAAb,MAA4D;CAC1D,AAAO,QACL,QACA,MACA,MACA,UACkC;AAClC,MAAI,SAAS,OACX,QAAO;GACL;GACA,IAAI;GACJ,UAAU;GACV,mBAAmB;GACnB,aAAa;GACb;GACD;;CAML,AAAO,SAAS;CAEhB,AAAO,YAAY,OAA0C;AAC3D,MAAI,OAAO,UAAU,SACnB,QAAO,QAAQ,KAAK,MAAM,MAAM,GAAG,EAAE;AAGvC,SAAO,KAAK,MAAM,YAAY,OAAO,aAAa,MAAM,CAAC,CAAC;;CAG5D,AAAO,UAAU,OAAqC;AACpD,MAAI,SAAS,QAAQ,OAAO,UAAU,SACpC,OAAM,IAAI,MAAM,sCAAsC;AAGxD,SAAO,KAAK,UAAU,MAAM;;CAG9B,AAAO,eAAwB;AAC7B,SAAO"}