@2702rebels/wpidata 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +28 -0
- package/README.md +5 -0
- package/dist/abstractions.cjs +0 -0
- package/dist/abstractions.d.cts +246 -0
- package/dist/abstractions.d.cts.map +1 -0
- package/dist/abstractions.d.mts +246 -0
- package/dist/abstractions.d.mts.map +1 -0
- package/dist/abstractions.mjs +1 -0
- package/dist/formats/json.cjs +32 -0
- package/dist/formats/json.d.cts +14 -0
- package/dist/formats/json.d.cts.map +1 -0
- package/dist/formats/json.d.mts +14 -0
- package/dist/formats/json.d.mts.map +1 -0
- package/dist/formats/json.mjs +33 -0
- package/dist/formats/json.mjs.map +1 -0
- package/dist/formats/msgpack.cjs +30 -0
- package/dist/formats/msgpack.d.cts +14 -0
- package/dist/formats/msgpack.d.cts.map +1 -0
- package/dist/formats/msgpack.d.mts +14 -0
- package/dist/formats/msgpack.d.mts.map +1 -0
- package/dist/formats/msgpack.mjs +31 -0
- package/dist/formats/msgpack.mjs.map +1 -0
- package/dist/formats/protobuf.cjs +130 -0
- package/dist/formats/protobuf.d.cts +68 -0
- package/dist/formats/protobuf.d.cts.map +1 -0
- package/dist/formats/protobuf.d.mts +68 -0
- package/dist/formats/protobuf.d.mts.map +1 -0
- package/dist/formats/protobuf.mjs +128 -0
- package/dist/formats/protobuf.mjs.map +1 -0
- package/dist/formats/struct.cjs +593 -0
- package/dist/formats/struct.d.cts +134 -0
- package/dist/formats/struct.d.cts.map +1 -0
- package/dist/formats/struct.d.mts +134 -0
- package/dist/formats/struct.d.mts.map +1 -0
- package/dist/formats/struct.mjs +591 -0
- package/dist/formats/struct.mjs.map +1 -0
- package/dist/sink.cjs +360 -0
- package/dist/sink.d.cts +93 -0
- package/dist/sink.d.cts.map +1 -0
- package/dist/sink.d.mts +93 -0
- package/dist/sink.d.mts.map +1 -0
- package/dist/sink.mjs +361 -0
- package/dist/sink.mjs.map +1 -0
- package/dist/types/protobuf.cjs +0 -0
- package/dist/types/protobuf.d.cts +302 -0
- package/dist/types/protobuf.d.cts.map +1 -0
- package/dist/types/protobuf.d.mts +302 -0
- package/dist/types/protobuf.d.mts.map +1 -0
- package/dist/types/protobuf.mjs +1 -0
- package/dist/types/sendable.cjs +0 -0
- package/dist/types/sendable.d.cts +225 -0
- package/dist/types/sendable.d.cts.map +1 -0
- package/dist/types/sendable.d.mts +225 -0
- package/dist/types/sendable.d.mts.map +1 -0
- package/dist/types/sendable.mjs +1 -0
- package/dist/types/struct.cjs +0 -0
- package/dist/types/struct.d.cts +304 -0
- package/dist/types/struct.d.cts.map +1 -0
- package/dist/types/struct.d.mts +304 -0
- package/dist/types/struct.d.mts.map +1 -0
- package/dist/types/struct.mjs +1 -0
- package/dist/utils.cjs +140 -0
- package/dist/utils.d.cts +40 -0
- package/dist/utils.d.cts.map +1 -0
- package/dist/utils.d.mts +40 -0
- package/dist/utils.d.mts.map +1 -0
- package/dist/utils.mjs +135 -0
- package/dist/utils.mjs.map +1 -0
- package/package.json +51 -0
- package/src/abstractions.ts +308 -0
- package/src/formats/json.ts +53 -0
- package/src/formats/msgpack.ts +42 -0
- package/src/formats/protobuf.ts +213 -0
- package/src/formats/struct.test.ts +814 -0
- package/src/formats/struct.ts +992 -0
- package/src/sink.ts +611 -0
- package/src/types/protobuf.ts +334 -0
- package/src/types/sendable.ts +244 -0
- package/src/types/struct.ts +333 -0
- package/src/utils.ts +241 -0
package/dist/sink.mjs
ADDED
|
@@ -0,0 +1,361 @@
|
|
|
1
|
+
import { addTimestampedRecord, getTimestampedRecord, pruneTimestampedRecords, setValueByPath, toUint8Array } from "./utils.mjs";
|
|
2
|
+
import { JsonDataTransformer } from "./formats/json.mjs";
|
|
3
|
+
import { MsgpackDataTransformer } from "./formats/msgpack.mjs";
|
|
4
|
+
import { ProtobufDataTransformer } from "./formats/protobuf.mjs";
|
|
5
|
+
import { StructDataTransformer } from "./formats/struct.mjs";
|
|
6
|
+
|
|
7
|
+
//#region src/sink.ts
|
|
8
|
+
/** Converts raw data type name to {@link DataType}. */
|
|
9
|
+
function getDataType(v) {
|
|
10
|
+
switch (v.toLocaleLowerCase()) {
|
|
11
|
+
case "boolean": return "boolean";
|
|
12
|
+
case "string": return "string";
|
|
13
|
+
case "double":
|
|
14
|
+
case "int":
|
|
15
|
+
case "float": return "number";
|
|
16
|
+
case "boolean[]": return "booleanArray";
|
|
17
|
+
case "string[]": return "stringArray";
|
|
18
|
+
case "double[]":
|
|
19
|
+
case "int[]":
|
|
20
|
+
case "float[]": return "numberArray";
|
|
21
|
+
case "json":
|
|
22
|
+
case "msgpack": return "json";
|
|
23
|
+
default: return "binary";
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
/** Gets the raw data type of the value to publish. */
|
|
27
|
+
function getRawType(v) {
|
|
28
|
+
if (Array.isArray(v)) {
|
|
29
|
+
if (v.length > 0) {
|
|
30
|
+
const itemType = getRawType(v[0]);
|
|
31
|
+
return itemType ? `${itemType}[]` : void 0;
|
|
32
|
+
}
|
|
33
|
+
} else switch (typeof v) {
|
|
34
|
+
case "boolean": return "boolean";
|
|
35
|
+
case "string": return "string";
|
|
36
|
+
case "number": return "double";
|
|
37
|
+
case "object": return "json";
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
/** Enqueues data into the composite channel at the specified path. */
|
|
41
|
+
function enqueueDataWithPath(channel, timestamp, data, path) {
|
|
42
|
+
channel.records ??= [];
|
|
43
|
+
const [record, recordIndex] = getTimestampedRecord(channel.records, timestamp);
|
|
44
|
+
let index = recordIndex;
|
|
45
|
+
if (record != null) if (record.timestamp === timestamp) setValueByPath(record.value, path, data);
|
|
46
|
+
else index = addTimestampedRecord(channel.records, timestamp, setValueByPath(structuredClone(record.value), path, data));
|
|
47
|
+
else index = addTimestampedRecord(channel.records, timestamp, setValueByPath({}, path, data));
|
|
48
|
+
for (let i = index + 1; i < channel.records.length; ++i) setValueByPath(channel.records[i].value, path, data, true);
|
|
49
|
+
}
|
|
50
|
+
/** Enqueues data into the channel. */
|
|
51
|
+
function enqueueData(channel, timestamp, data, dlq) {
|
|
52
|
+
if (dlq) {
|
|
53
|
+
channel.dlq ??= [];
|
|
54
|
+
addTimestampedRecord(channel.dlq, timestamp, toUint8Array(data));
|
|
55
|
+
} else if (channel.composite != null) {
|
|
56
|
+
const path = channel.composite.channels.get(channel.id);
|
|
57
|
+
if (path == null) throw new Error(`Invariant violation: ${channel.id} must be a sub-channel of ${channel.composite.id}`);
|
|
58
|
+
enqueueDataWithPath(channel.composite, timestamp, data, path);
|
|
59
|
+
} else {
|
|
60
|
+
channel.records ??= [];
|
|
61
|
+
addTimestampedRecord(channel.records, timestamp, data);
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
/** Converts channel into a sub-channel of a composite channel. */
|
|
65
|
+
function convertToSubchannel(channel, composite) {
|
|
66
|
+
const path = channel.id.substring(composite.id.length + 1).split(/\//).map((_) => _.trim()).filter((_) => _.length > 0);
|
|
67
|
+
channel.composite = composite;
|
|
68
|
+
composite.channels.set(channel.id, path);
|
|
69
|
+
}
|
|
70
|
+
/** Compares two arrays for value equality. */
|
|
71
|
+
function arraysEquals(a, b) {
|
|
72
|
+
if (a === b) return true;
|
|
73
|
+
if (a == null || b == null) return false;
|
|
74
|
+
if (a.length !== b.length) return false;
|
|
75
|
+
for (let i = 0; i < a.length; ++i) if (a[i] !== b[i]) return false;
|
|
76
|
+
return true;
|
|
77
|
+
}
|
|
78
|
+
/** Default console logger for error messages. */
|
|
79
|
+
function logError(message, error) {
|
|
80
|
+
console.error(`💣 [DataSink] ${message}`, error);
|
|
81
|
+
}
|
|
82
|
+
/** Default {@link DataTransformer} used by {@link DataSink} that can be reused across instances. */
|
|
83
|
+
const DefaultDataSinkTransformers = [new JsonDataTransformer(), new MsgpackDataTransformer()];
|
|
84
|
+
/** A data sink in the data pipeline with support for protocol transformers, data retention, etc. */
|
|
85
|
+
var DataSink = class DataSink {
|
|
86
|
+
channels;
|
|
87
|
+
composites;
|
|
88
|
+
schemas;
|
|
89
|
+
transformers;
|
|
90
|
+
retention;
|
|
91
|
+
disableCompositeChannels;
|
|
92
|
+
onDataChannelAdded;
|
|
93
|
+
onDataChannelRemoved;
|
|
94
|
+
logger;
|
|
95
|
+
timestamp;
|
|
96
|
+
structTransformer = new StructDataTransformer();
|
|
97
|
+
protobufTransformer = new ProtobufDataTransformer();
|
|
98
|
+
constructor(options) {
|
|
99
|
+
this.channels = /* @__PURE__ */ new Map();
|
|
100
|
+
this.composites = /* @__PURE__ */ new Map();
|
|
101
|
+
this.schemas = /* @__PURE__ */ new Map();
|
|
102
|
+
this.retention = options?.retention;
|
|
103
|
+
this.onDataChannelAdded = options?.onDataChannelAdded;
|
|
104
|
+
this.onDataChannelRemoved = options?.onDataChannelRemoved;
|
|
105
|
+
this.disableCompositeChannels = options?.disableCompositeChannels ?? false;
|
|
106
|
+
this.logger = options?.logger ?? logError;
|
|
107
|
+
this.timestamp = 0;
|
|
108
|
+
this.transformers = [
|
|
109
|
+
...DefaultDataSinkTransformers,
|
|
110
|
+
this.structTransformer,
|
|
111
|
+
this.protobufTransformer
|
|
112
|
+
];
|
|
113
|
+
if (options?.transformers) this.transformers.push(...options.transformers);
|
|
114
|
+
}
|
|
115
|
+
/** Constructs channel identifier. */
|
|
116
|
+
static createId(source, name) {
|
|
117
|
+
return `${source}:${name}`;
|
|
118
|
+
}
|
|
119
|
+
/** Returns parent composite data channel if one exists. */
|
|
120
|
+
getCompositeParent(id) {
|
|
121
|
+
for (const composite of this.composites.values()) if (id.startsWith(`${composite.id}/`)) return composite;
|
|
122
|
+
}
|
|
123
|
+
/** Registers new data channel. */
|
|
124
|
+
registerChannel(channel, publish) {
|
|
125
|
+
const id = DataSink.createId(channel.source, channel.id);
|
|
126
|
+
this.channels.set(id, channel);
|
|
127
|
+
channel.publish = publish ? this.createChannelPublisher(channel, publish) : void 0;
|
|
128
|
+
let silent = false;
|
|
129
|
+
if (!this.disableCompositeChannels) {
|
|
130
|
+
const composite = this.getCompositeParent(channel.id);
|
|
131
|
+
if (composite != null) {
|
|
132
|
+
convertToSubchannel(channel, composite);
|
|
133
|
+
silent = true;
|
|
134
|
+
}
|
|
135
|
+
}
|
|
136
|
+
if (!silent && this.onDataChannelAdded != null) this.onDataChannelAdded(channel);
|
|
137
|
+
}
|
|
138
|
+
/** Gets sub-channel by its path. */
|
|
139
|
+
getSubChannel(channel, path) {
|
|
140
|
+
for (const [id, p] of channel.channels) if (arraysEquals(path, p)) return this.channels.get(id);
|
|
141
|
+
}
|
|
142
|
+
/** Creates channel publisher. */
|
|
143
|
+
createChannelPublisher(channel, publish) {
|
|
144
|
+
return (value, path, options) => {
|
|
145
|
+
if (path != null && channel.dataType === "composite") {
|
|
146
|
+
const subchannel = this.getSubChannel(channel, path);
|
|
147
|
+
if (subchannel) {
|
|
148
|
+
if (subchannel.transformer != null) value = subchannel.transformer.serialize(value, channel.structuredType);
|
|
149
|
+
publish(subchannel.id, subchannel.publishedDataType, value);
|
|
150
|
+
} else {
|
|
151
|
+
const topic = `${channel.id}/${path.join("/")}`;
|
|
152
|
+
let dataType = getRawType(value);
|
|
153
|
+
if (options?.structuredType) switch (options.structuredType.format) {
|
|
154
|
+
case "composite": return;
|
|
155
|
+
case "struct":
|
|
156
|
+
value = this.structTransformer.serialize(value, options.structuredType);
|
|
157
|
+
dataType = `struct:${options.structuredType.name}`;
|
|
158
|
+
break;
|
|
159
|
+
case "protobuf":
|
|
160
|
+
value = this.protobufTransformer.serialize(value, options.structuredType);
|
|
161
|
+
dataType = `proto:${options.structuredType.name}`;
|
|
162
|
+
break;
|
|
163
|
+
}
|
|
164
|
+
if (dataType) publish(topic, dataType, value);
|
|
165
|
+
}
|
|
166
|
+
} else {
|
|
167
|
+
if (channel.transformer != null) value = channel.transformer.serialize(value, channel.structuredType);
|
|
168
|
+
publish(channel.id, channel.publishedDataType, value);
|
|
169
|
+
}
|
|
170
|
+
};
|
|
171
|
+
}
|
|
172
|
+
/** Returns most recent timestamp in microseconds observed by this instance. */
|
|
173
|
+
get recentTimestamp() {
|
|
174
|
+
return this.timestamp;
|
|
175
|
+
}
|
|
176
|
+
/**
|
|
177
|
+
* Gets a channel descriptor.
|
|
178
|
+
*
|
|
179
|
+
* @param source source, e.g. `nt` or `wpilog`
|
|
180
|
+
* @param name channel name
|
|
181
|
+
*/
|
|
182
|
+
get(source, name) {
|
|
183
|
+
const channel = this.channels.get(DataSink.createId(source, name));
|
|
184
|
+
return channel && channel.composite == null ? channel : void 0;
|
|
185
|
+
}
|
|
186
|
+
/**
|
|
187
|
+
* Adds a channel descriptor.
|
|
188
|
+
*
|
|
189
|
+
* @param source source, e.g. `nt` or `wpilog`
|
|
190
|
+
* @param name channel name
|
|
191
|
+
* @param type channel type
|
|
192
|
+
* @param properties channel properties
|
|
193
|
+
* @param publish channel publisher
|
|
194
|
+
*/
|
|
195
|
+
add(source, name, type, properties, publish) {
|
|
196
|
+
let metadata;
|
|
197
|
+
if (properties != null) {
|
|
198
|
+
if (typeof properties === "string") try {
|
|
199
|
+
metadata = JSON.parse(properties);
|
|
200
|
+
} catch {
|
|
201
|
+
metadata = properties;
|
|
202
|
+
}
|
|
203
|
+
else if (typeof properties === "object") {
|
|
204
|
+
const { persistent, retained, cached, ...other } = properties;
|
|
205
|
+
if (Object.keys(other).length > 0) metadata = other;
|
|
206
|
+
}
|
|
207
|
+
}
|
|
208
|
+
const dataType = getDataType(type);
|
|
209
|
+
if (!this.disableCompositeChannels && name.endsWith("/.type")) {
|
|
210
|
+
const composite = this.getCompositeParent(name);
|
|
211
|
+
if (composite) {
|
|
212
|
+
const channel = {
|
|
213
|
+
source,
|
|
214
|
+
id: name,
|
|
215
|
+
dataType,
|
|
216
|
+
publishedDataType: type,
|
|
217
|
+
metadata
|
|
218
|
+
};
|
|
219
|
+
channel.publish = publish ? this.createChannelPublisher(channel, publish) : void 0;
|
|
220
|
+
const id = DataSink.createId(channel.source, channel.id);
|
|
221
|
+
this.channels.set(id, channel);
|
|
222
|
+
convertToSubchannel(channel, composite);
|
|
223
|
+
} else {
|
|
224
|
+
const channel = {
|
|
225
|
+
source,
|
|
226
|
+
id: name.slice(0, -6),
|
|
227
|
+
dataType: "composite",
|
|
228
|
+
publishedDataType: "",
|
|
229
|
+
metadata,
|
|
230
|
+
channels: /* @__PURE__ */ new Map()
|
|
231
|
+
};
|
|
232
|
+
channel.publish = publish ? this.createChannelPublisher(channel, publish) : void 0;
|
|
233
|
+
const prefix = `${channel.id}/`;
|
|
234
|
+
this.channels.forEach((value) => {
|
|
235
|
+
if (value.id.startsWith(prefix)) {
|
|
236
|
+
convertToSubchannel(value, channel);
|
|
237
|
+
if (this.onDataChannelRemoved != null) this.onDataChannelRemoved(value);
|
|
238
|
+
}
|
|
239
|
+
});
|
|
240
|
+
const id = DataSink.createId(channel.source, channel.id);
|
|
241
|
+
this.composites.set(id, channel);
|
|
242
|
+
this.channels.set(id, channel);
|
|
243
|
+
if (this.onDataChannelAdded != null) this.onDataChannelAdded(channel);
|
|
244
|
+
}
|
|
245
|
+
} else {
|
|
246
|
+
for (const transformer of this.transformers) try {
|
|
247
|
+
const result = transformer.inspect(source, name, type, metadata);
|
|
248
|
+
if (result != null) {
|
|
249
|
+
if (typeof result === "string") this.schemas.set(DataSink.createId(source, name), [transformer, result]);
|
|
250
|
+
else this.registerChannel(result, publish);
|
|
251
|
+
return;
|
|
252
|
+
}
|
|
253
|
+
} catch (exception) {
|
|
254
|
+
this.logger(`Transformer '${typeof transformer}' inspection failed`, exception);
|
|
255
|
+
}
|
|
256
|
+
this.registerChannel({
|
|
257
|
+
source,
|
|
258
|
+
id: name,
|
|
259
|
+
dataType,
|
|
260
|
+
publishedDataType: type,
|
|
261
|
+
metadata
|
|
262
|
+
}, publish);
|
|
263
|
+
}
|
|
264
|
+
}
|
|
265
|
+
/**
|
|
266
|
+
* Enqueues a timestamped value for a named data channel.
|
|
267
|
+
*
|
|
268
|
+
* @param source source, e.g. `nt` or `wpilog`
|
|
269
|
+
* @param name channel name
|
|
270
|
+
* @param timestamp timestamp in microseconds
|
|
271
|
+
* @param value raw value
|
|
272
|
+
*/
|
|
273
|
+
enqueue(source, name, timestamp, value) {
|
|
274
|
+
this.timestamp = Math.max(this.timestamp, timestamp);
|
|
275
|
+
const id = DataSink.createId(source, name);
|
|
276
|
+
const schema = this.schemas.get(id);
|
|
277
|
+
if (schema) {
|
|
278
|
+
const [transformer, typeName] = schema;
|
|
279
|
+
try {
|
|
280
|
+
transformer.schema(typeName, value);
|
|
281
|
+
} catch (exception) {
|
|
282
|
+
this.logger(`Transformer '${typeof transformer}' failed to ingest schema data`, exception);
|
|
283
|
+
return false;
|
|
284
|
+
}
|
|
285
|
+
this.channels.forEach((channel$1) => {
|
|
286
|
+
if (channel$1.transformer === transformer && channel$1.dlq != null && channel$1.dlq.length > 0 && channel$1.structuredType != null && channel$1.transformer.canTransform(channel$1.structuredType.name)) {
|
|
287
|
+
channel$1.records ??= [];
|
|
288
|
+
for (const record of channel$1.dlq) try {
|
|
289
|
+
const v$1 = transformer.deserialize(record.value, channel$1.structuredType);
|
|
290
|
+
if (v$1 != null) enqueueData(channel$1, record.timestamp, v$1, false);
|
|
291
|
+
} catch (exception) {
|
|
292
|
+
this.logger(`Transformer '${typeof transformer}' failed to transform data in channel '${id}'`, exception);
|
|
293
|
+
}
|
|
294
|
+
channel$1.dlq = void 0;
|
|
295
|
+
}
|
|
296
|
+
});
|
|
297
|
+
return true;
|
|
298
|
+
}
|
|
299
|
+
if (!this.disableCompositeChannels && name.endsWith("/.type")) {
|
|
300
|
+
const channel$1 = this.composites.get(id.slice(0, -6));
|
|
301
|
+
if (channel$1 != null) {
|
|
302
|
+
if (typeof value === "string") {
|
|
303
|
+
channel$1.structuredType = {
|
|
304
|
+
name: value,
|
|
305
|
+
format: "composite"
|
|
306
|
+
};
|
|
307
|
+
enqueueDataWithPath(channel$1, timestamp, value, [".type"]);
|
|
308
|
+
}
|
|
309
|
+
return true;
|
|
310
|
+
}
|
|
311
|
+
}
|
|
312
|
+
const channel = this.channels.get(id);
|
|
313
|
+
if (channel == null) return false;
|
|
314
|
+
let v = value;
|
|
315
|
+
try {
|
|
316
|
+
if (channel.transformer != null) {
|
|
317
|
+
v = channel.transformer.deserialize(v, channel.structuredType);
|
|
318
|
+
if (v == null) {
|
|
319
|
+
enqueueData(channel, timestamp, value, true);
|
|
320
|
+
return true;
|
|
321
|
+
}
|
|
322
|
+
}
|
|
323
|
+
} catch (exception) {
|
|
324
|
+
this.logger(`Transformer '${typeof channel.transformer}' failed to transform data in channel '${id}'`, exception);
|
|
325
|
+
return false;
|
|
326
|
+
}
|
|
327
|
+
enqueueData(channel, timestamp, v, false);
|
|
328
|
+
return true;
|
|
329
|
+
}
|
|
330
|
+
/**
|
|
331
|
+
* Prunes old records based on the retention policy if configured.
|
|
332
|
+
*
|
|
333
|
+
* The `currentTimestamp` represents time in the robot clock, not
|
|
334
|
+
* wall clock, typically you want to supply the most recent timestamp
|
|
335
|
+
* reported by live connection protocol, such as NetworkTables.
|
|
336
|
+
* Defaults to {@link recentTimestamp} field.
|
|
337
|
+
*
|
|
338
|
+
* @param currentTimestamp timestamp in microseconds representing current time
|
|
339
|
+
*/
|
|
340
|
+
enforceRetention(currentTimestamp) {
|
|
341
|
+
if (this.retention == null) return;
|
|
342
|
+
const timestamp = currentTimestamp ?? this.timestamp;
|
|
343
|
+
const maxSize = this.retention.maxSize;
|
|
344
|
+
const cutoff = this.retention.maxTimeSeconds != null ? Math.max(0, timestamp - this.retention.maxTimeSeconds * 1e6) : void 0;
|
|
345
|
+
if (maxSize != null && maxSize > 0 || cutoff != null) this.channels.forEach((channel) => {
|
|
346
|
+
if (channel.records != null) pruneTimestampedRecords(channel.records, maxSize, cutoff);
|
|
347
|
+
if (channel.dlq != null) pruneTimestampedRecords(channel.dlq, maxSize, cutoff);
|
|
348
|
+
});
|
|
349
|
+
}
|
|
350
|
+
/** Purges this sink and all its records. */
|
|
351
|
+
purge() {
|
|
352
|
+
this.channels.clear();
|
|
353
|
+
this.composites.clear();
|
|
354
|
+
this.schemas.clear();
|
|
355
|
+
this.timestamp = 0;
|
|
356
|
+
}
|
|
357
|
+
};
|
|
358
|
+
|
|
359
|
+
//#endregion
|
|
360
|
+
export { DataSink };
|
|
361
|
+
//# sourceMappingURL=sink.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"sink.mjs","names":["channel","v"],"sources":["../src/sink.ts"],"sourcesContent":["import { JsonDataTransformer } from \"./formats/json\";\nimport { MsgpackDataTransformer } from \"./formats/msgpack\";\nimport { ProtobufDataTransformer } from \"./formats/protobuf\";\nimport { StructDataTransformer } from \"./formats/struct\";\nimport {\n addTimestampedRecord,\n getTimestampedRecord,\n pruneTimestampedRecords,\n setValueByPath,\n toUint8Array,\n} from \"./utils\";\n\nimport type {\n CompositeDataChannel,\n DataChannel,\n DataChannelPublisherOptions,\n DataTransformer,\n DataTypeImpl,\n} from \"./abstractions\";\n\n/** Converts raw data type name to {@link DataType}. */\nfunction getDataType(v: string) {\n switch (v.toLocaleLowerCase()) {\n case \"boolean\":\n return \"boolean\";\n case \"string\":\n return \"string\";\n case \"double\":\n case \"int\":\n case \"float\":\n return \"number\";\n case \"boolean[]\":\n return \"booleanArray\";\n case \"string[]\":\n return \"stringArray\";\n case \"double[]\":\n case \"int[]\":\n case \"float[]\":\n return \"numberArray\";\n case \"json\":\n case \"msgpack\":\n return \"json\";\n default:\n // case \"raw\":\n // case \"rpc\":\n return \"binary\";\n }\n}\n\n/** Gets the raw data type of the value to publish. */\nfunction getRawType(v: unknown): string | undefined {\n if (Array.isArray(v)) {\n if (v.length > 0) {\n const itemType = getRawType(v[0]);\n return itemType ? `${itemType}[]` : undefined;\n }\n } else {\n switch (typeof v) {\n case \"boolean\":\n return \"boolean\";\n case \"string\":\n return \"string\";\n case \"number\":\n return \"double\"; // we can't distinguish between int, float and double\n case \"object\":\n return \"json\";\n }\n }\n}\n\n/** Enqueues data into the composite channel at the specified path. */\nfunction enqueueDataWithPath(channel: CompositeDataChannel, timestamp: number, data: unknown, path: Array<string>) {\n channel.records ??= [];\n\n // three scenarios are possible here:\n // - merge into existing record value with the same timestamp\n // - merge into cloned most recent record value\n // - create new record value\n //\n // additionally it is also possible that we are inserting data at\n // a `historical` timestamp, i.e. there are already newer records\n // following the record being inserted; in this case we have to\n // also update each of the following records by setting the incoming\n // value, unless it is already present there\n const [record, recordIndex] = getTimestampedRecord(channel.records, timestamp);\n let index = recordIndex;\n\n if (record != null) {\n if (record.timestamp === timestamp) {\n setValueByPath(record.value, path, data);\n } else {\n index = addTimestampedRecord(\n channel.records,\n timestamp,\n setValueByPath(structuredClone(record.value), path, data)\n );\n }\n } else {\n index = addTimestampedRecord(channel.records, timestamp, setValueByPath({}, path, data));\n }\n\n for (let i = index + 1; i < channel.records.length; ++i) {\n setValueByPath(channel.records[i]!.value, path, data, true);\n }\n}\n\n/** Enqueues data into the channel. */\nfunction enqueueData(channel: DataChannel, timestamp: number, data: unknown, dlq: boolean) {\n if (dlq) {\n channel.dlq ??= [];\n addTimestampedRecord<Uint8Array>(channel.dlq, timestamp, toUint8Array(data));\n } else {\n if (channel.composite != null) {\n const path = channel.composite.channels.get(channel.id);\n if (path == null) {\n throw new Error(`Invariant violation: ${channel.id} must be a sub-channel of ${channel.composite.id}`);\n }\n\n enqueueDataWithPath(channel.composite, timestamp, data, path);\n } else {\n channel.records ??= [];\n addTimestampedRecord(channel.records, timestamp, data as DataTypeImpl);\n }\n }\n}\n\n/** Converts channel into a sub-channel of a composite channel. */\nfunction convertToSubchannel(channel: DataChannel, composite: CompositeDataChannel) {\n // compute path to store channel data within the composite channel record\n // for example,\n // channel.id = `nt:/SmartDashboard/PowerDistribution[0]/Chan0`\n // composite.id = `nt:/SmartDashboard/PowerDistribution[0]`\n // path = [`Chan0`]\n // -or-\n // channel.id = `nt:/SmartDashboard/Module 0/RootSpeed/x`\n // composite.id = `nt:/SmartDashboard/Module 0`\n // path = [`RootSpeed`, `x`]\n const path = channel.id\n .substring(composite.id.length + 1)\n .split(/\\//)\n .map((_) => _.trim())\n .filter((_) => _.length > 0);\n\n channel.composite = composite;\n composite.channels.set(channel.id, path);\n}\n\n/** Compares two arrays for value equality. */\nfunction arraysEquals<T>(a: ReadonlyArray<T>, b: ReadonlyArray<T>) {\n if (a === b) return true;\n if (a == null || b == null) return false;\n if (a.length !== b.length) return false;\n\n for (let i = 0; i < a.length; ++i) {\n if (a[i] !== b[i]) return false;\n }\n\n return true;\n}\n\n/** Default console logger for error messages. */\nfunction logError(message: string, error: unknown) {\n console.error(`💣 [DataSink] ${message}`, error);\n}\n\nexport type NativeChannelPublisher = (topic: string, type: string, value: unknown) => void;\n\nexport type DataRetentionPolicy = {\n /** Maximum number of records to retain */\n maxSize?: number;\n\n /** Time window in seconds to retain */\n maxTimeSeconds?: number;\n};\n\n/** Default {@link DataTransformer} used by {@link DataSink} that can be reused across instances. */\nconst DefaultDataSinkTransformers = [new JsonDataTransformer(), new MsgpackDataTransformer()];\n\n/** A data sink in the data pipeline with support for protocol transformers, data retention, etc. */\nexport class DataSink {\n private readonly channels: Map<string, DataChannel>;\n private readonly composites: Map<string, CompositeDataChannel>;\n private readonly schemas: Map<string, readonly [transformer: DataTransformer, typeName: string]>;\n private readonly transformers: Array<DataTransformer>;\n private readonly retention?: DataRetentionPolicy;\n private readonly disableCompositeChannels: boolean;\n private readonly onDataChannelAdded?: (channel: DataChannel) => void;\n private readonly onDataChannelRemoved?: (channel: DataChannel) => void;\n private readonly logger: (message: string, error: unknown) => void;\n private timestamp: number;\n\n // these transformers should be instantiated per sink since they maintain internal schema repos\n private readonly structTransformer = new StructDataTransformer();\n private readonly protobufTransformer = new ProtobufDataTransformer();\n\n constructor(options: {\n /** Additional data transformers. Default transformers for structured and binary types are always used. */\n transformers?: Array<DataTransformer>;\n /** Data retention policy. Default is unlimited retention. */\n retention?: DataRetentionPolicy;\n /** Callback invoked when new data channel is registered. */\n onDataChannelAdded?: (channel: DataChannel) => void;\n /** Callback invoked when existing data channel is removed. */\n onDataChannelRemoved?: (channel: DataChannel) => void;\n /** Disables support for composite (legacy) channels. */\n disableCompositeChannels?: boolean;\n /** Logger callback. Default is logging to console. */\n logger?: (message: string, error: unknown) => void;\n }) {\n this.channels = new Map();\n this.composites = new Map();\n this.schemas = new Map();\n this.retention = options?.retention;\n this.onDataChannelAdded = options?.onDataChannelAdded;\n this.onDataChannelRemoved = options?.onDataChannelRemoved;\n this.disableCompositeChannels = options?.disableCompositeChannels ?? false;\n this.logger = options?.logger ?? logError;\n this.timestamp = 0;\n\n this.transformers = [...DefaultDataSinkTransformers, this.structTransformer, this.protobufTransformer];\n\n if (options?.transformers) {\n this.transformers.push(...options.transformers);\n }\n }\n\n /** Constructs channel identifier. */\n private static createId(source: string, name: string) {\n return `${source}:${name}`;\n }\n\n /** Returns parent composite data channel if one exists. */\n private getCompositeParent(id: string) {\n for (const composite of this.composites.values()) {\n if (id.startsWith(`${composite.id}/`)) {\n return composite;\n }\n }\n\n return undefined;\n }\n\n /** Registers new data channel. */\n private registerChannel(channel: DataChannel, publish?: NativeChannelPublisher) {\n const id = DataSink.createId(channel.source, channel.id);\n this.channels.set(id, channel);\n\n // register publisher\n channel.publish = publish ? this.createChannelPublisher(channel, publish) : undefined;\n\n let silent = false;\n\n // test if this channel should be rolled into an existing composite\n if (!this.disableCompositeChannels) {\n const composite = this.getCompositeParent(channel.id);\n if (composite != null) {\n convertToSubchannel(channel, composite);\n silent = true;\n }\n }\n\n // notify\n if (!silent && this.onDataChannelAdded != null) {\n this.onDataChannelAdded(channel);\n }\n }\n\n /** Gets sub-channel by its path. */\n private getSubChannel(channel: CompositeDataChannel, path: ReadonlyArray<string>) {\n for (const [id, p] of channel.channels) {\n if (arraysEquals(path, p)) {\n return this.channels.get(id);\n }\n }\n\n return undefined;\n }\n\n /** Creates channel publisher. */\n private createChannelPublisher(channel: DataChannel, publish: NativeChannelPublisher) {\n return (value: unknown, path?: ReadonlyArray<string>, options?: DataChannelPublisherOptions) => {\n if (path != null && channel.dataType === \"composite\") {\n const subchannel = this.getSubChannel(channel, path);\n\n // sub-channel may not exist, e.g. it has not been published to yet,\n // in this case publish to the topic constructed by appending the path\n // to the composite channel id and derive the data type from the value\n if (subchannel) {\n // apply transformer\n if (subchannel.transformer != null) {\n value = subchannel.transformer.serialize(value, channel.structuredType);\n }\n\n publish(subchannel.id, subchannel.publishedDataType, value);\n } else {\n const topic = `${channel.id}/${path.join(\"/\")}`;\n let dataType = getRawType(value);\n\n if (options?.structuredType) {\n switch (options.structuredType.format) {\n case \"composite\":\n // not supported: must be published via subchannel\n return;\n\n case \"struct\":\n value = this.structTransformer.serialize(value, options.structuredType);\n dataType = `struct:${options.structuredType.name}`;\n break;\n\n case \"protobuf\":\n value = this.protobufTransformer.serialize(value, options.structuredType);\n dataType = `proto:${options.structuredType.name}`;\n break;\n }\n }\n\n // do not publish if we cannot figure out the data type\n if (dataType) {\n publish(topic, dataType, value);\n }\n }\n } else {\n // apply transformer\n if (channel.transformer != null) {\n value = channel.transformer.serialize(value, channel.structuredType);\n }\n\n publish(channel.id, channel.publishedDataType, value);\n }\n };\n }\n\n /** Returns most recent timestamp in microseconds observed by this instance. */\n public get recentTimestamp() {\n return this.timestamp;\n }\n\n /**\n * Gets a channel descriptor.\n *\n * @param source source, e.g. `nt` or `wpilog`\n * @param name channel name\n */\n public get(source: string, name: string) {\n const channel = this.channels.get(DataSink.createId(source, name));\n return channel && channel.composite == null ? channel : undefined;\n }\n\n /**\n * Adds a channel descriptor.\n *\n * @param source source, e.g. `nt` or `wpilog`\n * @param name channel name\n * @param type channel type\n * @param properties channel properties\n * @param publish channel publisher\n */\n public add(\n source: string,\n name: string,\n type: string,\n properties?: Record<string, unknown> | string,\n publish?: NativeChannelPublisher\n ) {\n // construct metadata\n let metadata: Record<string, unknown> | string | undefined;\n if (properties != null) {\n if (typeof properties === \"string\") {\n try {\n metadata = JSON.parse(properties);\n } catch {\n metadata = properties;\n }\n } else if (typeof properties === \"object\") {\n // remove standard properties when constructing metadata\n const { persistent, retained, cached, ...other } = properties;\n if (Object.keys(other).length > 0) {\n metadata = other;\n }\n }\n }\n\n const dataType = getDataType(type);\n\n // special handling of composite channels that are identified by the presence of `.type` topic;\n // allows handling of legacy sendable data structures that are dispersed across individual topics\n // as a single unified json-typed channel\n if (!this.disableCompositeChannels && name.endsWith(\"/.type\")) {\n // ignore deeply nested `.type` entries, instead treat them as\n // regular sub-channels that are rolled into the parent composite\n const composite = this.getCompositeParent(name);\n if (composite) {\n const channel: DataChannel = {\n source,\n id: name,\n dataType,\n publishedDataType: type,\n metadata,\n };\n\n // register publisher\n channel.publish = publish ? this.createChannelPublisher(channel, publish) : undefined;\n\n const id = DataSink.createId(channel.source, channel.id);\n this.channels.set(id, channel);\n\n convertToSubchannel(channel, composite);\n } else {\n const channel: CompositeDataChannel = {\n source,\n id: name.slice(0, -6),\n dataType: \"composite\",\n publishedDataType: \"\", // not applicable\n metadata,\n channels: new Map(),\n };\n\n // register publisher\n channel.publish = publish ? this.createChannelPublisher(channel, publish) : undefined;\n\n // convert any existing channels under this subtree to sub-channels\n const prefix = `${channel.id}/`;\n this.channels.forEach((value) => {\n if (value.id.startsWith(prefix)) {\n convertToSubchannel(value, channel);\n\n if (this.onDataChannelRemoved != null) {\n this.onDataChannelRemoved(value);\n }\n }\n });\n\n const id = DataSink.createId(channel.source, channel.id);\n this.composites.set(id, channel);\n this.channels.set(id, channel);\n\n // notify\n if (this.onDataChannelAdded != null) {\n this.onDataChannelAdded(channel);\n }\n }\n } else {\n for (const transformer of this.transformers) {\n try {\n const result = transformer.inspect(source, name, type, metadata);\n if (result != null) {\n if (typeof result === \"string\") {\n // schema channel\n this.schemas.set(DataSink.createId(source, name), [transformer, result]);\n } else {\n // data channel\n this.registerChannel(result, publish);\n }\n return;\n }\n } catch (exception) {\n this.logger(`Transformer '${typeof transformer}' inspection failed`, exception);\n }\n }\n\n this.registerChannel(\n {\n source,\n id: name,\n dataType,\n publishedDataType: type,\n metadata,\n },\n publish\n );\n }\n }\n\n /**\n * Enqueues a timestamped value for a named data channel.\n *\n * @param source source, e.g. `nt` or `wpilog`\n * @param name channel name\n * @param timestamp timestamp in microseconds\n * @param value raw value\n */\n public enqueue(source: string, name: string, timestamp: number, value: unknown): boolean {\n // update the most recent timestamp\n this.timestamp = Math.max(this.timestamp, timestamp);\n\n const id = DataSink.createId(source, name);\n const schema = this.schemas.get(id);\n if (schema) {\n // received data on the schema channel with a registered transformer\n const [transformer, typeName] = schema;\n try {\n transformer.schema(typeName, value);\n } catch (exception) {\n // ignore garbage\n this.logger(`Transformer '${typeof transformer}' failed to ingest schema data`, exception);\n return false;\n }\n\n // process records in DLQ\n this.channels.forEach((channel) => {\n if (\n channel.transformer === transformer &&\n channel.dlq != null &&\n channel.dlq.length > 0 &&\n channel.structuredType != null &&\n channel.transformer.canTransform(channel.structuredType.name)\n ) {\n channel.records ??= [];\n for (const record of channel.dlq) {\n try {\n const v = transformer.deserialize(record.value, channel.structuredType);\n if (v != null) {\n enqueueData(channel, record.timestamp, v, false);\n }\n } catch (exception) {\n // ignore garbage\n this.logger(`Transformer '${typeof transformer}' failed to transform data in channel '${id}'`, exception);\n }\n }\n channel.dlq = undefined;\n }\n });\n\n return true;\n }\n\n if (!this.disableCompositeChannels && name.endsWith(\"/.type\")) {\n const channel = this.composites.get(id.slice(0, -6));\n if (channel != null) {\n if (typeof value === \"string\") {\n channel.structuredType = {\n name: value,\n format: \"composite\",\n };\n\n enqueueDataWithPath(channel, timestamp, value, [\".type\"]);\n }\n\n return true;\n }\n\n // fallthrough for deeply nested `.type` channels\n }\n\n const channel = this.channels.get(id);\n if (channel == null) {\n return false;\n }\n\n let v = value;\n\n // apply transformer\n try {\n if (channel.transformer != null) {\n v = channel.transformer.deserialize(v, channel.structuredType);\n if (v == null) {\n enqueueData(channel, timestamp, value, true); // add to DLQ\n return true;\n }\n }\n } catch (exception) {\n // ignore garbage\n this.logger(`Transformer '${typeof channel.transformer}' failed to transform data in channel '${id}'`, exception);\n return false;\n }\n\n enqueueData(channel, timestamp, v, false);\n return true;\n }\n\n /**\n * Prunes old records based on the retention policy if configured.\n *\n * The `currentTimestamp` represents time in the robot clock, not\n * wall clock, typically you want to supply the most recent timestamp\n * reported by live connection protocol, such as NetworkTables.\n * Defaults to {@link recentTimestamp} field.\n *\n * @param currentTimestamp timestamp in microseconds representing current time\n */\n public enforceRetention(currentTimestamp?: number) {\n if (this.retention == null) {\n return;\n }\n\n const timestamp = currentTimestamp ?? this.timestamp;\n const maxSize = this.retention.maxSize;\n const cutoff =\n this.retention.maxTimeSeconds != null ? Math.max(0, timestamp - this.retention.maxTimeSeconds * 1e6) : undefined;\n\n if ((maxSize != null && maxSize > 0) || cutoff != null) {\n this.channels.forEach((channel) => {\n if (channel.records != null) {\n pruneTimestampedRecords(channel.records, maxSize, cutoff);\n }\n\n if (channel.dlq != null) {\n pruneTimestampedRecords(channel.dlq, maxSize, cutoff);\n }\n });\n }\n }\n\n /** Purges this sink and all its records. */\n public purge() {\n this.channels.clear();\n this.composites.clear();\n this.schemas.clear();\n this.timestamp = 0;\n }\n}\n"],"mappings":";;;;;;;;AAqBA,SAAS,YAAY,GAAW;AAC9B,SAAQ,EAAE,mBAAmB,EAA7B;EACE,KAAK,UACH,QAAO;EACT,KAAK,SACH,QAAO;EACT,KAAK;EACL,KAAK;EACL,KAAK,QACH,QAAO;EACT,KAAK,YACH,QAAO;EACT,KAAK,WACH,QAAO;EACT,KAAK;EACL,KAAK;EACL,KAAK,UACH,QAAO;EACT,KAAK;EACL,KAAK,UACH,QAAO;EACT,QAGE,QAAO;;;;AAKb,SAAS,WAAW,GAAgC;AAClD,KAAI,MAAM,QAAQ,EAAE,EAClB;MAAI,EAAE,SAAS,GAAG;GAChB,MAAM,WAAW,WAAW,EAAE,GAAG;AACjC,UAAO,WAAW,GAAG,SAAS,MAAM;;OAGtC,SAAQ,OAAO,GAAf;EACE,KAAK,UACH,QAAO;EACT,KAAK,SACH,QAAO;EACT,KAAK,SACH,QAAO;EACT,KAAK,SACH,QAAO;;;;AAMf,SAAS,oBAAoB,SAA+B,WAAmB,MAAe,MAAqB;AACjH,SAAQ,YAAY,EAAE;CAYtB,MAAM,CAAC,QAAQ,eAAe,qBAAqB,QAAQ,SAAS,UAAU;CAC9E,IAAI,QAAQ;AAEZ,KAAI,UAAU,KACZ,KAAI,OAAO,cAAc,UACvB,gBAAe,OAAO,OAAO,MAAM,KAAK;KAExC,SAAQ,qBACN,QAAQ,SACR,WACA,eAAe,gBAAgB,OAAO,MAAM,EAAE,MAAM,KAAK,CAC1D;KAGH,SAAQ,qBAAqB,QAAQ,SAAS,WAAW,eAAe,EAAE,EAAE,MAAM,KAAK,CAAC;AAG1F,MAAK,IAAI,IAAI,QAAQ,GAAG,IAAI,QAAQ,QAAQ,QAAQ,EAAE,EACpD,gBAAe,QAAQ,QAAQ,GAAI,OAAO,MAAM,MAAM,KAAK;;;AAK/D,SAAS,YAAY,SAAsB,WAAmB,MAAe,KAAc;AACzF,KAAI,KAAK;AACP,UAAQ,QAAQ,EAAE;AAClB,uBAAiC,QAAQ,KAAK,WAAW,aAAa,KAAK,CAAC;YAExE,QAAQ,aAAa,MAAM;EAC7B,MAAM,OAAO,QAAQ,UAAU,SAAS,IAAI,QAAQ,GAAG;AACvD,MAAI,QAAQ,KACV,OAAM,IAAI,MAAM,wBAAwB,QAAQ,GAAG,4BAA4B,QAAQ,UAAU,KAAK;AAGxG,sBAAoB,QAAQ,WAAW,WAAW,MAAM,KAAK;QACxD;AACL,UAAQ,YAAY,EAAE;AACtB,uBAAqB,QAAQ,SAAS,WAAW,KAAqB;;;;AAM5E,SAAS,oBAAoB,SAAsB,WAAiC;CAUlF,MAAM,OAAO,QAAQ,GAClB,UAAU,UAAU,GAAG,SAAS,EAAE,CAClC,MAAM,KAAK,CACX,KAAK,MAAM,EAAE,MAAM,CAAC,CACpB,QAAQ,MAAM,EAAE,SAAS,EAAE;AAE9B,SAAQ,YAAY;AACpB,WAAU,SAAS,IAAI,QAAQ,IAAI,KAAK;;;AAI1C,SAAS,aAAgB,GAAqB,GAAqB;AACjE,KAAI,MAAM,EAAG,QAAO;AACpB,KAAI,KAAK,QAAQ,KAAK,KAAM,QAAO;AACnC,KAAI,EAAE,WAAW,EAAE,OAAQ,QAAO;AAElC,MAAK,IAAI,IAAI,GAAG,IAAI,EAAE,QAAQ,EAAE,EAC9B,KAAI,EAAE,OAAO,EAAE,GAAI,QAAO;AAG5B,QAAO;;;AAIT,SAAS,SAAS,SAAiB,OAAgB;AACjD,SAAQ,MAAM,iBAAiB,WAAW,MAAM;;;AAclD,MAAM,8BAA8B,CAAC,IAAI,qBAAqB,EAAE,IAAI,wBAAwB,CAAC;;AAG7F,IAAa,WAAb,MAAa,SAAS;CACpB,AAAiB;CACjB,AAAiB;CACjB,AAAiB;CACjB,AAAiB;CACjB,AAAiB;CACjB,AAAiB;CACjB,AAAiB;CACjB,AAAiB;CACjB,AAAiB;CACjB,AAAQ;CAGR,AAAiB,oBAAoB,IAAI,uBAAuB;CAChE,AAAiB,sBAAsB,IAAI,yBAAyB;CAEpE,YAAY,SAaT;AACD,OAAK,2BAAW,IAAI,KAAK;AACzB,OAAK,6BAAa,IAAI,KAAK;AAC3B,OAAK,0BAAU,IAAI,KAAK;AACxB,OAAK,YAAY,SAAS;AAC1B,OAAK,qBAAqB,SAAS;AACnC,OAAK,uBAAuB,SAAS;AACrC,OAAK,2BAA2B,SAAS,4BAA4B;AACrE,OAAK,SAAS,SAAS,UAAU;AACjC,OAAK,YAAY;AAEjB,OAAK,eAAe;GAAC,GAAG;GAA6B,KAAK;GAAmB,KAAK;GAAoB;AAEtG,MAAI,SAAS,aACX,MAAK,aAAa,KAAK,GAAG,QAAQ,aAAa;;;CAKnD,OAAe,SAAS,QAAgB,MAAc;AACpD,SAAO,GAAG,OAAO,GAAG;;;CAItB,AAAQ,mBAAmB,IAAY;AACrC,OAAK,MAAM,aAAa,KAAK,WAAW,QAAQ,CAC9C,KAAI,GAAG,WAAW,GAAG,UAAU,GAAG,GAAG,CACnC,QAAO;;;CAQb,AAAQ,gBAAgB,SAAsB,SAAkC;EAC9E,MAAM,KAAK,SAAS,SAAS,QAAQ,QAAQ,QAAQ,GAAG;AACxD,OAAK,SAAS,IAAI,IAAI,QAAQ;AAG9B,UAAQ,UAAU,UAAU,KAAK,uBAAuB,SAAS,QAAQ,GAAG;EAE5E,IAAI,SAAS;AAGb,MAAI,CAAC,KAAK,0BAA0B;GAClC,MAAM,YAAY,KAAK,mBAAmB,QAAQ,GAAG;AACrD,OAAI,aAAa,MAAM;AACrB,wBAAoB,SAAS,UAAU;AACvC,aAAS;;;AAKb,MAAI,CAAC,UAAU,KAAK,sBAAsB,KACxC,MAAK,mBAAmB,QAAQ;;;CAKpC,AAAQ,cAAc,SAA+B,MAA6B;AAChF,OAAK,MAAM,CAAC,IAAI,MAAM,QAAQ,SAC5B,KAAI,aAAa,MAAM,EAAE,CACvB,QAAO,KAAK,SAAS,IAAI,GAAG;;;CAQlC,AAAQ,uBAAuB,SAAsB,SAAiC;AACpF,UAAQ,OAAgB,MAA8B,YAA0C;AAC9F,OAAI,QAAQ,QAAQ,QAAQ,aAAa,aAAa;IACpD,MAAM,aAAa,KAAK,cAAc,SAAS,KAAK;AAKpD,QAAI,YAAY;AAEd,SAAI,WAAW,eAAe,KAC5B,SAAQ,WAAW,YAAY,UAAU,OAAO,QAAQ,eAAe;AAGzE,aAAQ,WAAW,IAAI,WAAW,mBAAmB,MAAM;WACtD;KACL,MAAM,QAAQ,GAAG,QAAQ,GAAG,GAAG,KAAK,KAAK,IAAI;KAC7C,IAAI,WAAW,WAAW,MAAM;AAEhC,SAAI,SAAS,eACX,SAAQ,QAAQ,eAAe,QAA/B;MACE,KAAK,YAEH;MAEF,KAAK;AACH,eAAQ,KAAK,kBAAkB,UAAU,OAAO,QAAQ,eAAe;AACvE,kBAAW,UAAU,QAAQ,eAAe;AAC5C;MAEF,KAAK;AACH,eAAQ,KAAK,oBAAoB,UAAU,OAAO,QAAQ,eAAe;AACzE,kBAAW,SAAS,QAAQ,eAAe;AAC3C;;AAKN,SAAI,SACF,SAAQ,OAAO,UAAU,MAAM;;UAG9B;AAEL,QAAI,QAAQ,eAAe,KACzB,SAAQ,QAAQ,YAAY,UAAU,OAAO,QAAQ,eAAe;AAGtE,YAAQ,QAAQ,IAAI,QAAQ,mBAAmB,MAAM;;;;;CAM3D,IAAW,kBAAkB;AAC3B,SAAO,KAAK;;;;;;;;CASd,AAAO,IAAI,QAAgB,MAAc;EACvC,MAAM,UAAU,KAAK,SAAS,IAAI,SAAS,SAAS,QAAQ,KAAK,CAAC;AAClE,SAAO,WAAW,QAAQ,aAAa,OAAO,UAAU;;;;;;;;;;;CAY1D,AAAO,IACL,QACA,MACA,MACA,YACA,SACA;EAEA,IAAI;AACJ,MAAI,cAAc,MAChB;OAAI,OAAO,eAAe,SACxB,KAAI;AACF,eAAW,KAAK,MAAM,WAAW;WAC3B;AACN,eAAW;;YAEJ,OAAO,eAAe,UAAU;IAEzC,MAAM,EAAE,YAAY,UAAU,QAAQ,GAAG,UAAU;AACnD,QAAI,OAAO,KAAK,MAAM,CAAC,SAAS,EAC9B,YAAW;;;EAKjB,MAAM,WAAW,YAAY,KAAK;AAKlC,MAAI,CAAC,KAAK,4BAA4B,KAAK,SAAS,SAAS,EAAE;GAG7D,MAAM,YAAY,KAAK,mBAAmB,KAAK;AAC/C,OAAI,WAAW;IACb,MAAM,UAAuB;KAC3B;KACA,IAAI;KACJ;KACA,mBAAmB;KACnB;KACD;AAGD,YAAQ,UAAU,UAAU,KAAK,uBAAuB,SAAS,QAAQ,GAAG;IAE5E,MAAM,KAAK,SAAS,SAAS,QAAQ,QAAQ,QAAQ,GAAG;AACxD,SAAK,SAAS,IAAI,IAAI,QAAQ;AAE9B,wBAAoB,SAAS,UAAU;UAClC;IACL,MAAM,UAAgC;KACpC;KACA,IAAI,KAAK,MAAM,GAAG,GAAG;KACrB,UAAU;KACV,mBAAmB;KACnB;KACA,0BAAU,IAAI,KAAK;KACpB;AAGD,YAAQ,UAAU,UAAU,KAAK,uBAAuB,SAAS,QAAQ,GAAG;IAG5E,MAAM,SAAS,GAAG,QAAQ,GAAG;AAC7B,SAAK,SAAS,SAAS,UAAU;AAC/B,SAAI,MAAM,GAAG,WAAW,OAAO,EAAE;AAC/B,0BAAoB,OAAO,QAAQ;AAEnC,UAAI,KAAK,wBAAwB,KAC/B,MAAK,qBAAqB,MAAM;;MAGpC;IAEF,MAAM,KAAK,SAAS,SAAS,QAAQ,QAAQ,QAAQ,GAAG;AACxD,SAAK,WAAW,IAAI,IAAI,QAAQ;AAChC,SAAK,SAAS,IAAI,IAAI,QAAQ;AAG9B,QAAI,KAAK,sBAAsB,KAC7B,MAAK,mBAAmB,QAAQ;;SAG/B;AACL,QAAK,MAAM,eAAe,KAAK,aAC7B,KAAI;IACF,MAAM,SAAS,YAAY,QAAQ,QAAQ,MAAM,MAAM,SAAS;AAChE,QAAI,UAAU,MAAM;AAClB,SAAI,OAAO,WAAW,SAEpB,MAAK,QAAQ,IAAI,SAAS,SAAS,QAAQ,KAAK,EAAE,CAAC,aAAa,OAAO,CAAC;SAGxE,MAAK,gBAAgB,QAAQ,QAAQ;AAEvC;;YAEK,WAAW;AAClB,SAAK,OAAO,gBAAgB,OAAO,YAAY,sBAAsB,UAAU;;AAInF,QAAK,gBACH;IACE;IACA,IAAI;IACJ;IACA,mBAAmB;IACnB;IACD,EACD,QACD;;;;;;;;;;;CAYL,AAAO,QAAQ,QAAgB,MAAc,WAAmB,OAAyB;AAEvF,OAAK,YAAY,KAAK,IAAI,KAAK,WAAW,UAAU;EAEpD,MAAM,KAAK,SAAS,SAAS,QAAQ,KAAK;EAC1C,MAAM,SAAS,KAAK,QAAQ,IAAI,GAAG;AACnC,MAAI,QAAQ;GAEV,MAAM,CAAC,aAAa,YAAY;AAChC,OAAI;AACF,gBAAY,OAAO,UAAU,MAAM;YAC5B,WAAW;AAElB,SAAK,OAAO,gBAAgB,OAAO,YAAY,iCAAiC,UAAU;AAC1F,WAAO;;AAIT,QAAK,SAAS,SAAS,cAAY;AACjC,QACEA,UAAQ,gBAAgB,eACxBA,UAAQ,OAAO,QACfA,UAAQ,IAAI,SAAS,KACrBA,UAAQ,kBAAkB,QAC1BA,UAAQ,YAAY,aAAaA,UAAQ,eAAe,KAAK,EAC7D;AACA,eAAQ,YAAY,EAAE;AACtB,UAAK,MAAM,UAAUA,UAAQ,IAC3B,KAAI;MACF,MAAMC,MAAI,YAAY,YAAY,OAAO,OAAOD,UAAQ,eAAe;AACvE,UAAIC,OAAK,KACP,aAAYD,WAAS,OAAO,WAAWC,KAAG,MAAM;cAE3C,WAAW;AAElB,WAAK,OAAO,gBAAgB,OAAO,YAAY,yCAAyC,GAAG,IAAI,UAAU;;AAG7G,eAAQ,MAAM;;KAEhB;AAEF,UAAO;;AAGT,MAAI,CAAC,KAAK,4BAA4B,KAAK,SAAS,SAAS,EAAE;GAC7D,MAAMD,YAAU,KAAK,WAAW,IAAI,GAAG,MAAM,GAAG,GAAG,CAAC;AACpD,OAAIA,aAAW,MAAM;AACnB,QAAI,OAAO,UAAU,UAAU;AAC7B,eAAQ,iBAAiB;MACvB,MAAM;MACN,QAAQ;MACT;AAED,yBAAoBA,WAAS,WAAW,OAAO,CAAC,QAAQ,CAAC;;AAG3D,WAAO;;;EAMX,MAAM,UAAU,KAAK,SAAS,IAAI,GAAG;AACrC,MAAI,WAAW,KACb,QAAO;EAGT,IAAI,IAAI;AAGR,MAAI;AACF,OAAI,QAAQ,eAAe,MAAM;AAC/B,QAAI,QAAQ,YAAY,YAAY,GAAG,QAAQ,eAAe;AAC9D,QAAI,KAAK,MAAM;AACb,iBAAY,SAAS,WAAW,OAAO,KAAK;AAC5C,YAAO;;;WAGJ,WAAW;AAElB,QAAK,OAAO,gBAAgB,OAAO,QAAQ,YAAY,yCAAyC,GAAG,IAAI,UAAU;AACjH,UAAO;;AAGT,cAAY,SAAS,WAAW,GAAG,MAAM;AACzC,SAAO;;;;;;;;;;;;CAaT,AAAO,iBAAiB,kBAA2B;AACjD,MAAI,KAAK,aAAa,KACpB;EAGF,MAAM,YAAY,oBAAoB,KAAK;EAC3C,MAAM,UAAU,KAAK,UAAU;EAC/B,MAAM,SACJ,KAAK,UAAU,kBAAkB,OAAO,KAAK,IAAI,GAAG,YAAY,KAAK,UAAU,iBAAiB,IAAI,GAAG;AAEzG,MAAK,WAAW,QAAQ,UAAU,KAAM,UAAU,KAChD,MAAK,SAAS,SAAS,YAAY;AACjC,OAAI,QAAQ,WAAW,KACrB,yBAAwB,QAAQ,SAAS,SAAS,OAAO;AAG3D,OAAI,QAAQ,OAAO,KACjB,yBAAwB,QAAQ,KAAK,SAAS,OAAO;IAEvD;;;CAKN,AAAO,QAAQ;AACb,OAAK,SAAS,OAAO;AACrB,OAAK,WAAW,OAAO;AACvB,OAAK,QAAQ,OAAO;AACpB,OAAK,YAAY"}
|
|
File without changes
|