oscura 0.0.1__py3-none-any.whl → 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- oscura/__init__.py +813 -8
- oscura/__main__.py +392 -0
- oscura/analyzers/__init__.py +37 -0
- oscura/analyzers/digital/__init__.py +177 -0
- oscura/analyzers/digital/bus.py +691 -0
- oscura/analyzers/digital/clock.py +805 -0
- oscura/analyzers/digital/correlation.py +720 -0
- oscura/analyzers/digital/edges.py +632 -0
- oscura/analyzers/digital/extraction.py +413 -0
- oscura/analyzers/digital/quality.py +878 -0
- oscura/analyzers/digital/signal_quality.py +877 -0
- oscura/analyzers/digital/thresholds.py +708 -0
- oscura/analyzers/digital/timing.py +1104 -0
- oscura/analyzers/eye/__init__.py +46 -0
- oscura/analyzers/eye/diagram.py +434 -0
- oscura/analyzers/eye/metrics.py +555 -0
- oscura/analyzers/jitter/__init__.py +83 -0
- oscura/analyzers/jitter/ber.py +333 -0
- oscura/analyzers/jitter/decomposition.py +759 -0
- oscura/analyzers/jitter/measurements.py +413 -0
- oscura/analyzers/jitter/spectrum.py +220 -0
- oscura/analyzers/measurements.py +40 -0
- oscura/analyzers/packet/__init__.py +171 -0
- oscura/analyzers/packet/daq.py +1077 -0
- oscura/analyzers/packet/metrics.py +437 -0
- oscura/analyzers/packet/parser.py +327 -0
- oscura/analyzers/packet/payload.py +2156 -0
- oscura/analyzers/packet/payload_analysis.py +1312 -0
- oscura/analyzers/packet/payload_extraction.py +236 -0
- oscura/analyzers/packet/payload_patterns.py +670 -0
- oscura/analyzers/packet/stream.py +359 -0
- oscura/analyzers/patterns/__init__.py +266 -0
- oscura/analyzers/patterns/clustering.py +1036 -0
- oscura/analyzers/patterns/discovery.py +539 -0
- oscura/analyzers/patterns/learning.py +797 -0
- oscura/analyzers/patterns/matching.py +1091 -0
- oscura/analyzers/patterns/periodic.py +650 -0
- oscura/analyzers/patterns/sequences.py +767 -0
- oscura/analyzers/power/__init__.py +116 -0
- oscura/analyzers/power/ac_power.py +391 -0
- oscura/analyzers/power/basic.py +383 -0
- oscura/analyzers/power/conduction.py +314 -0
- oscura/analyzers/power/efficiency.py +297 -0
- oscura/analyzers/power/ripple.py +356 -0
- oscura/analyzers/power/soa.py +372 -0
- oscura/analyzers/power/switching.py +479 -0
- oscura/analyzers/protocol/__init__.py +150 -0
- oscura/analyzers/protocols/__init__.py +150 -0
- oscura/analyzers/protocols/base.py +500 -0
- oscura/analyzers/protocols/can.py +620 -0
- oscura/analyzers/protocols/can_fd.py +448 -0
- oscura/analyzers/protocols/flexray.py +405 -0
- oscura/analyzers/protocols/hdlc.py +399 -0
- oscura/analyzers/protocols/i2c.py +368 -0
- oscura/analyzers/protocols/i2s.py +296 -0
- oscura/analyzers/protocols/jtag.py +393 -0
- oscura/analyzers/protocols/lin.py +445 -0
- oscura/analyzers/protocols/manchester.py +333 -0
- oscura/analyzers/protocols/onewire.py +501 -0
- oscura/analyzers/protocols/spi.py +334 -0
- oscura/analyzers/protocols/swd.py +325 -0
- oscura/analyzers/protocols/uart.py +393 -0
- oscura/analyzers/protocols/usb.py +495 -0
- oscura/analyzers/signal_integrity/__init__.py +63 -0
- oscura/analyzers/signal_integrity/embedding.py +294 -0
- oscura/analyzers/signal_integrity/equalization.py +370 -0
- oscura/analyzers/signal_integrity/sparams.py +484 -0
- oscura/analyzers/spectral/__init__.py +53 -0
- oscura/analyzers/spectral/chunked.py +273 -0
- oscura/analyzers/spectral/chunked_fft.py +571 -0
- oscura/analyzers/spectral/chunked_wavelet.py +391 -0
- oscura/analyzers/spectral/fft.py +92 -0
- oscura/analyzers/statistical/__init__.py +250 -0
- oscura/analyzers/statistical/checksum.py +923 -0
- oscura/analyzers/statistical/chunked_corr.py +228 -0
- oscura/analyzers/statistical/classification.py +778 -0
- oscura/analyzers/statistical/entropy.py +1113 -0
- oscura/analyzers/statistical/ngrams.py +614 -0
- oscura/analyzers/statistics/__init__.py +119 -0
- oscura/analyzers/statistics/advanced.py +885 -0
- oscura/analyzers/statistics/basic.py +263 -0
- oscura/analyzers/statistics/correlation.py +630 -0
- oscura/analyzers/statistics/distribution.py +298 -0
- oscura/analyzers/statistics/outliers.py +463 -0
- oscura/analyzers/statistics/streaming.py +93 -0
- oscura/analyzers/statistics/trend.py +520 -0
- oscura/analyzers/validation.py +598 -0
- oscura/analyzers/waveform/__init__.py +36 -0
- oscura/analyzers/waveform/measurements.py +943 -0
- oscura/analyzers/waveform/measurements_with_uncertainty.py +371 -0
- oscura/analyzers/waveform/spectral.py +1689 -0
- oscura/analyzers/waveform/wavelets.py +298 -0
- oscura/api/__init__.py +62 -0
- oscura/api/dsl.py +538 -0
- oscura/api/fluent.py +571 -0
- oscura/api/operators.py +498 -0
- oscura/api/optimization.py +392 -0
- oscura/api/profiling.py +396 -0
- oscura/automotive/__init__.py +73 -0
- oscura/automotive/can/__init__.py +52 -0
- oscura/automotive/can/analysis.py +356 -0
- oscura/automotive/can/checksum.py +250 -0
- oscura/automotive/can/correlation.py +212 -0
- oscura/automotive/can/discovery.py +355 -0
- oscura/automotive/can/message_wrapper.py +375 -0
- oscura/automotive/can/models.py +385 -0
- oscura/automotive/can/patterns.py +381 -0
- oscura/automotive/can/session.py +452 -0
- oscura/automotive/can/state_machine.py +300 -0
- oscura/automotive/can/stimulus_response.py +461 -0
- oscura/automotive/dbc/__init__.py +15 -0
- oscura/automotive/dbc/generator.py +156 -0
- oscura/automotive/dbc/parser.py +146 -0
- oscura/automotive/dtc/__init__.py +30 -0
- oscura/automotive/dtc/database.py +3036 -0
- oscura/automotive/j1939/__init__.py +14 -0
- oscura/automotive/j1939/decoder.py +745 -0
- oscura/automotive/loaders/__init__.py +35 -0
- oscura/automotive/loaders/asc.py +98 -0
- oscura/automotive/loaders/blf.py +77 -0
- oscura/automotive/loaders/csv_can.py +136 -0
- oscura/automotive/loaders/dispatcher.py +136 -0
- oscura/automotive/loaders/mdf.py +331 -0
- oscura/automotive/loaders/pcap.py +132 -0
- oscura/automotive/obd/__init__.py +14 -0
- oscura/automotive/obd/decoder.py +707 -0
- oscura/automotive/uds/__init__.py +48 -0
- oscura/automotive/uds/decoder.py +265 -0
- oscura/automotive/uds/models.py +64 -0
- oscura/automotive/visualization.py +369 -0
- oscura/batch/__init__.py +55 -0
- oscura/batch/advanced.py +627 -0
- oscura/batch/aggregate.py +300 -0
- oscura/batch/analyze.py +139 -0
- oscura/batch/logging.py +487 -0
- oscura/batch/metrics.py +556 -0
- oscura/builders/__init__.py +41 -0
- oscura/builders/signal_builder.py +1131 -0
- oscura/cli/__init__.py +14 -0
- oscura/cli/batch.py +339 -0
- oscura/cli/characterize.py +273 -0
- oscura/cli/compare.py +775 -0
- oscura/cli/decode.py +551 -0
- oscura/cli/main.py +247 -0
- oscura/cli/shell.py +350 -0
- oscura/comparison/__init__.py +66 -0
- oscura/comparison/compare.py +397 -0
- oscura/comparison/golden.py +487 -0
- oscura/comparison/limits.py +391 -0
- oscura/comparison/mask.py +434 -0
- oscura/comparison/trace_diff.py +30 -0
- oscura/comparison/visualization.py +481 -0
- oscura/compliance/__init__.py +70 -0
- oscura/compliance/advanced.py +756 -0
- oscura/compliance/masks.py +363 -0
- oscura/compliance/reporting.py +483 -0
- oscura/compliance/testing.py +298 -0
- oscura/component/__init__.py +38 -0
- oscura/component/impedance.py +365 -0
- oscura/component/reactive.py +598 -0
- oscura/component/transmission_line.py +312 -0
- oscura/config/__init__.py +191 -0
- oscura/config/defaults.py +254 -0
- oscura/config/loader.py +348 -0
- oscura/config/memory.py +271 -0
- oscura/config/migration.py +458 -0
- oscura/config/pipeline.py +1077 -0
- oscura/config/preferences.py +530 -0
- oscura/config/protocol.py +875 -0
- oscura/config/schema.py +713 -0
- oscura/config/settings.py +420 -0
- oscura/config/thresholds.py +599 -0
- oscura/convenience.py +457 -0
- oscura/core/__init__.py +299 -0
- oscura/core/audit.py +457 -0
- oscura/core/backend_selector.py +405 -0
- oscura/core/cache.py +590 -0
- oscura/core/cancellation.py +439 -0
- oscura/core/confidence.py +225 -0
- oscura/core/config.py +506 -0
- oscura/core/correlation.py +216 -0
- oscura/core/cross_domain.py +422 -0
- oscura/core/debug.py +301 -0
- oscura/core/edge_cases.py +541 -0
- oscura/core/exceptions.py +535 -0
- oscura/core/gpu_backend.py +523 -0
- oscura/core/lazy.py +832 -0
- oscura/core/log_query.py +540 -0
- oscura/core/logging.py +931 -0
- oscura/core/logging_advanced.py +952 -0
- oscura/core/memoize.py +171 -0
- oscura/core/memory_check.py +274 -0
- oscura/core/memory_guard.py +290 -0
- oscura/core/memory_limits.py +336 -0
- oscura/core/memory_monitor.py +453 -0
- oscura/core/memory_progress.py +465 -0
- oscura/core/memory_warnings.py +315 -0
- oscura/core/numba_backend.py +362 -0
- oscura/core/performance.py +352 -0
- oscura/core/progress.py +524 -0
- oscura/core/provenance.py +358 -0
- oscura/core/results.py +331 -0
- oscura/core/types.py +504 -0
- oscura/core/uncertainty.py +383 -0
- oscura/discovery/__init__.py +52 -0
- oscura/discovery/anomaly_detector.py +672 -0
- oscura/discovery/auto_decoder.py +415 -0
- oscura/discovery/comparison.py +497 -0
- oscura/discovery/quality_validator.py +528 -0
- oscura/discovery/signal_detector.py +769 -0
- oscura/dsl/__init__.py +73 -0
- oscura/dsl/commands.py +246 -0
- oscura/dsl/interpreter.py +455 -0
- oscura/dsl/parser.py +689 -0
- oscura/dsl/repl.py +172 -0
- oscura/exceptions.py +59 -0
- oscura/exploratory/__init__.py +111 -0
- oscura/exploratory/error_recovery.py +642 -0
- oscura/exploratory/fuzzy.py +513 -0
- oscura/exploratory/fuzzy_advanced.py +786 -0
- oscura/exploratory/legacy.py +831 -0
- oscura/exploratory/parse.py +358 -0
- oscura/exploratory/recovery.py +275 -0
- oscura/exploratory/sync.py +382 -0
- oscura/exploratory/unknown.py +707 -0
- oscura/export/__init__.py +25 -0
- oscura/export/wireshark/README.md +265 -0
- oscura/export/wireshark/__init__.py +47 -0
- oscura/export/wireshark/generator.py +312 -0
- oscura/export/wireshark/lua_builder.py +159 -0
- oscura/export/wireshark/templates/dissector.lua.j2 +92 -0
- oscura/export/wireshark/type_mapping.py +165 -0
- oscura/export/wireshark/validator.py +105 -0
- oscura/exporters/__init__.py +94 -0
- oscura/exporters/csv.py +303 -0
- oscura/exporters/exporters.py +44 -0
- oscura/exporters/hdf5.py +219 -0
- oscura/exporters/html_export.py +701 -0
- oscura/exporters/json_export.py +291 -0
- oscura/exporters/markdown_export.py +367 -0
- oscura/exporters/matlab_export.py +354 -0
- oscura/exporters/npz_export.py +219 -0
- oscura/exporters/spice_export.py +210 -0
- oscura/extensibility/__init__.py +131 -0
- oscura/extensibility/docs.py +752 -0
- oscura/extensibility/extensions.py +1125 -0
- oscura/extensibility/logging.py +259 -0
- oscura/extensibility/measurements.py +485 -0
- oscura/extensibility/plugins.py +414 -0
- oscura/extensibility/registry.py +346 -0
- oscura/extensibility/templates.py +913 -0
- oscura/extensibility/validation.py +651 -0
- oscura/filtering/__init__.py +89 -0
- oscura/filtering/base.py +563 -0
- oscura/filtering/convenience.py +564 -0
- oscura/filtering/design.py +725 -0
- oscura/filtering/filters.py +32 -0
- oscura/filtering/introspection.py +605 -0
- oscura/guidance/__init__.py +24 -0
- oscura/guidance/recommender.py +429 -0
- oscura/guidance/wizard.py +518 -0
- oscura/inference/__init__.py +251 -0
- oscura/inference/active_learning/README.md +153 -0
- oscura/inference/active_learning/__init__.py +38 -0
- oscura/inference/active_learning/lstar.py +257 -0
- oscura/inference/active_learning/observation_table.py +230 -0
- oscura/inference/active_learning/oracle.py +78 -0
- oscura/inference/active_learning/teachers/__init__.py +15 -0
- oscura/inference/active_learning/teachers/simulator.py +192 -0
- oscura/inference/adaptive_tuning.py +453 -0
- oscura/inference/alignment.py +653 -0
- oscura/inference/bayesian.py +943 -0
- oscura/inference/binary.py +1016 -0
- oscura/inference/crc_reverse.py +711 -0
- oscura/inference/logic.py +288 -0
- oscura/inference/message_format.py +1305 -0
- oscura/inference/protocol.py +417 -0
- oscura/inference/protocol_dsl.py +1084 -0
- oscura/inference/protocol_library.py +1230 -0
- oscura/inference/sequences.py +809 -0
- oscura/inference/signal_intelligence.py +1509 -0
- oscura/inference/spectral.py +215 -0
- oscura/inference/state_machine.py +634 -0
- oscura/inference/stream.py +918 -0
- oscura/integrations/__init__.py +59 -0
- oscura/integrations/llm.py +1827 -0
- oscura/jupyter/__init__.py +32 -0
- oscura/jupyter/display.py +268 -0
- oscura/jupyter/magic.py +334 -0
- oscura/loaders/__init__.py +526 -0
- oscura/loaders/binary.py +69 -0
- oscura/loaders/configurable.py +1255 -0
- oscura/loaders/csv.py +26 -0
- oscura/loaders/csv_loader.py +473 -0
- oscura/loaders/hdf5.py +9 -0
- oscura/loaders/hdf5_loader.py +510 -0
- oscura/loaders/lazy.py +370 -0
- oscura/loaders/mmap_loader.py +583 -0
- oscura/loaders/numpy_loader.py +436 -0
- oscura/loaders/pcap.py +432 -0
- oscura/loaders/preprocessing.py +368 -0
- oscura/loaders/rigol.py +287 -0
- oscura/loaders/sigrok.py +321 -0
- oscura/loaders/tdms.py +367 -0
- oscura/loaders/tektronix.py +711 -0
- oscura/loaders/validation.py +584 -0
- oscura/loaders/vcd.py +464 -0
- oscura/loaders/wav.py +233 -0
- oscura/math/__init__.py +45 -0
- oscura/math/arithmetic.py +824 -0
- oscura/math/interpolation.py +413 -0
- oscura/onboarding/__init__.py +39 -0
- oscura/onboarding/help.py +498 -0
- oscura/onboarding/tutorials.py +405 -0
- oscura/onboarding/wizard.py +466 -0
- oscura/optimization/__init__.py +19 -0
- oscura/optimization/parallel.py +440 -0
- oscura/optimization/search.py +532 -0
- oscura/pipeline/__init__.py +43 -0
- oscura/pipeline/base.py +338 -0
- oscura/pipeline/composition.py +242 -0
- oscura/pipeline/parallel.py +448 -0
- oscura/pipeline/pipeline.py +375 -0
- oscura/pipeline/reverse_engineering.py +1119 -0
- oscura/plugins/__init__.py +122 -0
- oscura/plugins/base.py +272 -0
- oscura/plugins/cli.py +497 -0
- oscura/plugins/discovery.py +411 -0
- oscura/plugins/isolation.py +418 -0
- oscura/plugins/lifecycle.py +959 -0
- oscura/plugins/manager.py +493 -0
- oscura/plugins/registry.py +421 -0
- oscura/plugins/versioning.py +372 -0
- oscura/py.typed +0 -0
- oscura/quality/__init__.py +65 -0
- oscura/quality/ensemble.py +740 -0
- oscura/quality/explainer.py +338 -0
- oscura/quality/scoring.py +616 -0
- oscura/quality/warnings.py +456 -0
- oscura/reporting/__init__.py +248 -0
- oscura/reporting/advanced.py +1234 -0
- oscura/reporting/analyze.py +448 -0
- oscura/reporting/argument_preparer.py +596 -0
- oscura/reporting/auto_report.py +507 -0
- oscura/reporting/batch.py +615 -0
- oscura/reporting/chart_selection.py +223 -0
- oscura/reporting/comparison.py +330 -0
- oscura/reporting/config.py +615 -0
- oscura/reporting/content/__init__.py +39 -0
- oscura/reporting/content/executive.py +127 -0
- oscura/reporting/content/filtering.py +191 -0
- oscura/reporting/content/minimal.py +257 -0
- oscura/reporting/content/verbosity.py +162 -0
- oscura/reporting/core.py +508 -0
- oscura/reporting/core_formats/__init__.py +17 -0
- oscura/reporting/core_formats/multi_format.py +210 -0
- oscura/reporting/engine.py +836 -0
- oscura/reporting/export.py +366 -0
- oscura/reporting/formatting/__init__.py +129 -0
- oscura/reporting/formatting/emphasis.py +81 -0
- oscura/reporting/formatting/numbers.py +403 -0
- oscura/reporting/formatting/standards.py +55 -0
- oscura/reporting/formatting.py +466 -0
- oscura/reporting/html.py +578 -0
- oscura/reporting/index.py +590 -0
- oscura/reporting/multichannel.py +296 -0
- oscura/reporting/output.py +379 -0
- oscura/reporting/pdf.py +373 -0
- oscura/reporting/plots.py +731 -0
- oscura/reporting/pptx_export.py +360 -0
- oscura/reporting/renderers/__init__.py +11 -0
- oscura/reporting/renderers/pdf.py +94 -0
- oscura/reporting/sections.py +471 -0
- oscura/reporting/standards.py +680 -0
- oscura/reporting/summary_generator.py +368 -0
- oscura/reporting/tables.py +397 -0
- oscura/reporting/template_system.py +724 -0
- oscura/reporting/templates/__init__.py +15 -0
- oscura/reporting/templates/definition.py +205 -0
- oscura/reporting/templates/index.html +649 -0
- oscura/reporting/templates/index.md +173 -0
- oscura/schemas/__init__.py +158 -0
- oscura/schemas/bus_configuration.json +322 -0
- oscura/schemas/device_mapping.json +182 -0
- oscura/schemas/packet_format.json +418 -0
- oscura/schemas/protocol_definition.json +363 -0
- oscura/search/__init__.py +16 -0
- oscura/search/anomaly.py +292 -0
- oscura/search/context.py +149 -0
- oscura/search/pattern.py +160 -0
- oscura/session/__init__.py +34 -0
- oscura/session/annotations.py +289 -0
- oscura/session/history.py +313 -0
- oscura/session/session.py +445 -0
- oscura/streaming/__init__.py +43 -0
- oscura/streaming/chunked.py +611 -0
- oscura/streaming/progressive.py +393 -0
- oscura/streaming/realtime.py +622 -0
- oscura/testing/__init__.py +54 -0
- oscura/testing/synthetic.py +808 -0
- oscura/triggering/__init__.py +68 -0
- oscura/triggering/base.py +229 -0
- oscura/triggering/edge.py +353 -0
- oscura/triggering/pattern.py +344 -0
- oscura/triggering/pulse.py +581 -0
- oscura/triggering/window.py +453 -0
- oscura/ui/__init__.py +48 -0
- oscura/ui/formatters.py +526 -0
- oscura/ui/progressive_display.py +340 -0
- oscura/utils/__init__.py +99 -0
- oscura/utils/autodetect.py +338 -0
- oscura/utils/buffer.py +389 -0
- oscura/utils/lazy.py +407 -0
- oscura/utils/lazy_imports.py +147 -0
- oscura/utils/memory.py +836 -0
- oscura/utils/memory_advanced.py +1326 -0
- oscura/utils/memory_extensions.py +465 -0
- oscura/utils/progressive.py +352 -0
- oscura/utils/windowing.py +362 -0
- oscura/visualization/__init__.py +321 -0
- oscura/visualization/accessibility.py +526 -0
- oscura/visualization/annotations.py +374 -0
- oscura/visualization/axis_scaling.py +305 -0
- oscura/visualization/colors.py +453 -0
- oscura/visualization/digital.py +337 -0
- oscura/visualization/eye.py +420 -0
- oscura/visualization/histogram.py +281 -0
- oscura/visualization/interactive.py +858 -0
- oscura/visualization/jitter.py +702 -0
- oscura/visualization/keyboard.py +394 -0
- oscura/visualization/layout.py +365 -0
- oscura/visualization/optimization.py +1028 -0
- oscura/visualization/palettes.py +446 -0
- oscura/visualization/plot.py +92 -0
- oscura/visualization/power.py +290 -0
- oscura/visualization/power_extended.py +626 -0
- oscura/visualization/presets.py +467 -0
- oscura/visualization/protocols.py +932 -0
- oscura/visualization/render.py +207 -0
- oscura/visualization/rendering.py +444 -0
- oscura/visualization/reverse_engineering.py +791 -0
- oscura/visualization/signal_integrity.py +808 -0
- oscura/visualization/specialized.py +553 -0
- oscura/visualization/spectral.py +811 -0
- oscura/visualization/styles.py +381 -0
- oscura/visualization/thumbnails.py +311 -0
- oscura/visualization/time_axis.py +351 -0
- oscura/visualization/waveform.py +367 -0
- oscura/workflow/__init__.py +13 -0
- oscura/workflow/dag.py +377 -0
- oscura/workflows/__init__.py +58 -0
- oscura/workflows/compliance.py +280 -0
- oscura/workflows/digital.py +272 -0
- oscura/workflows/multi_trace.py +502 -0
- oscura/workflows/power.py +178 -0
- oscura/workflows/protocol.py +492 -0
- oscura/workflows/reverse_engineering.py +639 -0
- oscura/workflows/signal_integrity.py +227 -0
- oscura-0.1.0.dist-info/METADATA +300 -0
- oscura-0.1.0.dist-info/RECORD +463 -0
- oscura-0.1.0.dist-info/entry_points.txt +2 -0
- {oscura-0.0.1.dist-info → oscura-0.1.0.dist-info}/licenses/LICENSE +1 -1
- oscura-0.0.1.dist-info/METADATA +0 -63
- oscura-0.0.1.dist-info/RECORD +0 -5
- {oscura-0.0.1.dist-info → oscura-0.1.0.dist-info}/WHEEL +0 -0
|
@@ -0,0 +1,1084 @@
|
|
|
1
|
+
"""Protocol Definition Language parser and decoder generator.
|
|
2
|
+
|
|
3
|
+
Requirements addressed: PSI-004
|
|
4
|
+
|
|
5
|
+
This module provides a declarative DSL for defining custom protocol formats
|
|
6
|
+
that can be used to generate decoders and encoders automatically.
|
|
7
|
+
|
|
8
|
+
Key capabilities:
|
|
9
|
+
- Parse YAML-based protocol definitions
|
|
10
|
+
- Support all common field types
|
|
11
|
+
- Conditional fields and length-prefixed data
|
|
12
|
+
- Generate efficient decoders and encoders
|
|
13
|
+
- Comprehensive error reporting
|
|
14
|
+
"""
|
|
15
|
+
|
|
16
|
+
import ast
|
|
17
|
+
import operator
|
|
18
|
+
import struct
|
|
19
|
+
from collections.abc import Iterator
|
|
20
|
+
from dataclasses import dataclass, field
|
|
21
|
+
from pathlib import Path
|
|
22
|
+
from typing import Any, Literal
|
|
23
|
+
|
|
24
|
+
import yaml
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
@dataclass
|
|
28
|
+
class FieldDefinition:
|
|
29
|
+
"""Protocol field definition.
|
|
30
|
+
|
|
31
|
+
: Field specification.
|
|
32
|
+
|
|
33
|
+
Attributes:
|
|
34
|
+
name: Field name
|
|
35
|
+
field_type: Field type (uint8, uint16, int32, float32, bytes, string, bitfield, array, struct)
|
|
36
|
+
Also accessible as 'type' for compatibility.
|
|
37
|
+
size: Field size (literal or reference to length field)
|
|
38
|
+
offset: Field offset (optional, auto-calculated if not provided)
|
|
39
|
+
endian: Byte order ('big' or 'little')
|
|
40
|
+
condition: Conditional expression for optional fields
|
|
41
|
+
enum: Enumeration mapping for integer fields
|
|
42
|
+
validation: Validation rules
|
|
43
|
+
default: Default value
|
|
44
|
+
description: Human-readable description
|
|
45
|
+
value: Expected value for constant fields
|
|
46
|
+
size_ref: Reference to length field (alias for size when string)
|
|
47
|
+
element: Element definition for array types (contains type and optionally fields for struct)
|
|
48
|
+
count_field: Field name that contains the array count
|
|
49
|
+
count: Fixed array count
|
|
50
|
+
fields: List of nested field definitions for struct types
|
|
51
|
+
"""
|
|
52
|
+
|
|
53
|
+
name: str
|
|
54
|
+
field_type: str = (
|
|
55
|
+
"uint8" # uint8, uint16, int32, float32, bytes, string, bitfield, array, struct
|
|
56
|
+
)
|
|
57
|
+
size: int | str | None = None # Can be literal or reference to length field
|
|
58
|
+
offset: int | None = None
|
|
59
|
+
endian: Literal["big", "little"] = "big"
|
|
60
|
+
condition: str | None = None # Conditional field
|
|
61
|
+
enum: dict[int, str] | dict[str, Any] | None = None
|
|
62
|
+
validation: dict[str, Any] | None = None
|
|
63
|
+
default: Any = None
|
|
64
|
+
description: str = ""
|
|
65
|
+
value: Any = None # Expected constant value
|
|
66
|
+
size_ref: str | None = None # Alias for size reference
|
|
67
|
+
# Array/struct specific fields
|
|
68
|
+
element: dict[str, Any] | None = None # Element definition for arrays
|
|
69
|
+
count_field: str | None = None # Field containing array count
|
|
70
|
+
count: int | None = None # Fixed array count
|
|
71
|
+
fields: list["FieldDefinition"] | None = None # Nested fields for struct type
|
|
72
|
+
|
|
73
|
+
def __post_init__(self) -> None:
|
|
74
|
+
"""Handle size_ref as alias for size."""
|
|
75
|
+
if self.size_ref is not None and self.size is None:
|
|
76
|
+
self.size = self.size_ref
|
|
77
|
+
|
|
78
|
+
@property
|
|
79
|
+
def type(self) -> str:
|
|
80
|
+
"""Alias for field_type for backward compatibility."""
|
|
81
|
+
return self.field_type
|
|
82
|
+
|
|
83
|
+
@type.setter
|
|
84
|
+
def type(self, value: str) -> None:
|
|
85
|
+
"""Set field_type via type property."""
|
|
86
|
+
self.field_type = value
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
@dataclass
|
|
90
|
+
class ProtocolDefinition:
|
|
91
|
+
"""Complete protocol definition.
|
|
92
|
+
|
|
93
|
+
: Complete protocol specification.
|
|
94
|
+
|
|
95
|
+
Attributes:
|
|
96
|
+
name: Protocol name
|
|
97
|
+
version: Protocol version
|
|
98
|
+
description: Protocol description
|
|
99
|
+
settings: Global settings (endianness, etc.)
|
|
100
|
+
framing: Framing/sync configuration
|
|
101
|
+
fields: List of field definitions
|
|
102
|
+
computed_fields: Computed/derived fields
|
|
103
|
+
decoding: Decoding settings
|
|
104
|
+
encoding: Encoding settings
|
|
105
|
+
endian: Default endianness for all fields
|
|
106
|
+
"""
|
|
107
|
+
|
|
108
|
+
name: str
|
|
109
|
+
description: str = ""
|
|
110
|
+
version: str = "1.0"
|
|
111
|
+
endian: Literal["big", "little"] = "big"
|
|
112
|
+
fields: list[FieldDefinition] = field(default_factory=list)
|
|
113
|
+
settings: dict[str, Any] = field(default_factory=dict)
|
|
114
|
+
framing: dict[str, Any] = field(default_factory=dict)
|
|
115
|
+
computed_fields: list[dict[str, Any]] = field(default_factory=list)
|
|
116
|
+
decoding: dict[str, Any] = field(default_factory=dict)
|
|
117
|
+
encoding: dict[str, Any] = field(default_factory=dict)
|
|
118
|
+
|
|
119
|
+
@classmethod
|
|
120
|
+
def from_yaml(cls, path: str | Path) -> "ProtocolDefinition":
|
|
121
|
+
"""Load protocol definition from YAML file.
|
|
122
|
+
|
|
123
|
+
: YAML parsing.
|
|
124
|
+
|
|
125
|
+
Args:
|
|
126
|
+
path: Path to YAML file
|
|
127
|
+
|
|
128
|
+
Returns:
|
|
129
|
+
ProtocolDefinition instance
|
|
130
|
+
"""
|
|
131
|
+
with open(path) as f:
|
|
132
|
+
config = yaml.safe_load(f)
|
|
133
|
+
|
|
134
|
+
return cls.from_dict(config)
|
|
135
|
+
|
|
136
|
+
@classmethod
|
|
137
|
+
def from_dict(cls, config: dict[str, Any]) -> "ProtocolDefinition":
|
|
138
|
+
"""Create from dictionary.
|
|
139
|
+
|
|
140
|
+
: Configuration parsing.
|
|
141
|
+
|
|
142
|
+
Args:
|
|
143
|
+
config: Configuration dictionary
|
|
144
|
+
|
|
145
|
+
Returns:
|
|
146
|
+
ProtocolDefinition instance
|
|
147
|
+
"""
|
|
148
|
+
# Parse field definitions
|
|
149
|
+
field_defs = []
|
|
150
|
+
default_endian = config.get("endian", "big")
|
|
151
|
+
for field_dict in config.get("fields", []):
|
|
152
|
+
field_def = cls._parse_field_definition(field_dict, default_endian)
|
|
153
|
+
field_defs.append(field_def)
|
|
154
|
+
|
|
155
|
+
return cls(
|
|
156
|
+
name=config.get("name", "unknown"),
|
|
157
|
+
version=config.get("version", "1.0"),
|
|
158
|
+
description=config.get("description", ""),
|
|
159
|
+
endian=default_endian,
|
|
160
|
+
settings=config.get("settings", {}),
|
|
161
|
+
framing=config.get("framing", {}),
|
|
162
|
+
fields=field_defs,
|
|
163
|
+
computed_fields=config.get("computed_fields", []),
|
|
164
|
+
decoding=config.get("decoding", {}),
|
|
165
|
+
encoding=config.get("encoding", {}),
|
|
166
|
+
)
|
|
167
|
+
|
|
168
|
+
@classmethod
|
|
169
|
+
def _parse_field_definition(
|
|
170
|
+
cls, field_dict: dict[str, Any], default_endian: str
|
|
171
|
+
) -> "FieldDefinition":
|
|
172
|
+
"""Parse a single field definition from dictionary.
|
|
173
|
+
|
|
174
|
+
Args:
|
|
175
|
+
field_dict: Field configuration dictionary
|
|
176
|
+
default_endian: Default endianness
|
|
177
|
+
|
|
178
|
+
Returns:
|
|
179
|
+
FieldDefinition instance
|
|
180
|
+
"""
|
|
181
|
+
# Support both 'type' and 'field_type' attribute names
|
|
182
|
+
field_type = field_dict.get("type") or field_dict.get("field_type", "uint8")
|
|
183
|
+
|
|
184
|
+
# Parse nested fields for struct type
|
|
185
|
+
nested_fields: list[FieldDefinition] | None = None
|
|
186
|
+
if field_dict.get("fields"):
|
|
187
|
+
nested_fields = [
|
|
188
|
+
cls._parse_field_definition(f, default_endian) for f in field_dict["fields"]
|
|
189
|
+
]
|
|
190
|
+
|
|
191
|
+
return FieldDefinition(
|
|
192
|
+
name=field_dict["name"],
|
|
193
|
+
field_type=field_type,
|
|
194
|
+
size=field_dict.get("size"),
|
|
195
|
+
offset=field_dict.get("offset"),
|
|
196
|
+
endian=field_dict.get("endian", default_endian),
|
|
197
|
+
condition=field_dict.get("condition"),
|
|
198
|
+
enum=field_dict.get("enum"),
|
|
199
|
+
validation=field_dict.get("validation"),
|
|
200
|
+
default=field_dict.get("default"),
|
|
201
|
+
description=field_dict.get("description", ""),
|
|
202
|
+
value=field_dict.get("value"),
|
|
203
|
+
size_ref=field_dict.get("size_ref"),
|
|
204
|
+
element=field_dict.get("element"),
|
|
205
|
+
count_field=field_dict.get("count_field"),
|
|
206
|
+
count=field_dict.get("count"),
|
|
207
|
+
fields=nested_fields,
|
|
208
|
+
)
|
|
209
|
+
|
|
210
|
+
|
|
211
|
+
@dataclass
|
|
212
|
+
class DecodedMessage:
|
|
213
|
+
"""A decoded protocol message.
|
|
214
|
+
|
|
215
|
+
: Decoded message representation.
|
|
216
|
+
|
|
217
|
+
This class behaves like a dictionary for field access, supporting
|
|
218
|
+
operations like `"field_name" in message` and `message["field_name"]`.
|
|
219
|
+
|
|
220
|
+
Attributes:
|
|
221
|
+
fields: Dictionary of field name -> value
|
|
222
|
+
raw_data: Original binary data
|
|
223
|
+
size: Message size in bytes
|
|
224
|
+
valid: Whether message passed validation
|
|
225
|
+
errors: List of validation errors
|
|
226
|
+
"""
|
|
227
|
+
|
|
228
|
+
fields: dict[str, Any]
|
|
229
|
+
raw_data: bytes
|
|
230
|
+
size: int
|
|
231
|
+
valid: bool
|
|
232
|
+
errors: list[str]
|
|
233
|
+
|
|
234
|
+
def __contains__(self, key: str) -> bool:
|
|
235
|
+
"""Check if field exists in message."""
|
|
236
|
+
return key in self.fields
|
|
237
|
+
|
|
238
|
+
def __getitem__(self, key: str) -> Any:
|
|
239
|
+
"""Get field value by name."""
|
|
240
|
+
return self.fields[key]
|
|
241
|
+
|
|
242
|
+
def __iter__(self) -> Iterator[str]:
|
|
243
|
+
"""Iterate over field names."""
|
|
244
|
+
return iter(self.fields)
|
|
245
|
+
|
|
246
|
+
def keys(self) -> Any:
|
|
247
|
+
"""Return field names."""
|
|
248
|
+
return self.fields.keys()
|
|
249
|
+
|
|
250
|
+
def values(self) -> Any:
|
|
251
|
+
"""Return field values."""
|
|
252
|
+
return self.fields.values()
|
|
253
|
+
|
|
254
|
+
def items(self) -> Any:
|
|
255
|
+
"""Return field name-value pairs."""
|
|
256
|
+
return self.fields.items()
|
|
257
|
+
|
|
258
|
+
def get(self, key: str, default: Any = None) -> Any:
|
|
259
|
+
"""Get field value with default."""
|
|
260
|
+
return self.fields.get(key, default)
|
|
261
|
+
|
|
262
|
+
|
|
263
|
+
class _SafeConditionEvaluator(ast.NodeVisitor):
|
|
264
|
+
"""Safe evaluator for protocol field conditions.
|
|
265
|
+
|
|
266
|
+
Only allows:
|
|
267
|
+
- Comparisons: ==, !=, <, <=, >, >=
|
|
268
|
+
- Logical operations: and, or, not
|
|
269
|
+
- Constants: numbers, strings, booleans
|
|
270
|
+
- Variable names from context
|
|
271
|
+
|
|
272
|
+
Security:
|
|
273
|
+
Uses AST parsing to safely evaluate conditions without eval().
|
|
274
|
+
"""
|
|
275
|
+
|
|
276
|
+
def __init__(self, context: dict[str, Any]):
|
|
277
|
+
"""Initialize with field context.
|
|
278
|
+
|
|
279
|
+
Args:
|
|
280
|
+
context: Dictionary of field names to values
|
|
281
|
+
"""
|
|
282
|
+
self.context = context
|
|
283
|
+
self.compare_ops = {
|
|
284
|
+
ast.Eq: operator.eq,
|
|
285
|
+
ast.NotEq: operator.ne,
|
|
286
|
+
ast.Lt: operator.lt,
|
|
287
|
+
ast.LtE: operator.le,
|
|
288
|
+
ast.Gt: operator.gt,
|
|
289
|
+
ast.GtE: operator.ge,
|
|
290
|
+
}
|
|
291
|
+
|
|
292
|
+
def eval(self, expression: str) -> bool:
|
|
293
|
+
"""Evaluate condition expression.
|
|
294
|
+
|
|
295
|
+
Args:
|
|
296
|
+
expression: Condition string
|
|
297
|
+
|
|
298
|
+
Returns:
|
|
299
|
+
Boolean result
|
|
300
|
+
"""
|
|
301
|
+
try:
|
|
302
|
+
tree = ast.parse(expression, mode="eval")
|
|
303
|
+
result = self.visit(tree.body)
|
|
304
|
+
return bool(result)
|
|
305
|
+
except Exception:
|
|
306
|
+
# If evaluation fails, condition is false
|
|
307
|
+
return False
|
|
308
|
+
|
|
309
|
+
def visit_Compare(self, node: ast.Compare) -> Any:
|
|
310
|
+
"""Visit comparison operation."""
|
|
311
|
+
left = self.visit(node.left)
|
|
312
|
+
for op, comparator in zip(node.ops, node.comparators, strict=True):
|
|
313
|
+
if type(op) not in self.compare_ops:
|
|
314
|
+
return False
|
|
315
|
+
right = self.visit(comparator)
|
|
316
|
+
if not self.compare_ops[type(op)](left, right):
|
|
317
|
+
return False
|
|
318
|
+
left = right
|
|
319
|
+
return True
|
|
320
|
+
|
|
321
|
+
def visit_BoolOp(self, node: ast.BoolOp) -> Any:
|
|
322
|
+
"""Visit boolean operation (and, or)."""
|
|
323
|
+
if isinstance(node.op, ast.And):
|
|
324
|
+
return all(self.visit(value) for value in node.values)
|
|
325
|
+
elif isinstance(node.op, ast.Or):
|
|
326
|
+
return any(self.visit(value) for value in node.values)
|
|
327
|
+
return False
|
|
328
|
+
|
|
329
|
+
def visit_UnaryOp(self, node: ast.UnaryOp) -> Any:
|
|
330
|
+
"""Visit unary operation (not)."""
|
|
331
|
+
if isinstance(node.op, ast.Not):
|
|
332
|
+
return not self.visit(node.operand)
|
|
333
|
+
return False
|
|
334
|
+
|
|
335
|
+
def visit_Name(self, node: ast.Name) -> Any:
|
|
336
|
+
"""Visit variable name."""
|
|
337
|
+
return self.context.get(node.id)
|
|
338
|
+
|
|
339
|
+
def visit_Constant(self, node: ast.Constant) -> Any:
|
|
340
|
+
"""Visit constant value.
|
|
341
|
+
|
|
342
|
+
In Python 3.8+, ast.Constant replaces ast.Num, ast.Str, and ast.NameConstant.
|
|
343
|
+
Since this project requires Python 3.12+, we only need visit_Constant.
|
|
344
|
+
"""
|
|
345
|
+
return node.value
|
|
346
|
+
|
|
347
|
+
def generic_visit(self, node: ast.AST) -> Any:
|
|
348
|
+
"""Disallow other node types."""
|
|
349
|
+
return False
|
|
350
|
+
|
|
351
|
+
|
|
352
|
+
class ProtocolDecoder:
|
|
353
|
+
"""Decode binary data using protocol definition.
|
|
354
|
+
|
|
355
|
+
: Protocol decoder with full field type support.
|
|
356
|
+
"""
|
|
357
|
+
|
|
358
|
+
def __init__(self, definition: ProtocolDefinition):
|
|
359
|
+
"""Initialize decoder with protocol definition.
|
|
360
|
+
|
|
361
|
+
Args:
|
|
362
|
+
definition: Protocol definition
|
|
363
|
+
"""
|
|
364
|
+
self.definition = definition
|
|
365
|
+
self._endian_map: dict[str, str] = {"big": ">", "little": "<"}
|
|
366
|
+
|
|
367
|
+
@classmethod
|
|
368
|
+
def load(cls, path: str | Path) -> "ProtocolDecoder":
|
|
369
|
+
"""Load decoder from YAML protocol definition.
|
|
370
|
+
|
|
371
|
+
: Load decoder from file.
|
|
372
|
+
|
|
373
|
+
Args:
|
|
374
|
+
path: Path to YAML file
|
|
375
|
+
|
|
376
|
+
Returns:
|
|
377
|
+
ProtocolDecoder instance
|
|
378
|
+
"""
|
|
379
|
+
definition = ProtocolDefinition.from_yaml(path)
|
|
380
|
+
return cls(definition)
|
|
381
|
+
|
|
382
|
+
def decode(self, data: bytes, offset: int = 0) -> DecodedMessage:
|
|
383
|
+
"""Decode single message from binary data.
|
|
384
|
+
|
|
385
|
+
: Complete decoding with validation.
|
|
386
|
+
|
|
387
|
+
Args:
|
|
388
|
+
data: Binary data
|
|
389
|
+
offset: Starting offset in data
|
|
390
|
+
|
|
391
|
+
Returns:
|
|
392
|
+
DecodedMessage instance
|
|
393
|
+
"""
|
|
394
|
+
fields: dict[str, Any] = {}
|
|
395
|
+
errors: list[str] = []
|
|
396
|
+
current_offset = offset
|
|
397
|
+
valid = True
|
|
398
|
+
|
|
399
|
+
# Check minimum length
|
|
400
|
+
if len(data) - offset < 1:
|
|
401
|
+
return DecodedMessage(
|
|
402
|
+
fields={}, raw_data=data[offset:], size=0, valid=False, errors=["Insufficient data"]
|
|
403
|
+
)
|
|
404
|
+
|
|
405
|
+
# Decode each field
|
|
406
|
+
for field_def in self.definition.fields:
|
|
407
|
+
# Check condition
|
|
408
|
+
if field_def.condition:
|
|
409
|
+
if not self._evaluate_condition(field_def.condition, fields):
|
|
410
|
+
continue # Skip this field
|
|
411
|
+
|
|
412
|
+
try:
|
|
413
|
+
value, bytes_consumed = self._decode_field(data[current_offset:], field_def, fields)
|
|
414
|
+
|
|
415
|
+
# Validate
|
|
416
|
+
if field_def.validation:
|
|
417
|
+
validation_error = self._validate_field(value, field_def.validation)
|
|
418
|
+
if validation_error:
|
|
419
|
+
errors.append(f"{field_def.name}: {validation_error}")
|
|
420
|
+
valid = False
|
|
421
|
+
|
|
422
|
+
# Store value
|
|
423
|
+
fields[field_def.name] = value
|
|
424
|
+
current_offset += bytes_consumed
|
|
425
|
+
|
|
426
|
+
except Exception as e:
|
|
427
|
+
errors.append(f"{field_def.name}: {e!s}")
|
|
428
|
+
valid = False
|
|
429
|
+
break
|
|
430
|
+
|
|
431
|
+
total_size = current_offset - offset
|
|
432
|
+
|
|
433
|
+
return DecodedMessage(
|
|
434
|
+
fields=fields,
|
|
435
|
+
raw_data=data[offset:current_offset],
|
|
436
|
+
size=total_size,
|
|
437
|
+
valid=valid and len(errors) == 0,
|
|
438
|
+
errors=errors,
|
|
439
|
+
)
|
|
440
|
+
|
|
441
|
+
def decode_stream(self, data: bytes) -> list[DecodedMessage]:
|
|
442
|
+
"""Decode multiple messages from data stream.
|
|
443
|
+
|
|
444
|
+
: Stream decoding with sync detection.
|
|
445
|
+
|
|
446
|
+
Args:
|
|
447
|
+
data: Binary data stream
|
|
448
|
+
|
|
449
|
+
Returns:
|
|
450
|
+
List of DecodedMessage instances
|
|
451
|
+
"""
|
|
452
|
+
messages = []
|
|
453
|
+
offset = 0
|
|
454
|
+
|
|
455
|
+
# Check for sync pattern
|
|
456
|
+
sync_pattern = self.definition.framing.get("sync_pattern")
|
|
457
|
+
|
|
458
|
+
while offset < len(data):
|
|
459
|
+
# Find sync if configured
|
|
460
|
+
if sync_pattern:
|
|
461
|
+
sync_offset = self.find_sync(data, offset)
|
|
462
|
+
if sync_offset is None:
|
|
463
|
+
break # No more sync patterns
|
|
464
|
+
offset = sync_offset
|
|
465
|
+
|
|
466
|
+
# Decode message
|
|
467
|
+
msg = self.decode(data, offset)
|
|
468
|
+
|
|
469
|
+
if msg.valid:
|
|
470
|
+
messages.append(msg)
|
|
471
|
+
offset += msg.size
|
|
472
|
+
else:
|
|
473
|
+
# Try to recover by finding next sync
|
|
474
|
+
if sync_pattern:
|
|
475
|
+
offset += 1
|
|
476
|
+
else:
|
|
477
|
+
break # Can't recover without sync
|
|
478
|
+
|
|
479
|
+
return messages
|
|
480
|
+
|
|
481
|
+
def find_sync(self, data: bytes, start: int = 0) -> int | None:
|
|
482
|
+
"""Find sync pattern in data.
|
|
483
|
+
|
|
484
|
+
: Sync pattern detection.
|
|
485
|
+
|
|
486
|
+
Args:
|
|
487
|
+
data: Binary data
|
|
488
|
+
start: Starting offset
|
|
489
|
+
|
|
490
|
+
Returns:
|
|
491
|
+
Offset of sync pattern or None
|
|
492
|
+
"""
|
|
493
|
+
sync_pattern = self.definition.framing.get("sync_pattern")
|
|
494
|
+
if not sync_pattern:
|
|
495
|
+
return start # No sync pattern, start from beginning
|
|
496
|
+
|
|
497
|
+
# Convert sync pattern (hex string or bytes)
|
|
498
|
+
if isinstance(sync_pattern, str):
|
|
499
|
+
if sync_pattern.startswith("0x"):
|
|
500
|
+
# Hex string like "0xAA55"
|
|
501
|
+
sync_bytes = bytes.fromhex(sync_pattern[2:])
|
|
502
|
+
else:
|
|
503
|
+
sync_bytes = sync_pattern.encode()
|
|
504
|
+
else:
|
|
505
|
+
sync_bytes = bytes(sync_pattern)
|
|
506
|
+
|
|
507
|
+
# Search for pattern
|
|
508
|
+
idx = data.find(sync_bytes, start)
|
|
509
|
+
if idx == -1:
|
|
510
|
+
return None
|
|
511
|
+
return idx
|
|
512
|
+
|
|
513
|
+
def _decode_field(
|
|
514
|
+
self, data: bytes, field: FieldDefinition, context: dict[str, Any]
|
|
515
|
+
) -> tuple[Any, int]:
|
|
516
|
+
"""Decode single field.
|
|
517
|
+
|
|
518
|
+
: Field decoding for all types.
|
|
519
|
+
|
|
520
|
+
Args:
|
|
521
|
+
data: Binary data
|
|
522
|
+
field: Field definition
|
|
523
|
+
context: Previously decoded fields
|
|
524
|
+
|
|
525
|
+
Returns:
|
|
526
|
+
Tuple of (value, bytes_consumed)
|
|
527
|
+
|
|
528
|
+
Raises:
|
|
529
|
+
ValueError: If bitfield size is unsupported or field type is unknown
|
|
530
|
+
"""
|
|
531
|
+
endian = self._endian_map.get(field.endian, ">")
|
|
532
|
+
field_type = field.field_type
|
|
533
|
+
|
|
534
|
+
# Integer types
|
|
535
|
+
if field_type in ["uint8", "int8", "uint16", "int16", "uint32", "int32", "uint64", "int64"]:
|
|
536
|
+
return self._decode_integer(data, field_type, endian)
|
|
537
|
+
|
|
538
|
+
# Float types
|
|
539
|
+
elif field_type in ["float32", "float64"]:
|
|
540
|
+
return self._decode_float(data, field_type, endian)
|
|
541
|
+
|
|
542
|
+
# Bytes
|
|
543
|
+
elif field_type == "bytes":
|
|
544
|
+
size = self._resolve_size(field.size, context, data)
|
|
545
|
+
if size > len(data):
|
|
546
|
+
size = len(data) # Use remaining data
|
|
547
|
+
return bytes(data[:size]), size
|
|
548
|
+
|
|
549
|
+
# String
|
|
550
|
+
elif field_type == "string":
|
|
551
|
+
size = self._resolve_size(field.size, context, data)
|
|
552
|
+
if size > len(data):
|
|
553
|
+
size = len(data) # Use remaining data
|
|
554
|
+
string_bytes = data[:size]
|
|
555
|
+
# Try to decode as UTF-8, fall back to latin-1
|
|
556
|
+
try:
|
|
557
|
+
value = string_bytes.decode("utf-8").rstrip("\x00")
|
|
558
|
+
except UnicodeDecodeError:
|
|
559
|
+
value = string_bytes.decode("latin-1").rstrip("\x00")
|
|
560
|
+
return value, size
|
|
561
|
+
|
|
562
|
+
# Bitfield
|
|
563
|
+
elif field_type == "bitfield":
|
|
564
|
+
# Decode as uint and extract bits
|
|
565
|
+
field_size = field.size if isinstance(field.size, int) else 1
|
|
566
|
+
if field_size == 1:
|
|
567
|
+
bitfield_value = int(data[0])
|
|
568
|
+
elif field_size == 2:
|
|
569
|
+
bitfield_value = struct.unpack(f"{endian}H", data[:2])[0]
|
|
570
|
+
elif field_size == 4:
|
|
571
|
+
bitfield_value = struct.unpack(f"{endian}I", data[:4])[0]
|
|
572
|
+
else:
|
|
573
|
+
raise ValueError(f"Unsupported bitfield size: {field_size}")
|
|
574
|
+
|
|
575
|
+
# Return as-is, caller can extract specific bits
|
|
576
|
+
return bitfield_value, field_size
|
|
577
|
+
|
|
578
|
+
# Array
|
|
579
|
+
elif field_type == "array":
|
|
580
|
+
return self._decode_array(data, field, context)
|
|
581
|
+
|
|
582
|
+
# Struct (nested)
|
|
583
|
+
elif field_type == "struct":
|
|
584
|
+
return self._decode_struct(data, field, context)
|
|
585
|
+
|
|
586
|
+
else:
|
|
587
|
+
raise ValueError(f"Unknown field type: {field_type}")
|
|
588
|
+
|
|
589
|
+
def _decode_array(
|
|
590
|
+
self, data: bytes, field: FieldDefinition, context: dict[str, Any]
|
|
591
|
+
) -> tuple[list[Any], int]:
|
|
592
|
+
"""Decode array field.
|
|
593
|
+
|
|
594
|
+
: Array field decoding.
|
|
595
|
+
|
|
596
|
+
Args:
|
|
597
|
+
data: Binary data
|
|
598
|
+
field: Field definition with element spec
|
|
599
|
+
context: Previously decoded fields
|
|
600
|
+
|
|
601
|
+
Returns:
|
|
602
|
+
Tuple of (list of values, bytes_consumed)
|
|
603
|
+
|
|
604
|
+
Raises:
|
|
605
|
+
ValueError: If array field is missing element definition
|
|
606
|
+
"""
|
|
607
|
+
elements = []
|
|
608
|
+
total_consumed = 0
|
|
609
|
+
|
|
610
|
+
# Determine element count
|
|
611
|
+
count = None
|
|
612
|
+
if field.count is not None:
|
|
613
|
+
count = field.count
|
|
614
|
+
elif field.count_field is not None and field.count_field in context:
|
|
615
|
+
count = int(context[field.count_field])
|
|
616
|
+
|
|
617
|
+
# Get element definition
|
|
618
|
+
element_def = field.element
|
|
619
|
+
if element_def is None:
|
|
620
|
+
raise ValueError(f"Array field '{field.name}' missing element definition")
|
|
621
|
+
|
|
622
|
+
element_type = element_def.get("type", "uint8")
|
|
623
|
+
element_endian = element_def.get("endian", field.endian)
|
|
624
|
+
|
|
625
|
+
# If no count, try to decode until data exhausted
|
|
626
|
+
idx = 0
|
|
627
|
+
while len(data) - total_consumed > 0:
|
|
628
|
+
if count is not None and idx >= count:
|
|
629
|
+
break
|
|
630
|
+
|
|
631
|
+
# Create a temporary field definition for the element
|
|
632
|
+
if element_type == "struct":
|
|
633
|
+
# Nested struct in array
|
|
634
|
+
nested_fields = element_def.get("fields", [])
|
|
635
|
+
parsed_fields = [
|
|
636
|
+
ProtocolDefinition._parse_field_definition(f, element_endian)
|
|
637
|
+
for f in nested_fields
|
|
638
|
+
]
|
|
639
|
+
elem_field = FieldDefinition(
|
|
640
|
+
name=f"{field.name}[{idx}]",
|
|
641
|
+
field_type="struct",
|
|
642
|
+
endian=element_endian,
|
|
643
|
+
fields=parsed_fields,
|
|
644
|
+
)
|
|
645
|
+
value, consumed = self._decode_struct(data[total_consumed:], elem_field, context)
|
|
646
|
+
else:
|
|
647
|
+
# Simple element type
|
|
648
|
+
elem_field = FieldDefinition(
|
|
649
|
+
name=f"{field.name}[{idx}]",
|
|
650
|
+
field_type=element_type,
|
|
651
|
+
endian=element_endian,
|
|
652
|
+
size=element_def.get("size"),
|
|
653
|
+
)
|
|
654
|
+
value, consumed = self._decode_field(data[total_consumed:], elem_field, context)
|
|
655
|
+
|
|
656
|
+
if consumed == 0:
|
|
657
|
+
break # Prevent infinite loop
|
|
658
|
+
|
|
659
|
+
elements.append(value)
|
|
660
|
+
total_consumed += consumed
|
|
661
|
+
idx += 1
|
|
662
|
+
|
|
663
|
+
return elements, total_consumed
|
|
664
|
+
|
|
665
|
+
def _decode_struct(
|
|
666
|
+
self, data: bytes, field: FieldDefinition, context: dict[str, Any]
|
|
667
|
+
) -> tuple[dict[str, Any], int]:
|
|
668
|
+
"""Decode struct field.
|
|
669
|
+
|
|
670
|
+
: Nested struct field decoding.
|
|
671
|
+
|
|
672
|
+
Args:
|
|
673
|
+
data: Binary data
|
|
674
|
+
field: Field definition with nested fields
|
|
675
|
+
context: Previously decoded fields
|
|
676
|
+
|
|
677
|
+
Returns:
|
|
678
|
+
Tuple of (dict of field values, bytes_consumed)
|
|
679
|
+
|
|
680
|
+
Raises:
|
|
681
|
+
ValueError: If struct field is missing fields definition
|
|
682
|
+
"""
|
|
683
|
+
struct_fields: dict[str, Any] = {}
|
|
684
|
+
total_consumed = 0
|
|
685
|
+
|
|
686
|
+
# Get nested field definitions
|
|
687
|
+
nested_fields = field.fields
|
|
688
|
+
if nested_fields is None:
|
|
689
|
+
raise ValueError(f"Struct field '{field.name}' missing fields definition")
|
|
690
|
+
|
|
691
|
+
# Decode each nested field
|
|
692
|
+
for nested_field in nested_fields:
|
|
693
|
+
if len(data) - total_consumed < 1:
|
|
694
|
+
break # Not enough data
|
|
695
|
+
|
|
696
|
+
# Check condition if present
|
|
697
|
+
if nested_field.condition:
|
|
698
|
+
# Use combined context (parent context + struct fields decoded so far)
|
|
699
|
+
combined_context = {**context, **struct_fields}
|
|
700
|
+
if not self._evaluate_condition(nested_field.condition, combined_context):
|
|
701
|
+
continue
|
|
702
|
+
|
|
703
|
+
value, consumed = self._decode_field(
|
|
704
|
+
data[total_consumed:], nested_field, {**context, **struct_fields}
|
|
705
|
+
)
|
|
706
|
+
struct_fields[nested_field.name] = value
|
|
707
|
+
total_consumed += consumed
|
|
708
|
+
|
|
709
|
+
return struct_fields, total_consumed
|
|
710
|
+
|
|
711
|
+
def _decode_integer(self, data: bytes, type_name: str, endian: str) -> tuple[int, int]:
|
|
712
|
+
"""Decode integer field.
|
|
713
|
+
|
|
714
|
+
Args:
|
|
715
|
+
data: Binary data
|
|
716
|
+
type_name: Type name (uint8, int16, etc.)
|
|
717
|
+
endian: Endian marker
|
|
718
|
+
|
|
719
|
+
Returns:
|
|
720
|
+
Tuple of (value, bytes_consumed)
|
|
721
|
+
|
|
722
|
+
Raises:
|
|
723
|
+
ValueError: If insufficient data for the integer type
|
|
724
|
+
"""
|
|
725
|
+
format_map = {
|
|
726
|
+
"uint8": ("B", 1),
|
|
727
|
+
"int8": ("b", 1),
|
|
728
|
+
"uint16": ("H", 2),
|
|
729
|
+
"int16": ("h", 2),
|
|
730
|
+
"uint32": ("I", 4),
|
|
731
|
+
"int32": ("i", 4),
|
|
732
|
+
"uint64": ("Q", 8),
|
|
733
|
+
"int64": ("q", 8),
|
|
734
|
+
}
|
|
735
|
+
|
|
736
|
+
fmt_char, size = format_map[type_name]
|
|
737
|
+
|
|
738
|
+
if len(data) < size:
|
|
739
|
+
raise ValueError(f"Insufficient data for {type_name} (need {size}, have {len(data)})")
|
|
740
|
+
|
|
741
|
+
# uint8/int8 don't use endianness
|
|
742
|
+
if size == 1:
|
|
743
|
+
value = struct.unpack(fmt_char, data[:size])[0]
|
|
744
|
+
else:
|
|
745
|
+
value = struct.unpack(f"{endian}{fmt_char}", data[:size])[0]
|
|
746
|
+
|
|
747
|
+
return value, size
|
|
748
|
+
|
|
749
|
+
def _decode_float(self, data: bytes, type_name: str, endian: str) -> tuple[float, int]:
|
|
750
|
+
"""Decode float field.
|
|
751
|
+
|
|
752
|
+
Args:
|
|
753
|
+
data: Binary data
|
|
754
|
+
type_name: Type name (float32 or float64)
|
|
755
|
+
endian: Endian marker
|
|
756
|
+
|
|
757
|
+
Returns:
|
|
758
|
+
Tuple of (value, bytes_consumed)
|
|
759
|
+
|
|
760
|
+
Raises:
|
|
761
|
+
ValueError: If insufficient data for the float type
|
|
762
|
+
"""
|
|
763
|
+
if type_name == "float32":
|
|
764
|
+
size = 4
|
|
765
|
+
fmt = f"{endian}f"
|
|
766
|
+
else: # float64
|
|
767
|
+
size = 8
|
|
768
|
+
fmt = f"{endian}d"
|
|
769
|
+
|
|
770
|
+
if len(data) < size:
|
|
771
|
+
raise ValueError(f"Insufficient data for {type_name} (need {size}, have {len(data)})")
|
|
772
|
+
|
|
773
|
+
value = struct.unpack(fmt, data[:size])[0]
|
|
774
|
+
return value, size
|
|
775
|
+
|
|
776
|
+
def _resolve_size(
|
|
777
|
+
self, size_spec: int | str | None, context: dict[str, Any], data: bytes
|
|
778
|
+
) -> int:
|
|
779
|
+
"""Resolve field size (literal or reference).
|
|
780
|
+
|
|
781
|
+
Args:
|
|
782
|
+
size_spec: Size specification (int, field name, or 'remaining')
|
|
783
|
+
context: Decoded fields
|
|
784
|
+
data: Current data buffer (for 'remaining' size)
|
|
785
|
+
|
|
786
|
+
Returns:
|
|
787
|
+
Resolved size
|
|
788
|
+
|
|
789
|
+
Raises:
|
|
790
|
+
ValueError: If size field not found in context or size specification is invalid
|
|
791
|
+
"""
|
|
792
|
+
if size_spec is None:
|
|
793
|
+
# No size specified, return 0 (caller should handle)
|
|
794
|
+
return 0
|
|
795
|
+
elif isinstance(size_spec, int):
|
|
796
|
+
return size_spec
|
|
797
|
+
elif isinstance(size_spec, str):
|
|
798
|
+
# Special case: 'remaining' means use all remaining data
|
|
799
|
+
if size_spec == "remaining":
|
|
800
|
+
return len(data)
|
|
801
|
+
# Reference to another field
|
|
802
|
+
if size_spec in context:
|
|
803
|
+
return int(context[size_spec])
|
|
804
|
+
else:
|
|
805
|
+
raise ValueError(f"Size field '{size_spec}' not found in context")
|
|
806
|
+
else:
|
|
807
|
+
raise ValueError(f"Invalid size specification: {size_spec}")
|
|
808
|
+
|
|
809
|
+
def _evaluate_condition(self, condition: str, context: dict[str, Any]) -> bool:
|
|
810
|
+
"""Evaluate field condition against decoded context.
|
|
811
|
+
|
|
812
|
+
: Conditional field evaluation.
|
|
813
|
+
|
|
814
|
+
Args:
|
|
815
|
+
condition: Condition expression (e.g., "msg_type == 0x02")
|
|
816
|
+
context: Decoded fields
|
|
817
|
+
|
|
818
|
+
Returns:
|
|
819
|
+
True if condition is satisfied
|
|
820
|
+
|
|
821
|
+
Security:
|
|
822
|
+
Uses AST-based safe evaluation. Only comparisons and logical
|
|
823
|
+
operations are permitted.
|
|
824
|
+
"""
|
|
825
|
+
evaluator = _SafeConditionEvaluator(context)
|
|
826
|
+
return evaluator.eval(condition)
|
|
827
|
+
|
|
828
|
+
def _validate_field(self, value: Any, validation: dict[str, Any]) -> str | None:
|
|
829
|
+
"""Validate field value.
|
|
830
|
+
|
|
831
|
+
Args:
|
|
832
|
+
value: Field value
|
|
833
|
+
validation: Validation rules
|
|
834
|
+
|
|
835
|
+
Returns:
|
|
836
|
+
Error message or None if valid
|
|
837
|
+
"""
|
|
838
|
+
# Min/max validation
|
|
839
|
+
if "min" in validation:
|
|
840
|
+
if value < validation["min"]:
|
|
841
|
+
return f"Value {value} below minimum {validation['min']}"
|
|
842
|
+
|
|
843
|
+
if "max" in validation:
|
|
844
|
+
if value > validation["max"]:
|
|
845
|
+
return f"Value {value} above maximum {validation['max']}"
|
|
846
|
+
|
|
847
|
+
# Value validation
|
|
848
|
+
if "value" in validation:
|
|
849
|
+
if value != validation["value"]:
|
|
850
|
+
return f"Expected {validation['value']}, got {value}"
|
|
851
|
+
|
|
852
|
+
return None
|
|
853
|
+
|
|
854
|
+
|
|
855
|
+
class ProtocolEncoder:
|
|
856
|
+
"""Encode data using protocol definition.
|
|
857
|
+
|
|
858
|
+
: Protocol encoder.
|
|
859
|
+
"""
|
|
860
|
+
|
|
861
|
+
def __init__(self, definition: ProtocolDefinition):
|
|
862
|
+
"""Initialize encoder.
|
|
863
|
+
|
|
864
|
+
Args:
|
|
865
|
+
definition: Protocol definition
|
|
866
|
+
"""
|
|
867
|
+
self.definition = definition
|
|
868
|
+
self._endian_map: dict[str, str] = {"big": ">", "little": "<"}
|
|
869
|
+
|
|
870
|
+
def encode(self, fields: dict[str, Any]) -> bytes:
|
|
871
|
+
"""Encode field values to binary message.
|
|
872
|
+
|
|
873
|
+
: Message encoding.
|
|
874
|
+
|
|
875
|
+
Args:
|
|
876
|
+
fields: Dictionary of field name -> value
|
|
877
|
+
|
|
878
|
+
Returns:
|
|
879
|
+
Encoded binary message
|
|
880
|
+
|
|
881
|
+
Raises:
|
|
882
|
+
ValueError: If required field is missing
|
|
883
|
+
"""
|
|
884
|
+
result = bytearray()
|
|
885
|
+
|
|
886
|
+
for field_def in self.definition.fields:
|
|
887
|
+
# Check condition
|
|
888
|
+
if field_def.condition:
|
|
889
|
+
# Use safe evaluator instead of eval()
|
|
890
|
+
evaluator = _SafeConditionEvaluator(fields)
|
|
891
|
+
if not evaluator.eval(field_def.condition):
|
|
892
|
+
continue
|
|
893
|
+
|
|
894
|
+
# Get value
|
|
895
|
+
if field_def.name in fields:
|
|
896
|
+
value = fields[field_def.name]
|
|
897
|
+
elif field_def.default is not None:
|
|
898
|
+
value = field_def.default
|
|
899
|
+
else:
|
|
900
|
+
raise ValueError(f"Missing required field: {field_def.name}")
|
|
901
|
+
|
|
902
|
+
# Encode field
|
|
903
|
+
encoded = self._encode_field(value, field_def)
|
|
904
|
+
result.extend(encoded)
|
|
905
|
+
|
|
906
|
+
return bytes(result)
|
|
907
|
+
|
|
908
|
+
def _encode_field(self, value: Any, field: FieldDefinition) -> bytes:
|
|
909
|
+
"""Encode single field value.
|
|
910
|
+
|
|
911
|
+
: Field encoding.
|
|
912
|
+
|
|
913
|
+
Args:
|
|
914
|
+
value: Field value
|
|
915
|
+
field: Field definition
|
|
916
|
+
|
|
917
|
+
Returns:
|
|
918
|
+
Encoded bytes
|
|
919
|
+
|
|
920
|
+
Raises:
|
|
921
|
+
ValueError: If bytes value is invalid or field type is unknown for encoding
|
|
922
|
+
"""
|
|
923
|
+
endian = self._endian_map.get(field.endian, ">")
|
|
924
|
+
field_type = field.field_type
|
|
925
|
+
|
|
926
|
+
# Integer types
|
|
927
|
+
if field_type == "uint8":
|
|
928
|
+
return struct.pack("B", int(value))
|
|
929
|
+
elif field_type == "int8":
|
|
930
|
+
return struct.pack("b", int(value))
|
|
931
|
+
elif field_type == "uint16":
|
|
932
|
+
return struct.pack(f"{endian}H", int(value))
|
|
933
|
+
elif field_type == "int16":
|
|
934
|
+
return struct.pack(f"{endian}h", int(value))
|
|
935
|
+
elif field_type == "uint32":
|
|
936
|
+
return struct.pack(f"{endian}I", int(value))
|
|
937
|
+
elif field_type == "int32":
|
|
938
|
+
return struct.pack(f"{endian}i", int(value))
|
|
939
|
+
elif field_type == "uint64":
|
|
940
|
+
return struct.pack(f"{endian}Q", int(value))
|
|
941
|
+
elif field_type == "int64":
|
|
942
|
+
return struct.pack(f"{endian}q", int(value))
|
|
943
|
+
|
|
944
|
+
# Float types
|
|
945
|
+
elif field_type == "float32":
|
|
946
|
+
return struct.pack(f"{endian}f", float(value))
|
|
947
|
+
elif field_type == "float64":
|
|
948
|
+
return struct.pack(f"{endian}d", float(value))
|
|
949
|
+
|
|
950
|
+
# Bytes
|
|
951
|
+
elif field_type == "bytes":
|
|
952
|
+
if isinstance(value, bytes):
|
|
953
|
+
return value
|
|
954
|
+
elif isinstance(value, list | tuple):
|
|
955
|
+
return bytes(value)
|
|
956
|
+
else:
|
|
957
|
+
raise ValueError(f"Invalid bytes value: {value}")
|
|
958
|
+
|
|
959
|
+
# String
|
|
960
|
+
elif field_type == "string":
|
|
961
|
+
if isinstance(value, str):
|
|
962
|
+
return value.encode("utf-8")
|
|
963
|
+
else:
|
|
964
|
+
return bytes(value)
|
|
965
|
+
|
|
966
|
+
# Array
|
|
967
|
+
elif field_type == "array":
|
|
968
|
+
return self._encode_array(value, field)
|
|
969
|
+
|
|
970
|
+
# Struct
|
|
971
|
+
elif field_type == "struct":
|
|
972
|
+
return self._encode_struct(value, field)
|
|
973
|
+
|
|
974
|
+
else:
|
|
975
|
+
raise ValueError(f"Unknown field type for encoding: {field_type}")
|
|
976
|
+
|
|
977
|
+
def _encode_array(self, value: list[Any], field: FieldDefinition) -> bytes:
|
|
978
|
+
"""Encode array field.
|
|
979
|
+
|
|
980
|
+
Args:
|
|
981
|
+
value: List of values
|
|
982
|
+
field: Field definition
|
|
983
|
+
|
|
984
|
+
Returns:
|
|
985
|
+
Encoded bytes
|
|
986
|
+
|
|
987
|
+
Raises:
|
|
988
|
+
ValueError: If array field is missing element definition
|
|
989
|
+
"""
|
|
990
|
+
result = bytearray()
|
|
991
|
+
|
|
992
|
+
element_def = field.element
|
|
993
|
+
if element_def is None:
|
|
994
|
+
raise ValueError(f"Array field '{field.name}' missing element definition")
|
|
995
|
+
|
|
996
|
+
element_type = element_def.get("type", "uint8")
|
|
997
|
+
element_endian = element_def.get("endian", field.endian)
|
|
998
|
+
|
|
999
|
+
for i, elem in enumerate(value):
|
|
1000
|
+
if element_type == "struct":
|
|
1001
|
+
# Nested struct
|
|
1002
|
+
nested_fields = element_def.get("fields", [])
|
|
1003
|
+
parsed_fields = [
|
|
1004
|
+
ProtocolDefinition._parse_field_definition(f, element_endian)
|
|
1005
|
+
for f in nested_fields
|
|
1006
|
+
]
|
|
1007
|
+
elem_field = FieldDefinition(
|
|
1008
|
+
name=f"{field.name}[{i}]",
|
|
1009
|
+
field_type="struct",
|
|
1010
|
+
endian=element_endian,
|
|
1011
|
+
fields=parsed_fields,
|
|
1012
|
+
)
|
|
1013
|
+
result.extend(self._encode_struct(elem, elem_field))
|
|
1014
|
+
else:
|
|
1015
|
+
elem_field = FieldDefinition(
|
|
1016
|
+
name=f"{field.name}[{i}]",
|
|
1017
|
+
field_type=element_type,
|
|
1018
|
+
endian=element_endian,
|
|
1019
|
+
)
|
|
1020
|
+
result.extend(self._encode_field(elem, elem_field))
|
|
1021
|
+
|
|
1022
|
+
return bytes(result)
|
|
1023
|
+
|
|
1024
|
+
def _encode_struct(self, value: dict[str, Any], field: FieldDefinition) -> bytes:
|
|
1025
|
+
"""Encode struct field.
|
|
1026
|
+
|
|
1027
|
+
Args:
|
|
1028
|
+
value: Dictionary of field values
|
|
1029
|
+
field: Field definition
|
|
1030
|
+
|
|
1031
|
+
Returns:
|
|
1032
|
+
Encoded bytes
|
|
1033
|
+
|
|
1034
|
+
Raises:
|
|
1035
|
+
ValueError: If struct field is missing fields definition
|
|
1036
|
+
"""
|
|
1037
|
+
result = bytearray()
|
|
1038
|
+
|
|
1039
|
+
nested_fields = field.fields
|
|
1040
|
+
if nested_fields is None:
|
|
1041
|
+
raise ValueError(f"Struct field '{field.name}' missing fields definition")
|
|
1042
|
+
|
|
1043
|
+
for nested_field in nested_fields:
|
|
1044
|
+
if nested_field.name in value:
|
|
1045
|
+
result.extend(self._encode_field(value[nested_field.name], nested_field))
|
|
1046
|
+
elif nested_field.default is not None:
|
|
1047
|
+
result.extend(self._encode_field(nested_field.default, nested_field))
|
|
1048
|
+
|
|
1049
|
+
return bytes(result)
|
|
1050
|
+
|
|
1051
|
+
|
|
1052
|
+
def load_protocol(path: str | Path) -> ProtocolDefinition:
|
|
1053
|
+
"""Load protocol definition from YAML.
|
|
1054
|
+
|
|
1055
|
+
: Convenience function for loading protocols.
|
|
1056
|
+
|
|
1057
|
+
Args:
|
|
1058
|
+
path: Path to YAML file
|
|
1059
|
+
|
|
1060
|
+
Returns:
|
|
1061
|
+
ProtocolDefinition instance
|
|
1062
|
+
"""
|
|
1063
|
+
return ProtocolDefinition.from_yaml(path)
|
|
1064
|
+
|
|
1065
|
+
|
|
1066
|
+
def decode_message(data: bytes, protocol: str | ProtocolDefinition) -> DecodedMessage:
|
|
1067
|
+
"""Decode message using protocol.
|
|
1068
|
+
|
|
1069
|
+
: Convenience function for decoding.
|
|
1070
|
+
|
|
1071
|
+
Args:
|
|
1072
|
+
data: Binary message data
|
|
1073
|
+
protocol: Protocol definition (path or instance)
|
|
1074
|
+
|
|
1075
|
+
Returns:
|
|
1076
|
+
DecodedMessage instance
|
|
1077
|
+
"""
|
|
1078
|
+
if isinstance(protocol, str):
|
|
1079
|
+
protocol_def = ProtocolDefinition.from_yaml(protocol)
|
|
1080
|
+
else:
|
|
1081
|
+
protocol_def = protocol
|
|
1082
|
+
|
|
1083
|
+
decoder = ProtocolDecoder(protocol_def)
|
|
1084
|
+
return decoder.decode(data)
|