oscura 0.5.1__py3-none-any.whl → 0.7.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- oscura/__init__.py +169 -167
- oscura/analyzers/__init__.py +3 -0
- oscura/analyzers/classification.py +659 -0
- oscura/analyzers/digital/edges.py +325 -65
- oscura/analyzers/digital/quality.py +293 -166
- oscura/analyzers/digital/timing.py +260 -115
- oscura/analyzers/digital/timing_numba.py +334 -0
- oscura/analyzers/entropy.py +605 -0
- oscura/analyzers/eye/diagram.py +176 -109
- oscura/analyzers/eye/metrics.py +5 -5
- oscura/analyzers/jitter/__init__.py +6 -4
- oscura/analyzers/jitter/ber.py +52 -52
- oscura/analyzers/jitter/classification.py +156 -0
- oscura/analyzers/jitter/decomposition.py +163 -113
- oscura/analyzers/jitter/spectrum.py +80 -64
- oscura/analyzers/ml/__init__.py +39 -0
- oscura/analyzers/ml/features.py +600 -0
- oscura/analyzers/ml/signal_classifier.py +604 -0
- oscura/analyzers/packet/daq.py +246 -158
- oscura/analyzers/packet/parser.py +12 -1
- oscura/analyzers/packet/payload.py +50 -2110
- oscura/analyzers/packet/payload_analysis.py +361 -181
- oscura/analyzers/packet/payload_patterns.py +133 -70
- oscura/analyzers/packet/stream.py +84 -23
- oscura/analyzers/patterns/__init__.py +26 -5
- oscura/analyzers/patterns/anomaly_detection.py +908 -0
- oscura/analyzers/patterns/clustering.py +169 -108
- oscura/analyzers/patterns/clustering_optimized.py +227 -0
- oscura/analyzers/patterns/discovery.py +1 -1
- oscura/analyzers/patterns/matching.py +581 -197
- oscura/analyzers/patterns/pattern_mining.py +778 -0
- oscura/analyzers/patterns/periodic.py +121 -38
- oscura/analyzers/patterns/sequences.py +175 -78
- oscura/analyzers/power/conduction.py +1 -1
- oscura/analyzers/power/soa.py +6 -6
- oscura/analyzers/power/switching.py +250 -110
- oscura/analyzers/protocol/__init__.py +17 -1
- oscura/analyzers/protocols/base.py +6 -6
- oscura/analyzers/protocols/ble/__init__.py +38 -0
- oscura/analyzers/protocols/ble/analyzer.py +809 -0
- oscura/analyzers/protocols/ble/uuids.py +288 -0
- oscura/analyzers/protocols/can.py +257 -127
- oscura/analyzers/protocols/can_fd.py +107 -80
- oscura/analyzers/protocols/flexray.py +139 -80
- oscura/analyzers/protocols/hdlc.py +93 -58
- oscura/analyzers/protocols/i2c.py +247 -106
- oscura/analyzers/protocols/i2s.py +138 -86
- oscura/analyzers/protocols/industrial/__init__.py +40 -0
- oscura/analyzers/protocols/industrial/bacnet/__init__.py +33 -0
- oscura/analyzers/protocols/industrial/bacnet/analyzer.py +708 -0
- oscura/analyzers/protocols/industrial/bacnet/encoding.py +412 -0
- oscura/analyzers/protocols/industrial/bacnet/services.py +622 -0
- oscura/analyzers/protocols/industrial/ethercat/__init__.py +30 -0
- oscura/analyzers/protocols/industrial/ethercat/analyzer.py +474 -0
- oscura/analyzers/protocols/industrial/ethercat/mailbox.py +339 -0
- oscura/analyzers/protocols/industrial/ethercat/topology.py +166 -0
- oscura/analyzers/protocols/industrial/modbus/__init__.py +31 -0
- oscura/analyzers/protocols/industrial/modbus/analyzer.py +525 -0
- oscura/analyzers/protocols/industrial/modbus/crc.py +79 -0
- oscura/analyzers/protocols/industrial/modbus/functions.py +436 -0
- oscura/analyzers/protocols/industrial/opcua/__init__.py +21 -0
- oscura/analyzers/protocols/industrial/opcua/analyzer.py +552 -0
- oscura/analyzers/protocols/industrial/opcua/datatypes.py +446 -0
- oscura/analyzers/protocols/industrial/opcua/services.py +264 -0
- oscura/analyzers/protocols/industrial/profinet/__init__.py +23 -0
- oscura/analyzers/protocols/industrial/profinet/analyzer.py +441 -0
- oscura/analyzers/protocols/industrial/profinet/dcp.py +263 -0
- oscura/analyzers/protocols/industrial/profinet/ptcp.py +200 -0
- oscura/analyzers/protocols/jtag.py +180 -98
- oscura/analyzers/protocols/lin.py +219 -114
- oscura/analyzers/protocols/manchester.py +4 -4
- oscura/analyzers/protocols/onewire.py +253 -149
- oscura/analyzers/protocols/parallel_bus/__init__.py +20 -0
- oscura/analyzers/protocols/parallel_bus/centronics.py +92 -0
- oscura/analyzers/protocols/parallel_bus/gpib.py +137 -0
- oscura/analyzers/protocols/spi.py +192 -95
- oscura/analyzers/protocols/swd.py +321 -167
- oscura/analyzers/protocols/uart.py +267 -125
- oscura/analyzers/protocols/usb.py +235 -131
- oscura/analyzers/side_channel/power.py +17 -12
- oscura/analyzers/signal/__init__.py +15 -0
- oscura/analyzers/signal/timing_analysis.py +1086 -0
- oscura/analyzers/signal_integrity/__init__.py +4 -1
- oscura/analyzers/signal_integrity/sparams.py +2 -19
- oscura/analyzers/spectral/chunked.py +129 -60
- oscura/analyzers/spectral/chunked_fft.py +300 -94
- oscura/analyzers/spectral/chunked_wavelet.py +100 -80
- oscura/analyzers/statistical/checksum.py +376 -217
- oscura/analyzers/statistical/classification.py +229 -107
- oscura/analyzers/statistical/entropy.py +78 -53
- oscura/analyzers/statistics/correlation.py +407 -211
- oscura/analyzers/statistics/outliers.py +2 -2
- oscura/analyzers/statistics/streaming.py +30 -5
- oscura/analyzers/validation.py +216 -101
- oscura/analyzers/waveform/measurements.py +9 -0
- oscura/analyzers/waveform/measurements_with_uncertainty.py +31 -15
- oscura/analyzers/waveform/spectral.py +500 -228
- oscura/api/__init__.py +31 -5
- oscura/api/dsl/__init__.py +582 -0
- oscura/{dsl → api/dsl}/commands.py +43 -76
- oscura/{dsl → api/dsl}/interpreter.py +26 -51
- oscura/{dsl → api/dsl}/parser.py +107 -77
- oscura/{dsl → api/dsl}/repl.py +2 -2
- oscura/api/dsl.py +1 -1
- oscura/{integrations → api/integrations}/__init__.py +1 -1
- oscura/{integrations → api/integrations}/llm.py +201 -102
- oscura/api/operators.py +3 -3
- oscura/api/optimization.py +144 -30
- oscura/api/rest_server.py +921 -0
- oscura/api/server/__init__.py +17 -0
- oscura/api/server/dashboard.py +850 -0
- oscura/api/server/static/README.md +34 -0
- oscura/api/server/templates/base.html +181 -0
- oscura/api/server/templates/export.html +120 -0
- oscura/api/server/templates/home.html +284 -0
- oscura/api/server/templates/protocols.html +58 -0
- oscura/api/server/templates/reports.html +43 -0
- oscura/api/server/templates/session_detail.html +89 -0
- oscura/api/server/templates/sessions.html +83 -0
- oscura/api/server/templates/waveforms.html +73 -0
- oscura/automotive/__init__.py +8 -1
- oscura/automotive/can/__init__.py +10 -0
- oscura/automotive/can/checksum.py +3 -1
- oscura/automotive/can/dbc_generator.py +590 -0
- oscura/automotive/can/message_wrapper.py +121 -74
- oscura/automotive/can/patterns.py +98 -21
- oscura/automotive/can/session.py +292 -56
- oscura/automotive/can/state_machine.py +6 -3
- oscura/automotive/can/stimulus_response.py +97 -75
- oscura/automotive/dbc/__init__.py +10 -2
- oscura/automotive/dbc/generator.py +84 -56
- oscura/automotive/dbc/parser.py +6 -6
- oscura/automotive/dtc/data.json +17 -102
- oscura/automotive/dtc/database.py +2 -2
- oscura/automotive/flexray/__init__.py +31 -0
- oscura/automotive/flexray/analyzer.py +504 -0
- oscura/automotive/flexray/crc.py +185 -0
- oscura/automotive/flexray/fibex.py +449 -0
- oscura/automotive/j1939/__init__.py +45 -8
- oscura/automotive/j1939/analyzer.py +605 -0
- oscura/automotive/j1939/spns.py +326 -0
- oscura/automotive/j1939/transport.py +306 -0
- oscura/automotive/lin/__init__.py +47 -0
- oscura/automotive/lin/analyzer.py +612 -0
- oscura/automotive/loaders/blf.py +13 -2
- oscura/automotive/loaders/csv_can.py +143 -72
- oscura/automotive/loaders/dispatcher.py +50 -2
- oscura/automotive/loaders/mdf.py +86 -45
- oscura/automotive/loaders/pcap.py +111 -61
- oscura/automotive/uds/__init__.py +4 -0
- oscura/automotive/uds/analyzer.py +725 -0
- oscura/automotive/uds/decoder.py +140 -58
- oscura/automotive/uds/models.py +7 -1
- oscura/automotive/visualization.py +1 -1
- oscura/cli/analyze.py +348 -0
- oscura/cli/batch.py +142 -122
- oscura/cli/benchmark.py +275 -0
- oscura/cli/characterize.py +137 -82
- oscura/cli/compare.py +224 -131
- oscura/cli/completion.py +250 -0
- oscura/cli/config_cmd.py +361 -0
- oscura/cli/decode.py +164 -87
- oscura/cli/export.py +286 -0
- oscura/cli/main.py +115 -31
- oscura/{onboarding → cli/onboarding}/__init__.py +3 -3
- oscura/{onboarding → cli/onboarding}/help.py +80 -58
- oscura/{onboarding → cli/onboarding}/tutorials.py +97 -72
- oscura/{onboarding → cli/onboarding}/wizard.py +55 -36
- oscura/cli/progress.py +147 -0
- oscura/cli/shell.py +157 -135
- oscura/cli/validate_cmd.py +204 -0
- oscura/cli/visualize.py +158 -0
- oscura/convenience.py +125 -79
- oscura/core/__init__.py +4 -2
- oscura/core/backend_selector.py +3 -3
- oscura/core/cache.py +126 -15
- oscura/core/cancellation.py +1 -1
- oscura/{config → core/config}/__init__.py +20 -11
- oscura/{config → core/config}/defaults.py +1 -1
- oscura/{config → core/config}/loader.py +7 -5
- oscura/{config → core/config}/memory.py +5 -5
- oscura/{config → core/config}/migration.py +1 -1
- oscura/{config → core/config}/pipeline.py +99 -23
- oscura/{config → core/config}/preferences.py +1 -1
- oscura/{config → core/config}/protocol.py +3 -3
- oscura/{config → core/config}/schema.py +426 -272
- oscura/{config → core/config}/settings.py +1 -1
- oscura/{config → core/config}/thresholds.py +195 -153
- oscura/core/correlation.py +5 -6
- oscura/core/cross_domain.py +0 -2
- oscura/core/debug.py +9 -5
- oscura/{extensibility → core/extensibility}/docs.py +158 -70
- oscura/{extensibility → core/extensibility}/extensions.py +160 -76
- oscura/{extensibility → core/extensibility}/logging.py +1 -1
- oscura/{extensibility → core/extensibility}/measurements.py +1 -1
- oscura/{extensibility → core/extensibility}/plugins.py +1 -1
- oscura/{extensibility → core/extensibility}/templates.py +73 -3
- oscura/{extensibility → core/extensibility}/validation.py +1 -1
- oscura/core/gpu_backend.py +11 -7
- oscura/core/log_query.py +101 -11
- oscura/core/logging.py +126 -54
- oscura/core/logging_advanced.py +5 -5
- oscura/core/memory_limits.py +108 -70
- oscura/core/memory_monitor.py +2 -2
- oscura/core/memory_progress.py +7 -7
- oscura/core/memory_warnings.py +1 -1
- oscura/core/numba_backend.py +13 -13
- oscura/{plugins → core/plugins}/__init__.py +9 -9
- oscura/{plugins → core/plugins}/base.py +7 -7
- oscura/{plugins → core/plugins}/cli.py +3 -3
- oscura/{plugins → core/plugins}/discovery.py +186 -106
- oscura/{plugins → core/plugins}/lifecycle.py +1 -1
- oscura/{plugins → core/plugins}/manager.py +7 -7
- oscura/{plugins → core/plugins}/registry.py +3 -3
- oscura/{plugins → core/plugins}/versioning.py +1 -1
- oscura/core/progress.py +16 -1
- oscura/core/provenance.py +8 -2
- oscura/{schemas → core/schemas}/__init__.py +2 -2
- oscura/{schemas → core/schemas}/device_mapping.json +2 -8
- oscura/{schemas → core/schemas}/packet_format.json +4 -24
- oscura/{schemas → core/schemas}/protocol_definition.json +2 -12
- oscura/core/types.py +4 -0
- oscura/core/uncertainty.py +3 -3
- oscura/correlation/__init__.py +52 -0
- oscura/correlation/multi_protocol.py +811 -0
- oscura/discovery/auto_decoder.py +117 -35
- oscura/discovery/comparison.py +191 -86
- oscura/discovery/quality_validator.py +155 -68
- oscura/discovery/signal_detector.py +196 -79
- oscura/export/__init__.py +18 -8
- oscura/export/kaitai_struct.py +513 -0
- oscura/export/scapy_layer.py +801 -0
- oscura/export/wireshark/generator.py +1 -1
- oscura/export/wireshark/templates/dissector.lua.j2 +2 -2
- oscura/export/wireshark_dissector.py +746 -0
- oscura/guidance/wizard.py +207 -111
- oscura/hardware/__init__.py +19 -0
- oscura/{acquisition → hardware/acquisition}/__init__.py +4 -4
- oscura/{acquisition → hardware/acquisition}/file.py +2 -2
- oscura/{acquisition → hardware/acquisition}/hardware.py +7 -7
- oscura/{acquisition → hardware/acquisition}/saleae.py +15 -12
- oscura/{acquisition → hardware/acquisition}/socketcan.py +1 -1
- oscura/{acquisition → hardware/acquisition}/streaming.py +2 -2
- oscura/{acquisition → hardware/acquisition}/synthetic.py +3 -3
- oscura/{acquisition → hardware/acquisition}/visa.py +33 -11
- oscura/hardware/firmware/__init__.py +29 -0
- oscura/hardware/firmware/pattern_recognition.py +874 -0
- oscura/hardware/hal_detector.py +736 -0
- oscura/hardware/security/__init__.py +37 -0
- oscura/hardware/security/side_channel_detector.py +1126 -0
- oscura/inference/__init__.py +4 -0
- oscura/inference/active_learning/observation_table.py +4 -1
- oscura/inference/alignment.py +216 -123
- oscura/inference/bayesian.py +113 -33
- oscura/inference/crc_reverse.py +101 -55
- oscura/inference/logic.py +6 -2
- oscura/inference/message_format.py +342 -183
- oscura/inference/protocol.py +95 -44
- oscura/inference/protocol_dsl.py +180 -82
- oscura/inference/signal_intelligence.py +1439 -706
- oscura/inference/spectral.py +99 -57
- oscura/inference/state_machine.py +810 -158
- oscura/inference/stream.py +270 -110
- oscura/iot/__init__.py +34 -0
- oscura/iot/coap/__init__.py +32 -0
- oscura/iot/coap/analyzer.py +668 -0
- oscura/iot/coap/options.py +212 -0
- oscura/iot/lorawan/__init__.py +21 -0
- oscura/iot/lorawan/crypto.py +206 -0
- oscura/iot/lorawan/decoder.py +801 -0
- oscura/iot/lorawan/mac_commands.py +341 -0
- oscura/iot/mqtt/__init__.py +27 -0
- oscura/iot/mqtt/analyzer.py +999 -0
- oscura/iot/mqtt/properties.py +315 -0
- oscura/iot/zigbee/__init__.py +31 -0
- oscura/iot/zigbee/analyzer.py +615 -0
- oscura/iot/zigbee/security.py +153 -0
- oscura/iot/zigbee/zcl.py +349 -0
- oscura/jupyter/display.py +125 -45
- oscura/{exploratory → jupyter/exploratory}/__init__.py +8 -8
- oscura/{exploratory → jupyter/exploratory}/error_recovery.py +298 -141
- oscura/jupyter/exploratory/fuzzy.py +746 -0
- oscura/{exploratory → jupyter/exploratory}/fuzzy_advanced.py +258 -100
- oscura/{exploratory → jupyter/exploratory}/legacy.py +464 -242
- oscura/{exploratory → jupyter/exploratory}/parse.py +167 -145
- oscura/{exploratory → jupyter/exploratory}/recovery.py +119 -87
- oscura/jupyter/exploratory/sync.py +612 -0
- oscura/{exploratory → jupyter/exploratory}/unknown.py +299 -176
- oscura/jupyter/magic.py +4 -4
- oscura/{ui → jupyter/ui}/__init__.py +2 -2
- oscura/{ui → jupyter/ui}/formatters.py +3 -3
- oscura/{ui → jupyter/ui}/progressive_display.py +153 -82
- oscura/loaders/__init__.py +183 -67
- oscura/loaders/binary.py +88 -1
- oscura/loaders/chipwhisperer.py +153 -137
- oscura/loaders/configurable.py +208 -86
- oscura/loaders/csv_loader.py +458 -215
- oscura/loaders/hdf5_loader.py +278 -119
- oscura/loaders/lazy.py +87 -54
- oscura/loaders/mmap_loader.py +1 -1
- oscura/loaders/numpy_loader.py +253 -116
- oscura/loaders/pcap.py +226 -151
- oscura/loaders/rigol.py +110 -49
- oscura/loaders/sigrok.py +201 -78
- oscura/loaders/tdms.py +81 -58
- oscura/loaders/tektronix.py +291 -174
- oscura/loaders/touchstone.py +182 -87
- oscura/loaders/tss.py +456 -0
- oscura/loaders/vcd.py +215 -117
- oscura/loaders/wav.py +155 -68
- oscura/reporting/__init__.py +9 -0
- oscura/reporting/analyze.py +352 -146
- oscura/reporting/argument_preparer.py +69 -14
- oscura/reporting/auto_report.py +97 -61
- oscura/reporting/batch.py +131 -58
- oscura/reporting/chart_selection.py +57 -45
- oscura/reporting/comparison.py +63 -17
- oscura/reporting/content/executive.py +76 -24
- oscura/reporting/core_formats/multi_format.py +11 -8
- oscura/reporting/engine.py +312 -158
- oscura/reporting/enhanced_reports.py +949 -0
- oscura/reporting/export.py +86 -43
- oscura/reporting/formatting/numbers.py +69 -42
- oscura/reporting/html.py +139 -58
- oscura/reporting/index.py +137 -65
- oscura/reporting/output.py +158 -67
- oscura/reporting/pdf.py +67 -102
- oscura/reporting/plots.py +191 -112
- oscura/reporting/sections.py +88 -47
- oscura/reporting/standards.py +104 -61
- oscura/reporting/summary_generator.py +75 -55
- oscura/reporting/tables.py +138 -54
- oscura/reporting/templates/enhanced/protocol_re.html +525 -0
- oscura/sessions/__init__.py +14 -23
- oscura/sessions/base.py +3 -3
- oscura/sessions/blackbox.py +106 -10
- oscura/sessions/generic.py +2 -2
- oscura/sessions/legacy.py +783 -0
- oscura/side_channel/__init__.py +63 -0
- oscura/side_channel/dpa.py +1025 -0
- oscura/utils/__init__.py +15 -1
- oscura/utils/bitwise.py +118 -0
- oscura/{builders → utils/builders}/__init__.py +1 -1
- oscura/{comparison → utils/comparison}/__init__.py +6 -6
- oscura/{comparison → utils/comparison}/compare.py +202 -101
- oscura/{comparison → utils/comparison}/golden.py +83 -63
- oscura/{comparison → utils/comparison}/limits.py +313 -89
- oscura/{comparison → utils/comparison}/mask.py +151 -45
- oscura/{comparison → utils/comparison}/trace_diff.py +1 -1
- oscura/{comparison → utils/comparison}/visualization.py +147 -89
- oscura/{component → utils/component}/__init__.py +3 -3
- oscura/{component → utils/component}/impedance.py +122 -58
- oscura/{component → utils/component}/reactive.py +165 -168
- oscura/{component → utils/component}/transmission_line.py +3 -3
- oscura/{filtering → utils/filtering}/__init__.py +6 -6
- oscura/{filtering → utils/filtering}/base.py +1 -1
- oscura/{filtering → utils/filtering}/convenience.py +2 -2
- oscura/{filtering → utils/filtering}/design.py +169 -93
- oscura/{filtering → utils/filtering}/filters.py +2 -2
- oscura/{filtering → utils/filtering}/introspection.py +2 -2
- oscura/utils/geometry.py +31 -0
- oscura/utils/imports.py +184 -0
- oscura/utils/lazy.py +1 -1
- oscura/{math → utils/math}/__init__.py +2 -2
- oscura/{math → utils/math}/arithmetic.py +114 -48
- oscura/{math → utils/math}/interpolation.py +139 -106
- oscura/utils/memory.py +129 -66
- oscura/utils/memory_advanced.py +92 -9
- oscura/utils/memory_extensions.py +10 -8
- oscura/{optimization → utils/optimization}/__init__.py +1 -1
- oscura/{optimization → utils/optimization}/search.py +2 -2
- oscura/utils/performance/__init__.py +58 -0
- oscura/utils/performance/caching.py +889 -0
- oscura/utils/performance/lsh_clustering.py +333 -0
- oscura/utils/performance/memory_optimizer.py +699 -0
- oscura/utils/performance/optimizations.py +675 -0
- oscura/utils/performance/parallel.py +654 -0
- oscura/utils/performance/profiling.py +661 -0
- oscura/{pipeline → utils/pipeline}/base.py +1 -1
- oscura/{pipeline → utils/pipeline}/composition.py +1 -1
- oscura/{pipeline → utils/pipeline}/parallel.py +3 -2
- oscura/{pipeline → utils/pipeline}/pipeline.py +1 -1
- oscura/{pipeline → utils/pipeline}/reverse_engineering.py +412 -221
- oscura/{search → utils/search}/__init__.py +3 -3
- oscura/{search → utils/search}/anomaly.py +188 -58
- oscura/utils/search/context.py +294 -0
- oscura/{search → utils/search}/pattern.py +138 -10
- oscura/utils/serial.py +51 -0
- oscura/utils/storage/__init__.py +61 -0
- oscura/utils/storage/database.py +1166 -0
- oscura/{streaming → utils/streaming}/chunked.py +302 -143
- oscura/{streaming → utils/streaming}/progressive.py +1 -1
- oscura/{streaming → utils/streaming}/realtime.py +3 -2
- oscura/{triggering → utils/triggering}/__init__.py +6 -6
- oscura/{triggering → utils/triggering}/base.py +6 -6
- oscura/{triggering → utils/triggering}/edge.py +2 -2
- oscura/{triggering → utils/triggering}/pattern.py +2 -2
- oscura/{triggering → utils/triggering}/pulse.py +115 -74
- oscura/{triggering → utils/triggering}/window.py +2 -2
- oscura/utils/validation.py +32 -0
- oscura/validation/__init__.py +121 -0
- oscura/{compliance → validation/compliance}/__init__.py +5 -5
- oscura/{compliance → validation/compliance}/advanced.py +5 -5
- oscura/{compliance → validation/compliance}/masks.py +1 -1
- oscura/{compliance → validation/compliance}/reporting.py +127 -53
- oscura/{compliance → validation/compliance}/testing.py +114 -52
- oscura/validation/compliance_tests.py +915 -0
- oscura/validation/fuzzer.py +990 -0
- oscura/validation/grammar_tests.py +596 -0
- oscura/validation/grammar_validator.py +904 -0
- oscura/validation/hil_testing.py +977 -0
- oscura/{quality → validation/quality}/__init__.py +4 -4
- oscura/{quality → validation/quality}/ensemble.py +251 -171
- oscura/{quality → validation/quality}/explainer.py +3 -3
- oscura/{quality → validation/quality}/scoring.py +1 -1
- oscura/{quality → validation/quality}/warnings.py +4 -4
- oscura/validation/regression_suite.py +808 -0
- oscura/validation/replay.py +788 -0
- oscura/{testing → validation/testing}/__init__.py +2 -2
- oscura/{testing → validation/testing}/synthetic.py +5 -5
- oscura/visualization/__init__.py +9 -0
- oscura/visualization/accessibility.py +1 -1
- oscura/visualization/annotations.py +64 -67
- oscura/visualization/colors.py +7 -7
- oscura/visualization/digital.py +180 -81
- oscura/visualization/eye.py +236 -85
- oscura/visualization/interactive.py +320 -143
- oscura/visualization/jitter.py +587 -247
- oscura/visualization/layout.py +169 -134
- oscura/visualization/optimization.py +103 -52
- oscura/visualization/palettes.py +1 -1
- oscura/visualization/power.py +427 -211
- oscura/visualization/power_extended.py +626 -297
- oscura/visualization/presets.py +2 -0
- oscura/visualization/protocols.py +495 -181
- oscura/visualization/render.py +79 -63
- oscura/visualization/reverse_engineering.py +171 -124
- oscura/visualization/signal_integrity.py +460 -279
- oscura/visualization/specialized.py +190 -100
- oscura/visualization/spectral.py +670 -255
- oscura/visualization/thumbnails.py +166 -137
- oscura/visualization/waveform.py +150 -63
- oscura/workflows/__init__.py +3 -0
- oscura/{batch → workflows/batch}/__init__.py +5 -5
- oscura/{batch → workflows/batch}/advanced.py +150 -75
- oscura/workflows/batch/aggregate.py +531 -0
- oscura/workflows/batch/analyze.py +236 -0
- oscura/{batch → workflows/batch}/logging.py +2 -2
- oscura/{batch → workflows/batch}/metrics.py +1 -1
- oscura/workflows/complete_re.py +1144 -0
- oscura/workflows/compliance.py +44 -54
- oscura/workflows/digital.py +197 -51
- oscura/workflows/legacy/__init__.py +12 -0
- oscura/{workflow → workflows/legacy}/dag.py +4 -1
- oscura/workflows/multi_trace.py +9 -9
- oscura/workflows/power.py +42 -62
- oscura/workflows/protocol.py +82 -49
- oscura/workflows/reverse_engineering.py +351 -150
- oscura/workflows/signal_integrity.py +157 -82
- oscura-0.7.0.dist-info/METADATA +661 -0
- oscura-0.7.0.dist-info/RECORD +591 -0
- oscura/batch/aggregate.py +0 -300
- oscura/batch/analyze.py +0 -139
- oscura/dsl/__init__.py +0 -73
- oscura/exceptions.py +0 -59
- oscura/exploratory/fuzzy.py +0 -513
- oscura/exploratory/sync.py +0 -384
- oscura/exporters/__init__.py +0 -94
- oscura/exporters/csv.py +0 -303
- oscura/exporters/exporters.py +0 -44
- oscura/exporters/hdf5.py +0 -217
- oscura/exporters/html_export.py +0 -701
- oscura/exporters/json_export.py +0 -291
- oscura/exporters/markdown_export.py +0 -367
- oscura/exporters/matlab_export.py +0 -354
- oscura/exporters/npz_export.py +0 -219
- oscura/exporters/spice_export.py +0 -210
- oscura/search/context.py +0 -149
- oscura/session/__init__.py +0 -34
- oscura/session/annotations.py +0 -289
- oscura/session/history.py +0 -313
- oscura/session/session.py +0 -520
- oscura/workflow/__init__.py +0 -13
- oscura-0.5.1.dist-info/METADATA +0 -583
- oscura-0.5.1.dist-info/RECORD +0 -481
- /oscura/core/{config.py → config/legacy.py} +0 -0
- /oscura/{extensibility → core/extensibility}/__init__.py +0 -0
- /oscura/{extensibility → core/extensibility}/registry.py +0 -0
- /oscura/{plugins → core/plugins}/isolation.py +0 -0
- /oscura/{schemas → core/schemas}/bus_configuration.json +0 -0
- /oscura/{builders → utils/builders}/signal_builder.py +0 -0
- /oscura/{optimization → utils/optimization}/parallel.py +0 -0
- /oscura/{pipeline → utils/pipeline}/__init__.py +0 -0
- /oscura/{streaming → utils/streaming}/__init__.py +0 -0
- {oscura-0.5.1.dist-info → oscura-0.7.0.dist-info}/WHEEL +0 -0
- {oscura-0.5.1.dist-info → oscura-0.7.0.dist-info}/entry_points.txt +0 -0
- {oscura-0.5.1.dist-info → oscura-0.7.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -17,7 +17,7 @@ from collections.abc import Callable, Sequence
|
|
|
17
17
|
from dataclasses import dataclass, field
|
|
18
18
|
from datetime import datetime
|
|
19
19
|
from pathlib import Path
|
|
20
|
-
from typing import Any, ClassVar, Literal
|
|
20
|
+
from typing import Any, ClassVar, Literal, cast
|
|
21
21
|
|
|
22
22
|
logger = logging.getLogger(__name__)
|
|
23
23
|
|
|
@@ -206,6 +206,111 @@ class REPipeline:
|
|
|
206
206
|
self._checkpoint_path: str | None = None
|
|
207
207
|
self._checkpoint_data: dict[str, Any] = {}
|
|
208
208
|
|
|
209
|
+
def _initialize_analysis_context(
|
|
210
|
+
self, data: bytes | Sequence[dict[str, Any]] | Sequence[bytes]
|
|
211
|
+
) -> dict[str, Any]:
|
|
212
|
+
"""Initialize analysis context with empty containers.
|
|
213
|
+
|
|
214
|
+
Args:
|
|
215
|
+
data: Input data to analyze.
|
|
216
|
+
|
|
217
|
+
Returns:
|
|
218
|
+
Initialized context dictionary.
|
|
219
|
+
"""
|
|
220
|
+
return {
|
|
221
|
+
"raw_data": data,
|
|
222
|
+
"flows": [],
|
|
223
|
+
"payloads": [],
|
|
224
|
+
"messages": [],
|
|
225
|
+
"patterns": [],
|
|
226
|
+
"clusters": [],
|
|
227
|
+
"schemas": {},
|
|
228
|
+
"protocol_candidates": [],
|
|
229
|
+
"state_machine": None,
|
|
230
|
+
"warnings": [],
|
|
231
|
+
"statistics": {},
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
def _execute_stage(
|
|
235
|
+
self,
|
|
236
|
+
stage_name: str,
|
|
237
|
+
context: dict[str, Any],
|
|
238
|
+
checkpoint: str | None,
|
|
239
|
+
) -> StageResult:
|
|
240
|
+
"""Execute single pipeline stage.
|
|
241
|
+
|
|
242
|
+
Args:
|
|
243
|
+
stage_name: Name of stage to execute.
|
|
244
|
+
context: Analysis context.
|
|
245
|
+
checkpoint: Checkpoint path.
|
|
246
|
+
|
|
247
|
+
Returns:
|
|
248
|
+
StageResult with execution outcome.
|
|
249
|
+
"""
|
|
250
|
+
handler = self._stage_handlers.get(stage_name)
|
|
251
|
+
if not handler:
|
|
252
|
+
return StageResult(
|
|
253
|
+
stage_name=stage_name, success=False, duration=0, output=None, error="No handler"
|
|
254
|
+
)
|
|
255
|
+
|
|
256
|
+
try:
|
|
257
|
+
stage_start = time.time()
|
|
258
|
+
output = handler(context)
|
|
259
|
+
stage_duration = time.time() - stage_start
|
|
260
|
+
|
|
261
|
+
if output:
|
|
262
|
+
context.update(output)
|
|
263
|
+
|
|
264
|
+
if checkpoint:
|
|
265
|
+
self._save_checkpoint(checkpoint, stage_name, context)
|
|
266
|
+
|
|
267
|
+
return StageResult(
|
|
268
|
+
stage_name=stage_name,
|
|
269
|
+
success=True,
|
|
270
|
+
duration=stage_duration,
|
|
271
|
+
output=output,
|
|
272
|
+
)
|
|
273
|
+
|
|
274
|
+
except Exception as e:
|
|
275
|
+
warnings_list: list[str] = context.get("warnings", [])
|
|
276
|
+
warnings_list.append(f"Stage {stage_name} failed: {e}")
|
|
277
|
+
context["warnings"] = warnings_list
|
|
278
|
+
|
|
279
|
+
return StageResult(
|
|
280
|
+
stage_name=stage_name,
|
|
281
|
+
success=False,
|
|
282
|
+
duration=0,
|
|
283
|
+
output=None,
|
|
284
|
+
error=str(e),
|
|
285
|
+
)
|
|
286
|
+
|
|
287
|
+
def _execute_all_stages(
|
|
288
|
+
self, context: dict[str, Any], checkpoint: str | None
|
|
289
|
+
) -> list[StageResult]:
|
|
290
|
+
"""Execute all pipeline stages.
|
|
291
|
+
|
|
292
|
+
Args:
|
|
293
|
+
context: Analysis context.
|
|
294
|
+
checkpoint: Checkpoint path.
|
|
295
|
+
|
|
296
|
+
Returns:
|
|
297
|
+
List of stage results.
|
|
298
|
+
"""
|
|
299
|
+
stage_results = []
|
|
300
|
+
total_stages = len(self.stages)
|
|
301
|
+
|
|
302
|
+
for i, stage_name in enumerate(self.stages):
|
|
303
|
+
if stage_name in self._checkpoint_data:
|
|
304
|
+
context.update(self._checkpoint_data[stage_name])
|
|
305
|
+
continue
|
|
306
|
+
|
|
307
|
+
self._report_progress(stage_name, (i / total_stages) * 100)
|
|
308
|
+
stage_result = self._execute_stage(stage_name, context, checkpoint)
|
|
309
|
+
stage_results.append(stage_result)
|
|
310
|
+
|
|
311
|
+
self._report_progress("complete", 100)
|
|
312
|
+
return stage_results
|
|
313
|
+
|
|
209
314
|
def analyze(
|
|
210
315
|
self,
|
|
211
316
|
data: bytes | Sequence[dict[str, Any]] | Sequence[bytes],
|
|
@@ -229,85 +334,22 @@ class REPipeline:
|
|
|
229
334
|
>>> for msg_type in results.message_types:
|
|
230
335
|
... print(f"{msg_type.name}: {msg_type.sample_count} samples")
|
|
231
336
|
"""
|
|
337
|
+
# Setup: initialize state and load checkpoint
|
|
232
338
|
start_time = time.time()
|
|
233
339
|
self._progress_callback = progress_callback
|
|
234
340
|
self._checkpoint_path = checkpoint
|
|
235
341
|
self._checkpoint_data = {}
|
|
236
342
|
|
|
237
|
-
# Load checkpoint if available
|
|
238
343
|
if checkpoint and os.path.exists(checkpoint):
|
|
239
344
|
self._load_checkpoint(checkpoint)
|
|
240
345
|
|
|
241
|
-
|
|
242
|
-
context: dict[str, Any] = {
|
|
243
|
-
"raw_data": data,
|
|
244
|
-
"flows": [],
|
|
245
|
-
"payloads": [],
|
|
246
|
-
"messages": [],
|
|
247
|
-
"patterns": [],
|
|
248
|
-
"clusters": [],
|
|
249
|
-
"schemas": {},
|
|
250
|
-
"protocol_candidates": [],
|
|
251
|
-
"state_machine": None,
|
|
252
|
-
"warnings": [],
|
|
253
|
-
"statistics": {},
|
|
254
|
-
}
|
|
255
|
-
|
|
256
|
-
# Execute stages
|
|
257
|
-
stage_results = []
|
|
258
|
-
total_stages = len(self.stages)
|
|
259
|
-
|
|
260
|
-
for i, stage_name in enumerate(self.stages):
|
|
261
|
-
if stage_name in self._checkpoint_data:
|
|
262
|
-
# Skip completed stages
|
|
263
|
-
context.update(self._checkpoint_data[stage_name])
|
|
264
|
-
continue
|
|
265
|
-
|
|
266
|
-
self._report_progress(stage_name, (i / total_stages) * 100)
|
|
346
|
+
context = self._initialize_analysis_context(data)
|
|
267
347
|
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
try:
|
|
271
|
-
stage_start = time.time()
|
|
272
|
-
output = handler(context)
|
|
273
|
-
stage_duration = time.time() - stage_start
|
|
274
|
-
|
|
275
|
-
stage_results.append(
|
|
276
|
-
StageResult(
|
|
277
|
-
stage_name=stage_name,
|
|
278
|
-
success=True,
|
|
279
|
-
duration=stage_duration,
|
|
280
|
-
output=output,
|
|
281
|
-
)
|
|
282
|
-
)
|
|
348
|
+
# Processing: execute pipeline stages
|
|
349
|
+
stage_results = self._execute_all_stages(context, checkpoint)
|
|
283
350
|
|
|
284
|
-
|
|
285
|
-
if output:
|
|
286
|
-
context.update(output)
|
|
287
|
-
|
|
288
|
-
# Checkpoint after each stage
|
|
289
|
-
if checkpoint:
|
|
290
|
-
self._save_checkpoint(checkpoint, stage_name, context)
|
|
291
|
-
|
|
292
|
-
except Exception as e:
|
|
293
|
-
stage_results.append(
|
|
294
|
-
StageResult(
|
|
295
|
-
stage_name=stage_name,
|
|
296
|
-
success=False,
|
|
297
|
-
duration=0,
|
|
298
|
-
output=None,
|
|
299
|
-
error=str(e),
|
|
300
|
-
)
|
|
301
|
-
)
|
|
302
|
-
warnings_list: list[str] = context.get("warnings", [])
|
|
303
|
-
warnings_list.append(f"Stage {stage_name} failed: {e}")
|
|
304
|
-
context["warnings"] = warnings_list
|
|
305
|
-
|
|
306
|
-
self._report_progress("complete", 100)
|
|
307
|
-
|
|
308
|
-
# Build result
|
|
351
|
+
# Result building: construct final result
|
|
309
352
|
duration = time.time() - start_time
|
|
310
|
-
|
|
311
353
|
flows_list: list[Any] = context.get("flows", [])
|
|
312
354
|
messages_list: list[Any] = context.get("messages", [])
|
|
313
355
|
protocol_candidates_list: list[ProtocolCandidate] = context.get("protocol_candidates", [])
|
|
@@ -416,115 +458,205 @@ class REPipeline:
|
|
|
416
458
|
context: Pipeline context.
|
|
417
459
|
|
|
418
460
|
Returns:
|
|
419
|
-
Updated context with flows.
|
|
461
|
+
Updated context with flows and payloads.
|
|
420
462
|
"""
|
|
421
463
|
data = context["raw_data"]
|
|
422
|
-
flows = []
|
|
423
|
-
payloads = []
|
|
424
464
|
|
|
425
465
|
if isinstance(data, bytes):
|
|
426
|
-
|
|
427
|
-
|
|
466
|
+
flows, payloads = self._extract_from_raw_bytes(data)
|
|
467
|
+
elif isinstance(data, list | tuple):
|
|
468
|
+
flows, payloads = self._extract_from_packet_list(data)
|
|
469
|
+
else:
|
|
470
|
+
flows, payloads = [], []
|
|
471
|
+
|
|
472
|
+
self._update_flow_statistics(context, flows, payloads)
|
|
473
|
+
return {"flows": flows, "payloads": payloads}
|
|
474
|
+
|
|
475
|
+
def _extract_from_raw_bytes(self, data: bytes) -> tuple[list[FlowInfo], list[bytes]]:
|
|
476
|
+
"""Extract flow from raw binary data.
|
|
477
|
+
|
|
478
|
+
Args:
|
|
479
|
+
data: Raw binary data.
|
|
480
|
+
|
|
481
|
+
Returns:
|
|
482
|
+
Tuple of (flows, payloads).
|
|
483
|
+
"""
|
|
484
|
+
flow = FlowInfo(
|
|
485
|
+
flow_id="flow_0",
|
|
486
|
+
src_ip="unknown",
|
|
487
|
+
dst_ip="unknown",
|
|
488
|
+
src_port=0,
|
|
489
|
+
dst_port=0,
|
|
490
|
+
protocol="unknown",
|
|
491
|
+
packet_count=1,
|
|
492
|
+
byte_count=len(data),
|
|
493
|
+
start_time=0,
|
|
494
|
+
end_time=0,
|
|
495
|
+
)
|
|
496
|
+
return [flow], [data]
|
|
497
|
+
|
|
498
|
+
def _extract_from_packet_list(
|
|
499
|
+
self, packets: Sequence[dict[str, Any] | bytes]
|
|
500
|
+
) -> tuple[list[FlowInfo], list[bytes]]:
|
|
501
|
+
"""Extract flows from list of packets.
|
|
502
|
+
|
|
503
|
+
Args:
|
|
504
|
+
packets: List of packet dicts or raw bytes.
|
|
505
|
+
|
|
506
|
+
Returns:
|
|
507
|
+
Tuple of (flows, payloads).
|
|
508
|
+
"""
|
|
509
|
+
flow_map: dict[str, dict[str, Any]] = {}
|
|
510
|
+
payloads: list[bytes] = []
|
|
511
|
+
raw_bytes_payloads: list[bytes] = []
|
|
512
|
+
|
|
513
|
+
for pkt in packets:
|
|
514
|
+
if isinstance(pkt, dict):
|
|
515
|
+
self._process_packet_dict(pkt, flow_map, payloads)
|
|
516
|
+
else:
|
|
517
|
+
payload = bytes(pkt) if not isinstance(pkt, bytes) else pkt
|
|
518
|
+
payloads.append(payload)
|
|
519
|
+
raw_bytes_payloads.append(payload)
|
|
520
|
+
|
|
521
|
+
flows = self._build_flows_from_map(flow_map)
|
|
522
|
+
|
|
523
|
+
# Create default flow for raw bytes if needed
|
|
524
|
+
if raw_bytes_payloads and not flows:
|
|
525
|
+
flows.append(self._create_default_flow(raw_bytes_payloads))
|
|
526
|
+
|
|
527
|
+
return flows, payloads
|
|
528
|
+
|
|
529
|
+
def _process_packet_dict(
|
|
530
|
+
self,
|
|
531
|
+
pkt: dict[str, Any],
|
|
532
|
+
flow_map: dict[str, dict[str, Any]],
|
|
533
|
+
payloads: list[bytes],
|
|
534
|
+
) -> None:
|
|
535
|
+
"""Process a packet dictionary and update flow map.
|
|
536
|
+
|
|
537
|
+
Args:
|
|
538
|
+
pkt: Packet dictionary with metadata.
|
|
539
|
+
flow_map: Flow mapping to update.
|
|
540
|
+
payloads: Payloads list to append to.
|
|
541
|
+
"""
|
|
542
|
+
# Extract payload
|
|
543
|
+
payload_raw = pkt.get("data", pkt.get("payload", b""))
|
|
544
|
+
if isinstance(payload_raw, list | tuple):
|
|
545
|
+
payload = bytes(payload_raw)
|
|
546
|
+
else:
|
|
547
|
+
payload = payload_raw if isinstance(payload_raw, bytes) else b""
|
|
548
|
+
|
|
549
|
+
# Create flow key
|
|
550
|
+
flow_key = self._create_flow_key(pkt)
|
|
551
|
+
|
|
552
|
+
# Initialize flow if new
|
|
553
|
+
if flow_key not in flow_map:
|
|
554
|
+
flow_map[flow_key] = self._create_flow_entry(pkt)
|
|
555
|
+
|
|
556
|
+
# Update flow data
|
|
557
|
+
flow_map[flow_key]["packets"].append(pkt)
|
|
558
|
+
flow_map[flow_key]["payloads"].append(payload)
|
|
559
|
+
if "timestamp" in pkt:
|
|
560
|
+
flow_map[flow_key]["timestamps"].append(pkt["timestamp"])
|
|
561
|
+
|
|
562
|
+
payloads.append(payload)
|
|
563
|
+
|
|
564
|
+
def _create_flow_key(self, pkt: dict[str, Any]) -> str:
|
|
565
|
+
"""Create flow identifier key from packet.
|
|
566
|
+
|
|
567
|
+
Args:
|
|
568
|
+
pkt: Packet dictionary.
|
|
569
|
+
|
|
570
|
+
Returns:
|
|
571
|
+
Flow key string.
|
|
572
|
+
"""
|
|
573
|
+
src_ip = pkt.get("src_ip", "0.0.0.0")
|
|
574
|
+
dst_ip = pkt.get("dst_ip", "0.0.0.0")
|
|
575
|
+
src_port = pkt.get("src_port", 0)
|
|
576
|
+
dst_port = pkt.get("dst_port", 0)
|
|
577
|
+
protocol = pkt.get("protocol", "unknown")
|
|
578
|
+
return f"{src_ip}:{src_port}-{dst_ip}:{dst_port}-{protocol}"
|
|
579
|
+
|
|
580
|
+
def _create_flow_entry(self, pkt: dict[str, Any]) -> dict[str, Any]:
|
|
581
|
+
"""Create new flow entry from packet.
|
|
582
|
+
|
|
583
|
+
Args:
|
|
584
|
+
pkt: Packet dictionary.
|
|
585
|
+
|
|
586
|
+
Returns:
|
|
587
|
+
Flow entry dictionary.
|
|
588
|
+
"""
|
|
589
|
+
return {
|
|
590
|
+
"src_ip": pkt.get("src_ip", "0.0.0.0"),
|
|
591
|
+
"dst_ip": pkt.get("dst_ip", "0.0.0.0"),
|
|
592
|
+
"src_port": pkt.get("src_port", 0),
|
|
593
|
+
"dst_port": pkt.get("dst_port", 0),
|
|
594
|
+
"protocol": pkt.get("protocol", "unknown"),
|
|
595
|
+
"packets": [],
|
|
596
|
+
"payloads": [],
|
|
597
|
+
"timestamps": [],
|
|
598
|
+
}
|
|
599
|
+
|
|
600
|
+
def _build_flows_from_map(self, flow_map: dict[str, dict[str, Any]]) -> list[FlowInfo]:
|
|
601
|
+
"""Build FlowInfo objects from flow map.
|
|
602
|
+
|
|
603
|
+
Args:
|
|
604
|
+
flow_map: Mapping of flow keys to flow data.
|
|
605
|
+
|
|
606
|
+
Returns:
|
|
607
|
+
List of FlowInfo objects.
|
|
608
|
+
"""
|
|
609
|
+
flows = []
|
|
610
|
+
for flow_id, flow_data in flow_map.items():
|
|
611
|
+
timestamps = flow_data.get("timestamps", [0])
|
|
428
612
|
flows.append(
|
|
429
613
|
FlowInfo(
|
|
430
|
-
flow_id=
|
|
431
|
-
src_ip="
|
|
432
|
-
dst_ip="
|
|
433
|
-
src_port=
|
|
434
|
-
dst_port=
|
|
435
|
-
protocol="
|
|
436
|
-
packet_count=
|
|
437
|
-
byte_count=len(
|
|
438
|
-
start_time=0,
|
|
439
|
-
end_time=0,
|
|
614
|
+
flow_id=flow_id,
|
|
615
|
+
src_ip=flow_data["src_ip"],
|
|
616
|
+
dst_ip=flow_data["dst_ip"],
|
|
617
|
+
src_port=flow_data["src_port"],
|
|
618
|
+
dst_port=flow_data["dst_port"],
|
|
619
|
+
protocol=flow_data["protocol"],
|
|
620
|
+
packet_count=len(flow_data["packets"]),
|
|
621
|
+
byte_count=sum(len(p) for p in flow_data["payloads"]),
|
|
622
|
+
start_time=min(timestamps) if timestamps else 0,
|
|
623
|
+
end_time=max(timestamps) if timestamps else 0,
|
|
440
624
|
)
|
|
441
625
|
)
|
|
626
|
+
return flows
|
|
442
627
|
|
|
443
|
-
|
|
444
|
-
|
|
445
|
-
flow_map: dict[str, dict[str, Any]] = {}
|
|
446
|
-
raw_bytes_payloads: list[bytes] = []
|
|
447
|
-
|
|
448
|
-
for _i, pkt in enumerate(data):
|
|
449
|
-
if isinstance(pkt, dict):
|
|
450
|
-
# Packet with metadata
|
|
451
|
-
payload_raw = pkt.get("data", pkt.get("payload", b""))
|
|
452
|
-
if isinstance(payload_raw, list | tuple):
|
|
453
|
-
payload = bytes(payload_raw)
|
|
454
|
-
else:
|
|
455
|
-
payload = payload_raw if isinstance(payload_raw, bytes) else b""
|
|
456
|
-
|
|
457
|
-
# Create flow key
|
|
458
|
-
src_ip = pkt.get("src_ip", "0.0.0.0")
|
|
459
|
-
dst_ip = pkt.get("dst_ip", "0.0.0.0")
|
|
460
|
-
src_port = pkt.get("src_port", 0)
|
|
461
|
-
dst_port = pkt.get("dst_port", 0)
|
|
462
|
-
protocol = pkt.get("protocol", "unknown")
|
|
463
|
-
|
|
464
|
-
flow_key = f"{src_ip}:{src_port}-{dst_ip}:{dst_port}-{protocol}"
|
|
465
|
-
|
|
466
|
-
if flow_key not in flow_map:
|
|
467
|
-
flow_map[flow_key] = {
|
|
468
|
-
"src_ip": src_ip,
|
|
469
|
-
"dst_ip": dst_ip,
|
|
470
|
-
"src_port": src_port,
|
|
471
|
-
"dst_port": dst_port,
|
|
472
|
-
"protocol": protocol,
|
|
473
|
-
"packets": [],
|
|
474
|
-
"payloads": [],
|
|
475
|
-
"timestamps": [],
|
|
476
|
-
}
|
|
477
|
-
|
|
478
|
-
flow_map[flow_key]["packets"].append(pkt)
|
|
479
|
-
flow_map[flow_key]["payloads"].append(payload)
|
|
480
|
-
if "timestamp" in pkt:
|
|
481
|
-
flow_map[flow_key]["timestamps"].append(pkt["timestamp"])
|
|
482
|
-
|
|
483
|
-
payloads.append(payload)
|
|
484
|
-
|
|
485
|
-
else:
|
|
486
|
-
# Raw bytes - collect for default flow
|
|
487
|
-
raw_payload = bytes(pkt) if not isinstance(pkt, bytes) else pkt
|
|
488
|
-
payloads.append(raw_payload)
|
|
489
|
-
raw_bytes_payloads.append(raw_payload)
|
|
490
|
-
|
|
491
|
-
# Build flow objects from flow_map
|
|
492
|
-
for flow_id, flow_data in flow_map.items():
|
|
493
|
-
timestamps = flow_data.get("timestamps", [0])
|
|
494
|
-
flows.append(
|
|
495
|
-
FlowInfo(
|
|
496
|
-
flow_id=flow_id,
|
|
497
|
-
src_ip=flow_data["src_ip"],
|
|
498
|
-
dst_ip=flow_data["dst_ip"],
|
|
499
|
-
src_port=flow_data["src_port"],
|
|
500
|
-
dst_port=flow_data["dst_port"],
|
|
501
|
-
protocol=flow_data["protocol"],
|
|
502
|
-
packet_count=len(flow_data["packets"]),
|
|
503
|
-
byte_count=sum(len(p) for p in flow_data["payloads"]),
|
|
504
|
-
start_time=min(timestamps) if timestamps else 0,
|
|
505
|
-
end_time=max(timestamps) if timestamps else 0,
|
|
506
|
-
)
|
|
507
|
-
)
|
|
628
|
+
def _create_default_flow(self, payloads: list[bytes]) -> FlowInfo:
|
|
629
|
+
"""Create default flow for raw bytes.
|
|
508
630
|
|
|
509
|
-
|
|
510
|
-
|
|
511
|
-
if raw_bytes_payloads and not flows:
|
|
512
|
-
flows.append(
|
|
513
|
-
FlowInfo(
|
|
514
|
-
flow_id="flow_default",
|
|
515
|
-
src_ip="unknown",
|
|
516
|
-
dst_ip="unknown",
|
|
517
|
-
src_port=0,
|
|
518
|
-
dst_port=0,
|
|
519
|
-
protocol="unknown",
|
|
520
|
-
packet_count=len(raw_bytes_payloads),
|
|
521
|
-
byte_count=sum(len(p) for p in raw_bytes_payloads),
|
|
522
|
-
start_time=0,
|
|
523
|
-
end_time=0,
|
|
524
|
-
)
|
|
525
|
-
)
|
|
631
|
+
Args:
|
|
632
|
+
payloads: List of raw byte payloads.
|
|
526
633
|
|
|
527
|
-
|
|
634
|
+
Returns:
|
|
635
|
+
Default FlowInfo object.
|
|
636
|
+
"""
|
|
637
|
+
return FlowInfo(
|
|
638
|
+
flow_id="flow_default",
|
|
639
|
+
src_ip="unknown",
|
|
640
|
+
dst_ip="unknown",
|
|
641
|
+
src_port=0,
|
|
642
|
+
dst_port=0,
|
|
643
|
+
protocol="unknown",
|
|
644
|
+
packet_count=len(payloads),
|
|
645
|
+
byte_count=sum(len(p) for p in payloads),
|
|
646
|
+
start_time=0,
|
|
647
|
+
end_time=0,
|
|
648
|
+
)
|
|
649
|
+
|
|
650
|
+
def _update_flow_statistics(
|
|
651
|
+
self, context: dict[str, Any], flows: list[FlowInfo], payloads: list[bytes]
|
|
652
|
+
) -> None:
|
|
653
|
+
"""Update context statistics with flow extraction results.
|
|
654
|
+
|
|
655
|
+
Args:
|
|
656
|
+
context: Pipeline context to update.
|
|
657
|
+
flows: Extracted flows.
|
|
658
|
+
payloads: Extracted payloads.
|
|
659
|
+
"""
|
|
528
660
|
if "statistics" not in context:
|
|
529
661
|
context["statistics"] = {}
|
|
530
662
|
|
|
@@ -534,8 +666,6 @@ class REPipeline:
|
|
|
534
666
|
"total_bytes": sum(len(p) for p in payloads),
|
|
535
667
|
}
|
|
536
668
|
|
|
537
|
-
return {"flows": flows, "payloads": payloads}
|
|
538
|
-
|
|
539
669
|
def _stage_payload_analysis(self, context: dict[str, Any]) -> dict[str, Any]:
|
|
540
670
|
"""Analyze payloads for structure.
|
|
541
671
|
|
|
@@ -713,9 +843,27 @@ class REPipeline:
|
|
|
713
843
|
"""
|
|
714
844
|
messages = context.get("messages", [])
|
|
715
845
|
flows = context.get("flows", [])
|
|
716
|
-
candidates = []
|
|
846
|
+
candidates: list[ProtocolCandidate] = []
|
|
847
|
+
|
|
848
|
+
# Detect protocols from multiple sources
|
|
849
|
+
candidates.extend(self._detect_by_port(flows))
|
|
850
|
+
candidates.extend(self._detect_by_magic_bytes(messages))
|
|
851
|
+
candidates.extend(self._detect_from_library(messages))
|
|
717
852
|
|
|
718
|
-
#
|
|
853
|
+
# Deduplicate candidates
|
|
854
|
+
unique_candidates = self._deduplicate_candidates(candidates)
|
|
855
|
+
|
|
856
|
+
return {"protocol_candidates": unique_candidates}
|
|
857
|
+
|
|
858
|
+
def _detect_by_port(self, flows: list[FlowInfo]) -> list[ProtocolCandidate]:
|
|
859
|
+
"""Detect protocols based on well-known port numbers.
|
|
860
|
+
|
|
861
|
+
Args:
|
|
862
|
+
flows: List of network flows.
|
|
863
|
+
|
|
864
|
+
Returns:
|
|
865
|
+
List of protocol candidates.
|
|
866
|
+
"""
|
|
719
867
|
port_protocols = {
|
|
720
868
|
53: "dns",
|
|
721
869
|
80: "http",
|
|
@@ -726,6 +874,7 @@ class REPipeline:
|
|
|
726
874
|
47808: "bacnet",
|
|
727
875
|
}
|
|
728
876
|
|
|
877
|
+
candidates = []
|
|
729
878
|
for flow in flows:
|
|
730
879
|
port = flow.dst_port or flow.src_port
|
|
731
880
|
if port in port_protocols:
|
|
@@ -737,69 +886,110 @@ class REPipeline:
|
|
|
737
886
|
)
|
|
738
887
|
)
|
|
739
888
|
|
|
740
|
-
|
|
741
|
-
|
|
742
|
-
|
|
743
|
-
|
|
744
|
-
|
|
745
|
-
|
|
746
|
-
|
|
747
|
-
|
|
748
|
-
|
|
749
|
-
|
|
750
|
-
|
|
751
|
-
|
|
752
|
-
|
|
753
|
-
|
|
754
|
-
|
|
755
|
-
|
|
756
|
-
|
|
889
|
+
return candidates
|
|
890
|
+
|
|
891
|
+
def _detect_by_magic_bytes(self, messages: list[bytes]) -> list[ProtocolCandidate]:
|
|
892
|
+
"""Detect protocols by magic byte signatures.
|
|
893
|
+
|
|
894
|
+
Args:
|
|
895
|
+
messages: List of message bytes.
|
|
896
|
+
|
|
897
|
+
Returns:
|
|
898
|
+
List of protocol candidates.
|
|
899
|
+
"""
|
|
900
|
+
if not messages:
|
|
901
|
+
return []
|
|
902
|
+
|
|
903
|
+
try:
|
|
904
|
+
from oscura.inference.binary import MagicByteDetector
|
|
905
|
+
|
|
906
|
+
detector = MagicByteDetector()
|
|
907
|
+
sample = messages[0]
|
|
908
|
+
|
|
909
|
+
if len(sample) >= 2:
|
|
910
|
+
result = detector.detect(sample)
|
|
911
|
+
if result and result.known_format:
|
|
912
|
+
return [
|
|
913
|
+
ProtocolCandidate(
|
|
914
|
+
name=result.known_format,
|
|
915
|
+
confidence=result.confidence,
|
|
916
|
+
header_match=True,
|
|
757
917
|
)
|
|
918
|
+
]
|
|
758
919
|
|
|
759
|
-
|
|
760
|
-
|
|
920
|
+
except Exception as e:
|
|
921
|
+
logger.debug("Magic byte detection failed (non-critical): %s", e)
|
|
922
|
+
|
|
923
|
+
return []
|
|
924
|
+
|
|
925
|
+
def _detect_from_library(self, messages: list[bytes]) -> list[ProtocolCandidate]:
|
|
926
|
+
"""Detect protocols from protocol library.
|
|
761
927
|
|
|
762
|
-
|
|
928
|
+
Args:
|
|
929
|
+
messages: List of message bytes.
|
|
930
|
+
|
|
931
|
+
Returns:
|
|
932
|
+
List of protocol candidates.
|
|
933
|
+
"""
|
|
763
934
|
try:
|
|
764
935
|
from oscura.inference.protocol_library import get_library
|
|
765
936
|
|
|
766
937
|
library = get_library()
|
|
938
|
+
candidates = []
|
|
767
939
|
|
|
768
940
|
for protocol in library.list_protocols():
|
|
769
|
-
if protocol
|
|
770
|
-
|
|
771
|
-
|
|
772
|
-
|
|
773
|
-
|
|
774
|
-
|
|
775
|
-
|
|
776
|
-
|
|
777
|
-
|
|
778
|
-
|
|
779
|
-
if first_field and hasattr(first_field, "value"):
|
|
780
|
-
# Has expected value
|
|
781
|
-
candidates.append(
|
|
782
|
-
ProtocolCandidate(
|
|
783
|
-
name=protocol.name,
|
|
784
|
-
confidence=0.4,
|
|
785
|
-
matched_patterns=["header_value"],
|
|
786
|
-
)
|
|
787
|
-
)
|
|
788
|
-
break
|
|
941
|
+
if self._matches_protocol_header(protocol, messages):
|
|
942
|
+
candidates.append(
|
|
943
|
+
ProtocolCandidate(
|
|
944
|
+
name=protocol.name,
|
|
945
|
+
confidence=0.4,
|
|
946
|
+
matched_patterns=["header_value"],
|
|
947
|
+
)
|
|
948
|
+
)
|
|
949
|
+
|
|
950
|
+
return candidates
|
|
789
951
|
|
|
790
952
|
except Exception as e:
|
|
791
953
|
logger.debug("Protocol library matching failed (non-critical): %s", e)
|
|
954
|
+
return []
|
|
955
|
+
|
|
956
|
+
def _matches_protocol_header(self, protocol: Any, messages: list[bytes]) -> bool:
|
|
957
|
+
"""Check if messages match protocol header.
|
|
792
958
|
|
|
793
|
-
|
|
794
|
-
|
|
959
|
+
Args:
|
|
960
|
+
protocol: Protocol definition.
|
|
961
|
+
messages: List of message bytes.
|
|
962
|
+
|
|
963
|
+
Returns:
|
|
964
|
+
True if matches.
|
|
965
|
+
"""
|
|
966
|
+
if not protocol.definition or not protocol.definition.fields:
|
|
967
|
+
return False
|
|
968
|
+
|
|
969
|
+
first_field = protocol.definition.fields[0]
|
|
970
|
+
if not hasattr(first_field, "value"):
|
|
971
|
+
return False
|
|
972
|
+
|
|
973
|
+
# Check first 10 messages
|
|
974
|
+
return any(len(msg) >= 4 for msg in messages[:10])
|
|
975
|
+
|
|
976
|
+
def _deduplicate_candidates(
|
|
977
|
+
self, candidates: list[ProtocolCandidate]
|
|
978
|
+
) -> list[ProtocolCandidate]:
|
|
979
|
+
"""Deduplicate candidates, keeping highest confidence.
|
|
980
|
+
|
|
981
|
+
Args:
|
|
982
|
+
candidates: List of candidates.
|
|
983
|
+
|
|
984
|
+
Returns:
|
|
985
|
+
Deduplicated list.
|
|
986
|
+
"""
|
|
987
|
+
unique: dict[str, ProtocolCandidate] = {}
|
|
795
988
|
for c in candidates:
|
|
796
|
-
if
|
|
797
|
-
c.name
|
|
798
|
-
or c.confidence > unique_candidates[c.name].confidence
|
|
799
|
-
):
|
|
800
|
-
unique_candidates[c.name] = c
|
|
989
|
+
if c.name not in unique or c.confidence > unique[c.name].confidence:
|
|
990
|
+
unique[c.name] = c
|
|
801
991
|
|
|
802
|
-
return
|
|
992
|
+
return list(unique.values())
|
|
803
993
|
|
|
804
994
|
def _stage_state_machine(self, context: dict[str, Any]) -> dict[str, Any]:
|
|
805
995
|
"""Infer protocol state machine.
|
|
@@ -825,7 +1015,7 @@ class REPipeline:
|
|
|
825
1015
|
message_to_cluster[idx] = getattr(cluster, "cluster_id", 0)
|
|
826
1016
|
|
|
827
1017
|
# Build observation sequence
|
|
828
|
-
sequence = [
|
|
1018
|
+
sequence: list[str] = [
|
|
829
1019
|
f"type_{message_to_cluster.get(i, 0)}"
|
|
830
1020
|
for i in range(len(messages))
|
|
831
1021
|
if i in message_to_cluster
|
|
@@ -835,7 +1025,8 @@ class REPipeline:
|
|
|
835
1025
|
from oscura.inference.state_machine import StateMachineInferrer
|
|
836
1026
|
|
|
837
1027
|
inferrer = StateMachineInferrer()
|
|
838
|
-
|
|
1028
|
+
# Cast list[str] to list[str | int] for API compatibility
|
|
1029
|
+
automaton = inferrer.infer_rpni([cast("list[str | int]", sequence)])
|
|
839
1030
|
|
|
840
1031
|
return {
|
|
841
1032
|
"state_machine": {
|