oscura 0.5.0__py3-none-any.whl → 0.6.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- oscura/__init__.py +169 -167
- oscura/analyzers/__init__.py +3 -0
- oscura/analyzers/classification.py +659 -0
- oscura/analyzers/digital/__init__.py +0 -48
- oscura/analyzers/digital/edges.py +325 -65
- oscura/analyzers/digital/extraction.py +0 -195
- oscura/analyzers/digital/quality.py +293 -166
- oscura/analyzers/digital/timing.py +260 -115
- oscura/analyzers/digital/timing_numba.py +334 -0
- oscura/analyzers/entropy.py +605 -0
- oscura/analyzers/eye/diagram.py +176 -109
- oscura/analyzers/eye/metrics.py +5 -5
- oscura/analyzers/jitter/__init__.py +6 -4
- oscura/analyzers/jitter/ber.py +52 -52
- oscura/analyzers/jitter/classification.py +156 -0
- oscura/analyzers/jitter/decomposition.py +163 -113
- oscura/analyzers/jitter/spectrum.py +80 -64
- oscura/analyzers/ml/__init__.py +39 -0
- oscura/analyzers/ml/features.py +600 -0
- oscura/analyzers/ml/signal_classifier.py +604 -0
- oscura/analyzers/packet/daq.py +246 -158
- oscura/analyzers/packet/parser.py +12 -1
- oscura/analyzers/packet/payload.py +50 -2110
- oscura/analyzers/packet/payload_analysis.py +361 -181
- oscura/analyzers/packet/payload_patterns.py +133 -70
- oscura/analyzers/packet/stream.py +84 -23
- oscura/analyzers/patterns/__init__.py +26 -5
- oscura/analyzers/patterns/anomaly_detection.py +908 -0
- oscura/analyzers/patterns/clustering.py +169 -108
- oscura/analyzers/patterns/clustering_optimized.py +227 -0
- oscura/analyzers/patterns/discovery.py +1 -1
- oscura/analyzers/patterns/matching.py +581 -197
- oscura/analyzers/patterns/pattern_mining.py +778 -0
- oscura/analyzers/patterns/periodic.py +121 -38
- oscura/analyzers/patterns/sequences.py +175 -78
- oscura/analyzers/power/conduction.py +1 -1
- oscura/analyzers/power/soa.py +6 -6
- oscura/analyzers/power/switching.py +250 -110
- oscura/analyzers/protocol/__init__.py +17 -1
- oscura/analyzers/protocols/__init__.py +1 -22
- oscura/analyzers/protocols/base.py +6 -6
- oscura/analyzers/protocols/ble/__init__.py +38 -0
- oscura/analyzers/protocols/ble/analyzer.py +809 -0
- oscura/analyzers/protocols/ble/uuids.py +288 -0
- oscura/analyzers/protocols/can.py +257 -127
- oscura/analyzers/protocols/can_fd.py +107 -80
- oscura/analyzers/protocols/flexray.py +139 -80
- oscura/analyzers/protocols/hdlc.py +93 -58
- oscura/analyzers/protocols/i2c.py +247 -106
- oscura/analyzers/protocols/i2s.py +138 -86
- oscura/analyzers/protocols/industrial/__init__.py +40 -0
- oscura/analyzers/protocols/industrial/bacnet/__init__.py +33 -0
- oscura/analyzers/protocols/industrial/bacnet/analyzer.py +708 -0
- oscura/analyzers/protocols/industrial/bacnet/encoding.py +412 -0
- oscura/analyzers/protocols/industrial/bacnet/services.py +622 -0
- oscura/analyzers/protocols/industrial/ethercat/__init__.py +30 -0
- oscura/analyzers/protocols/industrial/ethercat/analyzer.py +474 -0
- oscura/analyzers/protocols/industrial/ethercat/mailbox.py +339 -0
- oscura/analyzers/protocols/industrial/ethercat/topology.py +166 -0
- oscura/analyzers/protocols/industrial/modbus/__init__.py +31 -0
- oscura/analyzers/protocols/industrial/modbus/analyzer.py +525 -0
- oscura/analyzers/protocols/industrial/modbus/crc.py +79 -0
- oscura/analyzers/protocols/industrial/modbus/functions.py +436 -0
- oscura/analyzers/protocols/industrial/opcua/__init__.py +21 -0
- oscura/analyzers/protocols/industrial/opcua/analyzer.py +552 -0
- oscura/analyzers/protocols/industrial/opcua/datatypes.py +446 -0
- oscura/analyzers/protocols/industrial/opcua/services.py +264 -0
- oscura/analyzers/protocols/industrial/profinet/__init__.py +23 -0
- oscura/analyzers/protocols/industrial/profinet/analyzer.py +441 -0
- oscura/analyzers/protocols/industrial/profinet/dcp.py +263 -0
- oscura/analyzers/protocols/industrial/profinet/ptcp.py +200 -0
- oscura/analyzers/protocols/jtag.py +180 -98
- oscura/analyzers/protocols/lin.py +219 -114
- oscura/analyzers/protocols/manchester.py +4 -4
- oscura/analyzers/protocols/onewire.py +253 -149
- oscura/analyzers/protocols/parallel_bus/__init__.py +20 -0
- oscura/analyzers/protocols/parallel_bus/centronics.py +92 -0
- oscura/analyzers/protocols/parallel_bus/gpib.py +137 -0
- oscura/analyzers/protocols/spi.py +192 -95
- oscura/analyzers/protocols/swd.py +321 -167
- oscura/analyzers/protocols/uart.py +267 -125
- oscura/analyzers/protocols/usb.py +235 -131
- oscura/analyzers/side_channel/power.py +17 -12
- oscura/analyzers/signal/__init__.py +15 -0
- oscura/analyzers/signal/timing_analysis.py +1086 -0
- oscura/analyzers/signal_integrity/__init__.py +4 -1
- oscura/analyzers/signal_integrity/sparams.py +2 -19
- oscura/analyzers/spectral/chunked.py +129 -60
- oscura/analyzers/spectral/chunked_fft.py +300 -94
- oscura/analyzers/spectral/chunked_wavelet.py +100 -80
- oscura/analyzers/statistical/checksum.py +376 -217
- oscura/analyzers/statistical/classification.py +229 -107
- oscura/analyzers/statistical/entropy.py +78 -53
- oscura/analyzers/statistics/correlation.py +407 -211
- oscura/analyzers/statistics/outliers.py +2 -2
- oscura/analyzers/statistics/streaming.py +30 -5
- oscura/analyzers/validation.py +216 -101
- oscura/analyzers/waveform/measurements.py +9 -0
- oscura/analyzers/waveform/measurements_with_uncertainty.py +31 -15
- oscura/analyzers/waveform/spectral.py +500 -228
- oscura/api/__init__.py +31 -5
- oscura/api/dsl/__init__.py +582 -0
- oscura/{dsl → api/dsl}/commands.py +43 -76
- oscura/{dsl → api/dsl}/interpreter.py +26 -51
- oscura/{dsl → api/dsl}/parser.py +107 -77
- oscura/{dsl → api/dsl}/repl.py +2 -2
- oscura/api/dsl.py +1 -1
- oscura/{integrations → api/integrations}/__init__.py +1 -1
- oscura/{integrations → api/integrations}/llm.py +201 -102
- oscura/api/operators.py +3 -3
- oscura/api/optimization.py +144 -30
- oscura/api/rest_server.py +921 -0
- oscura/api/server/__init__.py +17 -0
- oscura/api/server/dashboard.py +850 -0
- oscura/api/server/static/README.md +34 -0
- oscura/api/server/templates/base.html +181 -0
- oscura/api/server/templates/export.html +120 -0
- oscura/api/server/templates/home.html +284 -0
- oscura/api/server/templates/protocols.html +58 -0
- oscura/api/server/templates/reports.html +43 -0
- oscura/api/server/templates/session_detail.html +89 -0
- oscura/api/server/templates/sessions.html +83 -0
- oscura/api/server/templates/waveforms.html +73 -0
- oscura/automotive/__init__.py +8 -1
- oscura/automotive/can/__init__.py +10 -0
- oscura/automotive/can/checksum.py +3 -1
- oscura/automotive/can/dbc_generator.py +590 -0
- oscura/automotive/can/message_wrapper.py +121 -74
- oscura/automotive/can/patterns.py +98 -21
- oscura/automotive/can/session.py +292 -56
- oscura/automotive/can/state_machine.py +6 -3
- oscura/automotive/can/stimulus_response.py +97 -75
- oscura/automotive/dbc/__init__.py +10 -2
- oscura/automotive/dbc/generator.py +84 -56
- oscura/automotive/dbc/parser.py +6 -6
- oscura/automotive/dtc/data.json +2763 -0
- oscura/automotive/dtc/database.py +2 -2
- oscura/automotive/flexray/__init__.py +31 -0
- oscura/automotive/flexray/analyzer.py +504 -0
- oscura/automotive/flexray/crc.py +185 -0
- oscura/automotive/flexray/fibex.py +449 -0
- oscura/automotive/j1939/__init__.py +45 -8
- oscura/automotive/j1939/analyzer.py +605 -0
- oscura/automotive/j1939/spns.py +326 -0
- oscura/automotive/j1939/transport.py +306 -0
- oscura/automotive/lin/__init__.py +47 -0
- oscura/automotive/lin/analyzer.py +612 -0
- oscura/automotive/loaders/blf.py +13 -2
- oscura/automotive/loaders/csv_can.py +143 -72
- oscura/automotive/loaders/dispatcher.py +50 -2
- oscura/automotive/loaders/mdf.py +86 -45
- oscura/automotive/loaders/pcap.py +111 -61
- oscura/automotive/uds/__init__.py +4 -0
- oscura/automotive/uds/analyzer.py +725 -0
- oscura/automotive/uds/decoder.py +140 -58
- oscura/automotive/uds/models.py +7 -1
- oscura/automotive/visualization.py +1 -1
- oscura/cli/analyze.py +348 -0
- oscura/cli/batch.py +142 -122
- oscura/cli/benchmark.py +275 -0
- oscura/cli/characterize.py +137 -82
- oscura/cli/compare.py +224 -131
- oscura/cli/completion.py +250 -0
- oscura/cli/config_cmd.py +361 -0
- oscura/cli/decode.py +164 -87
- oscura/cli/export.py +286 -0
- oscura/cli/main.py +115 -31
- oscura/{onboarding → cli/onboarding}/__init__.py +3 -3
- oscura/{onboarding → cli/onboarding}/help.py +80 -58
- oscura/{onboarding → cli/onboarding}/tutorials.py +97 -72
- oscura/{onboarding → cli/onboarding}/wizard.py +55 -36
- oscura/cli/progress.py +147 -0
- oscura/cli/shell.py +157 -135
- oscura/cli/validate_cmd.py +204 -0
- oscura/cli/visualize.py +158 -0
- oscura/convenience.py +125 -79
- oscura/core/__init__.py +4 -2
- oscura/core/backend_selector.py +3 -3
- oscura/core/cache.py +126 -15
- oscura/core/cancellation.py +1 -1
- oscura/{config → core/config}/__init__.py +20 -11
- oscura/{config → core/config}/defaults.py +1 -1
- oscura/{config → core/config}/loader.py +7 -5
- oscura/{config → core/config}/memory.py +5 -5
- oscura/{config → core/config}/migration.py +1 -1
- oscura/{config → core/config}/pipeline.py +99 -23
- oscura/{config → core/config}/preferences.py +1 -1
- oscura/{config → core/config}/protocol.py +3 -3
- oscura/{config → core/config}/schema.py +426 -272
- oscura/{config → core/config}/settings.py +1 -1
- oscura/{config → core/config}/thresholds.py +195 -153
- oscura/core/correlation.py +5 -6
- oscura/core/cross_domain.py +0 -2
- oscura/core/debug.py +9 -5
- oscura/{extensibility → core/extensibility}/docs.py +158 -70
- oscura/{extensibility → core/extensibility}/extensions.py +160 -76
- oscura/{extensibility → core/extensibility}/logging.py +1 -1
- oscura/{extensibility → core/extensibility}/measurements.py +1 -1
- oscura/{extensibility → core/extensibility}/plugins.py +1 -1
- oscura/{extensibility → core/extensibility}/templates.py +73 -3
- oscura/{extensibility → core/extensibility}/validation.py +1 -1
- oscura/core/gpu_backend.py +11 -7
- oscura/core/log_query.py +101 -11
- oscura/core/logging.py +126 -54
- oscura/core/logging_advanced.py +5 -5
- oscura/core/memory_limits.py +108 -70
- oscura/core/memory_monitor.py +2 -2
- oscura/core/memory_progress.py +7 -7
- oscura/core/memory_warnings.py +1 -1
- oscura/core/numba_backend.py +13 -13
- oscura/{plugins → core/plugins}/__init__.py +9 -9
- oscura/{plugins → core/plugins}/base.py +7 -7
- oscura/{plugins → core/plugins}/cli.py +3 -3
- oscura/{plugins → core/plugins}/discovery.py +186 -106
- oscura/{plugins → core/plugins}/lifecycle.py +1 -1
- oscura/{plugins → core/plugins}/manager.py +7 -7
- oscura/{plugins → core/plugins}/registry.py +3 -3
- oscura/{plugins → core/plugins}/versioning.py +1 -1
- oscura/core/progress.py +16 -1
- oscura/core/provenance.py +8 -2
- oscura/{schemas → core/schemas}/__init__.py +2 -2
- oscura/core/schemas/bus_configuration.json +322 -0
- oscura/core/schemas/device_mapping.json +182 -0
- oscura/core/schemas/packet_format.json +418 -0
- oscura/core/schemas/protocol_definition.json +363 -0
- oscura/core/types.py +4 -0
- oscura/core/uncertainty.py +3 -3
- oscura/correlation/__init__.py +52 -0
- oscura/correlation/multi_protocol.py +811 -0
- oscura/discovery/auto_decoder.py +117 -35
- oscura/discovery/comparison.py +191 -86
- oscura/discovery/quality_validator.py +155 -68
- oscura/discovery/signal_detector.py +196 -79
- oscura/export/__init__.py +18 -20
- oscura/export/kaitai_struct.py +513 -0
- oscura/export/scapy_layer.py +801 -0
- oscura/export/wireshark/README.md +15 -15
- oscura/export/wireshark/generator.py +1 -1
- oscura/export/wireshark/templates/dissector.lua.j2 +2 -2
- oscura/export/wireshark_dissector.py +746 -0
- oscura/guidance/wizard.py +207 -111
- oscura/hardware/__init__.py +19 -0
- oscura/{acquisition → hardware/acquisition}/__init__.py +4 -4
- oscura/{acquisition → hardware/acquisition}/file.py +2 -2
- oscura/{acquisition → hardware/acquisition}/hardware.py +7 -7
- oscura/{acquisition → hardware/acquisition}/saleae.py +15 -12
- oscura/{acquisition → hardware/acquisition}/socketcan.py +1 -1
- oscura/{acquisition → hardware/acquisition}/streaming.py +2 -2
- oscura/{acquisition → hardware/acquisition}/synthetic.py +3 -3
- oscura/{acquisition → hardware/acquisition}/visa.py +33 -11
- oscura/hardware/firmware/__init__.py +29 -0
- oscura/hardware/firmware/pattern_recognition.py +874 -0
- oscura/hardware/hal_detector.py +736 -0
- oscura/hardware/security/__init__.py +37 -0
- oscura/hardware/security/side_channel_detector.py +1126 -0
- oscura/inference/__init__.py +4 -0
- oscura/inference/active_learning/README.md +7 -7
- oscura/inference/active_learning/observation_table.py +4 -1
- oscura/inference/alignment.py +216 -123
- oscura/inference/bayesian.py +113 -33
- oscura/inference/crc_reverse.py +101 -55
- oscura/inference/logic.py +6 -2
- oscura/inference/message_format.py +342 -183
- oscura/inference/protocol.py +95 -44
- oscura/inference/protocol_dsl.py +180 -82
- oscura/inference/signal_intelligence.py +1439 -706
- oscura/inference/spectral.py +99 -57
- oscura/inference/state_machine.py +810 -158
- oscura/inference/stream.py +270 -110
- oscura/iot/__init__.py +34 -0
- oscura/iot/coap/__init__.py +32 -0
- oscura/iot/coap/analyzer.py +668 -0
- oscura/iot/coap/options.py +212 -0
- oscura/iot/lorawan/__init__.py +21 -0
- oscura/iot/lorawan/crypto.py +206 -0
- oscura/iot/lorawan/decoder.py +801 -0
- oscura/iot/lorawan/mac_commands.py +341 -0
- oscura/iot/mqtt/__init__.py +27 -0
- oscura/iot/mqtt/analyzer.py +999 -0
- oscura/iot/mqtt/properties.py +315 -0
- oscura/iot/zigbee/__init__.py +31 -0
- oscura/iot/zigbee/analyzer.py +615 -0
- oscura/iot/zigbee/security.py +153 -0
- oscura/iot/zigbee/zcl.py +349 -0
- oscura/jupyter/display.py +125 -45
- oscura/{exploratory → jupyter/exploratory}/__init__.py +8 -8
- oscura/{exploratory → jupyter/exploratory}/error_recovery.py +298 -141
- oscura/jupyter/exploratory/fuzzy.py +746 -0
- oscura/{exploratory → jupyter/exploratory}/fuzzy_advanced.py +258 -100
- oscura/{exploratory → jupyter/exploratory}/legacy.py +464 -242
- oscura/{exploratory → jupyter/exploratory}/parse.py +167 -145
- oscura/{exploratory → jupyter/exploratory}/recovery.py +119 -87
- oscura/jupyter/exploratory/sync.py +612 -0
- oscura/{exploratory → jupyter/exploratory}/unknown.py +299 -176
- oscura/jupyter/magic.py +4 -4
- oscura/{ui → jupyter/ui}/__init__.py +2 -2
- oscura/{ui → jupyter/ui}/formatters.py +3 -3
- oscura/{ui → jupyter/ui}/progressive_display.py +153 -82
- oscura/loaders/__init__.py +171 -63
- oscura/loaders/binary.py +88 -1
- oscura/loaders/chipwhisperer.py +153 -137
- oscura/loaders/configurable.py +208 -86
- oscura/loaders/csv_loader.py +458 -215
- oscura/loaders/hdf5_loader.py +278 -119
- oscura/loaders/lazy.py +87 -54
- oscura/loaders/mmap_loader.py +1 -1
- oscura/loaders/numpy_loader.py +253 -116
- oscura/loaders/pcap.py +226 -151
- oscura/loaders/rigol.py +110 -49
- oscura/loaders/sigrok.py +201 -78
- oscura/loaders/tdms.py +81 -58
- oscura/loaders/tektronix.py +291 -174
- oscura/loaders/touchstone.py +182 -87
- oscura/loaders/vcd.py +215 -117
- oscura/loaders/wav.py +155 -68
- oscura/reporting/__init__.py +9 -7
- oscura/reporting/analyze.py +352 -146
- oscura/reporting/argument_preparer.py +69 -14
- oscura/reporting/auto_report.py +97 -61
- oscura/reporting/batch.py +131 -58
- oscura/reporting/chart_selection.py +57 -45
- oscura/reporting/comparison.py +63 -17
- oscura/reporting/content/executive.py +76 -24
- oscura/reporting/core_formats/multi_format.py +11 -8
- oscura/reporting/engine.py +312 -158
- oscura/reporting/enhanced_reports.py +949 -0
- oscura/reporting/export.py +86 -43
- oscura/reporting/formatting/numbers.py +69 -42
- oscura/reporting/html.py +139 -58
- oscura/reporting/index.py +137 -65
- oscura/reporting/output.py +158 -67
- oscura/reporting/pdf.py +67 -102
- oscura/reporting/plots.py +191 -112
- oscura/reporting/sections.py +88 -47
- oscura/reporting/standards.py +104 -61
- oscura/reporting/summary_generator.py +75 -55
- oscura/reporting/tables.py +138 -54
- oscura/reporting/templates/enhanced/protocol_re.html +525 -0
- oscura/reporting/templates/index.md +13 -13
- oscura/sessions/__init__.py +14 -23
- oscura/sessions/base.py +3 -3
- oscura/sessions/blackbox.py +106 -10
- oscura/sessions/generic.py +2 -2
- oscura/sessions/legacy.py +783 -0
- oscura/side_channel/__init__.py +63 -0
- oscura/side_channel/dpa.py +1025 -0
- oscura/utils/__init__.py +15 -1
- oscura/utils/autodetect.py +1 -5
- oscura/utils/bitwise.py +118 -0
- oscura/{builders → utils/builders}/__init__.py +1 -1
- oscura/{comparison → utils/comparison}/__init__.py +6 -6
- oscura/{comparison → utils/comparison}/compare.py +202 -101
- oscura/{comparison → utils/comparison}/golden.py +83 -63
- oscura/{comparison → utils/comparison}/limits.py +313 -89
- oscura/{comparison → utils/comparison}/mask.py +151 -45
- oscura/{comparison → utils/comparison}/trace_diff.py +1 -1
- oscura/{comparison → utils/comparison}/visualization.py +147 -89
- oscura/{component → utils/component}/__init__.py +3 -3
- oscura/{component → utils/component}/impedance.py +122 -58
- oscura/{component → utils/component}/reactive.py +165 -168
- oscura/{component → utils/component}/transmission_line.py +3 -3
- oscura/{filtering → utils/filtering}/__init__.py +6 -6
- oscura/{filtering → utils/filtering}/base.py +1 -1
- oscura/{filtering → utils/filtering}/convenience.py +2 -2
- oscura/{filtering → utils/filtering}/design.py +169 -93
- oscura/{filtering → utils/filtering}/filters.py +2 -2
- oscura/{filtering → utils/filtering}/introspection.py +2 -2
- oscura/utils/geometry.py +31 -0
- oscura/utils/imports.py +184 -0
- oscura/utils/lazy.py +1 -1
- oscura/{math → utils/math}/__init__.py +2 -2
- oscura/{math → utils/math}/arithmetic.py +114 -48
- oscura/{math → utils/math}/interpolation.py +139 -106
- oscura/utils/memory.py +129 -66
- oscura/utils/memory_advanced.py +92 -9
- oscura/utils/memory_extensions.py +10 -8
- oscura/{optimization → utils/optimization}/__init__.py +1 -1
- oscura/{optimization → utils/optimization}/search.py +2 -2
- oscura/utils/performance/__init__.py +58 -0
- oscura/utils/performance/caching.py +889 -0
- oscura/utils/performance/lsh_clustering.py +333 -0
- oscura/utils/performance/memory_optimizer.py +699 -0
- oscura/utils/performance/optimizations.py +675 -0
- oscura/utils/performance/parallel.py +654 -0
- oscura/utils/performance/profiling.py +661 -0
- oscura/{pipeline → utils/pipeline}/base.py +1 -1
- oscura/{pipeline → utils/pipeline}/composition.py +11 -3
- oscura/{pipeline → utils/pipeline}/parallel.py +3 -2
- oscura/{pipeline → utils/pipeline}/pipeline.py +1 -1
- oscura/{pipeline → utils/pipeline}/reverse_engineering.py +412 -221
- oscura/{search → utils/search}/__init__.py +3 -3
- oscura/{search → utils/search}/anomaly.py +188 -58
- oscura/utils/search/context.py +294 -0
- oscura/{search → utils/search}/pattern.py +138 -10
- oscura/utils/serial.py +51 -0
- oscura/utils/storage/__init__.py +61 -0
- oscura/utils/storage/database.py +1166 -0
- oscura/{streaming → utils/streaming}/chunked.py +302 -143
- oscura/{streaming → utils/streaming}/progressive.py +1 -1
- oscura/{streaming → utils/streaming}/realtime.py +3 -2
- oscura/{triggering → utils/triggering}/__init__.py +6 -6
- oscura/{triggering → utils/triggering}/base.py +6 -6
- oscura/{triggering → utils/triggering}/edge.py +2 -2
- oscura/{triggering → utils/triggering}/pattern.py +2 -2
- oscura/{triggering → utils/triggering}/pulse.py +115 -74
- oscura/{triggering → utils/triggering}/window.py +2 -2
- oscura/utils/validation.py +32 -0
- oscura/validation/__init__.py +121 -0
- oscura/{compliance → validation/compliance}/__init__.py +5 -5
- oscura/{compliance → validation/compliance}/advanced.py +5 -5
- oscura/{compliance → validation/compliance}/masks.py +1 -1
- oscura/{compliance → validation/compliance}/reporting.py +127 -53
- oscura/{compliance → validation/compliance}/testing.py +114 -52
- oscura/validation/compliance_tests.py +915 -0
- oscura/validation/fuzzer.py +990 -0
- oscura/validation/grammar_tests.py +596 -0
- oscura/validation/grammar_validator.py +904 -0
- oscura/validation/hil_testing.py +977 -0
- oscura/{quality → validation/quality}/__init__.py +4 -4
- oscura/{quality → validation/quality}/ensemble.py +251 -171
- oscura/{quality → validation/quality}/explainer.py +3 -3
- oscura/{quality → validation/quality}/scoring.py +1 -1
- oscura/{quality → validation/quality}/warnings.py +4 -4
- oscura/validation/regression_suite.py +808 -0
- oscura/validation/replay.py +788 -0
- oscura/{testing → validation/testing}/__init__.py +2 -2
- oscura/{testing → validation/testing}/synthetic.py +5 -5
- oscura/visualization/__init__.py +9 -0
- oscura/visualization/accessibility.py +1 -1
- oscura/visualization/annotations.py +64 -67
- oscura/visualization/colors.py +7 -7
- oscura/visualization/digital.py +180 -81
- oscura/visualization/eye.py +236 -85
- oscura/visualization/interactive.py +320 -143
- oscura/visualization/jitter.py +587 -247
- oscura/visualization/layout.py +169 -134
- oscura/visualization/optimization.py +103 -52
- oscura/visualization/palettes.py +1 -1
- oscura/visualization/power.py +427 -211
- oscura/visualization/power_extended.py +626 -297
- oscura/visualization/presets.py +2 -0
- oscura/visualization/protocols.py +495 -181
- oscura/visualization/render.py +79 -63
- oscura/visualization/reverse_engineering.py +171 -124
- oscura/visualization/signal_integrity.py +460 -279
- oscura/visualization/specialized.py +190 -100
- oscura/visualization/spectral.py +670 -255
- oscura/visualization/thumbnails.py +166 -137
- oscura/visualization/waveform.py +150 -63
- oscura/workflows/__init__.py +3 -0
- oscura/{batch → workflows/batch}/__init__.py +5 -5
- oscura/{batch → workflows/batch}/advanced.py +150 -75
- oscura/workflows/batch/aggregate.py +531 -0
- oscura/workflows/batch/analyze.py +236 -0
- oscura/{batch → workflows/batch}/logging.py +2 -2
- oscura/{batch → workflows/batch}/metrics.py +1 -1
- oscura/workflows/complete_re.py +1144 -0
- oscura/workflows/compliance.py +44 -54
- oscura/workflows/digital.py +197 -51
- oscura/workflows/legacy/__init__.py +12 -0
- oscura/{workflow → workflows/legacy}/dag.py +4 -1
- oscura/workflows/multi_trace.py +9 -9
- oscura/workflows/power.py +42 -62
- oscura/workflows/protocol.py +82 -49
- oscura/workflows/reverse_engineering.py +351 -150
- oscura/workflows/signal_integrity.py +157 -82
- oscura-0.6.0.dist-info/METADATA +643 -0
- oscura-0.6.0.dist-info/RECORD +590 -0
- oscura/analyzers/digital/ic_database.py +0 -498
- oscura/analyzers/digital/timing_paths.py +0 -339
- oscura/analyzers/digital/vintage.py +0 -377
- oscura/analyzers/digital/vintage_result.py +0 -148
- oscura/analyzers/protocols/parallel_bus.py +0 -449
- oscura/batch/aggregate.py +0 -300
- oscura/batch/analyze.py +0 -139
- oscura/dsl/__init__.py +0 -73
- oscura/exceptions.py +0 -59
- oscura/exploratory/fuzzy.py +0 -513
- oscura/exploratory/sync.py +0 -384
- oscura/export/wavedrom.py +0 -430
- oscura/exporters/__init__.py +0 -94
- oscura/exporters/csv.py +0 -303
- oscura/exporters/exporters.py +0 -44
- oscura/exporters/hdf5.py +0 -217
- oscura/exporters/html_export.py +0 -701
- oscura/exporters/json_export.py +0 -338
- oscura/exporters/markdown_export.py +0 -367
- oscura/exporters/matlab_export.py +0 -354
- oscura/exporters/npz_export.py +0 -219
- oscura/exporters/spice_export.py +0 -210
- oscura/exporters/vintage_logic_csv.py +0 -247
- oscura/reporting/vintage_logic_report.py +0 -523
- oscura/search/context.py +0 -149
- oscura/session/__init__.py +0 -34
- oscura/session/annotations.py +0 -289
- oscura/session/history.py +0 -313
- oscura/session/session.py +0 -520
- oscura/visualization/digital_advanced.py +0 -718
- oscura/visualization/figure_manager.py +0 -156
- oscura/workflow/__init__.py +0 -13
- oscura-0.5.0.dist-info/METADATA +0 -407
- oscura-0.5.0.dist-info/RECORD +0 -486
- /oscura/core/{config.py → config/legacy.py} +0 -0
- /oscura/{extensibility → core/extensibility}/__init__.py +0 -0
- /oscura/{extensibility → core/extensibility}/registry.py +0 -0
- /oscura/{plugins → core/plugins}/isolation.py +0 -0
- /oscura/{builders → utils/builders}/signal_builder.py +0 -0
- /oscura/{optimization → utils/optimization}/parallel.py +0 -0
- /oscura/{pipeline → utils/pipeline}/__init__.py +0 -0
- /oscura/{streaming → utils/streaming}/__init__.py +0 -0
- {oscura-0.5.0.dist-info → oscura-0.6.0.dist-info}/WHEEL +0 -0
- {oscura-0.5.0.dist-info → oscura-0.6.0.dist-info}/entry_points.txt +0 -0
- {oscura-0.5.0.dist-info → oscura-0.6.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,604 @@
|
|
|
1
|
+
"""ML-based signal classification for automatic protocol detection.
|
|
2
|
+
|
|
3
|
+
This module implements machine learning classifiers for identifying signal types
|
|
4
|
+
and protocols from waveform data. Supports multiple ML algorithms and provides
|
|
5
|
+
comprehensive feature extraction for accurate classification.
|
|
6
|
+
|
|
7
|
+
Key capabilities:
|
|
8
|
+
- Multi-class classification (UART, SPI, I2C, CAN, analog, digital, PWM, etc.)
|
|
9
|
+
- Multiple ML algorithms (Random Forest, SVM, Neural Network, Gradient Boosting)
|
|
10
|
+
- Confidence scores and probability distributions
|
|
11
|
+
- Feature importance analysis (for tree-based models)
|
|
12
|
+
- Model persistence (save/load trained models)
|
|
13
|
+
- Incremental learning (online updates)
|
|
14
|
+
|
|
15
|
+
Example:
|
|
16
|
+
>>> from oscura.analyzers.ml import MLSignalClassifier, TrainingDataset
|
|
17
|
+
>>> # Create and train classifier
|
|
18
|
+
>>> classifier = MLSignalClassifier(algorithm="random_forest")
|
|
19
|
+
>>> dataset = TrainingDataset(
|
|
20
|
+
... signals=[uart_data, spi_data, i2c_data],
|
|
21
|
+
... labels=["uart", "spi", "i2c"],
|
|
22
|
+
... sample_rates=[1e6, 1e6, 1e6]
|
|
23
|
+
... )
|
|
24
|
+
>>> metrics = classifier.train(dataset, test_size=0.2)
|
|
25
|
+
>>> print(f"Accuracy: {metrics['accuracy']:.2%}")
|
|
26
|
+
>>>
|
|
27
|
+
>>> # Classify unknown signal
|
|
28
|
+
>>> result = classifier.predict(unknown_signal, sample_rate=1e6)
|
|
29
|
+
>>> print(f"Signal type: {result.signal_type}")
|
|
30
|
+
>>> print(f"Confidence: {result.confidence:.2%}")
|
|
31
|
+
>>> print(f"All probabilities: {result.probabilities}")
|
|
32
|
+
"""
|
|
33
|
+
|
|
34
|
+
from __future__ import annotations
|
|
35
|
+
|
|
36
|
+
import logging
|
|
37
|
+
import pickle
|
|
38
|
+
from dataclasses import dataclass, field
|
|
39
|
+
from pathlib import Path
|
|
40
|
+
from typing import TYPE_CHECKING, Any, ClassVar
|
|
41
|
+
|
|
42
|
+
import numpy as np
|
|
43
|
+
|
|
44
|
+
from oscura.analyzers.ml.features import FeatureExtractor
|
|
45
|
+
|
|
46
|
+
if TYPE_CHECKING:
|
|
47
|
+
from numpy.typing import NDArray
|
|
48
|
+
|
|
49
|
+
logger = logging.getLogger(__name__)
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
@dataclass
|
|
53
|
+
class MLClassificationResult:
|
|
54
|
+
"""Result from ML-based signal classification.
|
|
55
|
+
|
|
56
|
+
Attributes:
|
|
57
|
+
signal_type: Detected signal type (e.g., "uart", "spi", "i2c", "analog").
|
|
58
|
+
confidence: Classification confidence score (0.0-1.0). Higher values
|
|
59
|
+
indicate more certain predictions.
|
|
60
|
+
probabilities: Dictionary mapping each signal type to its probability.
|
|
61
|
+
All values sum to 1.0.
|
|
62
|
+
features: Dictionary of extracted features used for classification.
|
|
63
|
+
Useful for debugging and understanding model decisions.
|
|
64
|
+
feature_importance: Dictionary of feature importance scores (only for
|
|
65
|
+
tree-based models like Random Forest). Higher values indicate
|
|
66
|
+
features that contribute more to the classification.
|
|
67
|
+
model_type: Algorithm used for classification.
|
|
68
|
+
|
|
69
|
+
Example:
|
|
70
|
+
>>> result = classifier.predict(signal, sample_rate=1e6)
|
|
71
|
+
>>> if result.confidence > 0.8:
|
|
72
|
+
... print(f"High confidence: {result.signal_type}")
|
|
73
|
+
>>> # Inspect feature importance
|
|
74
|
+
>>> if result.feature_importance:
|
|
75
|
+
... top_features = sorted(
|
|
76
|
+
... result.feature_importance.items(),
|
|
77
|
+
... key=lambda x: x[1],
|
|
78
|
+
... reverse=True
|
|
79
|
+
... )[:5]
|
|
80
|
+
... print(f"Top features: {top_features}")
|
|
81
|
+
"""
|
|
82
|
+
|
|
83
|
+
signal_type: str
|
|
84
|
+
confidence: float
|
|
85
|
+
probabilities: dict[str, float]
|
|
86
|
+
features: dict[str, float]
|
|
87
|
+
feature_importance: dict[str, float] | None = None
|
|
88
|
+
model_type: str = "random_forest"
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
@dataclass
|
|
92
|
+
class TrainingDataset:
|
|
93
|
+
"""Training dataset for ML signal classifier.
|
|
94
|
+
|
|
95
|
+
Attributes:
|
|
96
|
+
signals: List of signal arrays (1D numpy arrays).
|
|
97
|
+
labels: List of signal type labels corresponding to each signal.
|
|
98
|
+
Must use consistent naming (e.g., "uart", "spi", "i2c").
|
|
99
|
+
sample_rates: List of sample rates (Hz) for each signal.
|
|
100
|
+
metadata: Optional metadata dictionary for dataset tracking.
|
|
101
|
+
|
|
102
|
+
Example:
|
|
103
|
+
>>> # Create dataset from synthetic signals
|
|
104
|
+
>>> uart_signals = [generate_uart() for _ in range(100)]
|
|
105
|
+
>>> spi_signals = [generate_spi() for _ in range(100)]
|
|
106
|
+
>>> dataset = TrainingDataset(
|
|
107
|
+
... signals=uart_signals + spi_signals,
|
|
108
|
+
... labels=["uart"] * 100 + ["spi"] * 100,
|
|
109
|
+
... sample_rates=[1e6] * 200,
|
|
110
|
+
... metadata={"source": "synthetic", "version": "1.0"}
|
|
111
|
+
... )
|
|
112
|
+
"""
|
|
113
|
+
|
|
114
|
+
signals: list[NDArray[np.floating[Any]]]
|
|
115
|
+
labels: list[str]
|
|
116
|
+
sample_rates: list[float]
|
|
117
|
+
metadata: dict[str, Any] = field(default_factory=dict)
|
|
118
|
+
|
|
119
|
+
def __post_init__(self) -> None:
|
|
120
|
+
"""Validate dataset consistency."""
|
|
121
|
+
if not (len(self.signals) == len(self.labels) == len(self.sample_rates)):
|
|
122
|
+
raise ValueError(
|
|
123
|
+
f"Dataset length mismatch: {len(self.signals)} signals, "
|
|
124
|
+
f"{len(self.labels)} labels, {len(self.sample_rates)} sample_rates"
|
|
125
|
+
)
|
|
126
|
+
|
|
127
|
+
|
|
128
|
+
class MLSignalClassifier:
|
|
129
|
+
"""ML-based signal classifier using scikit-learn.
|
|
130
|
+
|
|
131
|
+
This class provides automatic signal type classification using machine learning.
|
|
132
|
+
It supports multiple algorithms and provides comprehensive feature extraction
|
|
133
|
+
for accurate protocol detection.
|
|
134
|
+
|
|
135
|
+
Supported algorithms:
|
|
136
|
+
- random_forest: Fast, robust, provides feature importance
|
|
137
|
+
- svm: Good for high-dimensional data, slower training
|
|
138
|
+
- neural_network: Can capture complex patterns, requires more data
|
|
139
|
+
- gradient_boosting: Often highest accuracy, slower training
|
|
140
|
+
|
|
141
|
+
Supported signal types:
|
|
142
|
+
- Digital: digital, uart, spi, i2c, can, manchester, nrz, rz
|
|
143
|
+
- Analog: analog, pwm, amplitude_modulated, frequency_modulated
|
|
144
|
+
- Mixed: Various combinations
|
|
145
|
+
|
|
146
|
+
Example:
|
|
147
|
+
>>> # Train classifier
|
|
148
|
+
>>> classifier = MLSignalClassifier(algorithm="random_forest")
|
|
149
|
+
>>> metrics = classifier.train(training_dataset)
|
|
150
|
+
>>>
|
|
151
|
+
>>> # Save model for later use
|
|
152
|
+
>>> classifier.save_model(Path("models/signal_classifier.pkl"))
|
|
153
|
+
>>>
|
|
154
|
+
>>> # Load and use
|
|
155
|
+
>>> classifier2 = MLSignalClassifier()
|
|
156
|
+
>>> classifier2.load_model(Path("models/signal_classifier.pkl"))
|
|
157
|
+
>>> result = classifier2.predict(signal, sample_rate=1e6)
|
|
158
|
+
"""
|
|
159
|
+
|
|
160
|
+
# Supported ML algorithms
|
|
161
|
+
ALGORITHMS: ClassVar[list[str]] = [
|
|
162
|
+
"random_forest",
|
|
163
|
+
"svm",
|
|
164
|
+
"neural_network",
|
|
165
|
+
"gradient_boosting",
|
|
166
|
+
]
|
|
167
|
+
|
|
168
|
+
# Common signal types (can be extended during training)
|
|
169
|
+
SIGNAL_TYPES: ClassVar[list[str]] = [
|
|
170
|
+
"digital",
|
|
171
|
+
"analog",
|
|
172
|
+
"pwm",
|
|
173
|
+
"uart",
|
|
174
|
+
"spi",
|
|
175
|
+
"i2c",
|
|
176
|
+
"can",
|
|
177
|
+
"manchester",
|
|
178
|
+
"nrz",
|
|
179
|
+
"rz",
|
|
180
|
+
"amplitude_modulated",
|
|
181
|
+
"frequency_modulated",
|
|
182
|
+
]
|
|
183
|
+
|
|
184
|
+
def __init__(self, algorithm: str = "random_forest") -> None:
|
|
185
|
+
"""Initialize ML classifier with specified algorithm.
|
|
186
|
+
|
|
187
|
+
Args:
|
|
188
|
+
algorithm: ML algorithm to use. Must be one of ALGORITHMS.
|
|
189
|
+
|
|
190
|
+
Raises:
|
|
191
|
+
ValueError: If algorithm is not supported.
|
|
192
|
+
|
|
193
|
+
Example:
|
|
194
|
+
>>> classifier = MLSignalClassifier(algorithm="random_forest")
|
|
195
|
+
>>> classifier.algorithm
|
|
196
|
+
'random_forest'
|
|
197
|
+
"""
|
|
198
|
+
if algorithm not in self.ALGORITHMS:
|
|
199
|
+
raise ValueError(
|
|
200
|
+
f"Unsupported algorithm: {algorithm}. Choose from: {', '.join(self.ALGORITHMS)}"
|
|
201
|
+
)
|
|
202
|
+
|
|
203
|
+
self.algorithm = algorithm
|
|
204
|
+
self.model: Any = None
|
|
205
|
+
self.scaler: Any = None
|
|
206
|
+
self.feature_extractor = FeatureExtractor()
|
|
207
|
+
self.feature_names: list[str] = []
|
|
208
|
+
self.classes: list[str] = []
|
|
209
|
+
|
|
210
|
+
def train(
|
|
211
|
+
self, dataset: TrainingDataset, test_size: float = 0.2, random_state: int = 42
|
|
212
|
+
) -> dict[str, float]:
|
|
213
|
+
"""Train classifier on labeled dataset.
|
|
214
|
+
|
|
215
|
+
Extracts features from all signals, splits into train/test sets,
|
|
216
|
+
standardizes features, trains the selected ML model, and evaluates
|
|
217
|
+
performance on the test set.
|
|
218
|
+
|
|
219
|
+
Args:
|
|
220
|
+
dataset: Training dataset containing signals and labels.
|
|
221
|
+
test_size: Fraction of data to use for testing (0.0-1.0).
|
|
222
|
+
random_state: Random seed for reproducibility.
|
|
223
|
+
|
|
224
|
+
Returns:
|
|
225
|
+
Dictionary with performance metrics:
|
|
226
|
+
- accuracy: Overall classification accuracy (0.0-1.0)
|
|
227
|
+
- precision: Weighted precision score (0.0-1.0)
|
|
228
|
+
- recall: Weighted recall score (0.0-1.0)
|
|
229
|
+
- f1_score: Weighted F1 score (0.0-1.0)
|
|
230
|
+
|
|
231
|
+
Raises:
|
|
232
|
+
ImportError: If scikit-learn is not installed.
|
|
233
|
+
ValueError: If dataset is too small or has invalid labels.
|
|
234
|
+
|
|
235
|
+
Example:
|
|
236
|
+
>>> dataset = TrainingDataset(
|
|
237
|
+
... signals=[uart1, uart2, spi1, spi2],
|
|
238
|
+
... labels=["uart", "uart", "spi", "spi"],
|
|
239
|
+
... sample_rates=[1e6, 1e6, 1e6, 1e6]
|
|
240
|
+
... )
|
|
241
|
+
>>> metrics = classifier.train(dataset, test_size=0.25)
|
|
242
|
+
>>> print(f"Accuracy: {metrics['accuracy']:.2%}")
|
|
243
|
+
>>> print(f"F1 Score: {metrics['f1_score']:.2%}")
|
|
244
|
+
"""
|
|
245
|
+
_check_sklearn_available()
|
|
246
|
+
_validate_dataset_size(dataset)
|
|
247
|
+
|
|
248
|
+
# Extract features and split data
|
|
249
|
+
X_train, X_test, y_train, y_test = self._prepare_training_data(
|
|
250
|
+
dataset, test_size, random_state
|
|
251
|
+
)
|
|
252
|
+
|
|
253
|
+
# Train and evaluate model
|
|
254
|
+
self._train_model(X_train, y_train, random_state)
|
|
255
|
+
return self._evaluate_model(X_test, y_test)
|
|
256
|
+
|
|
257
|
+
def _prepare_training_data(
|
|
258
|
+
self, dataset: TrainingDataset, test_size: float, random_state: int
|
|
259
|
+
) -> tuple[Any, Any, Any, Any]:
|
|
260
|
+
"""Extract features, split, and scale training data."""
|
|
261
|
+
from sklearn.model_selection import train_test_split
|
|
262
|
+
from sklearn.preprocessing import StandardScaler
|
|
263
|
+
|
|
264
|
+
logger.info(f"Extracting features from {len(dataset.signals)} signals...")
|
|
265
|
+
X, y = self._extract_features(dataset)
|
|
266
|
+
|
|
267
|
+
X_train, X_test, y_train, y_test = train_test_split(
|
|
268
|
+
X, y, test_size=test_size, random_state=random_state, stratify=y
|
|
269
|
+
)
|
|
270
|
+
|
|
271
|
+
logger.info("Standardizing features...")
|
|
272
|
+
self.scaler = StandardScaler()
|
|
273
|
+
X_train_scaled = self.scaler.fit_transform(X_train)
|
|
274
|
+
X_test_scaled = self.scaler.transform(X_test)
|
|
275
|
+
|
|
276
|
+
return X_train_scaled, X_test_scaled, y_train, y_test
|
|
277
|
+
|
|
278
|
+
def _extract_features(self, dataset: TrainingDataset) -> tuple[NDArray[Any], list[str]]:
|
|
279
|
+
"""Extract features from all signals in dataset."""
|
|
280
|
+
X = []
|
|
281
|
+
for signal, sample_rate in zip(dataset.signals, dataset.sample_rates, strict=True):
|
|
282
|
+
features = self.feature_extractor.extract_all(signal, sample_rate)
|
|
283
|
+
X.append(list(features.values()))
|
|
284
|
+
|
|
285
|
+
if not self.feature_names:
|
|
286
|
+
self.feature_names = list(features.keys())
|
|
287
|
+
|
|
288
|
+
X_array = np.array(X)
|
|
289
|
+
logger.info(f"Extracted {X_array.shape[1]} features per signal")
|
|
290
|
+
return X_array, dataset.labels
|
|
291
|
+
|
|
292
|
+
def _train_model(self, X_train: Any, y_train: Any, random_state: int) -> None:
|
|
293
|
+
"""Train the selected ML model."""
|
|
294
|
+
logger.info(f"Training {self.algorithm} classifier...")
|
|
295
|
+
self.model = _create_classifier(self.algorithm, random_state)
|
|
296
|
+
self.model.fit(X_train, y_train)
|
|
297
|
+
self.classes = list(self.model.classes_)
|
|
298
|
+
logger.info(f"Trained on {len(self.classes)} classes: {self.classes}")
|
|
299
|
+
|
|
300
|
+
def _evaluate_model(self, X_test: Any, y_test: Any) -> dict[str, float]:
|
|
301
|
+
"""Evaluate model performance on test set."""
|
|
302
|
+
from sklearn.metrics import accuracy_score, precision_recall_fscore_support
|
|
303
|
+
|
|
304
|
+
logger.info("Evaluating on test set...")
|
|
305
|
+
y_pred = self.model.predict(X_test)
|
|
306
|
+
accuracy = float(accuracy_score(y_test, y_pred))
|
|
307
|
+
|
|
308
|
+
precision, recall, f1, _ = precision_recall_fscore_support(
|
|
309
|
+
y_test, y_pred, average="weighted", zero_division=0.0
|
|
310
|
+
)
|
|
311
|
+
|
|
312
|
+
metrics = {
|
|
313
|
+
"accuracy": accuracy,
|
|
314
|
+
"precision": float(precision),
|
|
315
|
+
"recall": float(recall),
|
|
316
|
+
"f1_score": float(f1),
|
|
317
|
+
}
|
|
318
|
+
|
|
319
|
+
logger.info(f"Training complete: {metrics}")
|
|
320
|
+
return metrics
|
|
321
|
+
|
|
322
|
+
def predict(
|
|
323
|
+
self, signal: NDArray[np.floating[Any]], sample_rate: float
|
|
324
|
+
) -> MLClassificationResult:
|
|
325
|
+
"""Classify a single signal using trained model.
|
|
326
|
+
|
|
327
|
+
Args:
|
|
328
|
+
signal: Input signal as 1D numpy array.
|
|
329
|
+
sample_rate: Sampling rate in Hz.
|
|
330
|
+
|
|
331
|
+
Returns:
|
|
332
|
+
MLClassificationResult with predicted signal type, confidence,
|
|
333
|
+
probabilities, and extracted features.
|
|
334
|
+
|
|
335
|
+
Raises:
|
|
336
|
+
ValueError: If model has not been trained yet.
|
|
337
|
+
|
|
338
|
+
Example:
|
|
339
|
+
>>> result = classifier.predict(unknown_signal, sample_rate=1e6)
|
|
340
|
+
>>> print(f"Type: {result.signal_type}")
|
|
341
|
+
>>> print(f"Confidence: {result.confidence:.2%}")
|
|
342
|
+
>>> for signal_type, prob in result.probabilities.items():
|
|
343
|
+
... print(f" {signal_type}: {prob:.2%}")
|
|
344
|
+
"""
|
|
345
|
+
if self.model is None or self.scaler is None:
|
|
346
|
+
raise ValueError("Model not trained. Call train() or load_model() first.")
|
|
347
|
+
|
|
348
|
+
# Extract features
|
|
349
|
+
features = self.feature_extractor.extract_all(signal, sample_rate)
|
|
350
|
+
X = np.array([list(features.values())])
|
|
351
|
+
|
|
352
|
+
# Standardize
|
|
353
|
+
X_scaled = self.scaler.transform(X)
|
|
354
|
+
|
|
355
|
+
# Predict
|
|
356
|
+
prediction = self.model.predict(X_scaled)[0]
|
|
357
|
+
probabilities_array = self.model.predict_proba(X_scaled)[0]
|
|
358
|
+
|
|
359
|
+
# Build probability dictionary
|
|
360
|
+
probabilities = {
|
|
361
|
+
str(class_): float(prob)
|
|
362
|
+
for class_, prob in zip(self.classes, probabilities_array, strict=True)
|
|
363
|
+
}
|
|
364
|
+
|
|
365
|
+
# Confidence is the maximum probability
|
|
366
|
+
confidence = float(max(probabilities_array))
|
|
367
|
+
|
|
368
|
+
# Extract feature importance if available (tree-based models)
|
|
369
|
+
feature_importance: dict[str, float] | None = None
|
|
370
|
+
if hasattr(self.model, "feature_importances_"):
|
|
371
|
+
feature_importance = {
|
|
372
|
+
name: float(importance)
|
|
373
|
+
for name, importance in zip(
|
|
374
|
+
self.feature_names, self.model.feature_importances_, strict=True
|
|
375
|
+
)
|
|
376
|
+
}
|
|
377
|
+
|
|
378
|
+
return MLClassificationResult(
|
|
379
|
+
signal_type=str(prediction),
|
|
380
|
+
confidence=confidence,
|
|
381
|
+
probabilities=probabilities,
|
|
382
|
+
features=features,
|
|
383
|
+
feature_importance=feature_importance,
|
|
384
|
+
model_type=self.algorithm,
|
|
385
|
+
)
|
|
386
|
+
|
|
387
|
+
def predict_batch(
|
|
388
|
+
self, signals: list[NDArray[np.floating[Any]]], sample_rate: float
|
|
389
|
+
) -> list[MLClassificationResult]:
|
|
390
|
+
"""Classify multiple signals in batch.
|
|
391
|
+
|
|
392
|
+
More efficient than calling predict() repeatedly for large batches.
|
|
393
|
+
|
|
394
|
+
Args:
|
|
395
|
+
signals: List of signal arrays.
|
|
396
|
+
sample_rate: Sampling rate in Hz (same for all signals).
|
|
397
|
+
|
|
398
|
+
Returns:
|
|
399
|
+
List of MLClassificationResult objects, one per input signal.
|
|
400
|
+
|
|
401
|
+
Raises:
|
|
402
|
+
ValueError: If model has not been trained yet.
|
|
403
|
+
|
|
404
|
+
Example:
|
|
405
|
+
>>> signals = [signal1, signal2, signal3]
|
|
406
|
+
>>> results = classifier.predict_batch(signals, sample_rate=1e6)
|
|
407
|
+
>>> for i, result in enumerate(results):
|
|
408
|
+
... print(f"Signal {i}: {result.signal_type} ({result.confidence:.2%})")
|
|
409
|
+
"""
|
|
410
|
+
if self.model is None or self.scaler is None:
|
|
411
|
+
raise ValueError("Model not trained. Call train() or load_model() first.")
|
|
412
|
+
|
|
413
|
+
results = []
|
|
414
|
+
for signal in signals:
|
|
415
|
+
result = self.predict(signal, sample_rate)
|
|
416
|
+
results.append(result)
|
|
417
|
+
|
|
418
|
+
return results
|
|
419
|
+
|
|
420
|
+
def save_model(self, path: Path) -> None:
|
|
421
|
+
"""Save trained model to disk.
|
|
422
|
+
|
|
423
|
+
Saves the complete model state including the ML model, feature scaler,
|
|
424
|
+
feature names, and class labels. Can be loaded later with load_model().
|
|
425
|
+
|
|
426
|
+
Args:
|
|
427
|
+
path: Path to save model file. Convention: use .pkl extension.
|
|
428
|
+
|
|
429
|
+
Raises:
|
|
430
|
+
ValueError: If model has not been trained yet.
|
|
431
|
+
|
|
432
|
+
Example:
|
|
433
|
+
>>> classifier.save_model(Path("models/uart_detector.pkl"))
|
|
434
|
+
>>> # Later...
|
|
435
|
+
>>> new_classifier = MLSignalClassifier()
|
|
436
|
+
>>> new_classifier.load_model(Path("models/uart_detector.pkl"))
|
|
437
|
+
"""
|
|
438
|
+
if self.model is None or self.scaler is None:
|
|
439
|
+
raise ValueError("Model not trained. Nothing to save.")
|
|
440
|
+
|
|
441
|
+
model_state = {
|
|
442
|
+
"algorithm": self.algorithm,
|
|
443
|
+
"model": self.model,
|
|
444
|
+
"scaler": self.scaler,
|
|
445
|
+
"feature_names": self.feature_names,
|
|
446
|
+
"classes": self.classes,
|
|
447
|
+
}
|
|
448
|
+
|
|
449
|
+
path.parent.mkdir(parents=True, exist_ok=True)
|
|
450
|
+
with open(path, "wb") as f:
|
|
451
|
+
pickle.dump(model_state, f)
|
|
452
|
+
|
|
453
|
+
logger.info(f"Model saved to {path}")
|
|
454
|
+
|
|
455
|
+
def load_model(self, path: Path) -> None:
|
|
456
|
+
"""Load trained model from disk.
|
|
457
|
+
|
|
458
|
+
Restores the complete model state including the ML model, feature scaler,
|
|
459
|
+
feature names, and class labels.
|
|
460
|
+
|
|
461
|
+
Args:
|
|
462
|
+
path: Path to saved model file.
|
|
463
|
+
|
|
464
|
+
Raises:
|
|
465
|
+
FileNotFoundError: If model file does not exist.
|
|
466
|
+
ValueError: If model file is corrupted or incompatible.
|
|
467
|
+
|
|
468
|
+
Example:
|
|
469
|
+
>>> classifier = MLSignalClassifier()
|
|
470
|
+
>>> classifier.load_model(Path("models/uart_detector.pkl"))
|
|
471
|
+
>>> result = classifier.predict(signal, sample_rate=1e6)
|
|
472
|
+
"""
|
|
473
|
+
if not path.exists():
|
|
474
|
+
raise FileNotFoundError(f"Model file not found: {path}")
|
|
475
|
+
|
|
476
|
+
with open(path, "rb") as f:
|
|
477
|
+
model_state = pickle.load(f)
|
|
478
|
+
|
|
479
|
+
# Validate model state
|
|
480
|
+
required_keys = {"algorithm", "model", "scaler", "feature_names", "classes"}
|
|
481
|
+
if not required_keys.issubset(model_state.keys()):
|
|
482
|
+
raise ValueError(
|
|
483
|
+
f"Invalid model file. Missing keys: {required_keys - set(model_state.keys())}"
|
|
484
|
+
)
|
|
485
|
+
|
|
486
|
+
self.algorithm = model_state["algorithm"]
|
|
487
|
+
self.model = model_state["model"]
|
|
488
|
+
self.scaler = model_state["scaler"]
|
|
489
|
+
self.feature_names = model_state["feature_names"]
|
|
490
|
+
self.classes = model_state["classes"]
|
|
491
|
+
|
|
492
|
+
logger.info(f"Model loaded from {path} ({len(self.classes)} classes)")
|
|
493
|
+
|
|
494
|
+
def partial_fit(
|
|
495
|
+
self,
|
|
496
|
+
signals: list[NDArray[np.floating[Any]]],
|
|
497
|
+
labels: list[str],
|
|
498
|
+
sample_rate: float,
|
|
499
|
+
) -> None:
|
|
500
|
+
"""Incrementally update model with new data (online learning).
|
|
501
|
+
|
|
502
|
+
Only supported for algorithms that implement partial_fit (currently
|
|
503
|
+
neural_network). For other algorithms, retrain with combined dataset.
|
|
504
|
+
|
|
505
|
+
Args:
|
|
506
|
+
signals: List of new signal arrays.
|
|
507
|
+
labels: List of labels for new signals.
|
|
508
|
+
sample_rate: Sampling rate in Hz (same for all signals).
|
|
509
|
+
|
|
510
|
+
Raises:
|
|
511
|
+
ValueError: If model has not been trained yet or algorithm doesn't
|
|
512
|
+
support incremental learning.
|
|
513
|
+
ImportError: If scikit-learn is not installed.
|
|
514
|
+
|
|
515
|
+
Example:
|
|
516
|
+
>>> # Initial training
|
|
517
|
+
>>> classifier.train(initial_dataset)
|
|
518
|
+
>>>
|
|
519
|
+
>>> # Later, add more data
|
|
520
|
+
>>> new_signals = [signal1, signal2]
|
|
521
|
+
>>> new_labels = ["uart", "spi"]
|
|
522
|
+
>>> classifier.partial_fit(new_signals, new_labels, sample_rate=1e6)
|
|
523
|
+
"""
|
|
524
|
+
if self.model is None or self.scaler is None:
|
|
525
|
+
raise ValueError("Model not trained. Call train() first.")
|
|
526
|
+
|
|
527
|
+
try:
|
|
528
|
+
from sklearn.neural_network import MLPClassifier
|
|
529
|
+
except ImportError as e:
|
|
530
|
+
raise ImportError(
|
|
531
|
+
"scikit-learn is required for ML classification. "
|
|
532
|
+
"Install with: uv pip install 'scikit-learn>=1.3.0'"
|
|
533
|
+
) from e
|
|
534
|
+
|
|
535
|
+
# Only neural network supports partial_fit in scikit-learn
|
|
536
|
+
if not isinstance(self.model, MLPClassifier):
|
|
537
|
+
raise ValueError(
|
|
538
|
+
f"Incremental learning not supported for {self.algorithm}. "
|
|
539
|
+
"Use 'neural_network' algorithm or retrain with full dataset."
|
|
540
|
+
)
|
|
541
|
+
|
|
542
|
+
# Extract features
|
|
543
|
+
X = []
|
|
544
|
+
for signal in signals:
|
|
545
|
+
features = self.feature_extractor.extract_all(signal, sample_rate)
|
|
546
|
+
X.append(list(features.values()))
|
|
547
|
+
|
|
548
|
+
X_array = np.array(X)
|
|
549
|
+
|
|
550
|
+
# Standardize using existing scaler
|
|
551
|
+
X_scaled = self.scaler.transform(X_array)
|
|
552
|
+
|
|
553
|
+
# Partial fit
|
|
554
|
+
self.model.partial_fit(X_scaled, labels, classes=self.classes)
|
|
555
|
+
logger.info(f"Updated model with {len(signals)} new samples")
|
|
556
|
+
|
|
557
|
+
|
|
558
|
+
def _check_sklearn_available() -> None:
|
|
559
|
+
"""Check if scikit-learn is available."""
|
|
560
|
+
try:
|
|
561
|
+
import sklearn # noqa: F401
|
|
562
|
+
except ImportError as e:
|
|
563
|
+
raise ImportError(
|
|
564
|
+
"scikit-learn is required for ML classification. "
|
|
565
|
+
"Install with: uv pip install 'scikit-learn>=1.3.0'"
|
|
566
|
+
) from e
|
|
567
|
+
|
|
568
|
+
|
|
569
|
+
def _validate_dataset_size(dataset: TrainingDataset) -> None:
|
|
570
|
+
"""Validate that dataset has minimum required samples."""
|
|
571
|
+
if len(dataset.signals) < 10:
|
|
572
|
+
raise ValueError(f"Dataset too small: {len(dataset.signals)} samples (need ≥10)")
|
|
573
|
+
|
|
574
|
+
|
|
575
|
+
def _create_classifier(algorithm: str, random_state: int) -> Any:
|
|
576
|
+
"""Create classifier instance based on algorithm type."""
|
|
577
|
+
from sklearn.ensemble import GradientBoostingClassifier, RandomForestClassifier
|
|
578
|
+
from sklearn.neural_network import MLPClassifier
|
|
579
|
+
from sklearn.svm import SVC
|
|
580
|
+
|
|
581
|
+
if algorithm == "random_forest":
|
|
582
|
+
return RandomForestClassifier(
|
|
583
|
+
n_estimators=100,
|
|
584
|
+
max_depth=None,
|
|
585
|
+
min_samples_split=2,
|
|
586
|
+
random_state=random_state,
|
|
587
|
+
n_jobs=-1,
|
|
588
|
+
)
|
|
589
|
+
elif algorithm == "svm":
|
|
590
|
+
return SVC(kernel="rbf", C=1.0, gamma="scale", probability=True, random_state=random_state)
|
|
591
|
+
elif algorithm == "neural_network":
|
|
592
|
+
return MLPClassifier(
|
|
593
|
+
hidden_layer_sizes=(100, 50),
|
|
594
|
+
activation="relu",
|
|
595
|
+
solver="adam",
|
|
596
|
+
max_iter=1000,
|
|
597
|
+
random_state=random_state,
|
|
598
|
+
)
|
|
599
|
+
elif algorithm == "gradient_boosting":
|
|
600
|
+
return GradientBoostingClassifier(
|
|
601
|
+
n_estimators=100, learning_rate=0.1, max_depth=3, random_state=random_state
|
|
602
|
+
)
|
|
603
|
+
else:
|
|
604
|
+
raise ValueError(f"Unknown algorithm: {algorithm}")
|