oscura 0.0.1__py3-none-any.whl → 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- oscura/__init__.py +813 -8
- oscura/__main__.py +392 -0
- oscura/analyzers/__init__.py +37 -0
- oscura/analyzers/digital/__init__.py +177 -0
- oscura/analyzers/digital/bus.py +691 -0
- oscura/analyzers/digital/clock.py +805 -0
- oscura/analyzers/digital/correlation.py +720 -0
- oscura/analyzers/digital/edges.py +632 -0
- oscura/analyzers/digital/extraction.py +413 -0
- oscura/analyzers/digital/quality.py +878 -0
- oscura/analyzers/digital/signal_quality.py +877 -0
- oscura/analyzers/digital/thresholds.py +708 -0
- oscura/analyzers/digital/timing.py +1104 -0
- oscura/analyzers/eye/__init__.py +46 -0
- oscura/analyzers/eye/diagram.py +434 -0
- oscura/analyzers/eye/metrics.py +555 -0
- oscura/analyzers/jitter/__init__.py +83 -0
- oscura/analyzers/jitter/ber.py +333 -0
- oscura/analyzers/jitter/decomposition.py +759 -0
- oscura/analyzers/jitter/measurements.py +413 -0
- oscura/analyzers/jitter/spectrum.py +220 -0
- oscura/analyzers/measurements.py +40 -0
- oscura/analyzers/packet/__init__.py +171 -0
- oscura/analyzers/packet/daq.py +1077 -0
- oscura/analyzers/packet/metrics.py +437 -0
- oscura/analyzers/packet/parser.py +327 -0
- oscura/analyzers/packet/payload.py +2156 -0
- oscura/analyzers/packet/payload_analysis.py +1312 -0
- oscura/analyzers/packet/payload_extraction.py +236 -0
- oscura/analyzers/packet/payload_patterns.py +670 -0
- oscura/analyzers/packet/stream.py +359 -0
- oscura/analyzers/patterns/__init__.py +266 -0
- oscura/analyzers/patterns/clustering.py +1036 -0
- oscura/analyzers/patterns/discovery.py +539 -0
- oscura/analyzers/patterns/learning.py +797 -0
- oscura/analyzers/patterns/matching.py +1091 -0
- oscura/analyzers/patterns/periodic.py +650 -0
- oscura/analyzers/patterns/sequences.py +767 -0
- oscura/analyzers/power/__init__.py +116 -0
- oscura/analyzers/power/ac_power.py +391 -0
- oscura/analyzers/power/basic.py +383 -0
- oscura/analyzers/power/conduction.py +314 -0
- oscura/analyzers/power/efficiency.py +297 -0
- oscura/analyzers/power/ripple.py +356 -0
- oscura/analyzers/power/soa.py +372 -0
- oscura/analyzers/power/switching.py +479 -0
- oscura/analyzers/protocol/__init__.py +150 -0
- oscura/analyzers/protocols/__init__.py +150 -0
- oscura/analyzers/protocols/base.py +500 -0
- oscura/analyzers/protocols/can.py +620 -0
- oscura/analyzers/protocols/can_fd.py +448 -0
- oscura/analyzers/protocols/flexray.py +405 -0
- oscura/analyzers/protocols/hdlc.py +399 -0
- oscura/analyzers/protocols/i2c.py +368 -0
- oscura/analyzers/protocols/i2s.py +296 -0
- oscura/analyzers/protocols/jtag.py +393 -0
- oscura/analyzers/protocols/lin.py +445 -0
- oscura/analyzers/protocols/manchester.py +333 -0
- oscura/analyzers/protocols/onewire.py +501 -0
- oscura/analyzers/protocols/spi.py +334 -0
- oscura/analyzers/protocols/swd.py +325 -0
- oscura/analyzers/protocols/uart.py +393 -0
- oscura/analyzers/protocols/usb.py +495 -0
- oscura/analyzers/signal_integrity/__init__.py +63 -0
- oscura/analyzers/signal_integrity/embedding.py +294 -0
- oscura/analyzers/signal_integrity/equalization.py +370 -0
- oscura/analyzers/signal_integrity/sparams.py +484 -0
- oscura/analyzers/spectral/__init__.py +53 -0
- oscura/analyzers/spectral/chunked.py +273 -0
- oscura/analyzers/spectral/chunked_fft.py +571 -0
- oscura/analyzers/spectral/chunked_wavelet.py +391 -0
- oscura/analyzers/spectral/fft.py +92 -0
- oscura/analyzers/statistical/__init__.py +250 -0
- oscura/analyzers/statistical/checksum.py +923 -0
- oscura/analyzers/statistical/chunked_corr.py +228 -0
- oscura/analyzers/statistical/classification.py +778 -0
- oscura/analyzers/statistical/entropy.py +1113 -0
- oscura/analyzers/statistical/ngrams.py +614 -0
- oscura/analyzers/statistics/__init__.py +119 -0
- oscura/analyzers/statistics/advanced.py +885 -0
- oscura/analyzers/statistics/basic.py +263 -0
- oscura/analyzers/statistics/correlation.py +630 -0
- oscura/analyzers/statistics/distribution.py +298 -0
- oscura/analyzers/statistics/outliers.py +463 -0
- oscura/analyzers/statistics/streaming.py +93 -0
- oscura/analyzers/statistics/trend.py +520 -0
- oscura/analyzers/validation.py +598 -0
- oscura/analyzers/waveform/__init__.py +36 -0
- oscura/analyzers/waveform/measurements.py +943 -0
- oscura/analyzers/waveform/measurements_with_uncertainty.py +371 -0
- oscura/analyzers/waveform/spectral.py +1689 -0
- oscura/analyzers/waveform/wavelets.py +298 -0
- oscura/api/__init__.py +62 -0
- oscura/api/dsl.py +538 -0
- oscura/api/fluent.py +571 -0
- oscura/api/operators.py +498 -0
- oscura/api/optimization.py +392 -0
- oscura/api/profiling.py +396 -0
- oscura/automotive/__init__.py +73 -0
- oscura/automotive/can/__init__.py +52 -0
- oscura/automotive/can/analysis.py +356 -0
- oscura/automotive/can/checksum.py +250 -0
- oscura/automotive/can/correlation.py +212 -0
- oscura/automotive/can/discovery.py +355 -0
- oscura/automotive/can/message_wrapper.py +375 -0
- oscura/automotive/can/models.py +385 -0
- oscura/automotive/can/patterns.py +381 -0
- oscura/automotive/can/session.py +452 -0
- oscura/automotive/can/state_machine.py +300 -0
- oscura/automotive/can/stimulus_response.py +461 -0
- oscura/automotive/dbc/__init__.py +15 -0
- oscura/automotive/dbc/generator.py +156 -0
- oscura/automotive/dbc/parser.py +146 -0
- oscura/automotive/dtc/__init__.py +30 -0
- oscura/automotive/dtc/database.py +3036 -0
- oscura/automotive/j1939/__init__.py +14 -0
- oscura/automotive/j1939/decoder.py +745 -0
- oscura/automotive/loaders/__init__.py +35 -0
- oscura/automotive/loaders/asc.py +98 -0
- oscura/automotive/loaders/blf.py +77 -0
- oscura/automotive/loaders/csv_can.py +136 -0
- oscura/automotive/loaders/dispatcher.py +136 -0
- oscura/automotive/loaders/mdf.py +331 -0
- oscura/automotive/loaders/pcap.py +132 -0
- oscura/automotive/obd/__init__.py +14 -0
- oscura/automotive/obd/decoder.py +707 -0
- oscura/automotive/uds/__init__.py +48 -0
- oscura/automotive/uds/decoder.py +265 -0
- oscura/automotive/uds/models.py +64 -0
- oscura/automotive/visualization.py +369 -0
- oscura/batch/__init__.py +55 -0
- oscura/batch/advanced.py +627 -0
- oscura/batch/aggregate.py +300 -0
- oscura/batch/analyze.py +139 -0
- oscura/batch/logging.py +487 -0
- oscura/batch/metrics.py +556 -0
- oscura/builders/__init__.py +41 -0
- oscura/builders/signal_builder.py +1131 -0
- oscura/cli/__init__.py +14 -0
- oscura/cli/batch.py +339 -0
- oscura/cli/characterize.py +273 -0
- oscura/cli/compare.py +775 -0
- oscura/cli/decode.py +551 -0
- oscura/cli/main.py +247 -0
- oscura/cli/shell.py +350 -0
- oscura/comparison/__init__.py +66 -0
- oscura/comparison/compare.py +397 -0
- oscura/comparison/golden.py +487 -0
- oscura/comparison/limits.py +391 -0
- oscura/comparison/mask.py +434 -0
- oscura/comparison/trace_diff.py +30 -0
- oscura/comparison/visualization.py +481 -0
- oscura/compliance/__init__.py +70 -0
- oscura/compliance/advanced.py +756 -0
- oscura/compliance/masks.py +363 -0
- oscura/compliance/reporting.py +483 -0
- oscura/compliance/testing.py +298 -0
- oscura/component/__init__.py +38 -0
- oscura/component/impedance.py +365 -0
- oscura/component/reactive.py +598 -0
- oscura/component/transmission_line.py +312 -0
- oscura/config/__init__.py +191 -0
- oscura/config/defaults.py +254 -0
- oscura/config/loader.py +348 -0
- oscura/config/memory.py +271 -0
- oscura/config/migration.py +458 -0
- oscura/config/pipeline.py +1077 -0
- oscura/config/preferences.py +530 -0
- oscura/config/protocol.py +875 -0
- oscura/config/schema.py +713 -0
- oscura/config/settings.py +420 -0
- oscura/config/thresholds.py +599 -0
- oscura/convenience.py +457 -0
- oscura/core/__init__.py +299 -0
- oscura/core/audit.py +457 -0
- oscura/core/backend_selector.py +405 -0
- oscura/core/cache.py +590 -0
- oscura/core/cancellation.py +439 -0
- oscura/core/confidence.py +225 -0
- oscura/core/config.py +506 -0
- oscura/core/correlation.py +216 -0
- oscura/core/cross_domain.py +422 -0
- oscura/core/debug.py +301 -0
- oscura/core/edge_cases.py +541 -0
- oscura/core/exceptions.py +535 -0
- oscura/core/gpu_backend.py +523 -0
- oscura/core/lazy.py +832 -0
- oscura/core/log_query.py +540 -0
- oscura/core/logging.py +931 -0
- oscura/core/logging_advanced.py +952 -0
- oscura/core/memoize.py +171 -0
- oscura/core/memory_check.py +274 -0
- oscura/core/memory_guard.py +290 -0
- oscura/core/memory_limits.py +336 -0
- oscura/core/memory_monitor.py +453 -0
- oscura/core/memory_progress.py +465 -0
- oscura/core/memory_warnings.py +315 -0
- oscura/core/numba_backend.py +362 -0
- oscura/core/performance.py +352 -0
- oscura/core/progress.py +524 -0
- oscura/core/provenance.py +358 -0
- oscura/core/results.py +331 -0
- oscura/core/types.py +504 -0
- oscura/core/uncertainty.py +383 -0
- oscura/discovery/__init__.py +52 -0
- oscura/discovery/anomaly_detector.py +672 -0
- oscura/discovery/auto_decoder.py +415 -0
- oscura/discovery/comparison.py +497 -0
- oscura/discovery/quality_validator.py +528 -0
- oscura/discovery/signal_detector.py +769 -0
- oscura/dsl/__init__.py +73 -0
- oscura/dsl/commands.py +246 -0
- oscura/dsl/interpreter.py +455 -0
- oscura/dsl/parser.py +689 -0
- oscura/dsl/repl.py +172 -0
- oscura/exceptions.py +59 -0
- oscura/exploratory/__init__.py +111 -0
- oscura/exploratory/error_recovery.py +642 -0
- oscura/exploratory/fuzzy.py +513 -0
- oscura/exploratory/fuzzy_advanced.py +786 -0
- oscura/exploratory/legacy.py +831 -0
- oscura/exploratory/parse.py +358 -0
- oscura/exploratory/recovery.py +275 -0
- oscura/exploratory/sync.py +382 -0
- oscura/exploratory/unknown.py +707 -0
- oscura/export/__init__.py +25 -0
- oscura/export/wireshark/README.md +265 -0
- oscura/export/wireshark/__init__.py +47 -0
- oscura/export/wireshark/generator.py +312 -0
- oscura/export/wireshark/lua_builder.py +159 -0
- oscura/export/wireshark/templates/dissector.lua.j2 +92 -0
- oscura/export/wireshark/type_mapping.py +165 -0
- oscura/export/wireshark/validator.py +105 -0
- oscura/exporters/__init__.py +94 -0
- oscura/exporters/csv.py +303 -0
- oscura/exporters/exporters.py +44 -0
- oscura/exporters/hdf5.py +219 -0
- oscura/exporters/html_export.py +701 -0
- oscura/exporters/json_export.py +291 -0
- oscura/exporters/markdown_export.py +367 -0
- oscura/exporters/matlab_export.py +354 -0
- oscura/exporters/npz_export.py +219 -0
- oscura/exporters/spice_export.py +210 -0
- oscura/extensibility/__init__.py +131 -0
- oscura/extensibility/docs.py +752 -0
- oscura/extensibility/extensions.py +1125 -0
- oscura/extensibility/logging.py +259 -0
- oscura/extensibility/measurements.py +485 -0
- oscura/extensibility/plugins.py +414 -0
- oscura/extensibility/registry.py +346 -0
- oscura/extensibility/templates.py +913 -0
- oscura/extensibility/validation.py +651 -0
- oscura/filtering/__init__.py +89 -0
- oscura/filtering/base.py +563 -0
- oscura/filtering/convenience.py +564 -0
- oscura/filtering/design.py +725 -0
- oscura/filtering/filters.py +32 -0
- oscura/filtering/introspection.py +605 -0
- oscura/guidance/__init__.py +24 -0
- oscura/guidance/recommender.py +429 -0
- oscura/guidance/wizard.py +518 -0
- oscura/inference/__init__.py +251 -0
- oscura/inference/active_learning/README.md +153 -0
- oscura/inference/active_learning/__init__.py +38 -0
- oscura/inference/active_learning/lstar.py +257 -0
- oscura/inference/active_learning/observation_table.py +230 -0
- oscura/inference/active_learning/oracle.py +78 -0
- oscura/inference/active_learning/teachers/__init__.py +15 -0
- oscura/inference/active_learning/teachers/simulator.py +192 -0
- oscura/inference/adaptive_tuning.py +453 -0
- oscura/inference/alignment.py +653 -0
- oscura/inference/bayesian.py +943 -0
- oscura/inference/binary.py +1016 -0
- oscura/inference/crc_reverse.py +711 -0
- oscura/inference/logic.py +288 -0
- oscura/inference/message_format.py +1305 -0
- oscura/inference/protocol.py +417 -0
- oscura/inference/protocol_dsl.py +1084 -0
- oscura/inference/protocol_library.py +1230 -0
- oscura/inference/sequences.py +809 -0
- oscura/inference/signal_intelligence.py +1509 -0
- oscura/inference/spectral.py +215 -0
- oscura/inference/state_machine.py +634 -0
- oscura/inference/stream.py +918 -0
- oscura/integrations/__init__.py +59 -0
- oscura/integrations/llm.py +1827 -0
- oscura/jupyter/__init__.py +32 -0
- oscura/jupyter/display.py +268 -0
- oscura/jupyter/magic.py +334 -0
- oscura/loaders/__init__.py +526 -0
- oscura/loaders/binary.py +69 -0
- oscura/loaders/configurable.py +1255 -0
- oscura/loaders/csv.py +26 -0
- oscura/loaders/csv_loader.py +473 -0
- oscura/loaders/hdf5.py +9 -0
- oscura/loaders/hdf5_loader.py +510 -0
- oscura/loaders/lazy.py +370 -0
- oscura/loaders/mmap_loader.py +583 -0
- oscura/loaders/numpy_loader.py +436 -0
- oscura/loaders/pcap.py +432 -0
- oscura/loaders/preprocessing.py +368 -0
- oscura/loaders/rigol.py +287 -0
- oscura/loaders/sigrok.py +321 -0
- oscura/loaders/tdms.py +367 -0
- oscura/loaders/tektronix.py +711 -0
- oscura/loaders/validation.py +584 -0
- oscura/loaders/vcd.py +464 -0
- oscura/loaders/wav.py +233 -0
- oscura/math/__init__.py +45 -0
- oscura/math/arithmetic.py +824 -0
- oscura/math/interpolation.py +413 -0
- oscura/onboarding/__init__.py +39 -0
- oscura/onboarding/help.py +498 -0
- oscura/onboarding/tutorials.py +405 -0
- oscura/onboarding/wizard.py +466 -0
- oscura/optimization/__init__.py +19 -0
- oscura/optimization/parallel.py +440 -0
- oscura/optimization/search.py +532 -0
- oscura/pipeline/__init__.py +43 -0
- oscura/pipeline/base.py +338 -0
- oscura/pipeline/composition.py +242 -0
- oscura/pipeline/parallel.py +448 -0
- oscura/pipeline/pipeline.py +375 -0
- oscura/pipeline/reverse_engineering.py +1119 -0
- oscura/plugins/__init__.py +122 -0
- oscura/plugins/base.py +272 -0
- oscura/plugins/cli.py +497 -0
- oscura/plugins/discovery.py +411 -0
- oscura/plugins/isolation.py +418 -0
- oscura/plugins/lifecycle.py +959 -0
- oscura/plugins/manager.py +493 -0
- oscura/plugins/registry.py +421 -0
- oscura/plugins/versioning.py +372 -0
- oscura/py.typed +0 -0
- oscura/quality/__init__.py +65 -0
- oscura/quality/ensemble.py +740 -0
- oscura/quality/explainer.py +338 -0
- oscura/quality/scoring.py +616 -0
- oscura/quality/warnings.py +456 -0
- oscura/reporting/__init__.py +248 -0
- oscura/reporting/advanced.py +1234 -0
- oscura/reporting/analyze.py +448 -0
- oscura/reporting/argument_preparer.py +596 -0
- oscura/reporting/auto_report.py +507 -0
- oscura/reporting/batch.py +615 -0
- oscura/reporting/chart_selection.py +223 -0
- oscura/reporting/comparison.py +330 -0
- oscura/reporting/config.py +615 -0
- oscura/reporting/content/__init__.py +39 -0
- oscura/reporting/content/executive.py +127 -0
- oscura/reporting/content/filtering.py +191 -0
- oscura/reporting/content/minimal.py +257 -0
- oscura/reporting/content/verbosity.py +162 -0
- oscura/reporting/core.py +508 -0
- oscura/reporting/core_formats/__init__.py +17 -0
- oscura/reporting/core_formats/multi_format.py +210 -0
- oscura/reporting/engine.py +836 -0
- oscura/reporting/export.py +366 -0
- oscura/reporting/formatting/__init__.py +129 -0
- oscura/reporting/formatting/emphasis.py +81 -0
- oscura/reporting/formatting/numbers.py +403 -0
- oscura/reporting/formatting/standards.py +55 -0
- oscura/reporting/formatting.py +466 -0
- oscura/reporting/html.py +578 -0
- oscura/reporting/index.py +590 -0
- oscura/reporting/multichannel.py +296 -0
- oscura/reporting/output.py +379 -0
- oscura/reporting/pdf.py +373 -0
- oscura/reporting/plots.py +731 -0
- oscura/reporting/pptx_export.py +360 -0
- oscura/reporting/renderers/__init__.py +11 -0
- oscura/reporting/renderers/pdf.py +94 -0
- oscura/reporting/sections.py +471 -0
- oscura/reporting/standards.py +680 -0
- oscura/reporting/summary_generator.py +368 -0
- oscura/reporting/tables.py +397 -0
- oscura/reporting/template_system.py +724 -0
- oscura/reporting/templates/__init__.py +15 -0
- oscura/reporting/templates/definition.py +205 -0
- oscura/reporting/templates/index.html +649 -0
- oscura/reporting/templates/index.md +173 -0
- oscura/schemas/__init__.py +158 -0
- oscura/schemas/bus_configuration.json +322 -0
- oscura/schemas/device_mapping.json +182 -0
- oscura/schemas/packet_format.json +418 -0
- oscura/schemas/protocol_definition.json +363 -0
- oscura/search/__init__.py +16 -0
- oscura/search/anomaly.py +292 -0
- oscura/search/context.py +149 -0
- oscura/search/pattern.py +160 -0
- oscura/session/__init__.py +34 -0
- oscura/session/annotations.py +289 -0
- oscura/session/history.py +313 -0
- oscura/session/session.py +445 -0
- oscura/streaming/__init__.py +43 -0
- oscura/streaming/chunked.py +611 -0
- oscura/streaming/progressive.py +393 -0
- oscura/streaming/realtime.py +622 -0
- oscura/testing/__init__.py +54 -0
- oscura/testing/synthetic.py +808 -0
- oscura/triggering/__init__.py +68 -0
- oscura/triggering/base.py +229 -0
- oscura/triggering/edge.py +353 -0
- oscura/triggering/pattern.py +344 -0
- oscura/triggering/pulse.py +581 -0
- oscura/triggering/window.py +453 -0
- oscura/ui/__init__.py +48 -0
- oscura/ui/formatters.py +526 -0
- oscura/ui/progressive_display.py +340 -0
- oscura/utils/__init__.py +99 -0
- oscura/utils/autodetect.py +338 -0
- oscura/utils/buffer.py +389 -0
- oscura/utils/lazy.py +407 -0
- oscura/utils/lazy_imports.py +147 -0
- oscura/utils/memory.py +836 -0
- oscura/utils/memory_advanced.py +1326 -0
- oscura/utils/memory_extensions.py +465 -0
- oscura/utils/progressive.py +352 -0
- oscura/utils/windowing.py +362 -0
- oscura/visualization/__init__.py +321 -0
- oscura/visualization/accessibility.py +526 -0
- oscura/visualization/annotations.py +374 -0
- oscura/visualization/axis_scaling.py +305 -0
- oscura/visualization/colors.py +453 -0
- oscura/visualization/digital.py +337 -0
- oscura/visualization/eye.py +420 -0
- oscura/visualization/histogram.py +281 -0
- oscura/visualization/interactive.py +858 -0
- oscura/visualization/jitter.py +702 -0
- oscura/visualization/keyboard.py +394 -0
- oscura/visualization/layout.py +365 -0
- oscura/visualization/optimization.py +1028 -0
- oscura/visualization/palettes.py +446 -0
- oscura/visualization/plot.py +92 -0
- oscura/visualization/power.py +290 -0
- oscura/visualization/power_extended.py +626 -0
- oscura/visualization/presets.py +467 -0
- oscura/visualization/protocols.py +932 -0
- oscura/visualization/render.py +207 -0
- oscura/visualization/rendering.py +444 -0
- oscura/visualization/reverse_engineering.py +791 -0
- oscura/visualization/signal_integrity.py +808 -0
- oscura/visualization/specialized.py +553 -0
- oscura/visualization/spectral.py +811 -0
- oscura/visualization/styles.py +381 -0
- oscura/visualization/thumbnails.py +311 -0
- oscura/visualization/time_axis.py +351 -0
- oscura/visualization/waveform.py +367 -0
- oscura/workflow/__init__.py +13 -0
- oscura/workflow/dag.py +377 -0
- oscura/workflows/__init__.py +58 -0
- oscura/workflows/compliance.py +280 -0
- oscura/workflows/digital.py +272 -0
- oscura/workflows/multi_trace.py +502 -0
- oscura/workflows/power.py +178 -0
- oscura/workflows/protocol.py +492 -0
- oscura/workflows/reverse_engineering.py +639 -0
- oscura/workflows/signal_integrity.py +227 -0
- oscura-0.1.0.dist-info/METADATA +300 -0
- oscura-0.1.0.dist-info/RECORD +463 -0
- oscura-0.1.0.dist-info/entry_points.txt +2 -0
- {oscura-0.0.1.dist-info → oscura-0.1.0.dist-info}/licenses/LICENSE +1 -1
- oscura-0.0.1.dist-info/METADATA +0 -63
- oscura-0.0.1.dist-info/RECORD +0 -5
- {oscura-0.0.1.dist-info → oscura-0.1.0.dist-info}/WHEEL +0 -0
|
@@ -0,0 +1,720 @@
|
|
|
1
|
+
"""Multi-channel time correlation for synchronized analysis.
|
|
2
|
+
|
|
3
|
+
This module provides tools for correlating and aligning multiple signal channels
|
|
4
|
+
that may have timing offsets, different sample rates, or require trigger-based
|
|
5
|
+
synchronization.
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
Example:
|
|
9
|
+
>>> from oscura.analyzers.digital.correlation import correlate_channels, align_by_trigger
|
|
10
|
+
>>> result = correlate_channels(channel_a, channel_b, sample_rate=1e9)
|
|
11
|
+
>>> print(f"Time offset: {result.offset_seconds:.9f} seconds")
|
|
12
|
+
>>> aligned = align_by_trigger(channels, trigger_channel='clk', edge='rising')
|
|
13
|
+
>>> print(f"Aligned channels: {aligned.channel_names}")
|
|
14
|
+
|
|
15
|
+
References:
|
|
16
|
+
Oppenheim & Schafer: Discrete-Time Signal Processing (3rd Ed), Chapter 2
|
|
17
|
+
Press et al: Numerical Recipes (3rd Ed), Section 13.2 - Correlation
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
from __future__ import annotations
|
|
21
|
+
|
|
22
|
+
from dataclasses import dataclass
|
|
23
|
+
from typing import TYPE_CHECKING, Literal
|
|
24
|
+
|
|
25
|
+
import numpy as np
|
|
26
|
+
from scipy import signal
|
|
27
|
+
|
|
28
|
+
from oscura.core.exceptions import InsufficientDataError, ValidationError
|
|
29
|
+
|
|
30
|
+
if TYPE_CHECKING:
|
|
31
|
+
from numpy.typing import NDArray
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
@dataclass
|
|
35
|
+
class CorrelationResult:
|
|
36
|
+
"""Result of cross-correlation analysis.
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
Attributes:
|
|
41
|
+
offset_samples: Time offset in samples (positive = channel_b leads).
|
|
42
|
+
offset_seconds: Time offset in seconds.
|
|
43
|
+
correlation_coefficient: Peak correlation value (-1.0 to 1.0).
|
|
44
|
+
confidence: Confidence score (0.0 to 1.0) based on peak sharpness.
|
|
45
|
+
quality: Quality classification.
|
|
46
|
+
"""
|
|
47
|
+
|
|
48
|
+
offset_samples: int
|
|
49
|
+
offset_seconds: float
|
|
50
|
+
correlation_coefficient: float
|
|
51
|
+
confidence: float
|
|
52
|
+
quality: str # 'excellent', 'good', 'fair', 'poor'
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
class CorrelatedChannels:
|
|
56
|
+
"""Container for time-aligned multi-channel data.
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
Attributes:
|
|
61
|
+
channels: Dictionary mapping channel names to aligned data arrays.
|
|
62
|
+
sample_rate: Common sample rate for all channels.
|
|
63
|
+
offsets: Dictionary mapping channel names to their time offsets (samples).
|
|
64
|
+
"""
|
|
65
|
+
|
|
66
|
+
def __init__(
|
|
67
|
+
self, channels: dict[str, NDArray[np.float64]], sample_rate: float, offsets: dict[str, int]
|
|
68
|
+
):
|
|
69
|
+
"""Initialize correlated channels container.
|
|
70
|
+
|
|
71
|
+
Args:
|
|
72
|
+
channels: Dictionary of channel name -> aligned data array.
|
|
73
|
+
sample_rate: Sample rate in Hz (same for all channels).
|
|
74
|
+
offsets: Dictionary of channel name -> offset in samples.
|
|
75
|
+
|
|
76
|
+
Raises:
|
|
77
|
+
ValidationError: If channels are empty or inconsistent.
|
|
78
|
+
"""
|
|
79
|
+
if not channels:
|
|
80
|
+
raise ValidationError("At least one channel is required")
|
|
81
|
+
|
|
82
|
+
# Validate all channels have same length
|
|
83
|
+
lengths = {name: len(data) for name, data in channels.items()}
|
|
84
|
+
if len(set(lengths.values())) > 1:
|
|
85
|
+
raise ValidationError(f"Channel length mismatch: {lengths}")
|
|
86
|
+
|
|
87
|
+
if sample_rate <= 0:
|
|
88
|
+
raise ValidationError(f"Sample rate must be positive, got {sample_rate}")
|
|
89
|
+
|
|
90
|
+
self.channels = channels
|
|
91
|
+
self.sample_rate = float(sample_rate)
|
|
92
|
+
self.offsets = offsets
|
|
93
|
+
|
|
94
|
+
@property
|
|
95
|
+
def channel_names(self) -> list[str]:
|
|
96
|
+
"""Get list of channel names."""
|
|
97
|
+
return list(self.channels.keys())
|
|
98
|
+
|
|
99
|
+
def get_channel(self, name: str) -> NDArray[np.float64]:
|
|
100
|
+
"""Get aligned data for a specific channel.
|
|
101
|
+
|
|
102
|
+
Args:
|
|
103
|
+
name: Channel name.
|
|
104
|
+
|
|
105
|
+
Returns:
|
|
106
|
+
Aligned data array.
|
|
107
|
+
"""
|
|
108
|
+
return self.channels[name]
|
|
109
|
+
|
|
110
|
+
def get_time_vector(self) -> NDArray[np.float64]:
|
|
111
|
+
"""Get time vector for aligned data.
|
|
112
|
+
|
|
113
|
+
Returns:
|
|
114
|
+
Time array in seconds, starting from 0.
|
|
115
|
+
"""
|
|
116
|
+
first_channel = next(iter(self.channels.values()))
|
|
117
|
+
n_samples = len(first_channel)
|
|
118
|
+
return np.arange(n_samples) / self.sample_rate
|
|
119
|
+
|
|
120
|
+
|
|
121
|
+
class ChannelCorrelator:
|
|
122
|
+
"""Correlate multiple signal channels in time.
|
|
123
|
+
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
This class provides methods for aligning channels using cross-correlation,
|
|
127
|
+
trigger edge detection, or resampling to a common sample rate.
|
|
128
|
+
"""
|
|
129
|
+
|
|
130
|
+
def __init__(self, reference_channel: str | None = None):
|
|
131
|
+
"""Initialize correlator.
|
|
132
|
+
|
|
133
|
+
Args:
|
|
134
|
+
reference_channel: Name of reference channel for multi-channel alignment.
|
|
135
|
+
If None, first channel will be used as reference.
|
|
136
|
+
"""
|
|
137
|
+
self.reference_channel = reference_channel
|
|
138
|
+
|
|
139
|
+
def correlate(
|
|
140
|
+
self,
|
|
141
|
+
signal1: NDArray[np.float64],
|
|
142
|
+
signal2: NDArray[np.float64],
|
|
143
|
+
) -> float:
|
|
144
|
+
"""Compute correlation coefficient between two signals.
|
|
145
|
+
|
|
146
|
+
Simple correlation interface for test compatibility.
|
|
147
|
+
|
|
148
|
+
Args:
|
|
149
|
+
signal1: First signal array.
|
|
150
|
+
signal2: Second signal array.
|
|
151
|
+
|
|
152
|
+
Returns:
|
|
153
|
+
Correlation coefficient (-1.0 to 1.0).
|
|
154
|
+
|
|
155
|
+
Example:
|
|
156
|
+
>>> correlator = ChannelCorrelator()
|
|
157
|
+
>>> corr = correlator.correlate(signal1, signal2)
|
|
158
|
+
"""
|
|
159
|
+
signal1 = np.asarray(signal1, dtype=np.float64)
|
|
160
|
+
signal2 = np.asarray(signal2, dtype=np.float64)
|
|
161
|
+
|
|
162
|
+
if len(signal1) != len(signal2):
|
|
163
|
+
# Use shorter length
|
|
164
|
+
min_len = min(len(signal1), len(signal2))
|
|
165
|
+
signal1 = signal1[:min_len]
|
|
166
|
+
signal2 = signal2[:min_len]
|
|
167
|
+
|
|
168
|
+
if len(signal1) < 2:
|
|
169
|
+
return 0.0
|
|
170
|
+
|
|
171
|
+
# Compute Pearson correlation coefficient
|
|
172
|
+
s1_centered = signal1 - np.mean(signal1)
|
|
173
|
+
s2_centered = signal2 - np.mean(signal2)
|
|
174
|
+
|
|
175
|
+
num = np.sum(s1_centered * s2_centered)
|
|
176
|
+
denom = np.sqrt(np.sum(s1_centered**2) * np.sum(s2_centered**2))
|
|
177
|
+
|
|
178
|
+
if denom == 0:
|
|
179
|
+
return 0.0
|
|
180
|
+
|
|
181
|
+
return float(num / denom)
|
|
182
|
+
|
|
183
|
+
def find_lag(
|
|
184
|
+
self,
|
|
185
|
+
signal1: NDArray[np.float64],
|
|
186
|
+
signal2: NDArray[np.float64],
|
|
187
|
+
) -> int:
|
|
188
|
+
"""Find the time lag between two signals using cross-correlation.
|
|
189
|
+
|
|
190
|
+
Args:
|
|
191
|
+
signal1: First signal array.
|
|
192
|
+
signal2: Second signal array.
|
|
193
|
+
|
|
194
|
+
Returns:
|
|
195
|
+
Lag in samples (positive = signal2 lags signal1).
|
|
196
|
+
|
|
197
|
+
Example:
|
|
198
|
+
>>> correlator = ChannelCorrelator()
|
|
199
|
+
>>> lag = correlator.find_lag(signal1, signal2)
|
|
200
|
+
"""
|
|
201
|
+
signal1 = np.asarray(signal1, dtype=np.float64)
|
|
202
|
+
signal2 = np.asarray(signal2, dtype=np.float64)
|
|
203
|
+
|
|
204
|
+
if len(signal1) < 2 or len(signal2) < 2:
|
|
205
|
+
return 0
|
|
206
|
+
|
|
207
|
+
# Center signals
|
|
208
|
+
s1_centered = signal1 - np.mean(signal1)
|
|
209
|
+
s2_centered = signal2 - np.mean(signal2)
|
|
210
|
+
|
|
211
|
+
# Compute cross-correlation
|
|
212
|
+
correlation = np.correlate(s1_centered, s2_centered, mode="full")
|
|
213
|
+
|
|
214
|
+
# Find peak
|
|
215
|
+
peak_idx = np.argmax(np.abs(correlation))
|
|
216
|
+
|
|
217
|
+
# Convert to lag (relative to signal2)
|
|
218
|
+
lag = peak_idx - (len(signal2) - 1)
|
|
219
|
+
|
|
220
|
+
return int(lag)
|
|
221
|
+
|
|
222
|
+
def correlation_matrix(
|
|
223
|
+
self,
|
|
224
|
+
channels: list[NDArray[np.float64]],
|
|
225
|
+
) -> NDArray[np.float64]:
|
|
226
|
+
"""Compute pairwise correlation matrix for multiple channels.
|
|
227
|
+
|
|
228
|
+
Args:
|
|
229
|
+
channels: List of signal arrays.
|
|
230
|
+
|
|
231
|
+
Returns:
|
|
232
|
+
NxN correlation matrix where N is number of channels.
|
|
233
|
+
|
|
234
|
+
Example:
|
|
235
|
+
>>> correlator = ChannelCorrelator()
|
|
236
|
+
>>> matrix = correlator.correlation_matrix([ch1, ch2, ch3])
|
|
237
|
+
"""
|
|
238
|
+
n = len(channels)
|
|
239
|
+
matrix = np.ones((n, n), dtype=np.float64)
|
|
240
|
+
|
|
241
|
+
for i in range(n):
|
|
242
|
+
for j in range(i + 1, n):
|
|
243
|
+
corr = self.correlate(channels[i], channels[j])
|
|
244
|
+
matrix[i, j] = corr
|
|
245
|
+
matrix[j, i] = corr
|
|
246
|
+
|
|
247
|
+
return matrix
|
|
248
|
+
|
|
249
|
+
def correlate_channels(
|
|
250
|
+
self,
|
|
251
|
+
channel_a: NDArray[np.float64],
|
|
252
|
+
channel_b: NDArray[np.float64],
|
|
253
|
+
sample_rate: float = 1.0,
|
|
254
|
+
) -> CorrelationResult:
|
|
255
|
+
"""Find time offset between two channels using cross-correlation.
|
|
256
|
+
|
|
257
|
+
|
|
258
|
+
|
|
259
|
+
Uses normalized cross-correlation to find the time offset that maximizes
|
|
260
|
+
alignment between two channels. Handles zero-mean normalization for
|
|
261
|
+
robustness against DC offsets.
|
|
262
|
+
|
|
263
|
+
Args:
|
|
264
|
+
channel_a: First channel data.
|
|
265
|
+
channel_b: Second channel data.
|
|
266
|
+
sample_rate: Sample rate in Hz (default 1.0 for sample-based results).
|
|
267
|
+
|
|
268
|
+
Returns:
|
|
269
|
+
CorrelationResult with offset and quality metrics.
|
|
270
|
+
|
|
271
|
+
Raises:
|
|
272
|
+
InsufficientDataError: If channels are too short.
|
|
273
|
+
ValidationError: If sample rate is invalid.
|
|
274
|
+
"""
|
|
275
|
+
if len(channel_a) < 2 or len(channel_b) < 2:
|
|
276
|
+
raise InsufficientDataError("Channels must have at least 2 samples")
|
|
277
|
+
|
|
278
|
+
if sample_rate <= 0:
|
|
279
|
+
raise ValidationError(f"Sample rate must be positive, got {sample_rate}")
|
|
280
|
+
|
|
281
|
+
# Convert to zero-mean for better correlation
|
|
282
|
+
a_mean = np.mean(channel_a)
|
|
283
|
+
b_mean = np.mean(channel_b)
|
|
284
|
+
a_centered = channel_a - a_mean
|
|
285
|
+
b_centered = channel_b - b_mean
|
|
286
|
+
|
|
287
|
+
# Compute cross-correlation using scipy (more efficient than numpy)
|
|
288
|
+
correlation = signal.correlate(a_centered, b_centered, mode="full", method="auto")
|
|
289
|
+
|
|
290
|
+
# Normalize by signal energies for correlation coefficient
|
|
291
|
+
a_energy = np.sum(a_centered**2)
|
|
292
|
+
b_energy = np.sum(b_centered**2)
|
|
293
|
+
|
|
294
|
+
if a_energy == 0 or b_energy == 0:
|
|
295
|
+
# One or both signals are constant
|
|
296
|
+
return CorrelationResult(
|
|
297
|
+
offset_samples=0,
|
|
298
|
+
offset_seconds=0.0,
|
|
299
|
+
correlation_coefficient=0.0,
|
|
300
|
+
confidence=0.0,
|
|
301
|
+
quality="poor",
|
|
302
|
+
)
|
|
303
|
+
|
|
304
|
+
normalization = np.sqrt(a_energy * b_energy)
|
|
305
|
+
correlation_normalized = correlation / normalization
|
|
306
|
+
|
|
307
|
+
# Find peak correlation
|
|
308
|
+
peak_idx = np.argmax(np.abs(correlation_normalized))
|
|
309
|
+
peak_value = correlation_normalized[peak_idx]
|
|
310
|
+
|
|
311
|
+
# Convert peak index to offset (positive = channel_b leads)
|
|
312
|
+
offset_samples = peak_idx - (len(channel_b) - 1)
|
|
313
|
+
offset_seconds = offset_samples / sample_rate
|
|
314
|
+
|
|
315
|
+
# Estimate confidence from peak sharpness
|
|
316
|
+
# High confidence = sharp peak, low confidence = broad/weak peak
|
|
317
|
+
confidence = self._estimate_correlation_confidence(correlation_normalized, int(peak_idx))
|
|
318
|
+
|
|
319
|
+
# Classify quality
|
|
320
|
+
quality = self._classify_correlation_quality(abs(peak_value), confidence)
|
|
321
|
+
|
|
322
|
+
return CorrelationResult(
|
|
323
|
+
offset_samples=int(offset_samples),
|
|
324
|
+
offset_seconds=float(offset_seconds),
|
|
325
|
+
correlation_coefficient=float(peak_value),
|
|
326
|
+
confidence=float(confidence),
|
|
327
|
+
quality=quality,
|
|
328
|
+
)
|
|
329
|
+
|
|
330
|
+
def align_by_trigger(
|
|
331
|
+
self,
|
|
332
|
+
channels: dict[str, NDArray[np.float64]],
|
|
333
|
+
trigger_channel: str,
|
|
334
|
+
edge: Literal["rising", "falling"] = "rising",
|
|
335
|
+
threshold: float = 0.5,
|
|
336
|
+
) -> CorrelatedChannels:
|
|
337
|
+
"""Align channels using trigger edge from one channel.
|
|
338
|
+
|
|
339
|
+
|
|
340
|
+
|
|
341
|
+
Aligns all channels by detecting the first trigger edge in the specified
|
|
342
|
+
channel and trimming all channels to start from that point.
|
|
343
|
+
|
|
344
|
+
Args:
|
|
345
|
+
channels: Dictionary of channel name -> data array.
|
|
346
|
+
trigger_channel: Name of channel to use for trigger detection.
|
|
347
|
+
edge: Edge type to detect ('rising' or 'falling').
|
|
348
|
+
threshold: Trigger threshold (normalized 0-1 if float, or absolute value).
|
|
349
|
+
|
|
350
|
+
Returns:
|
|
351
|
+
CorrelatedChannels with aligned data.
|
|
352
|
+
|
|
353
|
+
Raises:
|
|
354
|
+
InsufficientDataError: If trigger channel is too short.
|
|
355
|
+
ValidationError: If trigger channel not found or no edge detected.
|
|
356
|
+
"""
|
|
357
|
+
if trigger_channel not in channels:
|
|
358
|
+
raise ValidationError(f"Trigger channel '{trigger_channel}' not found")
|
|
359
|
+
|
|
360
|
+
trigger_data = channels[trigger_channel]
|
|
361
|
+
|
|
362
|
+
if len(trigger_data) < 2:
|
|
363
|
+
raise InsufficientDataError("Trigger channel too short")
|
|
364
|
+
|
|
365
|
+
# Normalize threshold if needed
|
|
366
|
+
if 0.0 <= threshold <= 1.0:
|
|
367
|
+
data_min = np.min(trigger_data)
|
|
368
|
+
data_max = np.max(trigger_data)
|
|
369
|
+
threshold_abs = float(data_min + threshold * (data_max - data_min))
|
|
370
|
+
else:
|
|
371
|
+
threshold_abs = float(threshold)
|
|
372
|
+
|
|
373
|
+
# Detect first edge
|
|
374
|
+
trigger_idx = self._find_first_edge(trigger_data, edge, threshold_abs)
|
|
375
|
+
|
|
376
|
+
if trigger_idx is None:
|
|
377
|
+
raise ValidationError(f"No {edge} edge found in trigger channel")
|
|
378
|
+
|
|
379
|
+
# Align all channels by trimming to trigger point
|
|
380
|
+
aligned_channels = {}
|
|
381
|
+
offsets = {}
|
|
382
|
+
|
|
383
|
+
for name, data in channels.items():
|
|
384
|
+
if trigger_idx < len(data):
|
|
385
|
+
aligned_channels[name] = data[trigger_idx:]
|
|
386
|
+
offsets[name] = trigger_idx
|
|
387
|
+
else:
|
|
388
|
+
# Trigger point is beyond this channel's data
|
|
389
|
+
aligned_channels[name] = np.array([])
|
|
390
|
+
offsets[name] = len(data)
|
|
391
|
+
|
|
392
|
+
# Assume all channels have same sample rate (no rate given)
|
|
393
|
+
# Use default of 1.0 Hz for sample-based indexing
|
|
394
|
+
return CorrelatedChannels(aligned_channels, sample_rate=1.0, offsets=offsets)
|
|
395
|
+
|
|
396
|
+
def resample_to_common_rate(
|
|
397
|
+
self,
|
|
398
|
+
channels: dict[str, tuple[NDArray[np.float64], float]],
|
|
399
|
+
target_rate: float | None = None,
|
|
400
|
+
) -> CorrelatedChannels:
|
|
401
|
+
"""Resample all channels to common sample rate.
|
|
402
|
+
|
|
403
|
+
|
|
404
|
+
|
|
405
|
+
Resamples channels with different sample rates to a common rate using
|
|
406
|
+
polyphase resampling for high quality. Uses the highest sample rate
|
|
407
|
+
as target if not specified.
|
|
408
|
+
|
|
409
|
+
Args:
|
|
410
|
+
channels: Dictionary of channel name -> (data, sample_rate) tuples.
|
|
411
|
+
target_rate: Target sample rate in Hz. If None, uses highest rate.
|
|
412
|
+
|
|
413
|
+
Returns:
|
|
414
|
+
CorrelatedChannels with resampled data at common rate.
|
|
415
|
+
|
|
416
|
+
Raises:
|
|
417
|
+
ValidationError: If channels are empty or rates are invalid.
|
|
418
|
+
"""
|
|
419
|
+
if not channels:
|
|
420
|
+
raise ValidationError("At least one channel is required")
|
|
421
|
+
|
|
422
|
+
# Determine target rate
|
|
423
|
+
if target_rate is None:
|
|
424
|
+
rates = [rate for _, rate in channels.values()]
|
|
425
|
+
target_rate = max(rates)
|
|
426
|
+
|
|
427
|
+
if target_rate <= 0:
|
|
428
|
+
raise ValidationError(f"Target rate must be positive, got {target_rate}")
|
|
429
|
+
|
|
430
|
+
resampled_channels = {}
|
|
431
|
+
offsets = {}
|
|
432
|
+
|
|
433
|
+
for name, (data, original_rate) in channels.items():
|
|
434
|
+
if original_rate <= 0:
|
|
435
|
+
raise ValidationError(f"Invalid sample rate for '{name}': {original_rate}")
|
|
436
|
+
|
|
437
|
+
if len(data) < 2:
|
|
438
|
+
# Skip empty/trivial channels
|
|
439
|
+
resampled_channels[name] = data
|
|
440
|
+
offsets[name] = 0
|
|
441
|
+
continue
|
|
442
|
+
|
|
443
|
+
# Calculate resampling ratio
|
|
444
|
+
ratio = target_rate / original_rate
|
|
445
|
+
|
|
446
|
+
if abs(ratio - 1.0) < 1e-6:
|
|
447
|
+
# Already at target rate
|
|
448
|
+
resampled_channels[name] = data
|
|
449
|
+
else:
|
|
450
|
+
# Resample using polyphase method
|
|
451
|
+
num_samples = int(np.round(len(data) * ratio))
|
|
452
|
+
resampled_channels[name] = signal.resample(data, num_samples)
|
|
453
|
+
|
|
454
|
+
offsets[name] = 0
|
|
455
|
+
|
|
456
|
+
return CorrelatedChannels(resampled_channels, sample_rate=target_rate, offsets=offsets)
|
|
457
|
+
|
|
458
|
+
def auto_align(
|
|
459
|
+
self,
|
|
460
|
+
channels: dict[str, NDArray[np.float64]],
|
|
461
|
+
sample_rate: float,
|
|
462
|
+
method: Literal["correlation", "trigger", "edge"] = "correlation",
|
|
463
|
+
) -> CorrelatedChannels:
|
|
464
|
+
"""Auto-align channels using best-guess method.
|
|
465
|
+
|
|
466
|
+
|
|
467
|
+
|
|
468
|
+
Automatically aligns multiple channels using the specified method.
|
|
469
|
+
For correlation method, aligns all channels to the reference channel.
|
|
470
|
+
|
|
471
|
+
Args:
|
|
472
|
+
channels: Dictionary of channel name -> data array.
|
|
473
|
+
sample_rate: Sample rate in Hz (same for all channels).
|
|
474
|
+
method: Alignment method to use.
|
|
475
|
+
|
|
476
|
+
Returns:
|
|
477
|
+
CorrelatedChannels with aligned data.
|
|
478
|
+
|
|
479
|
+
Raises:
|
|
480
|
+
ValidationError: If method is invalid or alignment fails.
|
|
481
|
+
"""
|
|
482
|
+
if not channels:
|
|
483
|
+
raise ValidationError("At least one channel is required")
|
|
484
|
+
|
|
485
|
+
if len(channels) < 2:
|
|
486
|
+
# Single channel, no alignment needed
|
|
487
|
+
return CorrelatedChannels(
|
|
488
|
+
channels=channels, sample_rate=sample_rate, offsets=dict.fromkeys(channels, 0)
|
|
489
|
+
)
|
|
490
|
+
|
|
491
|
+
# Determine reference channel
|
|
492
|
+
if self.reference_channel and self.reference_channel in channels:
|
|
493
|
+
ref_name = self.reference_channel
|
|
494
|
+
else:
|
|
495
|
+
ref_name = next(iter(channels))
|
|
496
|
+
|
|
497
|
+
ref_data = channels[ref_name]
|
|
498
|
+
|
|
499
|
+
if method == "correlation":
|
|
500
|
+
# Correlate all channels to reference
|
|
501
|
+
aligned_channels = {ref_name: ref_data}
|
|
502
|
+
offsets = {ref_name: 0}
|
|
503
|
+
|
|
504
|
+
for name, data in channels.items():
|
|
505
|
+
if name == ref_name:
|
|
506
|
+
continue
|
|
507
|
+
|
|
508
|
+
# Cross-correlate with reference
|
|
509
|
+
result = self.correlate_channels(ref_data, data, sample_rate)
|
|
510
|
+
|
|
511
|
+
# Apply offset to align
|
|
512
|
+
offset = -result.offset_samples # Negative because we want to shift data
|
|
513
|
+
|
|
514
|
+
if offset > 0:
|
|
515
|
+
# Trim start of data
|
|
516
|
+
aligned_channels[name] = data[offset:]
|
|
517
|
+
elif offset < 0:
|
|
518
|
+
# Pad start of data
|
|
519
|
+
pad = np.zeros(-offset)
|
|
520
|
+
aligned_channels[name] = np.concatenate([pad, data])
|
|
521
|
+
else:
|
|
522
|
+
aligned_channels[name] = data
|
|
523
|
+
|
|
524
|
+
offsets[name] = offset
|
|
525
|
+
|
|
526
|
+
# Trim all to same length
|
|
527
|
+
min_len = min(len(d) for d in aligned_channels.values())
|
|
528
|
+
aligned_channels = {name: data[:min_len] for name, data in aligned_channels.items()}
|
|
529
|
+
|
|
530
|
+
return CorrelatedChannels(aligned_channels, sample_rate, offsets)
|
|
531
|
+
|
|
532
|
+
elif method in ("trigger", "edge"):
|
|
533
|
+
# Use first channel as trigger
|
|
534
|
+
return self.align_by_trigger(channels, ref_name, edge="rising")
|
|
535
|
+
|
|
536
|
+
else:
|
|
537
|
+
raise ValidationError(f"Unknown alignment method: {method}")
|
|
538
|
+
|
|
539
|
+
def _estimate_correlation_confidence(
|
|
540
|
+
self, correlation: NDArray[np.float64], peak_idx: int
|
|
541
|
+
) -> float:
|
|
542
|
+
"""Estimate confidence from correlation peak sharpness.
|
|
543
|
+
|
|
544
|
+
Args:
|
|
545
|
+
correlation: Normalized correlation array.
|
|
546
|
+
peak_idx: Index of peak correlation.
|
|
547
|
+
|
|
548
|
+
Returns:
|
|
549
|
+
Confidence score 0.0 to 1.0.
|
|
550
|
+
"""
|
|
551
|
+
peak_value = abs(correlation[peak_idx])
|
|
552
|
+
|
|
553
|
+
# Calculate peak-to-sidelobe ratio
|
|
554
|
+
# Higher ratio = sharper peak = higher confidence
|
|
555
|
+
window_size = min(20, len(correlation) // 10)
|
|
556
|
+
start = max(0, peak_idx - window_size)
|
|
557
|
+
end = min(len(correlation), peak_idx + window_size + 1)
|
|
558
|
+
|
|
559
|
+
# Exclude peak itself
|
|
560
|
+
sidelobe_indices = np.concatenate(
|
|
561
|
+
[np.arange(start, peak_idx), np.arange(peak_idx + 1, end)]
|
|
562
|
+
)
|
|
563
|
+
|
|
564
|
+
if len(sidelobe_indices) > 0:
|
|
565
|
+
max_sidelobe = np.max(np.abs(correlation[sidelobe_indices]))
|
|
566
|
+
if max_sidelobe > 0:
|
|
567
|
+
ratio = peak_value / max_sidelobe
|
|
568
|
+
# Map ratio to confidence (empirically tuned)
|
|
569
|
+
confidence = min(1.0, ratio / 5.0)
|
|
570
|
+
else:
|
|
571
|
+
confidence = 1.0
|
|
572
|
+
else:
|
|
573
|
+
confidence = peak_value
|
|
574
|
+
|
|
575
|
+
return float(confidence)
|
|
576
|
+
|
|
577
|
+
def _classify_correlation_quality(self, correlation: float, confidence: float) -> str:
|
|
578
|
+
"""Classify correlation quality.
|
|
579
|
+
|
|
580
|
+
Args:
|
|
581
|
+
correlation: Correlation coefficient (0.0 to 1.0).
|
|
582
|
+
confidence: Confidence score (0.0 to 1.0).
|
|
583
|
+
|
|
584
|
+
Returns:
|
|
585
|
+
str: Quality rating - 'excellent', 'good', 'fair', or 'poor'.
|
|
586
|
+
"""
|
|
587
|
+
score = (correlation + confidence) / 2.0
|
|
588
|
+
|
|
589
|
+
if score >= 0.8:
|
|
590
|
+
return "excellent"
|
|
591
|
+
elif score >= 0.6:
|
|
592
|
+
return "good"
|
|
593
|
+
elif score >= 0.4:
|
|
594
|
+
return "fair"
|
|
595
|
+
else:
|
|
596
|
+
return "poor"
|
|
597
|
+
|
|
598
|
+
def _find_first_edge(
|
|
599
|
+
self, data: NDArray[np.float64], edge: str, threshold: float
|
|
600
|
+
) -> int | None:
|
|
601
|
+
"""Find first edge in data.
|
|
602
|
+
|
|
603
|
+
Args:
|
|
604
|
+
data: Signal data.
|
|
605
|
+
edge: Edge type ('rising' or 'falling').
|
|
606
|
+
threshold: Threshold value.
|
|
607
|
+
|
|
608
|
+
Returns:
|
|
609
|
+
Index of first edge, or None if not found.
|
|
610
|
+
"""
|
|
611
|
+
if edge == "rising":
|
|
612
|
+
# Find first point where signal crosses above threshold
|
|
613
|
+
crossings = np.where((data[:-1] < threshold) & (data[1:] >= threshold))[0]
|
|
614
|
+
else: # falling
|
|
615
|
+
crossings = np.where((data[:-1] > threshold) & (data[1:] <= threshold))[0]
|
|
616
|
+
|
|
617
|
+
if len(crossings) > 0:
|
|
618
|
+
return int(crossings[0] + 1) # Return index after crossing
|
|
619
|
+
else:
|
|
620
|
+
return None
|
|
621
|
+
|
|
622
|
+
|
|
623
|
+
# Convenience functions
|
|
624
|
+
|
|
625
|
+
|
|
626
|
+
def correlate_channels(
|
|
627
|
+
channel_a: NDArray[np.float64], channel_b: NDArray[np.float64], sample_rate: float = 1.0
|
|
628
|
+
) -> CorrelationResult:
|
|
629
|
+
"""Find time offset between two channels.
|
|
630
|
+
|
|
631
|
+
|
|
632
|
+
|
|
633
|
+
Convenience function for correlating two channels without creating
|
|
634
|
+
a ChannelCorrelator instance.
|
|
635
|
+
|
|
636
|
+
Args:
|
|
637
|
+
channel_a: First channel data.
|
|
638
|
+
channel_b: Second channel data.
|
|
639
|
+
sample_rate: Sample rate in Hz (default 1.0 for sample-based results).
|
|
640
|
+
|
|
641
|
+
Returns:
|
|
642
|
+
CorrelationResult with offset and quality metrics.
|
|
643
|
+
|
|
644
|
+
Example:
|
|
645
|
+
>>> result = correlate_channels(ch1, ch2, sample_rate=1e9)
|
|
646
|
+
>>> print(f"Offset: {result.offset_seconds*1e9:.2f} ns")
|
|
647
|
+
"""
|
|
648
|
+
correlator = ChannelCorrelator()
|
|
649
|
+
return correlator.correlate_channels(channel_a, channel_b, sample_rate)
|
|
650
|
+
|
|
651
|
+
|
|
652
|
+
def align_by_trigger(
|
|
653
|
+
channels: dict[str, NDArray[np.float64]],
|
|
654
|
+
trigger_channel: str,
|
|
655
|
+
edge: Literal["rising", "falling"] = "rising",
|
|
656
|
+
threshold: float = 0.5,
|
|
657
|
+
) -> CorrelatedChannels:
|
|
658
|
+
"""Align channels using trigger edge.
|
|
659
|
+
|
|
660
|
+
|
|
661
|
+
|
|
662
|
+
Convenience function for trigger-based alignment without creating
|
|
663
|
+
a ChannelCorrelator instance.
|
|
664
|
+
|
|
665
|
+
Args:
|
|
666
|
+
channels: Dictionary of channel name -> data array.
|
|
667
|
+
trigger_channel: Name of channel to use for trigger detection.
|
|
668
|
+
edge: Edge type to detect ('rising' or 'falling').
|
|
669
|
+
threshold: Trigger threshold (0-1 normalized or absolute).
|
|
670
|
+
|
|
671
|
+
Returns:
|
|
672
|
+
CorrelatedChannels with aligned data.
|
|
673
|
+
|
|
674
|
+
Example:
|
|
675
|
+
>>> aligned = align_by_trigger(
|
|
676
|
+
... {'clk': clk_data, 'data': data_signal},
|
|
677
|
+
... trigger_channel='clk',
|
|
678
|
+
... edge='rising'
|
|
679
|
+
... )
|
|
680
|
+
"""
|
|
681
|
+
correlator = ChannelCorrelator()
|
|
682
|
+
return correlator.align_by_trigger(channels, trigger_channel, edge, threshold)
|
|
683
|
+
|
|
684
|
+
|
|
685
|
+
def resample_to_common_rate(
|
|
686
|
+
channels: dict[str, tuple[NDArray[np.float64], float]], target_rate: float | None = None
|
|
687
|
+
) -> CorrelatedChannels:
|
|
688
|
+
"""Resample all channels to common rate.
|
|
689
|
+
|
|
690
|
+
|
|
691
|
+
|
|
692
|
+
Convenience function for resampling channels without creating
|
|
693
|
+
a ChannelCorrelator instance.
|
|
694
|
+
|
|
695
|
+
Args:
|
|
696
|
+
channels: Dictionary of channel name -> (data, sample_rate) tuples.
|
|
697
|
+
target_rate: Target sample rate in Hz. If None, uses highest rate.
|
|
698
|
+
|
|
699
|
+
Returns:
|
|
700
|
+
CorrelatedChannels with resampled data at common rate.
|
|
701
|
+
|
|
702
|
+
Example:
|
|
703
|
+
>>> resampled = resample_to_common_rate({
|
|
704
|
+
... 'ch1': (data1, 1e9),
|
|
705
|
+
... 'ch2': (data2, 2e9)
|
|
706
|
+
... })
|
|
707
|
+
>>> print(f"Common rate: {resampled.sample_rate} Hz")
|
|
708
|
+
"""
|
|
709
|
+
correlator = ChannelCorrelator()
|
|
710
|
+
return correlator.resample_to_common_rate(channels, target_rate)
|
|
711
|
+
|
|
712
|
+
|
|
713
|
+
__all__ = [
|
|
714
|
+
"ChannelCorrelator",
|
|
715
|
+
"CorrelatedChannels",
|
|
716
|
+
"CorrelationResult",
|
|
717
|
+
"align_by_trigger",
|
|
718
|
+
"correlate_channels",
|
|
719
|
+
"resample_to_common_rate",
|
|
720
|
+
]
|