oscura 0.0.1__py3-none-any.whl → 0.1.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (465) hide show
  1. oscura/__init__.py +813 -8
  2. oscura/__main__.py +392 -0
  3. oscura/analyzers/__init__.py +37 -0
  4. oscura/analyzers/digital/__init__.py +177 -0
  5. oscura/analyzers/digital/bus.py +691 -0
  6. oscura/analyzers/digital/clock.py +805 -0
  7. oscura/analyzers/digital/correlation.py +720 -0
  8. oscura/analyzers/digital/edges.py +632 -0
  9. oscura/analyzers/digital/extraction.py +413 -0
  10. oscura/analyzers/digital/quality.py +878 -0
  11. oscura/analyzers/digital/signal_quality.py +877 -0
  12. oscura/analyzers/digital/thresholds.py +708 -0
  13. oscura/analyzers/digital/timing.py +1104 -0
  14. oscura/analyzers/eye/__init__.py +46 -0
  15. oscura/analyzers/eye/diagram.py +434 -0
  16. oscura/analyzers/eye/metrics.py +555 -0
  17. oscura/analyzers/jitter/__init__.py +83 -0
  18. oscura/analyzers/jitter/ber.py +333 -0
  19. oscura/analyzers/jitter/decomposition.py +759 -0
  20. oscura/analyzers/jitter/measurements.py +413 -0
  21. oscura/analyzers/jitter/spectrum.py +220 -0
  22. oscura/analyzers/measurements.py +40 -0
  23. oscura/analyzers/packet/__init__.py +171 -0
  24. oscura/analyzers/packet/daq.py +1077 -0
  25. oscura/analyzers/packet/metrics.py +437 -0
  26. oscura/analyzers/packet/parser.py +327 -0
  27. oscura/analyzers/packet/payload.py +2156 -0
  28. oscura/analyzers/packet/payload_analysis.py +1312 -0
  29. oscura/analyzers/packet/payload_extraction.py +236 -0
  30. oscura/analyzers/packet/payload_patterns.py +670 -0
  31. oscura/analyzers/packet/stream.py +359 -0
  32. oscura/analyzers/patterns/__init__.py +266 -0
  33. oscura/analyzers/patterns/clustering.py +1036 -0
  34. oscura/analyzers/patterns/discovery.py +539 -0
  35. oscura/analyzers/patterns/learning.py +797 -0
  36. oscura/analyzers/patterns/matching.py +1091 -0
  37. oscura/analyzers/patterns/periodic.py +650 -0
  38. oscura/analyzers/patterns/sequences.py +767 -0
  39. oscura/analyzers/power/__init__.py +116 -0
  40. oscura/analyzers/power/ac_power.py +391 -0
  41. oscura/analyzers/power/basic.py +383 -0
  42. oscura/analyzers/power/conduction.py +314 -0
  43. oscura/analyzers/power/efficiency.py +297 -0
  44. oscura/analyzers/power/ripple.py +356 -0
  45. oscura/analyzers/power/soa.py +372 -0
  46. oscura/analyzers/power/switching.py +479 -0
  47. oscura/analyzers/protocol/__init__.py +150 -0
  48. oscura/analyzers/protocols/__init__.py +150 -0
  49. oscura/analyzers/protocols/base.py +500 -0
  50. oscura/analyzers/protocols/can.py +620 -0
  51. oscura/analyzers/protocols/can_fd.py +448 -0
  52. oscura/analyzers/protocols/flexray.py +405 -0
  53. oscura/analyzers/protocols/hdlc.py +399 -0
  54. oscura/analyzers/protocols/i2c.py +368 -0
  55. oscura/analyzers/protocols/i2s.py +296 -0
  56. oscura/analyzers/protocols/jtag.py +393 -0
  57. oscura/analyzers/protocols/lin.py +445 -0
  58. oscura/analyzers/protocols/manchester.py +333 -0
  59. oscura/analyzers/protocols/onewire.py +501 -0
  60. oscura/analyzers/protocols/spi.py +334 -0
  61. oscura/analyzers/protocols/swd.py +325 -0
  62. oscura/analyzers/protocols/uart.py +393 -0
  63. oscura/analyzers/protocols/usb.py +495 -0
  64. oscura/analyzers/signal_integrity/__init__.py +63 -0
  65. oscura/analyzers/signal_integrity/embedding.py +294 -0
  66. oscura/analyzers/signal_integrity/equalization.py +370 -0
  67. oscura/analyzers/signal_integrity/sparams.py +484 -0
  68. oscura/analyzers/spectral/__init__.py +53 -0
  69. oscura/analyzers/spectral/chunked.py +273 -0
  70. oscura/analyzers/spectral/chunked_fft.py +571 -0
  71. oscura/analyzers/spectral/chunked_wavelet.py +391 -0
  72. oscura/analyzers/spectral/fft.py +92 -0
  73. oscura/analyzers/statistical/__init__.py +250 -0
  74. oscura/analyzers/statistical/checksum.py +923 -0
  75. oscura/analyzers/statistical/chunked_corr.py +228 -0
  76. oscura/analyzers/statistical/classification.py +778 -0
  77. oscura/analyzers/statistical/entropy.py +1113 -0
  78. oscura/analyzers/statistical/ngrams.py +614 -0
  79. oscura/analyzers/statistics/__init__.py +119 -0
  80. oscura/analyzers/statistics/advanced.py +885 -0
  81. oscura/analyzers/statistics/basic.py +263 -0
  82. oscura/analyzers/statistics/correlation.py +630 -0
  83. oscura/analyzers/statistics/distribution.py +298 -0
  84. oscura/analyzers/statistics/outliers.py +463 -0
  85. oscura/analyzers/statistics/streaming.py +93 -0
  86. oscura/analyzers/statistics/trend.py +520 -0
  87. oscura/analyzers/validation.py +598 -0
  88. oscura/analyzers/waveform/__init__.py +36 -0
  89. oscura/analyzers/waveform/measurements.py +943 -0
  90. oscura/analyzers/waveform/measurements_with_uncertainty.py +371 -0
  91. oscura/analyzers/waveform/spectral.py +1689 -0
  92. oscura/analyzers/waveform/wavelets.py +298 -0
  93. oscura/api/__init__.py +62 -0
  94. oscura/api/dsl.py +538 -0
  95. oscura/api/fluent.py +571 -0
  96. oscura/api/operators.py +498 -0
  97. oscura/api/optimization.py +392 -0
  98. oscura/api/profiling.py +396 -0
  99. oscura/automotive/__init__.py +73 -0
  100. oscura/automotive/can/__init__.py +52 -0
  101. oscura/automotive/can/analysis.py +356 -0
  102. oscura/automotive/can/checksum.py +250 -0
  103. oscura/automotive/can/correlation.py +212 -0
  104. oscura/automotive/can/discovery.py +355 -0
  105. oscura/automotive/can/message_wrapper.py +375 -0
  106. oscura/automotive/can/models.py +385 -0
  107. oscura/automotive/can/patterns.py +381 -0
  108. oscura/automotive/can/session.py +452 -0
  109. oscura/automotive/can/state_machine.py +300 -0
  110. oscura/automotive/can/stimulus_response.py +461 -0
  111. oscura/automotive/dbc/__init__.py +15 -0
  112. oscura/automotive/dbc/generator.py +156 -0
  113. oscura/automotive/dbc/parser.py +146 -0
  114. oscura/automotive/dtc/__init__.py +30 -0
  115. oscura/automotive/dtc/database.py +3036 -0
  116. oscura/automotive/j1939/__init__.py +14 -0
  117. oscura/automotive/j1939/decoder.py +745 -0
  118. oscura/automotive/loaders/__init__.py +35 -0
  119. oscura/automotive/loaders/asc.py +98 -0
  120. oscura/automotive/loaders/blf.py +77 -0
  121. oscura/automotive/loaders/csv_can.py +136 -0
  122. oscura/automotive/loaders/dispatcher.py +136 -0
  123. oscura/automotive/loaders/mdf.py +331 -0
  124. oscura/automotive/loaders/pcap.py +132 -0
  125. oscura/automotive/obd/__init__.py +14 -0
  126. oscura/automotive/obd/decoder.py +707 -0
  127. oscura/automotive/uds/__init__.py +48 -0
  128. oscura/automotive/uds/decoder.py +265 -0
  129. oscura/automotive/uds/models.py +64 -0
  130. oscura/automotive/visualization.py +369 -0
  131. oscura/batch/__init__.py +55 -0
  132. oscura/batch/advanced.py +627 -0
  133. oscura/batch/aggregate.py +300 -0
  134. oscura/batch/analyze.py +139 -0
  135. oscura/batch/logging.py +487 -0
  136. oscura/batch/metrics.py +556 -0
  137. oscura/builders/__init__.py +41 -0
  138. oscura/builders/signal_builder.py +1131 -0
  139. oscura/cli/__init__.py +14 -0
  140. oscura/cli/batch.py +339 -0
  141. oscura/cli/characterize.py +273 -0
  142. oscura/cli/compare.py +775 -0
  143. oscura/cli/decode.py +551 -0
  144. oscura/cli/main.py +247 -0
  145. oscura/cli/shell.py +350 -0
  146. oscura/comparison/__init__.py +66 -0
  147. oscura/comparison/compare.py +397 -0
  148. oscura/comparison/golden.py +487 -0
  149. oscura/comparison/limits.py +391 -0
  150. oscura/comparison/mask.py +434 -0
  151. oscura/comparison/trace_diff.py +30 -0
  152. oscura/comparison/visualization.py +481 -0
  153. oscura/compliance/__init__.py +70 -0
  154. oscura/compliance/advanced.py +756 -0
  155. oscura/compliance/masks.py +363 -0
  156. oscura/compliance/reporting.py +483 -0
  157. oscura/compliance/testing.py +298 -0
  158. oscura/component/__init__.py +38 -0
  159. oscura/component/impedance.py +365 -0
  160. oscura/component/reactive.py +598 -0
  161. oscura/component/transmission_line.py +312 -0
  162. oscura/config/__init__.py +191 -0
  163. oscura/config/defaults.py +254 -0
  164. oscura/config/loader.py +348 -0
  165. oscura/config/memory.py +271 -0
  166. oscura/config/migration.py +458 -0
  167. oscura/config/pipeline.py +1077 -0
  168. oscura/config/preferences.py +530 -0
  169. oscura/config/protocol.py +875 -0
  170. oscura/config/schema.py +713 -0
  171. oscura/config/settings.py +420 -0
  172. oscura/config/thresholds.py +599 -0
  173. oscura/convenience.py +457 -0
  174. oscura/core/__init__.py +299 -0
  175. oscura/core/audit.py +457 -0
  176. oscura/core/backend_selector.py +405 -0
  177. oscura/core/cache.py +590 -0
  178. oscura/core/cancellation.py +439 -0
  179. oscura/core/confidence.py +225 -0
  180. oscura/core/config.py +506 -0
  181. oscura/core/correlation.py +216 -0
  182. oscura/core/cross_domain.py +422 -0
  183. oscura/core/debug.py +301 -0
  184. oscura/core/edge_cases.py +541 -0
  185. oscura/core/exceptions.py +535 -0
  186. oscura/core/gpu_backend.py +523 -0
  187. oscura/core/lazy.py +832 -0
  188. oscura/core/log_query.py +540 -0
  189. oscura/core/logging.py +931 -0
  190. oscura/core/logging_advanced.py +952 -0
  191. oscura/core/memoize.py +171 -0
  192. oscura/core/memory_check.py +274 -0
  193. oscura/core/memory_guard.py +290 -0
  194. oscura/core/memory_limits.py +336 -0
  195. oscura/core/memory_monitor.py +453 -0
  196. oscura/core/memory_progress.py +465 -0
  197. oscura/core/memory_warnings.py +315 -0
  198. oscura/core/numba_backend.py +362 -0
  199. oscura/core/performance.py +352 -0
  200. oscura/core/progress.py +524 -0
  201. oscura/core/provenance.py +358 -0
  202. oscura/core/results.py +331 -0
  203. oscura/core/types.py +504 -0
  204. oscura/core/uncertainty.py +383 -0
  205. oscura/discovery/__init__.py +52 -0
  206. oscura/discovery/anomaly_detector.py +672 -0
  207. oscura/discovery/auto_decoder.py +415 -0
  208. oscura/discovery/comparison.py +497 -0
  209. oscura/discovery/quality_validator.py +528 -0
  210. oscura/discovery/signal_detector.py +769 -0
  211. oscura/dsl/__init__.py +73 -0
  212. oscura/dsl/commands.py +246 -0
  213. oscura/dsl/interpreter.py +455 -0
  214. oscura/dsl/parser.py +689 -0
  215. oscura/dsl/repl.py +172 -0
  216. oscura/exceptions.py +59 -0
  217. oscura/exploratory/__init__.py +111 -0
  218. oscura/exploratory/error_recovery.py +642 -0
  219. oscura/exploratory/fuzzy.py +513 -0
  220. oscura/exploratory/fuzzy_advanced.py +786 -0
  221. oscura/exploratory/legacy.py +831 -0
  222. oscura/exploratory/parse.py +358 -0
  223. oscura/exploratory/recovery.py +275 -0
  224. oscura/exploratory/sync.py +382 -0
  225. oscura/exploratory/unknown.py +707 -0
  226. oscura/export/__init__.py +25 -0
  227. oscura/export/wireshark/README.md +265 -0
  228. oscura/export/wireshark/__init__.py +47 -0
  229. oscura/export/wireshark/generator.py +312 -0
  230. oscura/export/wireshark/lua_builder.py +159 -0
  231. oscura/export/wireshark/templates/dissector.lua.j2 +92 -0
  232. oscura/export/wireshark/type_mapping.py +165 -0
  233. oscura/export/wireshark/validator.py +105 -0
  234. oscura/exporters/__init__.py +94 -0
  235. oscura/exporters/csv.py +303 -0
  236. oscura/exporters/exporters.py +44 -0
  237. oscura/exporters/hdf5.py +219 -0
  238. oscura/exporters/html_export.py +701 -0
  239. oscura/exporters/json_export.py +291 -0
  240. oscura/exporters/markdown_export.py +367 -0
  241. oscura/exporters/matlab_export.py +354 -0
  242. oscura/exporters/npz_export.py +219 -0
  243. oscura/exporters/spice_export.py +210 -0
  244. oscura/extensibility/__init__.py +131 -0
  245. oscura/extensibility/docs.py +752 -0
  246. oscura/extensibility/extensions.py +1125 -0
  247. oscura/extensibility/logging.py +259 -0
  248. oscura/extensibility/measurements.py +485 -0
  249. oscura/extensibility/plugins.py +414 -0
  250. oscura/extensibility/registry.py +346 -0
  251. oscura/extensibility/templates.py +913 -0
  252. oscura/extensibility/validation.py +651 -0
  253. oscura/filtering/__init__.py +89 -0
  254. oscura/filtering/base.py +563 -0
  255. oscura/filtering/convenience.py +564 -0
  256. oscura/filtering/design.py +725 -0
  257. oscura/filtering/filters.py +32 -0
  258. oscura/filtering/introspection.py +605 -0
  259. oscura/guidance/__init__.py +24 -0
  260. oscura/guidance/recommender.py +429 -0
  261. oscura/guidance/wizard.py +518 -0
  262. oscura/inference/__init__.py +251 -0
  263. oscura/inference/active_learning/README.md +153 -0
  264. oscura/inference/active_learning/__init__.py +38 -0
  265. oscura/inference/active_learning/lstar.py +257 -0
  266. oscura/inference/active_learning/observation_table.py +230 -0
  267. oscura/inference/active_learning/oracle.py +78 -0
  268. oscura/inference/active_learning/teachers/__init__.py +15 -0
  269. oscura/inference/active_learning/teachers/simulator.py +192 -0
  270. oscura/inference/adaptive_tuning.py +453 -0
  271. oscura/inference/alignment.py +653 -0
  272. oscura/inference/bayesian.py +943 -0
  273. oscura/inference/binary.py +1016 -0
  274. oscura/inference/crc_reverse.py +711 -0
  275. oscura/inference/logic.py +288 -0
  276. oscura/inference/message_format.py +1305 -0
  277. oscura/inference/protocol.py +417 -0
  278. oscura/inference/protocol_dsl.py +1084 -0
  279. oscura/inference/protocol_library.py +1230 -0
  280. oscura/inference/sequences.py +809 -0
  281. oscura/inference/signal_intelligence.py +1509 -0
  282. oscura/inference/spectral.py +215 -0
  283. oscura/inference/state_machine.py +634 -0
  284. oscura/inference/stream.py +918 -0
  285. oscura/integrations/__init__.py +59 -0
  286. oscura/integrations/llm.py +1827 -0
  287. oscura/jupyter/__init__.py +32 -0
  288. oscura/jupyter/display.py +268 -0
  289. oscura/jupyter/magic.py +334 -0
  290. oscura/loaders/__init__.py +526 -0
  291. oscura/loaders/binary.py +69 -0
  292. oscura/loaders/configurable.py +1255 -0
  293. oscura/loaders/csv.py +26 -0
  294. oscura/loaders/csv_loader.py +473 -0
  295. oscura/loaders/hdf5.py +9 -0
  296. oscura/loaders/hdf5_loader.py +510 -0
  297. oscura/loaders/lazy.py +370 -0
  298. oscura/loaders/mmap_loader.py +583 -0
  299. oscura/loaders/numpy_loader.py +436 -0
  300. oscura/loaders/pcap.py +432 -0
  301. oscura/loaders/preprocessing.py +368 -0
  302. oscura/loaders/rigol.py +287 -0
  303. oscura/loaders/sigrok.py +321 -0
  304. oscura/loaders/tdms.py +367 -0
  305. oscura/loaders/tektronix.py +711 -0
  306. oscura/loaders/validation.py +584 -0
  307. oscura/loaders/vcd.py +464 -0
  308. oscura/loaders/wav.py +233 -0
  309. oscura/math/__init__.py +45 -0
  310. oscura/math/arithmetic.py +824 -0
  311. oscura/math/interpolation.py +413 -0
  312. oscura/onboarding/__init__.py +39 -0
  313. oscura/onboarding/help.py +498 -0
  314. oscura/onboarding/tutorials.py +405 -0
  315. oscura/onboarding/wizard.py +466 -0
  316. oscura/optimization/__init__.py +19 -0
  317. oscura/optimization/parallel.py +440 -0
  318. oscura/optimization/search.py +532 -0
  319. oscura/pipeline/__init__.py +43 -0
  320. oscura/pipeline/base.py +338 -0
  321. oscura/pipeline/composition.py +242 -0
  322. oscura/pipeline/parallel.py +448 -0
  323. oscura/pipeline/pipeline.py +375 -0
  324. oscura/pipeline/reverse_engineering.py +1119 -0
  325. oscura/plugins/__init__.py +122 -0
  326. oscura/plugins/base.py +272 -0
  327. oscura/plugins/cli.py +497 -0
  328. oscura/plugins/discovery.py +411 -0
  329. oscura/plugins/isolation.py +418 -0
  330. oscura/plugins/lifecycle.py +959 -0
  331. oscura/plugins/manager.py +493 -0
  332. oscura/plugins/registry.py +421 -0
  333. oscura/plugins/versioning.py +372 -0
  334. oscura/py.typed +0 -0
  335. oscura/quality/__init__.py +65 -0
  336. oscura/quality/ensemble.py +740 -0
  337. oscura/quality/explainer.py +338 -0
  338. oscura/quality/scoring.py +616 -0
  339. oscura/quality/warnings.py +456 -0
  340. oscura/reporting/__init__.py +248 -0
  341. oscura/reporting/advanced.py +1234 -0
  342. oscura/reporting/analyze.py +448 -0
  343. oscura/reporting/argument_preparer.py +596 -0
  344. oscura/reporting/auto_report.py +507 -0
  345. oscura/reporting/batch.py +615 -0
  346. oscura/reporting/chart_selection.py +223 -0
  347. oscura/reporting/comparison.py +330 -0
  348. oscura/reporting/config.py +615 -0
  349. oscura/reporting/content/__init__.py +39 -0
  350. oscura/reporting/content/executive.py +127 -0
  351. oscura/reporting/content/filtering.py +191 -0
  352. oscura/reporting/content/minimal.py +257 -0
  353. oscura/reporting/content/verbosity.py +162 -0
  354. oscura/reporting/core.py +508 -0
  355. oscura/reporting/core_formats/__init__.py +17 -0
  356. oscura/reporting/core_formats/multi_format.py +210 -0
  357. oscura/reporting/engine.py +836 -0
  358. oscura/reporting/export.py +366 -0
  359. oscura/reporting/formatting/__init__.py +129 -0
  360. oscura/reporting/formatting/emphasis.py +81 -0
  361. oscura/reporting/formatting/numbers.py +403 -0
  362. oscura/reporting/formatting/standards.py +55 -0
  363. oscura/reporting/formatting.py +466 -0
  364. oscura/reporting/html.py +578 -0
  365. oscura/reporting/index.py +590 -0
  366. oscura/reporting/multichannel.py +296 -0
  367. oscura/reporting/output.py +379 -0
  368. oscura/reporting/pdf.py +373 -0
  369. oscura/reporting/plots.py +731 -0
  370. oscura/reporting/pptx_export.py +360 -0
  371. oscura/reporting/renderers/__init__.py +11 -0
  372. oscura/reporting/renderers/pdf.py +94 -0
  373. oscura/reporting/sections.py +471 -0
  374. oscura/reporting/standards.py +680 -0
  375. oscura/reporting/summary_generator.py +368 -0
  376. oscura/reporting/tables.py +397 -0
  377. oscura/reporting/template_system.py +724 -0
  378. oscura/reporting/templates/__init__.py +15 -0
  379. oscura/reporting/templates/definition.py +205 -0
  380. oscura/reporting/templates/index.html +649 -0
  381. oscura/reporting/templates/index.md +173 -0
  382. oscura/schemas/__init__.py +158 -0
  383. oscura/schemas/bus_configuration.json +322 -0
  384. oscura/schemas/device_mapping.json +182 -0
  385. oscura/schemas/packet_format.json +418 -0
  386. oscura/schemas/protocol_definition.json +363 -0
  387. oscura/search/__init__.py +16 -0
  388. oscura/search/anomaly.py +292 -0
  389. oscura/search/context.py +149 -0
  390. oscura/search/pattern.py +160 -0
  391. oscura/session/__init__.py +34 -0
  392. oscura/session/annotations.py +289 -0
  393. oscura/session/history.py +313 -0
  394. oscura/session/session.py +445 -0
  395. oscura/streaming/__init__.py +43 -0
  396. oscura/streaming/chunked.py +611 -0
  397. oscura/streaming/progressive.py +393 -0
  398. oscura/streaming/realtime.py +622 -0
  399. oscura/testing/__init__.py +54 -0
  400. oscura/testing/synthetic.py +808 -0
  401. oscura/triggering/__init__.py +68 -0
  402. oscura/triggering/base.py +229 -0
  403. oscura/triggering/edge.py +353 -0
  404. oscura/triggering/pattern.py +344 -0
  405. oscura/triggering/pulse.py +581 -0
  406. oscura/triggering/window.py +453 -0
  407. oscura/ui/__init__.py +48 -0
  408. oscura/ui/formatters.py +526 -0
  409. oscura/ui/progressive_display.py +340 -0
  410. oscura/utils/__init__.py +99 -0
  411. oscura/utils/autodetect.py +338 -0
  412. oscura/utils/buffer.py +389 -0
  413. oscura/utils/lazy.py +407 -0
  414. oscura/utils/lazy_imports.py +147 -0
  415. oscura/utils/memory.py +836 -0
  416. oscura/utils/memory_advanced.py +1326 -0
  417. oscura/utils/memory_extensions.py +465 -0
  418. oscura/utils/progressive.py +352 -0
  419. oscura/utils/windowing.py +362 -0
  420. oscura/visualization/__init__.py +321 -0
  421. oscura/visualization/accessibility.py +526 -0
  422. oscura/visualization/annotations.py +374 -0
  423. oscura/visualization/axis_scaling.py +305 -0
  424. oscura/visualization/colors.py +453 -0
  425. oscura/visualization/digital.py +337 -0
  426. oscura/visualization/eye.py +420 -0
  427. oscura/visualization/histogram.py +281 -0
  428. oscura/visualization/interactive.py +858 -0
  429. oscura/visualization/jitter.py +702 -0
  430. oscura/visualization/keyboard.py +394 -0
  431. oscura/visualization/layout.py +365 -0
  432. oscura/visualization/optimization.py +1028 -0
  433. oscura/visualization/palettes.py +446 -0
  434. oscura/visualization/plot.py +92 -0
  435. oscura/visualization/power.py +290 -0
  436. oscura/visualization/power_extended.py +626 -0
  437. oscura/visualization/presets.py +467 -0
  438. oscura/visualization/protocols.py +932 -0
  439. oscura/visualization/render.py +207 -0
  440. oscura/visualization/rendering.py +444 -0
  441. oscura/visualization/reverse_engineering.py +791 -0
  442. oscura/visualization/signal_integrity.py +808 -0
  443. oscura/visualization/specialized.py +553 -0
  444. oscura/visualization/spectral.py +811 -0
  445. oscura/visualization/styles.py +381 -0
  446. oscura/visualization/thumbnails.py +311 -0
  447. oscura/visualization/time_axis.py +351 -0
  448. oscura/visualization/waveform.py +367 -0
  449. oscura/workflow/__init__.py +13 -0
  450. oscura/workflow/dag.py +377 -0
  451. oscura/workflows/__init__.py +58 -0
  452. oscura/workflows/compliance.py +280 -0
  453. oscura/workflows/digital.py +272 -0
  454. oscura/workflows/multi_trace.py +502 -0
  455. oscura/workflows/power.py +178 -0
  456. oscura/workflows/protocol.py +492 -0
  457. oscura/workflows/reverse_engineering.py +639 -0
  458. oscura/workflows/signal_integrity.py +227 -0
  459. oscura-0.1.1.dist-info/METADATA +300 -0
  460. oscura-0.1.1.dist-info/RECORD +463 -0
  461. oscura-0.1.1.dist-info/entry_points.txt +2 -0
  462. {oscura-0.0.1.dist-info → oscura-0.1.1.dist-info}/licenses/LICENSE +1 -1
  463. oscura-0.0.1.dist-info/METADATA +0 -63
  464. oscura-0.0.1.dist-info/RECORD +0 -5
  465. {oscura-0.0.1.dist-info → oscura-0.1.1.dist-info}/WHEEL +0 -0
@@ -0,0 +1,943 @@
1
+ """Bayesian inference for signal analysis and protocol characterization.
2
+
3
+ This module provides probabilistic reasoning about signal characteristics and
4
+ protocol properties using Bayesian updating. It enables inference with full
5
+ uncertainty quantification and supports sequential updates as more data arrives.
6
+
7
+
8
+ Key Features:
9
+ - Prior distributions for common signal properties (baud rate, frequency, etc.)
10
+ - Likelihood functions for observed measurements
11
+ - Posterior calculation with credible intervals
12
+ - Integration with quality scoring system (0-1 confidence mapping)
13
+ - Sequential Bayesian updating for streaming analysis
14
+ - Support for multiple distribution families (normal, uniform, beta, etc.)
15
+
16
+ Example:
17
+ >>> from oscura.inference.bayesian import BayesianInference, infer_with_uncertainty
18
+ >>> import numpy as np
19
+ >>>
20
+ >>> # Infer baud rate from edge timing observations
21
+ >>> inference = BayesianInference()
22
+ >>> edge_times = np.array([0.0, 0.00001, 0.00002, 0.00003]) # 100 kHz
23
+ >>> posterior = inference.infer_baud_rate(edge_times)
24
+ >>> print(f"Baud rate: {posterior.mean:.0f} ± {posterior.std:.0f}")
25
+ >>> print(f"95% CI: [{posterior.ci_lower:.0f}, {posterior.ci_upper:.0f}]")
26
+ >>> print(f"Confidence: {posterior.confidence:.2%}")
27
+ >>>
28
+ >>> # Infer number of symbols from amplitude histogram
29
+ >>> amplitudes = np.random.choice([0.0, 0.33, 0.67, 1.0], size=1000)
30
+ >>> histogram, _ = np.histogram(amplitudes, bins=50)
31
+ >>> symbol_posterior = inference.infer_symbol_count(histogram)
32
+ >>> print(f"Estimated symbols: {int(symbol_posterior.mean)}")
33
+ >>>
34
+ >>> # Sequential updating for streaming data
35
+ >>> from oscura.inference.bayesian import SequentialBayesian, Prior
36
+ >>> prior = Prior("normal", {"mean": 115200, "std": 10000})
37
+ >>> sequential = SequentialBayesian("baud_rate", prior)
38
+ >>> for _observation in streaming_data:
39
+ ... posterior = sequential.update(likelihood_fn)
40
+ ... if sequential.get_confidence() > 0.95:
41
+ ... break # High confidence reached
42
+
43
+ References:
44
+ - Gelman et al., "Bayesian Data Analysis" (3rd ed.)
45
+ - Murphy, "Machine Learning: A Probabilistic Perspective"
46
+ - scipy.stats documentation for distribution families
47
+ """
48
+
49
+ from __future__ import annotations
50
+
51
+ from collections.abc import Callable
52
+ from dataclasses import dataclass
53
+ from typing import TYPE_CHECKING, Any
54
+
55
+ import numpy as np
56
+ from scipy import stats
57
+ from scipy.signal import find_peaks
58
+
59
+ from oscura.core.exceptions import AnalysisError, InsufficientDataError
60
+
61
+ if TYPE_CHECKING:
62
+ from numpy.typing import NDArray
63
+
64
+
65
+ @dataclass
66
+ class Prior:
67
+ """Prior distribution for a parameter.
68
+
69
+ Represents prior belief about a parameter before observing data.
70
+ Supports common distribution families used in signal analysis.
71
+
72
+ Attributes:
73
+ distribution: Distribution family name (e.g., "normal", "uniform", "beta").
74
+ params: Distribution parameters as dict (keys depend on distribution).
75
+
76
+ Supported distributions:
77
+ - "normal": params = {"mean": float, "std": float}
78
+ - "uniform": params = {"low": float, "high": float}
79
+ - "log_uniform": params = {"low": float, "high": float} (for scale-invariant priors)
80
+ - "beta": params = {"a": float, "b": float} (for probabilities)
81
+ - "gamma": params = {"shape": float, "scale": float} (for positive values)
82
+ - "half_normal": params = {"scale": float} (for positive values like noise std)
83
+ - "geometric": params = {"p": float} (for discrete counts)
84
+
85
+ Example:
86
+ >>> # Weakly informative prior for baud rate (log-uniform over range)
87
+ >>> prior = Prior("log_uniform", {"low": 100, "high": 10_000_000})
88
+ >>> samples = prior.sample(1000)
89
+ >>> density = prior.pdf(115200)
90
+ >>>
91
+ >>> # Prior for duty cycle (beta distribution centered at 0.5)
92
+ >>> duty_prior = Prior("beta", {"a": 2, "b": 2})
93
+ """
94
+
95
+ distribution: str
96
+ params: dict[str, float]
97
+
98
+ def __post_init__(self) -> None:
99
+ """Validate distribution parameters after initialization."""
100
+ valid_distributions = {
101
+ "normal",
102
+ "uniform",
103
+ "log_uniform",
104
+ "beta",
105
+ "gamma",
106
+ "half_normal",
107
+ "geometric",
108
+ }
109
+
110
+ if self.distribution not in valid_distributions:
111
+ raise ValueError(
112
+ f"Unknown distribution: {self.distribution}. "
113
+ f"Supported: {sorted(valid_distributions)}"
114
+ )
115
+
116
+ # Validate required parameters for each distribution
117
+ required_params = {
118
+ "normal": {"mean", "std"},
119
+ "uniform": {"low", "high"},
120
+ "log_uniform": {"low", "high"},
121
+ "beta": {"a", "b"},
122
+ "gamma": {"shape", "scale"},
123
+ "half_normal": {"scale"},
124
+ "geometric": {"p"},
125
+ }
126
+
127
+ required = required_params[self.distribution]
128
+ missing = required - set(self.params.keys())
129
+ if missing:
130
+ raise ValueError(f"Missing parameters for {self.distribution} distribution: {missing}")
131
+
132
+ def pdf(self, x: float | NDArray[np.floating[Any]]) -> float | NDArray[np.floating[Any]]:
133
+ """Compute probability density at x.
134
+
135
+ Args:
136
+ x: Value(s) at which to evaluate density.
137
+
138
+ Returns:
139
+ Probability density value(s).
140
+
141
+ Raises:
142
+ ValueError: If distribution is not recognized.
143
+ """
144
+ if self.distribution == "normal":
145
+ return float(stats.norm.pdf(x, loc=self.params["mean"], scale=self.params["std"])) # type: ignore[no-any-return]
146
+ elif self.distribution == "uniform":
147
+ return float( # type: ignore[no-any-return]
148
+ stats.uniform.pdf(
149
+ x, loc=self.params["low"], scale=self.params["high"] - self.params["low"]
150
+ )
151
+ )
152
+ elif self.distribution == "log_uniform":
153
+ # Log-uniform: uniform on log scale
154
+ log_low = np.log(self.params["low"])
155
+ log_high = np.log(self.params["high"])
156
+ log_x = np.log(np.maximum(x, 1e-100)) # Avoid log(0)
157
+ density = stats.uniform.pdf(log_x, loc=log_low, scale=log_high - log_low)
158
+ # Jacobian correction: d(log x)/dx = 1/x
159
+ result = density / np.maximum(x, 1e-100)
160
+ return result # type: ignore[return-value, no-any-return]
161
+ elif self.distribution == "beta":
162
+ return float(stats.beta.pdf(x, a=self.params["a"], b=self.params["b"])) # type: ignore[no-any-return]
163
+ elif self.distribution == "gamma":
164
+ return float(stats.gamma.pdf(x, a=self.params["shape"], scale=self.params["scale"])) # type: ignore[no-any-return]
165
+ elif self.distribution == "half_normal":
166
+ return float(stats.halfnorm.pdf(x, scale=self.params["scale"])) # type: ignore[no-any-return]
167
+ elif self.distribution == "geometric":
168
+ return float(stats.geom.pmf(x, p=self.params["p"])) # type: ignore[no-any-return]
169
+ else:
170
+ raise ValueError(f"PDF not implemented for {self.distribution}")
171
+
172
+ def sample(self, n: int = 1) -> NDArray[np.floating[Any]]:
173
+ """Draw samples from prior distribution.
174
+
175
+ Args:
176
+ n: Number of samples to draw.
177
+
178
+ Returns:
179
+ Array of samples from the prior.
180
+
181
+ Raises:
182
+ ValueError: If distribution is not recognized.
183
+ """
184
+ if self.distribution == "normal":
185
+ return stats.norm.rvs(loc=self.params["mean"], scale=self.params["std"], size=n) # type: ignore[no-any-return]
186
+ elif self.distribution == "uniform":
187
+ return stats.uniform.rvs( # type: ignore[no-any-return]
188
+ loc=self.params["low"], scale=self.params["high"] - self.params["low"], size=n
189
+ )
190
+ elif self.distribution == "log_uniform":
191
+ # Sample uniformly on log scale, then exponentiate
192
+ log_low = np.log(self.params["low"])
193
+ log_high = np.log(self.params["high"])
194
+ log_samples = stats.uniform.rvs(loc=log_low, scale=log_high - log_low, size=n) # type: ignore[no-any-return]
195
+ return np.exp(log_samples) # type: ignore[no-any-return]
196
+ elif self.distribution == "beta":
197
+ return stats.beta.rvs(a=self.params["a"], b=self.params["b"], size=n) # type: ignore[no-any-return]
198
+ elif self.distribution == "gamma":
199
+ return stats.gamma.rvs(a=self.params["shape"], scale=self.params["scale"], size=n) # type: ignore[no-any-return]
200
+ elif self.distribution == "half_normal":
201
+ return stats.halfnorm.rvs(scale=self.params["scale"], size=n) # type: ignore[no-any-return]
202
+ elif self.distribution == "geometric":
203
+ return stats.geom.rvs(p=self.params["p"], size=n) # type: ignore[no-any-return]
204
+ else:
205
+ raise ValueError(f"Sampling not implemented for {self.distribution}")
206
+
207
+
208
+ @dataclass
209
+ class Posterior:
210
+ """Posterior distribution after updating with evidence.
211
+
212
+ Represents updated belief about a parameter after observing data.
213
+ Provides point estimates, uncertainty quantification, and confidence scores.
214
+
215
+ Attributes:
216
+ mean: Posterior mean (point estimate).
217
+ std: Posterior standard deviation (uncertainty).
218
+ ci_lower: Lower bound of 95% credible interval.
219
+ ci_upper: Upper bound of 95% credible interval.
220
+ samples: Optional array of posterior samples (for non-parametric posteriors).
221
+
222
+ Example:
223
+ >>> posterior = Posterior(mean=115200, std=5000, ci_lower=105600, ci_upper=124800)
224
+ >>> print(f"Estimate: {posterior.mean:.0f} ± {posterior.std:.0f}")
225
+ >>> print(f"95% CI: [{posterior.ci_lower:.0f}, {posterior.ci_upper:.0f}]")
226
+ >>> print(f"Confidence: {posterior.confidence:.2%}")
227
+ """
228
+
229
+ mean: float
230
+ std: float
231
+ ci_lower: float
232
+ ci_upper: float
233
+ samples: NDArray[np.floating[Any]] | None = None
234
+
235
+ @property
236
+ def confidence(self) -> float:
237
+ """Convert posterior certainty to 0-1 confidence score.
238
+
239
+ Maps posterior standard deviation to confidence using an empirical formula.
240
+ Lower std (more certain) -> higher confidence.
241
+
242
+ The mapping is based on coefficient of variation (CV = std/mean):
243
+ - CV < 0.05 (5%): High confidence (~0.95)
244
+ - CV ~ 0.10 (10%): Medium confidence (~0.85)
245
+ - CV ~ 0.20 (20%): Low confidence (~0.70)
246
+ - CV > 0.50 (50%): Very low confidence (~0.50)
247
+
248
+ Returns:
249
+ Confidence score between 0 and 1.
250
+
251
+ Example:
252
+ >>> # Low uncertainty -> high confidence
253
+ >>> p1 = Posterior(mean=100, std=5, ci_lower=90, ci_upper=110)
254
+ >>> p1.confidence # ~0.95
255
+ >>>
256
+ >>> # High uncertainty -> low confidence
257
+ >>> p2 = Posterior(mean=100, std=30, ci_lower=40, ci_upper=160)
258
+ >>> p2.confidence # ~0.70
259
+ """
260
+ # Avoid division by zero
261
+ if abs(self.mean) < 1e-10:
262
+ cv = self.std / 1e-10
263
+ else:
264
+ cv = abs(self.std / self.mean) # Coefficient of variation
265
+
266
+ # Map CV to confidence using sigmoid-like function
267
+ # confidence = 1 - min(1, cv / scale_factor)
268
+ # Scale factor determines how quickly confidence drops with uncertainty
269
+ scale_factor = 0.5 # 50% CV -> 0% confidence
270
+ confidence = 1.0 - min(1.0, cv / scale_factor)
271
+
272
+ # Ensure in valid range [0, 1]
273
+ return max(0.0, min(1.0, confidence))
274
+
275
+
276
+ class BayesianInference:
277
+ """Bayesian inference for signal analysis.
278
+
279
+ Provides methods for inferring signal properties (baud rate, frequency,
280
+ symbol count, etc.) with full uncertainty quantification using Bayesian
281
+ methods.
282
+
283
+ Attributes:
284
+ priors: Dictionary of default prior distributions for common parameters.
285
+
286
+ Example:
287
+ >>> inference = BayesianInference()
288
+ >>>
289
+ >>> # Infer baud rate from edge timings
290
+ >>> edge_times = np.array([0.0, 0.00001, 0.00002, 0.00003])
291
+ >>> posterior = inference.infer_baud_rate(edge_times)
292
+ >>>
293
+ >>> # Infer protocol type probabilities
294
+ >>> observations = {"idle_level": "high", "regularity": 0.3, "duty_cycle": 0.9}
295
+ >>> protocol_probs = inference.infer_protocol_type(observations)
296
+ >>> print(protocol_probs) # {"UART": 0.85, "I2C": 0.10, "SPI": 0.05}
297
+ """
298
+
299
+ def __init__(self) -> None:
300
+ """Initialize Bayesian inference engine with default priors."""
301
+ self.priors = self._default_priors()
302
+
303
+ def _default_priors(self) -> dict[str, Prior]:
304
+ """Create default priors for common signal properties.
305
+
306
+ Returns:
307
+ Dictionary mapping parameter names to Prior objects.
308
+
309
+ Priors are designed to be weakly informative:
310
+ - Broad enough to cover typical use cases
311
+ - Narrow enough to provide regularization
312
+ - Match physical constraints (e.g., positive values)
313
+ """
314
+ return {
315
+ # Log-uniform for scale-invariant parameters (wide range)
316
+ "baud_rate": Prior("log_uniform", {"low": 100, "high": 10_000_000}),
317
+ "frequency": Prior("log_uniform", {"low": 1, "high": 1e9}),
318
+ # Beta distribution for probabilities/proportions
319
+ "duty_cycle": Prior("beta", {"a": 2, "b": 2}), # Centered at 0.5
320
+ # Half-normal for positive values (noise, std, etc.)
321
+ "noise_std": Prior("half_normal", {"scale": 0.1}),
322
+ # Geometric for discrete counts (favor smaller values)
323
+ "num_symbols": Prior("geometric", {"p": 0.3}),
324
+ # Normal for typical signal characteristics
325
+ "amplitude": Prior("normal", {"mean": 0.0, "std": 1.0}),
326
+ "offset": Prior("normal", {"mean": 0.0, "std": 0.1}),
327
+ }
328
+
329
+ def update(
330
+ self,
331
+ param: str,
332
+ likelihood_fn: Callable[[float], float],
333
+ *,
334
+ prior: Prior | None = None,
335
+ num_samples: int = 10000,
336
+ ) -> Posterior:
337
+ """Update belief about parameter given observation.
338
+
339
+ General-purpose Bayesian updating using sampling-based inference.
340
+ Uses the prior distribution and likelihood function to compute
341
+ the posterior via importance sampling.
342
+
343
+ Args:
344
+ param: Parameter name (used to get default prior if not provided).
345
+ likelihood_fn: Function that computes likelihood p(observation | param_value).
346
+ prior: Prior distribution (uses default if None).
347
+ num_samples: Number of samples for posterior approximation.
348
+
349
+ Returns:
350
+ Posterior distribution with mean, std, and credible intervals.
351
+
352
+ Raises:
353
+ ValueError: If parameter is unknown and no prior is provided.
354
+ AnalysisError: If likelihood function fails.
355
+
356
+ Example:
357
+ >>> def likelihood(rate: float) -> float:
358
+ ... # Example: Poisson likelihood for event rate
359
+ ... observed_count = 42
360
+ ... time_window = 1.0
361
+ ... expected = rate * time_window
362
+ ... return stats.poisson.pmf(observed_count, mu=expected)
363
+ >>>
364
+ >>> inference = BayesianInference()
365
+ >>> posterior = inference.update("frequency", likelihood)
366
+ """
367
+ # Get prior
368
+ if prior is None:
369
+ if param not in self.priors:
370
+ raise ValueError(
371
+ f"Unknown parameter '{param}' and no prior provided. "
372
+ f"Known parameters: {list(self.priors.keys())}"
373
+ )
374
+ prior = self.priors[param]
375
+
376
+ # Sample from prior
377
+ try:
378
+ samples = prior.sample(num_samples)
379
+ except Exception as e:
380
+ raise AnalysisError(
381
+ f"Failed to sample from prior for '{param}'",
382
+ details=str(e),
383
+ ) from e
384
+
385
+ # Compute likelihood for each sample
386
+ try:
387
+ likelihoods = np.array([likelihood_fn(s) for s in samples])
388
+ except Exception as e:
389
+ raise AnalysisError(
390
+ f"Likelihood function failed for '{param}'",
391
+ details=str(e),
392
+ fix_hint="Check that likelihood_fn is compatible with prior samples",
393
+ ) from e
394
+
395
+ # Check for valid likelihoods (before numerical stability fixes)
396
+ if np.all(likelihoods == 0):
397
+ raise AnalysisError(
398
+ f"All likelihood values are zero for '{param}'",
399
+ details="Observation may be incompatible with prior range",
400
+ fix_hint="Adjust prior range or check likelihood function",
401
+ )
402
+
403
+ # Numerical stability: normalize likelihoods to prevent underflow
404
+ # Use log-space computation if likelihoods are very small
405
+ max_likelihood = np.max(likelihoods)
406
+ if max_likelihood > 0:
407
+ # Normalize by max to prevent overflow/underflow
408
+ normalized_likelihoods = likelihoods / max_likelihood
409
+ # Check if we need log-space (very small values)
410
+ if max_likelihood < 1e-300:
411
+ # Use log-space for extreme underflow
412
+ log_likelihoods = np.log(np.maximum(likelihoods, 1e-300))
413
+ log_likelihoods -= np.max(log_likelihoods) # Normalize
414
+ weights = np.exp(log_likelihoods)
415
+ weights /= np.sum(weights)
416
+ else:
417
+ # Standard normalization
418
+ weights = normalized_likelihoods / np.sum(normalized_likelihoods)
419
+ else:
420
+ # All likelihoods are zero - this should have been caught above
421
+ raise AnalysisError(
422
+ f"All likelihood values are zero for '{param}'",
423
+ details="Observation may be incompatible with prior range",
424
+ fix_hint="Adjust prior range or check likelihood function",
425
+ )
426
+
427
+ # Compute posterior statistics
428
+ mean = float(np.sum(samples * weights))
429
+ variance = float(np.sum(weights * (samples - mean) ** 2))
430
+ std = float(np.sqrt(variance))
431
+
432
+ # Compute 95% credible interval via weighted percentiles
433
+ sorted_indices = np.argsort(samples)
434
+ sorted_samples = samples[sorted_indices]
435
+ sorted_weights = weights[sorted_indices]
436
+ cumsum = np.cumsum(sorted_weights)
437
+
438
+ # Find 2.5th and 97.5th percentiles
439
+ ci_lower = float(sorted_samples[np.searchsorted(cumsum, 0.025)])
440
+ ci_upper = float(sorted_samples[np.searchsorted(cumsum, 0.975)])
441
+
442
+ return Posterior(
443
+ mean=mean,
444
+ std=std,
445
+ ci_lower=ci_lower,
446
+ ci_upper=ci_upper,
447
+ samples=samples,
448
+ )
449
+
450
+ def infer_baud_rate(
451
+ self, edge_times: NDArray[np.floating[Any]], *, prior: Prior | None = None
452
+ ) -> Posterior:
453
+ """Infer baud rate from edge timing observations.
454
+
455
+ Uses the distribution of inter-edge intervals to infer the underlying
456
+ baud rate. Assumes edges occur at bit boundaries.
457
+
458
+ Args:
459
+ edge_times: Array of edge timestamps in seconds.
460
+ prior: Optional prior for baud rate (uses default if None).
461
+
462
+ Returns:
463
+ Posterior distribution for baud rate in bits per second.
464
+
465
+ Raises:
466
+ InsufficientDataError: If fewer than 2 edges provided.
467
+
468
+ Example:
469
+ >>> # 115200 baud UART (bit period = 8.68 μs)
470
+ >>> edge_times = np.array([0, 8.68e-6, 17.36e-6, 26.04e-6])
471
+ >>> posterior = inference.infer_baud_rate(edge_times)
472
+ >>> print(f"Baud rate: {posterior.mean:.0f} bps")
473
+ """
474
+ if len(edge_times) < 2:
475
+ raise InsufficientDataError(
476
+ "Need at least 2 edges to infer baud rate",
477
+ required=2,
478
+ available=len(edge_times),
479
+ )
480
+
481
+ # Compute inter-edge intervals
482
+ intervals = np.diff(edge_times)
483
+
484
+ # Filter out zero/negative intervals (should not happen but be safe)
485
+ intervals = intervals[intervals > 0]
486
+
487
+ if len(intervals) == 0:
488
+ raise InsufficientDataError("No valid inter-edge intervals found")
489
+
490
+ # Likelihood: assume intervals are Gaussian around 1/baud_rate
491
+ # Multiple of bit period due to encoding (e.g., start/stop bits)
492
+ def likelihood(baud_rate: float) -> float:
493
+ if baud_rate <= 0:
494
+ return 0.0
495
+ bit_period = 1.0 / baud_rate
496
+ # Intervals should be multiples of bit_period
497
+ # Use smallest interval as proxy for single bit period
498
+ min_interval = np.min(intervals)
499
+ # Likelihood: intervals match multiples of estimated bit period
500
+ expected = min_interval
501
+ # Gaussian likelihood around expected interval
502
+ sigma = expected * 0.1 # 10% uncertainty
503
+ log_likelihood = -0.5 * ((expected - bit_period) / sigma) ** 2
504
+ # Clip log-likelihood to prevent extreme underflow
505
+ # Keep at least exp(-700) ≈ 1e-304 to stay above zero
506
+ log_likelihood = max(log_likelihood, -700.0)
507
+ return float(np.exp(log_likelihood))
508
+
509
+ return self.update("baud_rate", likelihood, prior=prior)
510
+
511
+ def infer_protocol_type(self, observations: dict[str, Any]) -> dict[str, float]:
512
+ """Infer probability of each protocol type given observations.
513
+
514
+ Uses a simple Bayesian classifier to compute posterior probabilities
515
+ for different protocol types (UART, SPI, I2C, CAN) based on observed
516
+ signal characteristics.
517
+
518
+ Args:
519
+ observations: Dictionary of observed signal characteristics:
520
+ - "idle_level": "high" or "low"
521
+ - "regularity": 0-1 (edge regularity)
522
+ - "duty_cycle": 0-1 (fraction time high)
523
+ - "symbol_rate": Hz (optional)
524
+ - "transition_density": edges/sec (optional)
525
+
526
+ Returns:
527
+ Dictionary mapping protocol names to posterior probabilities.
528
+ Probabilities sum to 1.0.
529
+
530
+ Example:
531
+ >>> observations = {
532
+ ... "idle_level": "high",
533
+ ... "regularity": 0.25,
534
+ ... "duty_cycle": 0.85,
535
+ ... "symbol_rate": 115200,
536
+ ... }
537
+ >>> probs = inference.infer_protocol_type(observations)
538
+ >>> print(probs) # {"UART": 0.85, "I2C": 0.10, "SPI": 0.03, "CAN": 0.02}
539
+ """
540
+ # Prior probabilities (uniform over protocols)
541
+ protocols = ["UART", "SPI", "I2C", "CAN"]
542
+ prior_prob = 1.0 / len(protocols)
543
+
544
+ # Compute likelihoods for each protocol
545
+ likelihoods = {
546
+ "UART": self._likelihood_uart(observations),
547
+ "SPI": self._likelihood_spi(observations),
548
+ "I2C": self._likelihood_i2c(observations),
549
+ "CAN": self._likelihood_can(observations),
550
+ }
551
+
552
+ # Compute posterior probabilities (Bayes' theorem)
553
+ posteriors = {proto: prior_prob * likelihoods[proto] for proto in protocols}
554
+
555
+ # Normalize to sum to 1
556
+ total = sum(posteriors.values())
557
+ if total > 0:
558
+ posteriors = {proto: prob / total for proto, prob in posteriors.items()}
559
+ else:
560
+ # No evidence - return uniform
561
+ posteriors = {proto: 1.0 / len(protocols) for proto in protocols}
562
+
563
+ return posteriors
564
+
565
+ def _likelihood_uart(self, obs: dict[str, Any]) -> float:
566
+ """Compute likelihood of observations given UART protocol."""
567
+ likelihood = 1.0
568
+
569
+ # UART characteristics: idle high, low regularity, high duty cycle
570
+ if obs.get("idle_level") == "high":
571
+ likelihood *= 0.9
572
+ else:
573
+ likelihood *= 0.2
574
+
575
+ regularity = obs.get("regularity", 0.5)
576
+ if regularity < 0.3:
577
+ likelihood *= 0.8
578
+ elif regularity > 0.7:
579
+ likelihood *= 0.2
580
+
581
+ duty_cycle = obs.get("duty_cycle", 0.5)
582
+ if duty_cycle > 0.7:
583
+ likelihood *= 0.8
584
+ elif duty_cycle < 0.3:
585
+ likelihood *= 0.3
586
+
587
+ return likelihood
588
+
589
+ def _likelihood_spi(self, obs: dict[str, Any]) -> float:
590
+ """Compute likelihood of observations given SPI protocol."""
591
+ likelihood = 1.0
592
+
593
+ # SPI characteristics: regular clock, ~50% duty, high transition density
594
+ regularity = obs.get("regularity", 0.5)
595
+ if regularity > 0.7:
596
+ likelihood *= 0.9
597
+ elif regularity < 0.4:
598
+ likelihood *= 0.2
599
+
600
+ duty_cycle = obs.get("duty_cycle", 0.5)
601
+ duty_error = abs(duty_cycle - 0.5)
602
+ if duty_error < 0.1:
603
+ likelihood *= 0.8
604
+ elif duty_error > 0.3:
605
+ likelihood *= 0.3
606
+
607
+ return likelihood
608
+
609
+ def _likelihood_i2c(self, obs: dict[str, Any]) -> float:
610
+ """Compute likelihood of observations given I2C protocol."""
611
+ likelihood = 1.0
612
+
613
+ # I2C characteristics: idle high, moderate regularity
614
+ if obs.get("idle_level") == "high":
615
+ likelihood *= 0.85
616
+ else:
617
+ likelihood *= 0.3
618
+
619
+ regularity = obs.get("regularity", 0.5)
620
+ if 0.4 < regularity < 0.8:
621
+ likelihood *= 0.8
622
+ else:
623
+ likelihood *= 0.4
624
+
625
+ return likelihood
626
+
627
+ def _likelihood_can(self, obs: dict[str, Any]) -> float:
628
+ """Compute likelihood of observations given CAN protocol."""
629
+ likelihood = 1.0
630
+
631
+ # CAN characteristics: idle high, moderate irregularity (bit stuffing)
632
+ if obs.get("idle_level") == "high":
633
+ likelihood *= 0.85
634
+ else:
635
+ likelihood *= 0.2
636
+
637
+ regularity = obs.get("regularity", 0.5)
638
+ if 0.3 < regularity < 0.7:
639
+ likelihood *= 0.8
640
+ else:
641
+ likelihood *= 0.4
642
+
643
+ # Check for standard CAN baud rates
644
+ symbol_rate = obs.get("symbol_rate", 0)
645
+ if symbol_rate > 0:
646
+ standard_rates = [125000, 250000, 500000, 1000000]
647
+ for rate in standard_rates:
648
+ if abs(symbol_rate - rate) / rate < 0.1:
649
+ likelihood *= 1.5
650
+ break
651
+
652
+ return likelihood
653
+
654
+ def infer_symbol_count(
655
+ self,
656
+ amplitude_histogram: NDArray[np.floating[Any]],
657
+ *,
658
+ prior: Prior | None = None,
659
+ max_symbols: int = 16,
660
+ ) -> Posterior:
661
+ """Infer number of discrete symbols from amplitude distribution.
662
+
663
+ Analyzes the amplitude histogram to determine how many discrete
664
+ signal levels (symbols) are present. Useful for multi-level signaling
665
+ (PAM-4, etc.).
666
+
667
+ Args:
668
+ amplitude_histogram: Histogram of signal amplitudes (bin counts).
669
+ prior: Optional prior for symbol count (uses default if None).
670
+ max_symbols: Maximum number of symbols to consider.
671
+
672
+ Returns:
673
+ Posterior distribution for number of symbols.
674
+
675
+ Raises:
676
+ InsufficientDataError: If histogram is empty or all zeros.
677
+
678
+ Example:
679
+ >>> # PAM-4 signal (4 levels)
680
+ >>> amplitudes = np.random.choice([0.0, 0.33, 0.67, 1.0], size=1000)
681
+ >>> hist, _ = np.histogram(amplitudes, bins=50)
682
+ >>> posterior = inference.infer_symbol_count(hist)
683
+ >>> print(f"Symbols: {int(posterior.mean)}") # Should be close to 4
684
+ """
685
+ if len(amplitude_histogram) == 0 or np.sum(amplitude_histogram) == 0:
686
+ raise InsufficientDataError("Amplitude histogram is empty or all zeros")
687
+
688
+ # Pre-compute peaks in histogram (optimization: only compute once)
689
+ # Use robust peak detection that also considers edge bins
690
+ prominence_threshold = np.max(amplitude_histogram) * 0.1
691
+ detected_peaks, _ = find_peaks(amplitude_histogram, prominence=prominence_threshold)
692
+ peak_indices = list(detected_peaks)
693
+
694
+ # Check edge bins - they can be peaks but find_peaks won't detect them
695
+ # First bin is a peak if it's significant and higher than second bin
696
+ if len(amplitude_histogram) > 1:
697
+ if (
698
+ amplitude_histogram[0] > prominence_threshold
699
+ and amplitude_histogram[0] > amplitude_histogram[1]
700
+ ):
701
+ peak_indices.insert(0, 0)
702
+ # Last bin is a peak if significant and higher than second-to-last
703
+ if (
704
+ amplitude_histogram[-1] > prominence_threshold
705
+ and amplitude_histogram[-1] > amplitude_histogram[-2]
706
+ ):
707
+ peak_indices.append(len(amplitude_histogram) - 1)
708
+
709
+ num_peaks = len(peak_indices)
710
+
711
+ # Likelihood: number of peaks in histogram should match symbol count
712
+ def likelihood(num_symbols: float) -> float:
713
+ k = int(round(num_symbols))
714
+ if k < 1 or k > max_symbols:
715
+ return 0.0
716
+
717
+ # Likelihood: peaks should match symbols
718
+ # Allow ±1 tolerance for noise
719
+ if abs(num_peaks - k) <= 1:
720
+ return 0.8
721
+ elif abs(num_peaks - k) == 2:
722
+ return 0.3
723
+ else:
724
+ return 0.1
725
+
726
+ return self.update(
727
+ "num_symbols",
728
+ likelihood,
729
+ prior=prior,
730
+ num_samples=max_symbols * 100,
731
+ )
732
+
733
+
734
+ class SequentialBayesian:
735
+ """Sequential Bayesian updating for streaming analysis.
736
+
737
+ Maintains a posterior that is updated as new observations arrive.
738
+ Useful for online/streaming signal analysis where data comes in
739
+ incrementally.
740
+
741
+ Attributes:
742
+ param: Parameter name being inferred.
743
+ current_posterior: Current posterior after all updates so far.
744
+
745
+ Example:
746
+ >>> from oscura.inference.bayesian import SequentialBayesian, Prior
747
+ >>> prior = Prior("normal", {"mean": 115200, "std": 10000})
748
+ >>> sequential = SequentialBayesian("baud_rate", prior)
749
+ >>>
750
+ >>> # Update with streaming observations
751
+ >>> for _observation in streaming_data:
752
+ ... posterior = sequential.update(likelihood_fn)
753
+ ... print(f"Current estimate: {posterior.mean:.0f} (confidence: {sequential.get_confidence():.2%})")
754
+ ... if sequential.get_confidence() > 0.95:
755
+ ... break # High confidence reached
756
+ """
757
+
758
+ def __init__(self, param: str, prior: Prior) -> None:
759
+ """Initialize sequential Bayesian updater.
760
+
761
+ Args:
762
+ param: Parameter name being inferred.
763
+ prior: Initial prior distribution.
764
+ """
765
+ self.param = param
766
+ self.current_posterior: Prior | Posterior = prior
767
+ self._samples: list[NDArray[np.floating[Any]]] = []
768
+ self._weights: list[NDArray[np.floating[Any]]] = []
769
+
770
+ def update(
771
+ self,
772
+ likelihood_fn: Callable[[float], float],
773
+ *,
774
+ num_samples: int = 5000,
775
+ ) -> Posterior:
776
+ """Update posterior with new observation.
777
+
778
+ Performs one step of sequential Bayesian updating. The current
779
+ posterior becomes the prior for the next update.
780
+
781
+ Args:
782
+ likelihood_fn: Likelihood function p(observation | param).
783
+ num_samples: Number of samples for approximation.
784
+
785
+ Returns:
786
+ Updated posterior distribution.
787
+
788
+ Raises:
789
+ AnalysisError: If likelihood function fails.
790
+ """
791
+ # If we have a Prior, sample from it
792
+ if isinstance(self.current_posterior, Prior):
793
+ samples = self.current_posterior.sample(num_samples)
794
+ else:
795
+ # Resample from previous posterior (must be Posterior)
796
+ if self.current_posterior.samples is not None:
797
+ # Resample with replacement
798
+ indices = np.random.choice(
799
+ len(self.current_posterior.samples),
800
+ size=num_samples,
801
+ replace=True,
802
+ )
803
+ samples = self.current_posterior.samples[indices]
804
+ else:
805
+ # Approximate as normal distribution
806
+ samples = np.random.normal(
807
+ self.current_posterior.mean,
808
+ self.current_posterior.std,
809
+ size=num_samples,
810
+ )
811
+
812
+ # Compute likelihoods
813
+ try:
814
+ likelihoods = np.array([likelihood_fn(s) for s in samples])
815
+ except Exception as e:
816
+ raise AnalysisError(
817
+ f"Likelihood function failed for '{self.param}'",
818
+ details=str(e),
819
+ ) from e
820
+
821
+ # Check for valid likelihoods
822
+ if np.all(likelihoods == 0):
823
+ # No update - keep current posterior
824
+ return (
825
+ self.current_posterior
826
+ if isinstance(self.current_posterior, Posterior)
827
+ else Posterior(mean=0.0, std=1.0, ci_lower=-2.0, ci_upper=2.0)
828
+ )
829
+
830
+ # Compute importance weights
831
+ weights = likelihoods / np.sum(likelihoods)
832
+
833
+ # Store for potential resampling
834
+ self._samples.append(samples)
835
+ self._weights.append(weights)
836
+
837
+ # Compute posterior statistics
838
+ mean = float(np.sum(samples * weights))
839
+ variance = float(np.sum(weights * (samples - mean) ** 2))
840
+ std = float(np.sqrt(variance))
841
+
842
+ # Credible interval
843
+ sorted_indices = np.argsort(samples)
844
+ sorted_samples = samples[sorted_indices]
845
+ sorted_weights = weights[sorted_indices]
846
+ cumsum = np.cumsum(sorted_weights)
847
+
848
+ ci_lower = float(sorted_samples[np.searchsorted(cumsum, 0.025)])
849
+ ci_upper = float(sorted_samples[np.searchsorted(cumsum, 0.975)])
850
+
851
+ posterior = Posterior(
852
+ mean=mean,
853
+ std=std,
854
+ ci_lower=ci_lower,
855
+ ci_upper=ci_upper,
856
+ samples=samples,
857
+ )
858
+
859
+ self.current_posterior = posterior
860
+ return posterior
861
+
862
+ def get_confidence(self) -> float:
863
+ """Get current confidence in estimate.
864
+
865
+ Returns:
866
+ Confidence score between 0 and 1.
867
+ """
868
+ if isinstance(self.current_posterior, Posterior):
869
+ return self.current_posterior.confidence
870
+ else:
871
+ # Prior - no evidence yet
872
+ return 0.0
873
+
874
+
875
+ def infer_with_uncertainty(
876
+ measurements: list[float] | NDArray[np.floating[Any]],
877
+ prior: Prior | None = None,
878
+ ) -> Posterior:
879
+ """Infer parameter value with full uncertainty quantification.
880
+
881
+ Convenience function for simple parameter inference from repeated measurements.
882
+ Assumes measurements are normally distributed around the true value.
883
+
884
+ Args:
885
+ measurements: List or array of independent measurements.
886
+ prior: Optional prior distribution (uses uninformative normal if None).
887
+
888
+ Returns:
889
+ Posterior distribution combining prior and measurement likelihood.
890
+
891
+ Raises:
892
+ InsufficientDataError: If measurements list is empty.
893
+
894
+ Example:
895
+ >>> # Repeated measurements of signal frequency
896
+ >>> measurements = [99.8, 100.2, 99.9, 100.1, 100.0] # Hz
897
+ >>> posterior = infer_with_uncertainty(measurements)
898
+ >>> print(f"Frequency: {posterior.mean:.2f} ± {posterior.std:.2f} Hz")
899
+ >>> print(f"95% CI: [{posterior.ci_lower:.2f}, {posterior.ci_upper:.2f}] Hz")
900
+ """
901
+ measurements_array = np.asarray(measurements)
902
+
903
+ if len(measurements_array) == 0:
904
+ raise InsufficientDataError(
905
+ "Cannot infer from empty measurements",
906
+ required=1,
907
+ available=0,
908
+ )
909
+
910
+ # Use sample statistics if no prior
911
+ if prior is None:
912
+ # Uninformative prior centered at sample mean
913
+ sample_mean = float(np.mean(measurements_array))
914
+ sample_std = (
915
+ float(np.std(measurements_array, ddof=1)) if len(measurements_array) > 1 else 1.0
916
+ )
917
+ prior = Prior("normal", {"mean": sample_mean, "std": sample_std * 10})
918
+
919
+ # Likelihood: measurements are Gaussian around true value
920
+ measurement_std = (
921
+ float(np.std(measurements_array, ddof=1)) if len(measurements_array) > 1 else 1.0
922
+ )
923
+
924
+ def likelihood(param_value: float) -> float:
925
+ # Product of Gaussian likelihoods
926
+ log_likelihood = -0.5 * np.sum(((measurements_array - param_value) / measurement_std) ** 2)
927
+ return float(np.exp(log_likelihood))
928
+
929
+ inference = BayesianInference()
930
+ return inference.update(
931
+ "parameter",
932
+ likelihood,
933
+ prior=prior,
934
+ )
935
+
936
+
937
+ __all__ = [
938
+ "BayesianInference",
939
+ "Posterior",
940
+ "Prior",
941
+ "SequentialBayesian",
942
+ "infer_with_uncertainty",
943
+ ]