oscura 0.0.1__py3-none-any.whl → 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (465) hide show
  1. oscura/__init__.py +813 -8
  2. oscura/__main__.py +392 -0
  3. oscura/analyzers/__init__.py +37 -0
  4. oscura/analyzers/digital/__init__.py +177 -0
  5. oscura/analyzers/digital/bus.py +691 -0
  6. oscura/analyzers/digital/clock.py +805 -0
  7. oscura/analyzers/digital/correlation.py +720 -0
  8. oscura/analyzers/digital/edges.py +632 -0
  9. oscura/analyzers/digital/extraction.py +413 -0
  10. oscura/analyzers/digital/quality.py +878 -0
  11. oscura/analyzers/digital/signal_quality.py +877 -0
  12. oscura/analyzers/digital/thresholds.py +708 -0
  13. oscura/analyzers/digital/timing.py +1104 -0
  14. oscura/analyzers/eye/__init__.py +46 -0
  15. oscura/analyzers/eye/diagram.py +434 -0
  16. oscura/analyzers/eye/metrics.py +555 -0
  17. oscura/analyzers/jitter/__init__.py +83 -0
  18. oscura/analyzers/jitter/ber.py +333 -0
  19. oscura/analyzers/jitter/decomposition.py +759 -0
  20. oscura/analyzers/jitter/measurements.py +413 -0
  21. oscura/analyzers/jitter/spectrum.py +220 -0
  22. oscura/analyzers/measurements.py +40 -0
  23. oscura/analyzers/packet/__init__.py +171 -0
  24. oscura/analyzers/packet/daq.py +1077 -0
  25. oscura/analyzers/packet/metrics.py +437 -0
  26. oscura/analyzers/packet/parser.py +327 -0
  27. oscura/analyzers/packet/payload.py +2156 -0
  28. oscura/analyzers/packet/payload_analysis.py +1312 -0
  29. oscura/analyzers/packet/payload_extraction.py +236 -0
  30. oscura/analyzers/packet/payload_patterns.py +670 -0
  31. oscura/analyzers/packet/stream.py +359 -0
  32. oscura/analyzers/patterns/__init__.py +266 -0
  33. oscura/analyzers/patterns/clustering.py +1036 -0
  34. oscura/analyzers/patterns/discovery.py +539 -0
  35. oscura/analyzers/patterns/learning.py +797 -0
  36. oscura/analyzers/patterns/matching.py +1091 -0
  37. oscura/analyzers/patterns/periodic.py +650 -0
  38. oscura/analyzers/patterns/sequences.py +767 -0
  39. oscura/analyzers/power/__init__.py +116 -0
  40. oscura/analyzers/power/ac_power.py +391 -0
  41. oscura/analyzers/power/basic.py +383 -0
  42. oscura/analyzers/power/conduction.py +314 -0
  43. oscura/analyzers/power/efficiency.py +297 -0
  44. oscura/analyzers/power/ripple.py +356 -0
  45. oscura/analyzers/power/soa.py +372 -0
  46. oscura/analyzers/power/switching.py +479 -0
  47. oscura/analyzers/protocol/__init__.py +150 -0
  48. oscura/analyzers/protocols/__init__.py +150 -0
  49. oscura/analyzers/protocols/base.py +500 -0
  50. oscura/analyzers/protocols/can.py +620 -0
  51. oscura/analyzers/protocols/can_fd.py +448 -0
  52. oscura/analyzers/protocols/flexray.py +405 -0
  53. oscura/analyzers/protocols/hdlc.py +399 -0
  54. oscura/analyzers/protocols/i2c.py +368 -0
  55. oscura/analyzers/protocols/i2s.py +296 -0
  56. oscura/analyzers/protocols/jtag.py +393 -0
  57. oscura/analyzers/protocols/lin.py +445 -0
  58. oscura/analyzers/protocols/manchester.py +333 -0
  59. oscura/analyzers/protocols/onewire.py +501 -0
  60. oscura/analyzers/protocols/spi.py +334 -0
  61. oscura/analyzers/protocols/swd.py +325 -0
  62. oscura/analyzers/protocols/uart.py +393 -0
  63. oscura/analyzers/protocols/usb.py +495 -0
  64. oscura/analyzers/signal_integrity/__init__.py +63 -0
  65. oscura/analyzers/signal_integrity/embedding.py +294 -0
  66. oscura/analyzers/signal_integrity/equalization.py +370 -0
  67. oscura/analyzers/signal_integrity/sparams.py +484 -0
  68. oscura/analyzers/spectral/__init__.py +53 -0
  69. oscura/analyzers/spectral/chunked.py +273 -0
  70. oscura/analyzers/spectral/chunked_fft.py +571 -0
  71. oscura/analyzers/spectral/chunked_wavelet.py +391 -0
  72. oscura/analyzers/spectral/fft.py +92 -0
  73. oscura/analyzers/statistical/__init__.py +250 -0
  74. oscura/analyzers/statistical/checksum.py +923 -0
  75. oscura/analyzers/statistical/chunked_corr.py +228 -0
  76. oscura/analyzers/statistical/classification.py +778 -0
  77. oscura/analyzers/statistical/entropy.py +1113 -0
  78. oscura/analyzers/statistical/ngrams.py +614 -0
  79. oscura/analyzers/statistics/__init__.py +119 -0
  80. oscura/analyzers/statistics/advanced.py +885 -0
  81. oscura/analyzers/statistics/basic.py +263 -0
  82. oscura/analyzers/statistics/correlation.py +630 -0
  83. oscura/analyzers/statistics/distribution.py +298 -0
  84. oscura/analyzers/statistics/outliers.py +463 -0
  85. oscura/analyzers/statistics/streaming.py +93 -0
  86. oscura/analyzers/statistics/trend.py +520 -0
  87. oscura/analyzers/validation.py +598 -0
  88. oscura/analyzers/waveform/__init__.py +36 -0
  89. oscura/analyzers/waveform/measurements.py +943 -0
  90. oscura/analyzers/waveform/measurements_with_uncertainty.py +371 -0
  91. oscura/analyzers/waveform/spectral.py +1689 -0
  92. oscura/analyzers/waveform/wavelets.py +298 -0
  93. oscura/api/__init__.py +62 -0
  94. oscura/api/dsl.py +538 -0
  95. oscura/api/fluent.py +571 -0
  96. oscura/api/operators.py +498 -0
  97. oscura/api/optimization.py +392 -0
  98. oscura/api/profiling.py +396 -0
  99. oscura/automotive/__init__.py +73 -0
  100. oscura/automotive/can/__init__.py +52 -0
  101. oscura/automotive/can/analysis.py +356 -0
  102. oscura/automotive/can/checksum.py +250 -0
  103. oscura/automotive/can/correlation.py +212 -0
  104. oscura/automotive/can/discovery.py +355 -0
  105. oscura/automotive/can/message_wrapper.py +375 -0
  106. oscura/automotive/can/models.py +385 -0
  107. oscura/automotive/can/patterns.py +381 -0
  108. oscura/automotive/can/session.py +452 -0
  109. oscura/automotive/can/state_machine.py +300 -0
  110. oscura/automotive/can/stimulus_response.py +461 -0
  111. oscura/automotive/dbc/__init__.py +15 -0
  112. oscura/automotive/dbc/generator.py +156 -0
  113. oscura/automotive/dbc/parser.py +146 -0
  114. oscura/automotive/dtc/__init__.py +30 -0
  115. oscura/automotive/dtc/database.py +3036 -0
  116. oscura/automotive/j1939/__init__.py +14 -0
  117. oscura/automotive/j1939/decoder.py +745 -0
  118. oscura/automotive/loaders/__init__.py +35 -0
  119. oscura/automotive/loaders/asc.py +98 -0
  120. oscura/automotive/loaders/blf.py +77 -0
  121. oscura/automotive/loaders/csv_can.py +136 -0
  122. oscura/automotive/loaders/dispatcher.py +136 -0
  123. oscura/automotive/loaders/mdf.py +331 -0
  124. oscura/automotive/loaders/pcap.py +132 -0
  125. oscura/automotive/obd/__init__.py +14 -0
  126. oscura/automotive/obd/decoder.py +707 -0
  127. oscura/automotive/uds/__init__.py +48 -0
  128. oscura/automotive/uds/decoder.py +265 -0
  129. oscura/automotive/uds/models.py +64 -0
  130. oscura/automotive/visualization.py +369 -0
  131. oscura/batch/__init__.py +55 -0
  132. oscura/batch/advanced.py +627 -0
  133. oscura/batch/aggregate.py +300 -0
  134. oscura/batch/analyze.py +139 -0
  135. oscura/batch/logging.py +487 -0
  136. oscura/batch/metrics.py +556 -0
  137. oscura/builders/__init__.py +41 -0
  138. oscura/builders/signal_builder.py +1131 -0
  139. oscura/cli/__init__.py +14 -0
  140. oscura/cli/batch.py +339 -0
  141. oscura/cli/characterize.py +273 -0
  142. oscura/cli/compare.py +775 -0
  143. oscura/cli/decode.py +551 -0
  144. oscura/cli/main.py +247 -0
  145. oscura/cli/shell.py +350 -0
  146. oscura/comparison/__init__.py +66 -0
  147. oscura/comparison/compare.py +397 -0
  148. oscura/comparison/golden.py +487 -0
  149. oscura/comparison/limits.py +391 -0
  150. oscura/comparison/mask.py +434 -0
  151. oscura/comparison/trace_diff.py +30 -0
  152. oscura/comparison/visualization.py +481 -0
  153. oscura/compliance/__init__.py +70 -0
  154. oscura/compliance/advanced.py +756 -0
  155. oscura/compliance/masks.py +363 -0
  156. oscura/compliance/reporting.py +483 -0
  157. oscura/compliance/testing.py +298 -0
  158. oscura/component/__init__.py +38 -0
  159. oscura/component/impedance.py +365 -0
  160. oscura/component/reactive.py +598 -0
  161. oscura/component/transmission_line.py +312 -0
  162. oscura/config/__init__.py +191 -0
  163. oscura/config/defaults.py +254 -0
  164. oscura/config/loader.py +348 -0
  165. oscura/config/memory.py +271 -0
  166. oscura/config/migration.py +458 -0
  167. oscura/config/pipeline.py +1077 -0
  168. oscura/config/preferences.py +530 -0
  169. oscura/config/protocol.py +875 -0
  170. oscura/config/schema.py +713 -0
  171. oscura/config/settings.py +420 -0
  172. oscura/config/thresholds.py +599 -0
  173. oscura/convenience.py +457 -0
  174. oscura/core/__init__.py +299 -0
  175. oscura/core/audit.py +457 -0
  176. oscura/core/backend_selector.py +405 -0
  177. oscura/core/cache.py +590 -0
  178. oscura/core/cancellation.py +439 -0
  179. oscura/core/confidence.py +225 -0
  180. oscura/core/config.py +506 -0
  181. oscura/core/correlation.py +216 -0
  182. oscura/core/cross_domain.py +422 -0
  183. oscura/core/debug.py +301 -0
  184. oscura/core/edge_cases.py +541 -0
  185. oscura/core/exceptions.py +535 -0
  186. oscura/core/gpu_backend.py +523 -0
  187. oscura/core/lazy.py +832 -0
  188. oscura/core/log_query.py +540 -0
  189. oscura/core/logging.py +931 -0
  190. oscura/core/logging_advanced.py +952 -0
  191. oscura/core/memoize.py +171 -0
  192. oscura/core/memory_check.py +274 -0
  193. oscura/core/memory_guard.py +290 -0
  194. oscura/core/memory_limits.py +336 -0
  195. oscura/core/memory_monitor.py +453 -0
  196. oscura/core/memory_progress.py +465 -0
  197. oscura/core/memory_warnings.py +315 -0
  198. oscura/core/numba_backend.py +362 -0
  199. oscura/core/performance.py +352 -0
  200. oscura/core/progress.py +524 -0
  201. oscura/core/provenance.py +358 -0
  202. oscura/core/results.py +331 -0
  203. oscura/core/types.py +504 -0
  204. oscura/core/uncertainty.py +383 -0
  205. oscura/discovery/__init__.py +52 -0
  206. oscura/discovery/anomaly_detector.py +672 -0
  207. oscura/discovery/auto_decoder.py +415 -0
  208. oscura/discovery/comparison.py +497 -0
  209. oscura/discovery/quality_validator.py +528 -0
  210. oscura/discovery/signal_detector.py +769 -0
  211. oscura/dsl/__init__.py +73 -0
  212. oscura/dsl/commands.py +246 -0
  213. oscura/dsl/interpreter.py +455 -0
  214. oscura/dsl/parser.py +689 -0
  215. oscura/dsl/repl.py +172 -0
  216. oscura/exceptions.py +59 -0
  217. oscura/exploratory/__init__.py +111 -0
  218. oscura/exploratory/error_recovery.py +642 -0
  219. oscura/exploratory/fuzzy.py +513 -0
  220. oscura/exploratory/fuzzy_advanced.py +786 -0
  221. oscura/exploratory/legacy.py +831 -0
  222. oscura/exploratory/parse.py +358 -0
  223. oscura/exploratory/recovery.py +275 -0
  224. oscura/exploratory/sync.py +382 -0
  225. oscura/exploratory/unknown.py +707 -0
  226. oscura/export/__init__.py +25 -0
  227. oscura/export/wireshark/README.md +265 -0
  228. oscura/export/wireshark/__init__.py +47 -0
  229. oscura/export/wireshark/generator.py +312 -0
  230. oscura/export/wireshark/lua_builder.py +159 -0
  231. oscura/export/wireshark/templates/dissector.lua.j2 +92 -0
  232. oscura/export/wireshark/type_mapping.py +165 -0
  233. oscura/export/wireshark/validator.py +105 -0
  234. oscura/exporters/__init__.py +94 -0
  235. oscura/exporters/csv.py +303 -0
  236. oscura/exporters/exporters.py +44 -0
  237. oscura/exporters/hdf5.py +219 -0
  238. oscura/exporters/html_export.py +701 -0
  239. oscura/exporters/json_export.py +291 -0
  240. oscura/exporters/markdown_export.py +367 -0
  241. oscura/exporters/matlab_export.py +354 -0
  242. oscura/exporters/npz_export.py +219 -0
  243. oscura/exporters/spice_export.py +210 -0
  244. oscura/extensibility/__init__.py +131 -0
  245. oscura/extensibility/docs.py +752 -0
  246. oscura/extensibility/extensions.py +1125 -0
  247. oscura/extensibility/logging.py +259 -0
  248. oscura/extensibility/measurements.py +485 -0
  249. oscura/extensibility/plugins.py +414 -0
  250. oscura/extensibility/registry.py +346 -0
  251. oscura/extensibility/templates.py +913 -0
  252. oscura/extensibility/validation.py +651 -0
  253. oscura/filtering/__init__.py +89 -0
  254. oscura/filtering/base.py +563 -0
  255. oscura/filtering/convenience.py +564 -0
  256. oscura/filtering/design.py +725 -0
  257. oscura/filtering/filters.py +32 -0
  258. oscura/filtering/introspection.py +605 -0
  259. oscura/guidance/__init__.py +24 -0
  260. oscura/guidance/recommender.py +429 -0
  261. oscura/guidance/wizard.py +518 -0
  262. oscura/inference/__init__.py +251 -0
  263. oscura/inference/active_learning/README.md +153 -0
  264. oscura/inference/active_learning/__init__.py +38 -0
  265. oscura/inference/active_learning/lstar.py +257 -0
  266. oscura/inference/active_learning/observation_table.py +230 -0
  267. oscura/inference/active_learning/oracle.py +78 -0
  268. oscura/inference/active_learning/teachers/__init__.py +15 -0
  269. oscura/inference/active_learning/teachers/simulator.py +192 -0
  270. oscura/inference/adaptive_tuning.py +453 -0
  271. oscura/inference/alignment.py +653 -0
  272. oscura/inference/bayesian.py +943 -0
  273. oscura/inference/binary.py +1016 -0
  274. oscura/inference/crc_reverse.py +711 -0
  275. oscura/inference/logic.py +288 -0
  276. oscura/inference/message_format.py +1305 -0
  277. oscura/inference/protocol.py +417 -0
  278. oscura/inference/protocol_dsl.py +1084 -0
  279. oscura/inference/protocol_library.py +1230 -0
  280. oscura/inference/sequences.py +809 -0
  281. oscura/inference/signal_intelligence.py +1509 -0
  282. oscura/inference/spectral.py +215 -0
  283. oscura/inference/state_machine.py +634 -0
  284. oscura/inference/stream.py +918 -0
  285. oscura/integrations/__init__.py +59 -0
  286. oscura/integrations/llm.py +1827 -0
  287. oscura/jupyter/__init__.py +32 -0
  288. oscura/jupyter/display.py +268 -0
  289. oscura/jupyter/magic.py +334 -0
  290. oscura/loaders/__init__.py +526 -0
  291. oscura/loaders/binary.py +69 -0
  292. oscura/loaders/configurable.py +1255 -0
  293. oscura/loaders/csv.py +26 -0
  294. oscura/loaders/csv_loader.py +473 -0
  295. oscura/loaders/hdf5.py +9 -0
  296. oscura/loaders/hdf5_loader.py +510 -0
  297. oscura/loaders/lazy.py +370 -0
  298. oscura/loaders/mmap_loader.py +583 -0
  299. oscura/loaders/numpy_loader.py +436 -0
  300. oscura/loaders/pcap.py +432 -0
  301. oscura/loaders/preprocessing.py +368 -0
  302. oscura/loaders/rigol.py +287 -0
  303. oscura/loaders/sigrok.py +321 -0
  304. oscura/loaders/tdms.py +367 -0
  305. oscura/loaders/tektronix.py +711 -0
  306. oscura/loaders/validation.py +584 -0
  307. oscura/loaders/vcd.py +464 -0
  308. oscura/loaders/wav.py +233 -0
  309. oscura/math/__init__.py +45 -0
  310. oscura/math/arithmetic.py +824 -0
  311. oscura/math/interpolation.py +413 -0
  312. oscura/onboarding/__init__.py +39 -0
  313. oscura/onboarding/help.py +498 -0
  314. oscura/onboarding/tutorials.py +405 -0
  315. oscura/onboarding/wizard.py +466 -0
  316. oscura/optimization/__init__.py +19 -0
  317. oscura/optimization/parallel.py +440 -0
  318. oscura/optimization/search.py +532 -0
  319. oscura/pipeline/__init__.py +43 -0
  320. oscura/pipeline/base.py +338 -0
  321. oscura/pipeline/composition.py +242 -0
  322. oscura/pipeline/parallel.py +448 -0
  323. oscura/pipeline/pipeline.py +375 -0
  324. oscura/pipeline/reverse_engineering.py +1119 -0
  325. oscura/plugins/__init__.py +122 -0
  326. oscura/plugins/base.py +272 -0
  327. oscura/plugins/cli.py +497 -0
  328. oscura/plugins/discovery.py +411 -0
  329. oscura/plugins/isolation.py +418 -0
  330. oscura/plugins/lifecycle.py +959 -0
  331. oscura/plugins/manager.py +493 -0
  332. oscura/plugins/registry.py +421 -0
  333. oscura/plugins/versioning.py +372 -0
  334. oscura/py.typed +0 -0
  335. oscura/quality/__init__.py +65 -0
  336. oscura/quality/ensemble.py +740 -0
  337. oscura/quality/explainer.py +338 -0
  338. oscura/quality/scoring.py +616 -0
  339. oscura/quality/warnings.py +456 -0
  340. oscura/reporting/__init__.py +248 -0
  341. oscura/reporting/advanced.py +1234 -0
  342. oscura/reporting/analyze.py +448 -0
  343. oscura/reporting/argument_preparer.py +596 -0
  344. oscura/reporting/auto_report.py +507 -0
  345. oscura/reporting/batch.py +615 -0
  346. oscura/reporting/chart_selection.py +223 -0
  347. oscura/reporting/comparison.py +330 -0
  348. oscura/reporting/config.py +615 -0
  349. oscura/reporting/content/__init__.py +39 -0
  350. oscura/reporting/content/executive.py +127 -0
  351. oscura/reporting/content/filtering.py +191 -0
  352. oscura/reporting/content/minimal.py +257 -0
  353. oscura/reporting/content/verbosity.py +162 -0
  354. oscura/reporting/core.py +508 -0
  355. oscura/reporting/core_formats/__init__.py +17 -0
  356. oscura/reporting/core_formats/multi_format.py +210 -0
  357. oscura/reporting/engine.py +836 -0
  358. oscura/reporting/export.py +366 -0
  359. oscura/reporting/formatting/__init__.py +129 -0
  360. oscura/reporting/formatting/emphasis.py +81 -0
  361. oscura/reporting/formatting/numbers.py +403 -0
  362. oscura/reporting/formatting/standards.py +55 -0
  363. oscura/reporting/formatting.py +466 -0
  364. oscura/reporting/html.py +578 -0
  365. oscura/reporting/index.py +590 -0
  366. oscura/reporting/multichannel.py +296 -0
  367. oscura/reporting/output.py +379 -0
  368. oscura/reporting/pdf.py +373 -0
  369. oscura/reporting/plots.py +731 -0
  370. oscura/reporting/pptx_export.py +360 -0
  371. oscura/reporting/renderers/__init__.py +11 -0
  372. oscura/reporting/renderers/pdf.py +94 -0
  373. oscura/reporting/sections.py +471 -0
  374. oscura/reporting/standards.py +680 -0
  375. oscura/reporting/summary_generator.py +368 -0
  376. oscura/reporting/tables.py +397 -0
  377. oscura/reporting/template_system.py +724 -0
  378. oscura/reporting/templates/__init__.py +15 -0
  379. oscura/reporting/templates/definition.py +205 -0
  380. oscura/reporting/templates/index.html +649 -0
  381. oscura/reporting/templates/index.md +173 -0
  382. oscura/schemas/__init__.py +158 -0
  383. oscura/schemas/bus_configuration.json +322 -0
  384. oscura/schemas/device_mapping.json +182 -0
  385. oscura/schemas/packet_format.json +418 -0
  386. oscura/schemas/protocol_definition.json +363 -0
  387. oscura/search/__init__.py +16 -0
  388. oscura/search/anomaly.py +292 -0
  389. oscura/search/context.py +149 -0
  390. oscura/search/pattern.py +160 -0
  391. oscura/session/__init__.py +34 -0
  392. oscura/session/annotations.py +289 -0
  393. oscura/session/history.py +313 -0
  394. oscura/session/session.py +445 -0
  395. oscura/streaming/__init__.py +43 -0
  396. oscura/streaming/chunked.py +611 -0
  397. oscura/streaming/progressive.py +393 -0
  398. oscura/streaming/realtime.py +622 -0
  399. oscura/testing/__init__.py +54 -0
  400. oscura/testing/synthetic.py +808 -0
  401. oscura/triggering/__init__.py +68 -0
  402. oscura/triggering/base.py +229 -0
  403. oscura/triggering/edge.py +353 -0
  404. oscura/triggering/pattern.py +344 -0
  405. oscura/triggering/pulse.py +581 -0
  406. oscura/triggering/window.py +453 -0
  407. oscura/ui/__init__.py +48 -0
  408. oscura/ui/formatters.py +526 -0
  409. oscura/ui/progressive_display.py +340 -0
  410. oscura/utils/__init__.py +99 -0
  411. oscura/utils/autodetect.py +338 -0
  412. oscura/utils/buffer.py +389 -0
  413. oscura/utils/lazy.py +407 -0
  414. oscura/utils/lazy_imports.py +147 -0
  415. oscura/utils/memory.py +836 -0
  416. oscura/utils/memory_advanced.py +1326 -0
  417. oscura/utils/memory_extensions.py +465 -0
  418. oscura/utils/progressive.py +352 -0
  419. oscura/utils/windowing.py +362 -0
  420. oscura/visualization/__init__.py +321 -0
  421. oscura/visualization/accessibility.py +526 -0
  422. oscura/visualization/annotations.py +374 -0
  423. oscura/visualization/axis_scaling.py +305 -0
  424. oscura/visualization/colors.py +453 -0
  425. oscura/visualization/digital.py +337 -0
  426. oscura/visualization/eye.py +420 -0
  427. oscura/visualization/histogram.py +281 -0
  428. oscura/visualization/interactive.py +858 -0
  429. oscura/visualization/jitter.py +702 -0
  430. oscura/visualization/keyboard.py +394 -0
  431. oscura/visualization/layout.py +365 -0
  432. oscura/visualization/optimization.py +1028 -0
  433. oscura/visualization/palettes.py +446 -0
  434. oscura/visualization/plot.py +92 -0
  435. oscura/visualization/power.py +290 -0
  436. oscura/visualization/power_extended.py +626 -0
  437. oscura/visualization/presets.py +467 -0
  438. oscura/visualization/protocols.py +932 -0
  439. oscura/visualization/render.py +207 -0
  440. oscura/visualization/rendering.py +444 -0
  441. oscura/visualization/reverse_engineering.py +791 -0
  442. oscura/visualization/signal_integrity.py +808 -0
  443. oscura/visualization/specialized.py +553 -0
  444. oscura/visualization/spectral.py +811 -0
  445. oscura/visualization/styles.py +381 -0
  446. oscura/visualization/thumbnails.py +311 -0
  447. oscura/visualization/time_axis.py +351 -0
  448. oscura/visualization/waveform.py +367 -0
  449. oscura/workflow/__init__.py +13 -0
  450. oscura/workflow/dag.py +377 -0
  451. oscura/workflows/__init__.py +58 -0
  452. oscura/workflows/compliance.py +280 -0
  453. oscura/workflows/digital.py +272 -0
  454. oscura/workflows/multi_trace.py +502 -0
  455. oscura/workflows/power.py +178 -0
  456. oscura/workflows/protocol.py +492 -0
  457. oscura/workflows/reverse_engineering.py +639 -0
  458. oscura/workflows/signal_integrity.py +227 -0
  459. oscura-0.1.0.dist-info/METADATA +300 -0
  460. oscura-0.1.0.dist-info/RECORD +463 -0
  461. oscura-0.1.0.dist-info/entry_points.txt +2 -0
  462. {oscura-0.0.1.dist-info → oscura-0.1.0.dist-info}/licenses/LICENSE +1 -1
  463. oscura-0.0.1.dist-info/METADATA +0 -63
  464. oscura-0.0.1.dist-info/RECORD +0 -5
  465. {oscura-0.0.1.dist-info → oscura-0.1.0.dist-info}/WHEEL +0 -0
@@ -0,0 +1,885 @@
1
+ """Advanced statistical analysis methods.
2
+
3
+ This module provides advanced outlier detection and time series analysis
4
+ methods for signal analysis.
5
+
6
+
7
+ Example:
8
+ >>> from oscura.analyzers.statistics.advanced import (
9
+ ... isolation_forest_outliers, local_outlier_factor,
10
+ ... seasonal_decompose, detect_change_points,
11
+ ... phase_coherence, kernel_density
12
+ ... )
13
+ >>> outliers = isolation_forest_outliers(trace)
14
+ >>> decomp = seasonal_decompose(trace, period=100)
15
+
16
+ References:
17
+ Liu et al. (2008): Isolation Forest
18
+ Breunig et al. (2000): Local Outlier Factor
19
+ Cleveland et al. (1990): STL Decomposition
20
+ """
21
+
22
+ from __future__ import annotations
23
+
24
+ from dataclasses import dataclass
25
+ from typing import TYPE_CHECKING, Any, Literal
26
+
27
+ import numpy as np
28
+ from scipy import signal
29
+ from scipy import stats as sp_stats
30
+
31
+ from oscura.core.types import WaveformTrace
32
+
33
+ if TYPE_CHECKING:
34
+ from numpy.typing import NDArray
35
+
36
+
37
+ @dataclass
38
+ class IsolationForestResult:
39
+ """Result of Isolation Forest outlier detection.
40
+
41
+ Attributes:
42
+ indices: Array of outlier indices.
43
+ scores: Anomaly scores for all samples (-1 = outlier, 1 = normal).
44
+ decision_scores: Raw decision function scores.
45
+ mask: Boolean mask (True = outlier).
46
+ count: Number of outliers detected.
47
+ contamination: Contamination fraction used.
48
+
49
+ References:
50
+ STAT-011
51
+ """
52
+
53
+ indices: NDArray[np.intp]
54
+ scores: NDArray[np.int8]
55
+ decision_scores: NDArray[np.float64]
56
+ mask: NDArray[np.bool_]
57
+ count: int
58
+ contamination: float
59
+
60
+
61
+ @dataclass
62
+ class LOFResult:
63
+ """Result of Local Outlier Factor detection.
64
+
65
+ Attributes:
66
+ indices: Array of outlier indices.
67
+ scores: LOF scores for all samples (>1 = outlier).
68
+ mask: Boolean mask (True = outlier).
69
+ count: Number of outliers detected.
70
+ threshold: Threshold used for outlier classification.
71
+ n_neighbors: Number of neighbors used.
72
+
73
+ References:
74
+ STAT-012
75
+ """
76
+
77
+ indices: NDArray[np.intp]
78
+ scores: NDArray[np.float64]
79
+ mask: NDArray[np.bool_]
80
+ count: int
81
+ threshold: float
82
+ n_neighbors: int
83
+
84
+
85
+ @dataclass
86
+ class DecompositionResult:
87
+ """Result of seasonal decomposition.
88
+
89
+ Attributes:
90
+ trend: Trend component.
91
+ seasonal: Seasonal component.
92
+ residual: Residual (remainder) component.
93
+ period: Detected or specified period.
94
+ observed: Original signal.
95
+
96
+ References:
97
+ STAT-013
98
+ """
99
+
100
+ trend: NDArray[np.float64]
101
+ seasonal: NDArray[np.float64]
102
+ residual: NDArray[np.float64]
103
+ period: int
104
+ observed: NDArray[np.float64]
105
+
106
+
107
+ @dataclass
108
+ class ChangePointResult:
109
+ """Result of change point detection.
110
+
111
+ Attributes:
112
+ indices: Array of change point indices.
113
+ n_changes: Number of change points detected.
114
+ segments: List of (start, end) segment boundaries.
115
+ segment_means: Mean value for each segment.
116
+ segment_stds: Standard deviation for each segment.
117
+ cost: Total cost of the segmentation.
118
+
119
+ References:
120
+ STAT-014
121
+ """
122
+
123
+ indices: NDArray[np.intp]
124
+ n_changes: int
125
+ segments: list[tuple[int, int]]
126
+ segment_means: NDArray[np.float64]
127
+ segment_stds: NDArray[np.float64]
128
+ cost: float
129
+
130
+
131
+ @dataclass
132
+ class CoherenceResult:
133
+ """Result of phase coherence analysis.
134
+
135
+ Attributes:
136
+ coherence: Coherence spectrum (0 to 1).
137
+ frequencies: Frequency axis in Hz.
138
+ phase: Phase difference spectrum in radians.
139
+ mean_coherence: Average coherence across frequencies.
140
+ peak_frequency: Frequency of maximum coherence.
141
+ peak_coherence: Maximum coherence value.
142
+
143
+ References:
144
+ STAT-015
145
+ """
146
+
147
+ coherence: NDArray[np.float64]
148
+ frequencies: NDArray[np.float64]
149
+ phase: NDArray[np.float64]
150
+ mean_coherence: float
151
+ peak_frequency: float
152
+ peak_coherence: float
153
+
154
+
155
+ @dataclass
156
+ class KDEResult:
157
+ """Result of kernel density estimation.
158
+
159
+ Attributes:
160
+ x: Evaluation points.
161
+ density: Probability density at each point.
162
+ bandwidth: Bandwidth used for estimation.
163
+ peaks: Indices of density peaks (modes).
164
+ peak_values: X-values at density peaks.
165
+
166
+ References:
167
+ STAT-016
168
+ """
169
+
170
+ x: NDArray[np.float64]
171
+ density: NDArray[np.float64]
172
+ bandwidth: float
173
+ peaks: NDArray[np.intp]
174
+ peak_values: NDArray[np.float64]
175
+
176
+
177
+ def isolation_forest_outliers(
178
+ trace: WaveformTrace | NDArray[np.floating[Any]],
179
+ *,
180
+ contamination: float = 0.05,
181
+ n_estimators: int = 100,
182
+ max_samples: int | str = "auto",
183
+ random_state: int | None = None,
184
+ ) -> IsolationForestResult:
185
+ """Detect outliers using Isolation Forest algorithm.
186
+
187
+ Isolation Forest isolates anomalies by randomly selecting features
188
+ and split values. Anomalies are isolated in fewer splits on average.
189
+
190
+ Args:
191
+ trace: Input trace or numpy array.
192
+ contamination: Expected proportion of outliers (0.0 to 0.5).
193
+ n_estimators: Number of isolation trees.
194
+ max_samples: Samples for each tree ("auto" = min(256, n_samples)).
195
+ random_state: Random seed for reproducibility.
196
+
197
+ Returns:
198
+ IsolationForestResult with outlier information.
199
+
200
+ Example:
201
+ >>> result = isolation_forest_outliers(trace, contamination=0.01)
202
+ >>> print(f"Found {result.count} outliers")
203
+ >>> clean_data = trace[~result.mask]
204
+
205
+ References:
206
+ Liu, Ting & Zhou (2008): Isolation Forest
207
+ STAT-011
208
+ """
209
+ data = trace.data if isinstance(trace, WaveformTrace) else np.asarray(trace)
210
+ n_samples = len(data)
211
+
212
+ if n_samples < 10:
213
+ return IsolationForestResult(
214
+ indices=np.array([], dtype=np.intp),
215
+ scores=np.ones(n_samples, dtype=np.int8),
216
+ decision_scores=np.zeros(n_samples, dtype=np.float64),
217
+ mask=np.zeros(n_samples, dtype=np.bool_),
218
+ count=0,
219
+ contamination=contamination,
220
+ )
221
+
222
+ # Set random state
223
+ rng = np.random.default_rng(random_state)
224
+
225
+ # Determine max_samples
226
+ max_samples_int: int
227
+ if max_samples == "auto":
228
+ max_samples_int = min(256, n_samples)
229
+ elif isinstance(max_samples, float):
230
+ max_samples_int = int(max_samples * n_samples)
231
+ elif isinstance(max_samples, int):
232
+ max_samples_int = max_samples
233
+ else:
234
+ # Fallback for any other string value
235
+ max_samples_int = min(256, n_samples)
236
+ max_samples_int = min(max_samples_int, n_samples)
237
+
238
+ # Build isolation forest
239
+ decision_scores = np.zeros(n_samples, dtype=np.float64)
240
+
241
+ for _ in range(n_estimators):
242
+ # Bootstrap sample
243
+ sample_idx = rng.choice(n_samples, size=max_samples_int, replace=False)
244
+ sample_data = data[sample_idx]
245
+
246
+ # Compute path lengths for all points
247
+ path_lengths = _isolation_tree_path_lengths(data, sample_data, rng)
248
+ decision_scores += path_lengths
249
+
250
+ # Average and normalize
251
+ decision_scores /= n_estimators
252
+
253
+ # Compute anomaly scores: shorter paths = anomalies
254
+ # Normalize using average path length formula
255
+ avg_path = _average_path_length(max_samples_int)
256
+ decision_scores = 2 ** (-decision_scores / avg_path)
257
+
258
+ # Threshold based on contamination
259
+ threshold = np.percentile(decision_scores, 100 * (1 - contamination))
260
+
261
+ # Classify
262
+ mask = decision_scores >= threshold
263
+ indices = np.where(mask)[0]
264
+ scores = np.where(mask, -1, 1).astype(np.int8)
265
+
266
+ return IsolationForestResult(
267
+ indices=indices.astype(np.intp),
268
+ scores=scores,
269
+ decision_scores=decision_scores,
270
+ mask=mask,
271
+ count=int(np.sum(mask)),
272
+ contamination=contamination,
273
+ )
274
+
275
+
276
+ def _isolation_tree_path_lengths(
277
+ data: NDArray[Any], sample: NDArray[Any], rng: np.random.Generator
278
+ ) -> NDArray[np.float64]:
279
+ """Compute isolation path lengths for data points."""
280
+ n = len(data)
281
+ path_lengths = np.zeros(n, dtype=np.float64)
282
+
283
+ # Simple recursive isolation tree simulation
284
+ # For each point, estimate how many splits to isolate it
285
+ for i, point in enumerate(data):
286
+ path_lengths[i] = _compute_path_length(point, sample, rng, 0)
287
+
288
+ return path_lengths
289
+
290
+
291
+ def _compute_path_length(
292
+ point: float,
293
+ sample: NDArray[Any],
294
+ rng: np.random.Generator,
295
+ depth: int,
296
+ max_depth: int = 20,
297
+ ) -> float:
298
+ """Recursively compute path length to isolate a point."""
299
+ if len(sample) <= 1 or depth >= max_depth:
300
+ return depth + _average_path_length(len(sample))
301
+
302
+ # Random split point
303
+ min_val, max_val = np.min(sample), np.max(sample)
304
+ if max_val == min_val:
305
+ return depth
306
+
307
+ split = rng.uniform(min_val, max_val)
308
+
309
+ if point < split:
310
+ left_sample = sample[sample < split]
311
+ return _compute_path_length(point, left_sample, rng, depth + 1, max_depth)
312
+ else:
313
+ right_sample = sample[sample >= split]
314
+ return _compute_path_length(point, right_sample, rng, depth + 1, max_depth)
315
+
316
+
317
+ def _average_path_length(n: int) -> float:
318
+ """Compute average path length for n samples (H(n-1) formula)."""
319
+ if n <= 1:
320
+ return 0
321
+ if n == 2:
322
+ return 1
323
+ # Harmonic number approximation
324
+ return 2 * (np.log(n - 1) + 0.5772156649) - 2 * (n - 1) / n # type: ignore[no-any-return]
325
+
326
+
327
+ def local_outlier_factor(
328
+ trace: WaveformTrace | NDArray[np.floating[Any]],
329
+ *,
330
+ n_neighbors: int = 20,
331
+ threshold: float = 1.5,
332
+ metric: Literal["euclidean", "manhattan"] = "euclidean",
333
+ ) -> LOFResult:
334
+ """Detect outliers using Local Outlier Factor.
335
+
336
+ LOF measures local density deviation of a point with respect to
337
+ its neighbors. Points with substantially lower density than
338
+ their neighbors are considered outliers.
339
+
340
+ Args:
341
+ trace: Input trace or numpy array.
342
+ n_neighbors: Number of neighbors to use for density estimation.
343
+ threshold: LOF threshold for outlier classification (>1 = outlier).
344
+ metric: Distance metric ("euclidean" or "manhattan").
345
+
346
+ Returns:
347
+ LOFResult with outlier information.
348
+
349
+ Example:
350
+ >>> result = local_outlier_factor(trace, n_neighbors=10)
351
+ >>> print(f"Found {result.count} outliers")
352
+
353
+ References:
354
+ Breunig, Kriegel, Ng & Sander (2000): LOF Algorithm
355
+ STAT-012
356
+ """
357
+ data = trace.data if isinstance(trace, WaveformTrace) else np.asarray(trace)
358
+ n_samples = len(data)
359
+
360
+ if n_samples < n_neighbors + 1:
361
+ return LOFResult(
362
+ indices=np.array([], dtype=np.intp),
363
+ scores=np.ones(n_samples, dtype=np.float64),
364
+ mask=np.zeros(n_samples, dtype=np.bool_),
365
+ count=0,
366
+ threshold=threshold,
367
+ n_neighbors=n_neighbors,
368
+ )
369
+
370
+ # For 1D data, use index-based neighbors
371
+ # Reshape for compatibility
372
+ X = data.reshape(-1, 1)
373
+
374
+ # Compute k-distances and neighbors
375
+ k_distances = np.zeros(n_samples, dtype=np.float64)
376
+ k_neighbors = np.zeros((n_samples, n_neighbors), dtype=np.intp)
377
+
378
+ for i in range(n_samples):
379
+ # Compute distances to all other points
380
+ if metric == "euclidean":
381
+ distances = np.abs(X[:, 0] - X[i, 0])
382
+ else: # manhattan
383
+ distances = np.abs(X[:, 0] - X[i, 0])
384
+
385
+ # Get k nearest neighbors (excluding self)
386
+ distances[i] = np.inf
387
+ neighbor_idx = np.argsort(distances)[:n_neighbors]
388
+ k_neighbors[i] = neighbor_idx
389
+ k_distances[i] = distances[neighbor_idx[-1]]
390
+
391
+ # Compute Local Reachability Density (LRD)
392
+ lrd = np.zeros(n_samples, dtype=np.float64)
393
+ for i in range(n_samples):
394
+ reach_dists = np.maximum(
395
+ np.abs(X[k_neighbors[i], 0] - X[i, 0]),
396
+ k_distances[k_neighbors[i]],
397
+ )
398
+ mean_reach_dist = np.mean(reach_dists)
399
+ lrd[i] = 1.0 / mean_reach_dist if mean_reach_dist > 0 else np.inf
400
+
401
+ # Compute LOF scores
402
+ lof_scores = np.zeros(n_samples, dtype=np.float64)
403
+ for i in range(n_samples):
404
+ neighbor_lrd = lrd[k_neighbors[i]]
405
+ lof_scores[i] = np.mean(neighbor_lrd) / lrd[i] if lrd[i] > 0 else 1.0
406
+
407
+ # Handle infinities
408
+ lof_scores = np.nan_to_num(lof_scores, nan=1.0, posinf=threshold * 2)
409
+
410
+ # Classify outliers
411
+ mask = lof_scores > threshold
412
+ indices = np.where(mask)[0]
413
+
414
+ return LOFResult(
415
+ indices=indices.astype(np.intp),
416
+ scores=lof_scores,
417
+ mask=mask,
418
+ count=int(np.sum(mask)),
419
+ threshold=threshold,
420
+ n_neighbors=n_neighbors,
421
+ )
422
+
423
+
424
+ def seasonal_decompose(
425
+ trace: WaveformTrace | NDArray[np.floating[Any]],
426
+ *,
427
+ period: int | None = None,
428
+ model: Literal["additive", "multiplicative"] = "additive",
429
+ ) -> DecompositionResult:
430
+ """Decompose time series into trend, seasonal, and residual components.
431
+
432
+ Uses classical decomposition (moving average for trend extraction).
433
+
434
+ Args:
435
+ trace: Input trace or numpy array.
436
+ period: Period of seasonality. If None, auto-detected.
437
+ model: Decomposition model:
438
+ - "additive": y = trend + seasonal + residual
439
+ - "multiplicative": y = trend * seasonal * residual
440
+
441
+ Returns:
442
+ DecompositionResult with trend, seasonal, and residual components.
443
+
444
+ Example:
445
+ >>> result = seasonal_decompose(trace, period=100)
446
+ >>> plt.plot(result.trend, label="Trend")
447
+ >>> plt.plot(result.seasonal, label="Seasonal")
448
+
449
+ References:
450
+ Cleveland et al. (1990): STL Decomposition
451
+ STAT-013
452
+ """
453
+ data = trace.data if isinstance(trace, WaveformTrace) else np.asarray(trace)
454
+ n = len(data)
455
+
456
+ # Auto-detect period if not provided
457
+ if period is None:
458
+ period = _detect_period(data)
459
+ if period is None or period < 2:
460
+ period = min(n // 4, 10) # Default fallback
461
+
462
+ period = max(2, min(period, n // 2))
463
+
464
+ # Extract trend using centered moving average
465
+ if period % 2 == 0:
466
+ # For even period, use 2-stage moving average
467
+ ma = np.convolve(data, np.ones(period) / period, mode="same")
468
+ trend = np.convolve(ma, np.ones(2) / 2, mode="same")
469
+ else:
470
+ trend = np.convolve(data, np.ones(period) / period, mode="same")
471
+
472
+ # Handle edges
473
+ half_period = period // 2
474
+ trend[:half_period] = trend[half_period]
475
+ trend[-half_period:] = trend[-half_period - 1]
476
+
477
+ # Detrend
478
+ if model == "multiplicative":
479
+ with np.errstate(divide="ignore", invalid="ignore"):
480
+ detrended = data / trend
481
+ detrended = np.nan_to_num(detrended, nan=1.0)
482
+ else:
483
+ detrended = data - trend
484
+
485
+ # Extract seasonal component (average for each phase)
486
+ seasonal = np.zeros_like(data)
487
+ for i in range(period):
488
+ indices = np.arange(i, n, period)
489
+ seasonal_mean = np.mean(detrended[indices])
490
+ seasonal[indices] = seasonal_mean
491
+
492
+ # Center seasonal component
493
+ if model == "multiplicative":
494
+ seasonal /= np.mean(seasonal)
495
+ else:
496
+ seasonal -= np.mean(seasonal)
497
+
498
+ # Compute residual
499
+ if model == "multiplicative":
500
+ with np.errstate(divide="ignore", invalid="ignore"):
501
+ residual = data / (trend * seasonal)
502
+ residual = np.nan_to_num(residual, nan=1.0)
503
+ else:
504
+ residual = data - trend - seasonal
505
+
506
+ return DecompositionResult(
507
+ trend=trend.astype(np.float64),
508
+ seasonal=seasonal.astype(np.float64),
509
+ residual=residual.astype(np.float64),
510
+ period=period,
511
+ observed=data.astype(np.float64),
512
+ )
513
+
514
+
515
+ def _detect_period(data: NDArray[Any]) -> int | None:
516
+ """Auto-detect dominant period using autocorrelation."""
517
+ n = len(data)
518
+ if n < 20:
519
+ return None
520
+
521
+ # Compute autocorrelation
522
+ data_centered = data - np.mean(data)
523
+ acf = np.correlate(data_centered, data_centered, mode="full")
524
+ acf = acf[n - 1 :] # Keep positive lags only
525
+ acf = acf / acf[0] # Normalize
526
+
527
+ # Find first significant peak after lag 0
528
+ # Skip first few lags to avoid noise
529
+ min_lag = max(2, n // 100)
530
+ max_lag = n // 2
531
+
532
+ # Find peaks in autocorrelation
533
+ peaks, _ = signal.find_peaks(acf[min_lag:max_lag], height=0.1, distance=min_lag)
534
+
535
+ if len(peaks) > 0:
536
+ return peaks[0] + min_lag # type: ignore[no-any-return]
537
+
538
+ return None
539
+
540
+
541
+ def detect_change_points(
542
+ trace: WaveformTrace | NDArray[np.floating[Any]],
543
+ *,
544
+ n_changes: int | None = None,
545
+ min_size: int = 10,
546
+ penalty: float | None = None,
547
+ method: Literal["pelt", "binseg"] = "pelt",
548
+ ) -> ChangePointResult:
549
+ """Detect change points in time series.
550
+
551
+ Identifies points where the statistical properties of the signal
552
+ change significantly.
553
+
554
+ Args:
555
+ trace: Input trace or numpy array.
556
+ n_changes: Number of change points to find. If None, auto-detected.
557
+ min_size: Minimum segment length between change points.
558
+ penalty: Penalty for adding change points (higher = fewer changes).
559
+ method: Detection method:
560
+ - "pelt": Pruned Exact Linear Time (fast, optimal)
561
+ - "binseg": Binary Segmentation (fast, approximate)
562
+
563
+ Returns:
564
+ ChangePointResult with change point locations and segment info.
565
+
566
+ Example:
567
+ >>> result = detect_change_points(trace, n_changes=3)
568
+ >>> for start, end in result.segments:
569
+ ... print(f"Segment: {start} to {end}")
570
+
571
+ References:
572
+ Killick et al. (2012): PELT Algorithm
573
+ STAT-014
574
+ """
575
+ data = trace.data if isinstance(trace, WaveformTrace) else np.asarray(trace)
576
+ n = len(data)
577
+
578
+ if n < min_size * 2:
579
+ return ChangePointResult(
580
+ indices=np.array([], dtype=np.intp),
581
+ n_changes=0,
582
+ segments=[(0, n)],
583
+ segment_means=np.array([np.mean(data)]),
584
+ segment_stds=np.array([np.std(data)]),
585
+ cost=0.0,
586
+ )
587
+
588
+ # Set default penalty based on BIC
589
+ if penalty is None:
590
+ penalty = np.log(n) * np.var(data)
591
+
592
+ if method == "pelt":
593
+ change_points = _pelt_change_points(data, min_size, penalty, n_changes)
594
+ else:
595
+ change_points = _binseg_change_points(data, min_size, penalty, n_changes)
596
+
597
+ # Build segments
598
+ all_points = [0, *list(change_points), n]
599
+ segments = [(all_points[i], all_points[i + 1]) for i in range(len(all_points) - 1)]
600
+
601
+ # Compute segment statistics
602
+ segment_means = np.array([np.mean(data[s:e]) for s, e in segments])
603
+ segment_stds = np.array([np.std(data[s:e]) for s, e in segments])
604
+
605
+ # Compute total cost
606
+ total_cost = sum(_segment_cost(data[s:e]) for s, e in segments) + penalty * len(change_points)
607
+
608
+ return ChangePointResult(
609
+ indices=np.array(change_points, dtype=np.intp),
610
+ n_changes=len(change_points),
611
+ segments=segments,
612
+ segment_means=segment_means,
613
+ segment_stds=segment_stds,
614
+ cost=float(total_cost),
615
+ )
616
+
617
+
618
+ def _segment_cost(segment: NDArray[Any]) -> float:
619
+ """Compute cost of a segment (negative log-likelihood for normal)."""
620
+ n = len(segment)
621
+ if n < 2:
622
+ return 0.0
623
+ var = np.var(segment)
624
+ if var <= 0:
625
+ return 0.0
626
+ return n * np.log(var) # type: ignore[no-any-return]
627
+
628
+
629
+ def _pelt_change_points(
630
+ data: NDArray[Any],
631
+ min_size: int,
632
+ penalty: float,
633
+ n_changes: int | None,
634
+ ) -> list[int]:
635
+ """PELT algorithm for change point detection."""
636
+ len(data)
637
+
638
+ # Simple implementation: use binary segmentation as approximation
639
+ # Full PELT requires dynamic programming which is more complex
640
+ return _binseg_change_points(data, min_size, penalty, n_changes)
641
+
642
+
643
+ def _binseg_change_points(
644
+ data: NDArray[Any],
645
+ min_size: int,
646
+ penalty: float,
647
+ n_changes: int | None,
648
+ ) -> list[int]:
649
+ """Binary segmentation for change point detection."""
650
+ n = len(data)
651
+ change_points: list[int] = []
652
+
653
+ def find_best_split(start: int, end: int) -> tuple[int, float]:
654
+ """Find best split point in segment."""
655
+ if end - start < 2 * min_size:
656
+ return -1, 0.0
657
+
658
+ best_idx = -1
659
+ best_gain = 0.0
660
+
661
+ for i in range(start + min_size, end - min_size + 1):
662
+ left = data[start:i]
663
+ right = data[i:end]
664
+ full = data[start:end]
665
+
666
+ cost_full = _segment_cost(full)
667
+ cost_split = _segment_cost(left) + _segment_cost(right)
668
+ gain = cost_full - cost_split - penalty
669
+
670
+ if gain > best_gain:
671
+ best_gain = gain
672
+ best_idx = i
673
+
674
+ return best_idx, best_gain
675
+
676
+ # Iteratively find change points
677
+ segments = [(0, n)]
678
+ max_iter = n_changes if n_changes is not None else n // min_size
679
+
680
+ for _ in range(max_iter):
681
+ best_segment_idx = -1
682
+ best_split_idx = -1
683
+ best_gain = 0.0
684
+
685
+ for seg_idx, (start, end) in enumerate(segments):
686
+ split_idx, gain = find_best_split(start, end)
687
+ if gain > best_gain:
688
+ best_gain = gain
689
+ best_split_idx = split_idx
690
+ best_segment_idx = seg_idx
691
+
692
+ if best_split_idx == -1:
693
+ break
694
+
695
+ # Add change point
696
+ change_points.append(best_split_idx)
697
+
698
+ # Update segments
699
+ start, end = segments[best_segment_idx]
700
+ segments[best_segment_idx] = (start, best_split_idx)
701
+ segments.insert(best_segment_idx + 1, (best_split_idx, end))
702
+
703
+ return sorted(change_points)
704
+
705
+
706
+ def phase_coherence(
707
+ trace1: WaveformTrace | NDArray[np.floating[Any]],
708
+ trace2: WaveformTrace | NDArray[np.floating[Any]],
709
+ *,
710
+ sample_rate: float | None = None,
711
+ nperseg: int | None = None,
712
+ ) -> CoherenceResult:
713
+ """Compute phase coherence between two signals.
714
+
715
+ Coherence measures the linear correlation between two signals
716
+ as a function of frequency.
717
+
718
+ Args:
719
+ trace1: First input trace.
720
+ trace2: Second input trace.
721
+ sample_rate: Sample rate in Hz. Required if traces are arrays.
722
+ nperseg: Segment length for Welch method.
723
+
724
+ Returns:
725
+ CoherenceResult with coherence spectrum and phase.
726
+
727
+ Example:
728
+ >>> result = phase_coherence(signal1, signal2, sample_rate=1e6)
729
+ >>> print(f"Mean coherence: {result.mean_coherence:.3f}")
730
+
731
+ References:
732
+ STAT-015
733
+ """
734
+ data1 = trace1.data if isinstance(trace1, WaveformTrace) else np.asarray(trace1)
735
+ data2 = trace2.data if isinstance(trace2, WaveformTrace) else np.asarray(trace2)
736
+
737
+ # Get sample rate
738
+ if sample_rate is None:
739
+ sample_rate = trace1.metadata.sample_rate if isinstance(trace1, WaveformTrace) else 1.0
740
+
741
+ # Ensure same length
742
+ n = min(len(data1), len(data2))
743
+ data1 = data1[:n]
744
+ data2 = data2[:n]
745
+
746
+ if nperseg is None:
747
+ nperseg = min(256, n // 4)
748
+ nperseg = max(16, min(nperseg, n))
749
+
750
+ # Compute coherence
751
+ frequencies, coherence = signal.coherence(data1, data2, fs=sample_rate, nperseg=nperseg)
752
+
753
+ # Compute cross-spectral phase
754
+ _, Pxy = signal.csd(data1, data2, fs=sample_rate, nperseg=nperseg)
755
+ phase = np.angle(Pxy)
756
+
757
+ # Statistics
758
+ mean_coherence = float(np.mean(coherence))
759
+ peak_idx = np.argmax(coherence)
760
+ peak_frequency = float(frequencies[peak_idx])
761
+ peak_coherence = float(coherence[peak_idx])
762
+
763
+ return CoherenceResult(
764
+ coherence=coherence.astype(np.float64),
765
+ frequencies=frequencies.astype(np.float64),
766
+ phase=phase.astype(np.float64),
767
+ mean_coherence=mean_coherence,
768
+ peak_frequency=peak_frequency,
769
+ peak_coherence=peak_coherence,
770
+ )
771
+
772
+
773
+ def kernel_density(
774
+ trace: WaveformTrace | NDArray[np.floating[Any]],
775
+ *,
776
+ n_points: int = 1000,
777
+ bandwidth: float | str = "scott",
778
+ kernel: Literal["gaussian", "tophat", "epanechnikov"] = "gaussian",
779
+ ) -> KDEResult:
780
+ """Estimate probability density using kernel density estimation.
781
+
782
+ Args:
783
+ trace: Input trace or numpy array.
784
+ n_points: Number of evaluation points.
785
+ bandwidth: Bandwidth for kernel ("scott", "silverman", or float).
786
+ kernel: Kernel function to use.
787
+
788
+ Returns:
789
+ KDEResult with density estimate and mode information.
790
+
791
+ Raises:
792
+ ValueError: If kernel is not one of the supported types.
793
+
794
+ Example:
795
+ >>> result = kernel_density(trace)
796
+ >>> plt.plot(result.x, result.density)
797
+ >>> print(f"Modes at: {result.peak_values}")
798
+
799
+ References:
800
+ Scott (1992): Multivariate Density Estimation
801
+ STAT-016
802
+ """
803
+ data = trace.data if isinstance(trace, WaveformTrace) else np.asarray(trace)
804
+ n = len(data)
805
+
806
+ if n < 2:
807
+ return KDEResult(
808
+ x=np.array([np.mean(data)]),
809
+ density=np.array([1.0]),
810
+ bandwidth=0.0,
811
+ peaks=np.array([0], dtype=np.intp),
812
+ peak_values=np.array([np.mean(data)]),
813
+ )
814
+
815
+ # Compute bandwidth
816
+ std = np.std(data)
817
+ iqr = np.percentile(data, 75) - np.percentile(data, 25)
818
+
819
+ if isinstance(bandwidth, str):
820
+ if bandwidth == "scott":
821
+ bw = 1.06 * std * n ** (-1 / 5)
822
+ elif bandwidth == "silverman":
823
+ bw = 0.9 * min(std, iqr / 1.34) * n ** (-1 / 5)
824
+ else:
825
+ bw = 1.06 * std * n ** (-1 / 5)
826
+ else:
827
+ bw = bandwidth
828
+
829
+ bw = max(bw, 1e-10) # Prevent zero bandwidth
830
+
831
+ # Evaluation grid
832
+ margin = 3 * bw
833
+ x_min = np.min(data) - margin
834
+ x_max = np.max(data) + margin
835
+ x = np.linspace(x_min, x_max, n_points)
836
+
837
+ # Compute density
838
+ if kernel == "gaussian":
839
+ kde = sp_stats.gaussian_kde(data, bw_method=bw / std if std > 0 else 1.0)
840
+ density = kde(x)
841
+ elif kernel == "tophat":
842
+ density = np.zeros(n_points)
843
+ for xi in data:
844
+ mask = np.abs(x - xi) <= bw
845
+ density[mask] += 1.0
846
+ density /= n * 2 * bw
847
+ elif kernel == "epanechnikov":
848
+ density = np.zeros(n_points)
849
+ for xi in data:
850
+ u = (x - xi) / bw
851
+ mask = np.abs(u) <= 1
852
+ density[mask] += 0.75 * (1 - u[mask] ** 2)
853
+ density /= n * bw
854
+ else:
855
+ raise ValueError(f"Unknown kernel: {kernel}")
856
+
857
+ # Find peaks (modes)
858
+ peaks_idx, _ = signal.find_peaks(density)
859
+ if len(peaks_idx) == 0:
860
+ peaks_idx = np.array([np.argmax(density)])
861
+ peak_values = x[peaks_idx]
862
+
863
+ return KDEResult(
864
+ x=x.astype(np.float64),
865
+ density=density.astype(np.float64),
866
+ bandwidth=float(bw),
867
+ peaks=peaks_idx.astype(np.intp),
868
+ peak_values=peak_values.astype(np.float64),
869
+ )
870
+
871
+
872
+ __all__ = [
873
+ "ChangePointResult",
874
+ "CoherenceResult",
875
+ "DecompositionResult",
876
+ "IsolationForestResult",
877
+ "KDEResult",
878
+ "LOFResult",
879
+ "detect_change_points",
880
+ "isolation_forest_outliers",
881
+ "kernel_density",
882
+ "local_outlier_factor",
883
+ "phase_coherence",
884
+ "seasonal_decompose",
885
+ ]