oscura 0.8.0__py3-none-any.whl → 0.10.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (151) hide show
  1. oscura/__init__.py +19 -19
  2. oscura/analyzers/__init__.py +2 -0
  3. oscura/analyzers/digital/extraction.py +2 -3
  4. oscura/analyzers/digital/quality.py +1 -1
  5. oscura/analyzers/digital/timing.py +1 -1
  6. oscura/analyzers/patterns/__init__.py +66 -0
  7. oscura/analyzers/power/basic.py +3 -3
  8. oscura/analyzers/power/soa.py +1 -1
  9. oscura/analyzers/power/switching.py +3 -3
  10. oscura/analyzers/signal_classification.py +529 -0
  11. oscura/analyzers/signal_integrity/sparams.py +3 -3
  12. oscura/analyzers/statistics/basic.py +10 -7
  13. oscura/analyzers/validation.py +1 -1
  14. oscura/analyzers/waveform/measurements.py +200 -156
  15. oscura/analyzers/waveform/measurements_with_uncertainty.py +91 -35
  16. oscura/analyzers/waveform/spectral.py +164 -73
  17. oscura/api/dsl/commands.py +15 -6
  18. oscura/api/server/templates/base.html +137 -146
  19. oscura/api/server/templates/export.html +84 -110
  20. oscura/api/server/templates/home.html +248 -267
  21. oscura/api/server/templates/protocols.html +44 -48
  22. oscura/api/server/templates/reports.html +27 -35
  23. oscura/api/server/templates/session_detail.html +68 -78
  24. oscura/api/server/templates/sessions.html +62 -72
  25. oscura/api/server/templates/waveforms.html +54 -64
  26. oscura/automotive/__init__.py +1 -1
  27. oscura/automotive/can/session.py +1 -1
  28. oscura/automotive/dbc/generator.py +638 -23
  29. oscura/automotive/uds/decoder.py +99 -6
  30. oscura/cli/analyze.py +8 -2
  31. oscura/cli/batch.py +36 -5
  32. oscura/cli/characterize.py +18 -4
  33. oscura/cli/export.py +47 -5
  34. oscura/cli/main.py +2 -0
  35. oscura/cli/onboarding/wizard.py +10 -6
  36. oscura/cli/pipeline.py +585 -0
  37. oscura/cli/visualize.py +6 -4
  38. oscura/convenience.py +400 -32
  39. oscura/core/measurement_result.py +286 -0
  40. oscura/core/progress.py +1 -1
  41. oscura/core/types.py +232 -239
  42. oscura/correlation/multi_protocol.py +1 -1
  43. oscura/export/legacy/__init__.py +11 -0
  44. oscura/export/legacy/wav.py +75 -0
  45. oscura/exporters/__init__.py +19 -0
  46. oscura/exporters/wireshark.py +809 -0
  47. oscura/hardware/acquisition/file.py +5 -19
  48. oscura/hardware/acquisition/saleae.py +10 -10
  49. oscura/hardware/acquisition/socketcan.py +4 -6
  50. oscura/hardware/acquisition/synthetic.py +1 -5
  51. oscura/hardware/acquisition/visa.py +6 -6
  52. oscura/hardware/security/side_channel_detector.py +5 -508
  53. oscura/inference/message_format.py +686 -1
  54. oscura/jupyter/display.py +2 -2
  55. oscura/jupyter/magic.py +3 -3
  56. oscura/loaders/__init__.py +17 -12
  57. oscura/loaders/binary.py +1 -1
  58. oscura/loaders/chipwhisperer.py +1 -2
  59. oscura/loaders/configurable.py +1 -1
  60. oscura/loaders/csv_loader.py +2 -2
  61. oscura/loaders/hdf5_loader.py +1 -1
  62. oscura/loaders/lazy.py +6 -1
  63. oscura/loaders/mmap_loader.py +0 -1
  64. oscura/loaders/numpy_loader.py +8 -7
  65. oscura/loaders/preprocessing.py +3 -5
  66. oscura/loaders/rigol.py +21 -7
  67. oscura/loaders/sigrok.py +2 -5
  68. oscura/loaders/tdms.py +3 -2
  69. oscura/loaders/tektronix.py +38 -32
  70. oscura/loaders/tss.py +20 -27
  71. oscura/loaders/vcd.py +13 -8
  72. oscura/loaders/wav.py +1 -6
  73. oscura/pipeline/__init__.py +76 -0
  74. oscura/pipeline/handlers/__init__.py +165 -0
  75. oscura/pipeline/handlers/analyzers.py +1045 -0
  76. oscura/pipeline/handlers/decoders.py +899 -0
  77. oscura/pipeline/handlers/exporters.py +1103 -0
  78. oscura/pipeline/handlers/filters.py +891 -0
  79. oscura/pipeline/handlers/loaders.py +640 -0
  80. oscura/pipeline/handlers/transforms.py +768 -0
  81. oscura/reporting/formatting/measurements.py +55 -14
  82. oscura/reporting/templates/enhanced/protocol_re.html +504 -503
  83. oscura/side_channel/__init__.py +38 -57
  84. oscura/utils/builders/signal_builder.py +5 -5
  85. oscura/utils/comparison/compare.py +7 -9
  86. oscura/utils/comparison/golden.py +1 -1
  87. oscura/utils/filtering/convenience.py +2 -2
  88. oscura/utils/math/arithmetic.py +38 -62
  89. oscura/utils/math/interpolation.py +20 -20
  90. oscura/utils/pipeline/__init__.py +4 -17
  91. oscura/utils/progressive.py +1 -4
  92. oscura/utils/triggering/edge.py +1 -1
  93. oscura/utils/triggering/pattern.py +2 -2
  94. oscura/utils/triggering/pulse.py +2 -2
  95. oscura/utils/triggering/window.py +3 -3
  96. oscura/validation/hil_testing.py +11 -11
  97. oscura/visualization/__init__.py +46 -284
  98. oscura/visualization/batch.py +72 -433
  99. oscura/visualization/plot.py +542 -53
  100. oscura/visualization/styles.py +184 -318
  101. oscura/workflows/batch/advanced.py +1 -1
  102. oscura/workflows/batch/aggregate.py +7 -8
  103. oscura/workflows/complete_re.py +251 -23
  104. oscura/workflows/digital.py +27 -4
  105. oscura/workflows/multi_trace.py +136 -17
  106. oscura/workflows/waveform.py +11 -6
  107. {oscura-0.8.0.dist-info → oscura-0.10.0.dist-info}/METADATA +59 -79
  108. {oscura-0.8.0.dist-info → oscura-0.10.0.dist-info}/RECORD +111 -136
  109. oscura/side_channel/dpa.py +0 -1025
  110. oscura/utils/optimization/__init__.py +0 -19
  111. oscura/utils/optimization/parallel.py +0 -443
  112. oscura/utils/optimization/search.py +0 -532
  113. oscura/utils/pipeline/base.py +0 -338
  114. oscura/utils/pipeline/composition.py +0 -248
  115. oscura/utils/pipeline/parallel.py +0 -449
  116. oscura/utils/pipeline/pipeline.py +0 -375
  117. oscura/utils/search/__init__.py +0 -16
  118. oscura/utils/search/anomaly.py +0 -424
  119. oscura/utils/search/context.py +0 -294
  120. oscura/utils/search/pattern.py +0 -288
  121. oscura/utils/storage/__init__.py +0 -61
  122. oscura/utils/storage/database.py +0 -1166
  123. oscura/visualization/accessibility.py +0 -526
  124. oscura/visualization/annotations.py +0 -371
  125. oscura/visualization/axis_scaling.py +0 -305
  126. oscura/visualization/colors.py +0 -451
  127. oscura/visualization/digital.py +0 -436
  128. oscura/visualization/eye.py +0 -571
  129. oscura/visualization/histogram.py +0 -281
  130. oscura/visualization/interactive.py +0 -1035
  131. oscura/visualization/jitter.py +0 -1042
  132. oscura/visualization/keyboard.py +0 -394
  133. oscura/visualization/layout.py +0 -400
  134. oscura/visualization/optimization.py +0 -1079
  135. oscura/visualization/palettes.py +0 -446
  136. oscura/visualization/power.py +0 -508
  137. oscura/visualization/power_extended.py +0 -955
  138. oscura/visualization/presets.py +0 -469
  139. oscura/visualization/protocols.py +0 -1246
  140. oscura/visualization/render.py +0 -223
  141. oscura/visualization/rendering.py +0 -444
  142. oscura/visualization/reverse_engineering.py +0 -838
  143. oscura/visualization/signal_integrity.py +0 -989
  144. oscura/visualization/specialized.py +0 -643
  145. oscura/visualization/spectral.py +0 -1226
  146. oscura/visualization/thumbnails.py +0 -340
  147. oscura/visualization/time_axis.py +0 -351
  148. oscura/visualization/waveform.py +0 -454
  149. {oscura-0.8.0.dist-info → oscura-0.10.0.dist-info}/WHEEL +0 -0
  150. {oscura-0.8.0.dist-info → oscura-0.10.0.dist-info}/entry_points.txt +0 -0
  151. {oscura-0.8.0.dist-info → oscura-0.10.0.dist-info}/licenses/LICENSE +0 -0
@@ -1,375 +0,0 @@
1
- """Pipeline architecture for chaining trace transformations.
2
-
3
- This module implements sklearn-style pipeline composition for trace operations,
4
- enabling declarative, reusable analysis workflows.
5
- """
6
-
7
- from __future__ import annotations
8
-
9
- from typing import TYPE_CHECKING, Any
10
-
11
- from .base import TraceTransformer
12
-
13
- if TYPE_CHECKING:
14
- from collections.abc import Sequence
15
-
16
- from oscura.core.types import WaveformTrace
17
-
18
-
19
- class Pipeline(TraceTransformer):
20
- """Chain multiple trace transformers into a single processing pipeline.
21
-
22
- Pipeline applies transformers sequentially: each stage transforms the output
23
- of the previous stage. Supports the fit/transform pattern and can be
24
- serialized with pickle or joblib.
25
-
26
- The pipeline is itself a TraceTransformer, so pipelines can be nested.
27
-
28
- Attributes:
29
- steps: List of (name, transformer) tuples defining the pipeline stages.
30
- named_steps: Dictionary mapping step names to transformers.
31
-
32
- Example:
33
- >>> import oscura as osc
34
- >>> pipeline = osc.Pipeline([
35
- ... ('lowpass', osc.LowPassFilter(cutoff=1e6)),
36
- ... ('resample', osc.Resample(rate=1e9)),
37
- ... ('normalize', osc.Normalize())
38
- ... ])
39
- >>> result = pipeline.transform(trace)
40
-
41
- Advanced Example:
42
- >>> # Create analysis pipeline with fit/transform
43
- >>> pipeline = osc.Pipeline([
44
- ... ('filter', osc.BandPassFilter(low=1e5, high=1e6)),
45
- ... ('normalize', osc.Normalize(method='zscore')),
46
- ... ('fft', osc.FFT(nfft=8192, window='hann')),
47
- ... ('extract', osc.ExtractMeasurement('thd'))
48
- ... ])
49
- >>> # Fit on reference trace
50
- >>> pipeline.fit(reference_trace)
51
- >>> # Transform multiple traces
52
- >>> results = [pipeline.transform(t) for t in traces]
53
- >>> # Access intermediate results
54
- >>> filtered = pipeline.named_steps['filter'].transform(trace)
55
- >>> # Save for reuse
56
- >>> import joblib
57
- >>> joblib.dump(pipeline, 'analysis_pipeline.pkl')
58
-
59
- References:
60
- API-001: sklearn-style Pipeline Architecture
61
- sklearn.pipeline.Pipeline
62
- https://scikit-learn.org/stable/modules/compose.html
63
- """
64
-
65
- def __init__(self, steps: Sequence[tuple[str, TraceTransformer]]) -> None:
66
- """Initialize pipeline with sequence of transformers.
67
-
68
- Args:
69
- steps: Sequence of (name, transformer) tuples. Each transformer
70
- must be a TraceTransformer instance.
71
-
72
- Raises:
73
- TypeError: If any step is not a TraceTransformer.
74
- ValueError: If step names are not unique or empty.
75
- """
76
- if not steps:
77
- raise ValueError("Pipeline steps cannot be empty")
78
-
79
- # Validate steps
80
- names = []
81
- for name, transformer in steps:
82
- if not name:
83
- raise ValueError("Step name cannot be empty")
84
- if not isinstance(transformer, TraceTransformer):
85
- raise TypeError(
86
- f"All pipeline steps must be TraceTransformer instances. "
87
- f"Step '{name}' is {type(transformer).__name__}"
88
- )
89
- names.append(name)
90
-
91
- # Check for duplicate names
92
- if len(names) != len(set(names)):
93
- duplicates = [n for n in names if names.count(n) > 1]
94
- raise ValueError(f"Duplicate step names: {set(duplicates)}")
95
-
96
- self.steps = list(steps)
97
- self.named_steps = dict(steps)
98
- self._intermediate_results: dict[str, WaveformTrace] = {}
99
-
100
- def fit(self, trace: WaveformTrace) -> Pipeline:
101
- """Fit all transformers in the pipeline.
102
-
103
- Fits each transformer sequentially on the output of the previous stage.
104
- This allows stateful transformers to learn parameters from the trace.
105
-
106
- Args:
107
- trace: Reference WaveformTrace to fit to.
108
-
109
- Returns:
110
- Self for method chaining.
111
-
112
- Example:
113
- >>> pipeline = Pipeline([
114
- ... ('normalize', AdaptiveNormalizer()),
115
- ... ('filter', AdaptiveFilter())
116
- ... ])
117
- >>> pipeline.fit(reference_trace)
118
- """
119
- current = trace
120
- for _name, transformer in self.steps:
121
- # Fit transformer to current trace
122
- transformer.fit(current)
123
- # Transform for next stage
124
- current = transformer.transform(current)
125
- return self
126
-
127
- def transform(self, trace: WaveformTrace) -> WaveformTrace:
128
- """Transform trace through all pipeline stages.
129
-
130
- Applies each transformer sequentially, passing the output of each
131
- stage to the next. Optionally caches intermediate results.
132
-
133
- Args:
134
- trace: Input WaveformTrace to transform.
135
-
136
- Returns:
137
- Transformed WaveformTrace after passing through all stages.
138
-
139
- Example:
140
- >>> result = pipeline.transform(trace)
141
- """
142
- current = trace
143
- self._intermediate_results.clear()
144
-
145
- for name, transformer in self.steps:
146
- current = transformer.transform(current)
147
- # Cache intermediate result for introspection
148
- self._intermediate_results[name] = current
149
-
150
- return current
151
-
152
- def get_intermediate(self, step_name: str, key: str | None = None) -> Any:
153
- """Get intermediate result from a pipeline stage.
154
-
155
- Retrieves the cached output from a specific pipeline stage after
156
- transform() has been called. Can also access internal intermediate
157
- results from transformers that cache them (e.g., FFT coefficients).
158
-
159
-
160
- Args:
161
- step_name: Name of the pipeline step.
162
- key: Optional key for transformer-internal intermediate result.
163
- If None, returns the trace output from that stage.
164
-
165
- Returns:
166
- WaveformTrace output from that stage (if key=None), or
167
- specific intermediate result from the transformer.
168
-
169
- Raises:
170
- KeyError: If step name not found or transform() not yet called.
171
-
172
- Example:
173
- >>> pipeline = Pipeline([
174
- ... ('filter', LowPassFilter(1e6)),
175
- ... ('fft', FFT(nfft=8192)),
176
- ... ('normalize', Normalize())
177
- ... ])
178
- >>> result = pipeline.transform(trace)
179
- >>> # Get trace output from filter stage
180
- >>> filtered = pipeline.get_intermediate('filter')
181
- >>> # Get FFT coefficients from FFT stage
182
- >>> fft_spectrum = pipeline.get_intermediate('fft', 'spectrum')
183
- >>> fft_frequencies = pipeline.get_intermediate('fft', 'frequencies')
184
-
185
- References:
186
- API-005: Intermediate Result Access
187
- """
188
- if step_name not in self._intermediate_results:
189
- if step_name not in self.named_steps:
190
- raise KeyError(f"Step '{step_name}' not found in pipeline")
191
- raise KeyError(
192
- f"No intermediate result for step '{step_name}'. Call transform() first."
193
- )
194
-
195
- # If no key specified, return the trace output from that stage
196
- if key is None:
197
- return self._intermediate_results[step_name]
198
-
199
- # Otherwise, try to get internal intermediate from the transformer
200
- transformer = self.named_steps[step_name]
201
- return transformer.get_intermediate_result(key)
202
-
203
- def has_intermediate(self, step_name: str, key: str | None = None) -> bool:
204
- """Check if intermediate result is available.
205
-
206
- Args:
207
- step_name: Name of the pipeline step.
208
- key: Optional key for transformer-internal intermediate result.
209
-
210
- Returns:
211
- True if intermediate result exists.
212
-
213
- Example:
214
- >>> if pipeline.has_intermediate('fft', 'spectrum'):
215
- ... spectrum = pipeline.get_intermediate('fft', 'spectrum')
216
-
217
- References:
218
- API-005: Intermediate Result Access
219
- """
220
- if step_name not in self._intermediate_results:
221
- return False
222
-
223
- if key is None:
224
- return True
225
-
226
- transformer = self.named_steps[step_name]
227
- return transformer.has_intermediate_result(key)
228
-
229
- def list_intermediates(self, step_name: str | None = None) -> list[str] | dict[str, list[str]]:
230
- """List available intermediate results.
231
-
232
- Args:
233
- step_name: If specified, list intermediates for that step only.
234
- If None, return dict of all steps with their intermediates.
235
-
236
- Returns:
237
- List of intermediate keys for a step, or dict mapping step names
238
- to their available intermediates.
239
-
240
- Raises:
241
- KeyError: If step_name not found in pipeline.
242
-
243
- Example:
244
- >>> # List all intermediates
245
- >>> all_intermediates = pipeline.list_intermediates()
246
- >>> print(all_intermediates)
247
- {'filter': ['transfer_function', 'impulse_response'],
248
- 'fft': ['spectrum', 'frequencies', 'power', 'phase']}
249
- >>> # List intermediates for specific step
250
- >>> fft_intermediates = pipeline.list_intermediates('fft')
251
- >>> print(fft_intermediates)
252
- ['spectrum', 'frequencies', 'power', 'phase']
253
-
254
- References:
255
- API-005: Intermediate Result Access
256
- """
257
- if step_name is not None:
258
- if step_name not in self.named_steps:
259
- raise KeyError(f"Step '{step_name}' not found in pipeline")
260
- transformer = self.named_steps[step_name]
261
- return transformer.list_intermediate_results()
262
-
263
- # Return all intermediates for all steps
264
- result = {}
265
- for name, transformer in self.steps:
266
- intermediates = transformer.list_intermediate_results()
267
- if intermediates: # Only include steps with intermediates
268
- result[name] = intermediates
269
- return result
270
-
271
- def get_params(self, deep: bool = True) -> dict[str, Any]:
272
- """Get parameters for all transformers in the pipeline.
273
-
274
- Args:
275
- deep: If True, returns parameters for all nested transformers.
276
-
277
- Returns:
278
- Dictionary of parameters with step names as prefixes.
279
-
280
- Example:
281
- >>> params = pipeline.get_params()
282
- >>> print(params['filter__cutoff'])
283
- 1000000.0
284
- """
285
- params: dict[str, Any] = {"steps": self.steps}
286
-
287
- if deep:
288
- for name, transformer in self.steps:
289
- transformer_params = transformer.get_params(deep=True)
290
- for key, value in transformer_params.items():
291
- params[f"{name}__{key}"] = value
292
-
293
- return params
294
-
295
- def set_params(self, **params: Any) -> Pipeline:
296
- """Set parameters for transformers in the pipeline.
297
-
298
- Args:
299
- **params: Parameters to set, using step__param syntax.
300
-
301
- Returns:
302
- Self for method chaining.
303
-
304
- Raises:
305
- ValueError: If parameter format is invalid.
306
-
307
- Example:
308
- >>> pipeline.set_params(filter__cutoff=2e6, normalize__method='peak')
309
- """
310
- # Special case: setting steps directly
311
- if "steps" in params:
312
- self.steps = params["steps"]
313
- self.named_steps = dict(self.steps)
314
- return self
315
-
316
- # Parse step__param syntax
317
- for param_name, value in params.items():
318
- if "__" not in param_name:
319
- raise ValueError(
320
- f"Pipeline parameter must use 'step__param' syntax, got '{param_name}'"
321
- )
322
-
323
- step_name, param = param_name.split("__", 1)
324
- if step_name not in self.named_steps:
325
- raise ValueError(
326
- f"Step '{step_name}' not found in pipeline. "
327
- f"Available steps: {list(self.named_steps.keys())}"
328
- )
329
-
330
- self.named_steps[step_name].set_params(**{param: value})
331
-
332
- return self
333
-
334
- def clone(self) -> Pipeline:
335
- """Create a copy of this pipeline.
336
-
337
- Returns:
338
- New Pipeline instance with cloned transformers.
339
-
340
- Example:
341
- >>> pipeline_copy = pipeline.clone()
342
- """
343
- cloned_steps = [(name, transformer.clone()) for name, transformer in self.steps]
344
- return Pipeline(cloned_steps)
345
-
346
- def __len__(self) -> int:
347
- """Return number of steps in the pipeline."""
348
- return len(self.steps)
349
-
350
- def __getitem__(self, index: int | str) -> TraceTransformer:
351
- """Get transformer by index or name.
352
-
353
- Args:
354
- index: Integer index or string name.
355
-
356
- Returns:
357
- TraceTransformer at that position.
358
-
359
- Example:
360
- >>> first_step = pipeline[0]
361
- >>> filter_step = pipeline['filter']
362
- """
363
- if isinstance(index, str):
364
- return self.named_steps[index]
365
- return self.steps[index][1]
366
-
367
- def __repr__(self) -> str:
368
- """String representation of the pipeline."""
369
- step_strs = [
370
- f"('{name}', {transformer.__class__.__name__})" for name, transformer in self.steps
371
- ]
372
- return "Pipeline([\n " + ",\n ".join(step_strs) + "\n])"
373
-
374
-
375
- __all__ = ["Pipeline"]
@@ -1,16 +0,0 @@
1
- """Pattern search and anomaly detection for Oscura.
2
-
3
-
4
- This module enables efficient pattern matching, anomaly detection, and
5
- context extraction for debugging and analysis workflows.
6
- """
7
-
8
- from oscura.utils.search.anomaly import find_anomalies
9
- from oscura.utils.search.context import extract_context
10
- from oscura.utils.search.pattern import find_pattern
11
-
12
- __all__ = [
13
- "extract_context",
14
- "find_anomalies",
15
- "find_pattern",
16
- ]