oscura 0.8.0__py3-none-any.whl → 0.11.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (161) hide show
  1. oscura/__init__.py +19 -19
  2. oscura/__main__.py +4 -0
  3. oscura/analyzers/__init__.py +2 -0
  4. oscura/analyzers/digital/extraction.py +2 -3
  5. oscura/analyzers/digital/quality.py +1 -1
  6. oscura/analyzers/digital/timing.py +1 -1
  7. oscura/analyzers/ml/signal_classifier.py +6 -0
  8. oscura/analyzers/patterns/__init__.py +66 -0
  9. oscura/analyzers/power/basic.py +3 -3
  10. oscura/analyzers/power/soa.py +1 -1
  11. oscura/analyzers/power/switching.py +3 -3
  12. oscura/analyzers/signal_classification.py +529 -0
  13. oscura/analyzers/signal_integrity/sparams.py +3 -3
  14. oscura/analyzers/statistics/basic.py +10 -7
  15. oscura/analyzers/validation.py +1 -1
  16. oscura/analyzers/waveform/measurements.py +200 -156
  17. oscura/analyzers/waveform/measurements_with_uncertainty.py +91 -35
  18. oscura/analyzers/waveform/spectral.py +182 -84
  19. oscura/api/dsl/commands.py +15 -6
  20. oscura/api/server/templates/base.html +137 -146
  21. oscura/api/server/templates/export.html +84 -110
  22. oscura/api/server/templates/home.html +248 -267
  23. oscura/api/server/templates/protocols.html +44 -48
  24. oscura/api/server/templates/reports.html +27 -35
  25. oscura/api/server/templates/session_detail.html +68 -78
  26. oscura/api/server/templates/sessions.html +62 -72
  27. oscura/api/server/templates/waveforms.html +54 -64
  28. oscura/automotive/__init__.py +1 -1
  29. oscura/automotive/can/session.py +1 -1
  30. oscura/automotive/dbc/generator.py +638 -23
  31. oscura/automotive/dtc/data.json +17 -102
  32. oscura/automotive/flexray/fibex.py +9 -1
  33. oscura/automotive/uds/decoder.py +99 -6
  34. oscura/cli/analyze.py +8 -2
  35. oscura/cli/batch.py +36 -5
  36. oscura/cli/characterize.py +18 -4
  37. oscura/cli/export.py +47 -5
  38. oscura/cli/main.py +2 -0
  39. oscura/cli/onboarding/wizard.py +10 -6
  40. oscura/cli/pipeline.py +585 -0
  41. oscura/cli/visualize.py +6 -4
  42. oscura/convenience.py +400 -32
  43. oscura/core/measurement_result.py +286 -0
  44. oscura/core/progress.py +1 -1
  45. oscura/core/schemas/device_mapping.json +2 -8
  46. oscura/core/schemas/packet_format.json +4 -24
  47. oscura/core/schemas/protocol_definition.json +2 -12
  48. oscura/core/types.py +232 -239
  49. oscura/correlation/multi_protocol.py +1 -1
  50. oscura/export/legacy/__init__.py +11 -0
  51. oscura/export/legacy/wav.py +75 -0
  52. oscura/exporters/__init__.py +19 -0
  53. oscura/exporters/wireshark.py +809 -0
  54. oscura/hardware/acquisition/file.py +5 -19
  55. oscura/hardware/acquisition/saleae.py +10 -10
  56. oscura/hardware/acquisition/socketcan.py +4 -6
  57. oscura/hardware/acquisition/synthetic.py +1 -5
  58. oscura/hardware/acquisition/visa.py +6 -6
  59. oscura/hardware/security/side_channel_detector.py +5 -508
  60. oscura/inference/message_format.py +686 -1
  61. oscura/jupyter/display.py +2 -2
  62. oscura/jupyter/magic.py +3 -3
  63. oscura/loaders/__init__.py +17 -12
  64. oscura/loaders/binary.py +1 -1
  65. oscura/loaders/chipwhisperer.py +1 -2
  66. oscura/loaders/configurable.py +1 -1
  67. oscura/loaders/csv_loader.py +2 -2
  68. oscura/loaders/hdf5_loader.py +1 -1
  69. oscura/loaders/lazy.py +6 -1
  70. oscura/loaders/mmap_loader.py +0 -1
  71. oscura/loaders/numpy_loader.py +8 -7
  72. oscura/loaders/preprocessing.py +3 -5
  73. oscura/loaders/rigol.py +21 -7
  74. oscura/loaders/sigrok.py +2 -5
  75. oscura/loaders/tdms.py +3 -2
  76. oscura/loaders/tektronix.py +38 -32
  77. oscura/loaders/tss.py +20 -27
  78. oscura/loaders/validation.py +17 -10
  79. oscura/loaders/vcd.py +13 -8
  80. oscura/loaders/wav.py +1 -6
  81. oscura/pipeline/__init__.py +76 -0
  82. oscura/pipeline/handlers/__init__.py +165 -0
  83. oscura/pipeline/handlers/analyzers.py +1045 -0
  84. oscura/pipeline/handlers/decoders.py +899 -0
  85. oscura/pipeline/handlers/exporters.py +1103 -0
  86. oscura/pipeline/handlers/filters.py +891 -0
  87. oscura/pipeline/handlers/loaders.py +640 -0
  88. oscura/pipeline/handlers/transforms.py +768 -0
  89. oscura/reporting/formatting/measurements.py +55 -14
  90. oscura/reporting/templates/enhanced/protocol_re.html +504 -503
  91. oscura/sessions/legacy.py +49 -1
  92. oscura/side_channel/__init__.py +38 -57
  93. oscura/utils/builders/signal_builder.py +5 -5
  94. oscura/utils/comparison/compare.py +7 -9
  95. oscura/utils/comparison/golden.py +1 -1
  96. oscura/utils/filtering/convenience.py +2 -2
  97. oscura/utils/math/arithmetic.py +38 -62
  98. oscura/utils/math/interpolation.py +20 -20
  99. oscura/utils/pipeline/__init__.py +4 -17
  100. oscura/utils/progressive.py +1 -4
  101. oscura/utils/triggering/edge.py +1 -1
  102. oscura/utils/triggering/pattern.py +2 -2
  103. oscura/utils/triggering/pulse.py +2 -2
  104. oscura/utils/triggering/window.py +3 -3
  105. oscura/validation/hil_testing.py +11 -11
  106. oscura/visualization/__init__.py +46 -284
  107. oscura/visualization/batch.py +72 -433
  108. oscura/visualization/plot.py +542 -53
  109. oscura/visualization/styles.py +184 -318
  110. oscura/workflows/batch/advanced.py +1 -1
  111. oscura/workflows/batch/aggregate.py +12 -9
  112. oscura/workflows/complete_re.py +251 -23
  113. oscura/workflows/digital.py +27 -4
  114. oscura/workflows/multi_trace.py +136 -17
  115. oscura/workflows/waveform.py +11 -6
  116. oscura-0.11.0.dist-info/METADATA +460 -0
  117. {oscura-0.8.0.dist-info → oscura-0.11.0.dist-info}/RECORD +120 -145
  118. oscura/side_channel/dpa.py +0 -1025
  119. oscura/utils/optimization/__init__.py +0 -19
  120. oscura/utils/optimization/parallel.py +0 -443
  121. oscura/utils/optimization/search.py +0 -532
  122. oscura/utils/pipeline/base.py +0 -338
  123. oscura/utils/pipeline/composition.py +0 -248
  124. oscura/utils/pipeline/parallel.py +0 -449
  125. oscura/utils/pipeline/pipeline.py +0 -375
  126. oscura/utils/search/__init__.py +0 -16
  127. oscura/utils/search/anomaly.py +0 -424
  128. oscura/utils/search/context.py +0 -294
  129. oscura/utils/search/pattern.py +0 -288
  130. oscura/utils/storage/__init__.py +0 -61
  131. oscura/utils/storage/database.py +0 -1166
  132. oscura/visualization/accessibility.py +0 -526
  133. oscura/visualization/annotations.py +0 -371
  134. oscura/visualization/axis_scaling.py +0 -305
  135. oscura/visualization/colors.py +0 -451
  136. oscura/visualization/digital.py +0 -436
  137. oscura/visualization/eye.py +0 -571
  138. oscura/visualization/histogram.py +0 -281
  139. oscura/visualization/interactive.py +0 -1035
  140. oscura/visualization/jitter.py +0 -1042
  141. oscura/visualization/keyboard.py +0 -394
  142. oscura/visualization/layout.py +0 -400
  143. oscura/visualization/optimization.py +0 -1079
  144. oscura/visualization/palettes.py +0 -446
  145. oscura/visualization/power.py +0 -508
  146. oscura/visualization/power_extended.py +0 -955
  147. oscura/visualization/presets.py +0 -469
  148. oscura/visualization/protocols.py +0 -1246
  149. oscura/visualization/render.py +0 -223
  150. oscura/visualization/rendering.py +0 -444
  151. oscura/visualization/reverse_engineering.py +0 -838
  152. oscura/visualization/signal_integrity.py +0 -989
  153. oscura/visualization/specialized.py +0 -643
  154. oscura/visualization/spectral.py +0 -1226
  155. oscura/visualization/thumbnails.py +0 -340
  156. oscura/visualization/time_axis.py +0 -351
  157. oscura/visualization/waveform.py +0 -454
  158. oscura-0.8.0.dist-info/METADATA +0 -661
  159. {oscura-0.8.0.dist-info → oscura-0.11.0.dist-info}/WHEEL +0 -0
  160. {oscura-0.8.0.dist-info → oscura-0.11.0.dist-info}/entry_points.txt +0 -0
  161. {oscura-0.8.0.dist-info → oscura-0.11.0.dist-info}/licenses/LICENSE +0 -0
@@ -1,338 +0,0 @@
1
- """Base classes for trace transformations and pipeline stages.
2
-
3
- This module implements the foundational abstract base classes for creating
4
- custom trace transformations compatible with the Pipeline architecture.
5
- """
6
-
7
- from __future__ import annotations
8
-
9
- from abc import ABC, abstractmethod
10
- from typing import TYPE_CHECKING, Any
11
-
12
- if TYPE_CHECKING:
13
- from oscura.core.types import WaveformTrace
14
-
15
-
16
- class TraceTransformer(ABC):
17
- """Abstract base class for trace transformations.
18
-
19
- All pipeline stages and custom transformations must inherit from this class.
20
- Provides the fit/transform pattern similar to sklearn transformers.
21
-
22
- The TraceTransformer enforces a consistent interface:
23
- - transform(trace) -> trace: Required transformation method
24
- - fit(trace) -> self: Optional learning/calibration method
25
- - fit_transform(trace) -> trace: Convenience method
26
- - get_params() / set_params(): Hyperparameter access
27
- - clone(): Create a copy of the transformer
28
-
29
- Example:
30
- >>> class AmplitudeScaler(TraceTransformer):
31
- ... def __init__(self, scale_factor=1.0):
32
- ... self.scale_factor = scale_factor
33
- ...
34
- ... def transform(self, trace):
35
- ... scaled_data = trace.data * self.scale_factor
36
- ... return WaveformTrace(
37
- ... data=scaled_data,
38
- ... metadata=trace.metadata
39
- ... )
40
- ...
41
- >>> scaler = AmplitudeScaler(scale_factor=2.0)
42
- >>> result = scaler.transform(trace)
43
-
44
- References:
45
- API-004: TraceTransformer Base Class
46
- sklearn.base.BaseEstimator, TransformerMixin
47
- """
48
-
49
- @abstractmethod
50
- def transform(self, trace: WaveformTrace) -> WaveformTrace:
51
- """Transform a trace.
52
-
53
- Args:
54
- trace: Input WaveformTrace to transform.
55
-
56
- Returns:
57
- Transformed WaveformTrace.
58
-
59
- Raises:
60
- NotImplementedError: If not implemented by subclass.
61
- """
62
- raise NotImplementedError(f"{self.__class__.__name__} must implement transform() method")
63
-
64
- def fit(self, trace: WaveformTrace) -> TraceTransformer:
65
- """Fit transformer to a reference trace (optional for stateful transformers).
66
-
67
- This method is optional and should be overridden by stateful transformers
68
- that need to learn parameters from a reference trace (e.g., normalization
69
- statistics, adaptive filters).
70
-
71
- Args:
72
- trace: Reference WaveformTrace to fit to.
73
-
74
- Returns:
75
- Self for method chaining.
76
-
77
- Example:
78
- >>> class AdaptiveNormalizer(TraceTransformer):
79
- ... def __init__(self):
80
- ... self.mean_ = None
81
- ... self.std_ = None
82
- ...
83
- ... def fit(self, trace):
84
- ... self.mean_ = trace.data.mean()
85
- ... self.std_ = trace.data.std()
86
- ... return self
87
- ...
88
- ... def transform(self, trace):
89
- ... normalized = (trace.data - self.mean_) / self.std_
90
- ... return WaveformTrace(
91
- ... data=normalized,
92
- ... metadata=trace.metadata
93
- ... )
94
- """
95
- # Default implementation: no fitting required
96
- return self
97
-
98
- def fit_transform(self, trace: WaveformTrace) -> WaveformTrace:
99
- """Fit to trace, then transform it.
100
-
101
- Convenience method that calls fit() followed by transform().
102
-
103
- Args:
104
- trace: Input WaveformTrace to fit and transform.
105
-
106
- Returns:
107
- Transformed WaveformTrace.
108
-
109
- Example:
110
- >>> normalizer = AdaptiveNormalizer()
111
- >>> result = normalizer.fit_transform(reference_trace)
112
- """
113
- return self.fit(trace).transform(trace)
114
-
115
- def get_params(self, deep: bool = True) -> dict[str, Any]:
116
- """Get parameters for this transformer.
117
-
118
- Args:
119
- deep: If True, will return parameters for nested objects.
120
-
121
- Returns:
122
- Dictionary of parameter names mapped to their values.
123
-
124
- Example:
125
- >>> scaler = AmplitudeScaler(scale_factor=2.0)
126
- >>> params = scaler.get_params()
127
- >>> print(params)
128
- {'scale_factor': 2.0}
129
- """
130
- params = {}
131
- for key in dir(self):
132
- # Skip private/magic attributes and methods
133
- if key.startswith("_") or callable(getattr(self, key)):
134
- continue
135
- value = getattr(self, key)
136
- params[key] = value
137
-
138
- # Handle nested transformers if deep=True
139
- if deep and hasattr(value, "get_params"):
140
- nested_params = value.get_params(deep=True)
141
- for nested_key, nested_value in nested_params.items():
142
- params[f"{key}__{nested_key}"] = nested_value
143
-
144
- return params
145
-
146
- def set_params(self, **params: Any) -> TraceTransformer:
147
- """Set parameters for this transformer.
148
-
149
- Args:
150
- **params: Parameter names and values to set.
151
-
152
- Returns:
153
- Self for method chaining.
154
-
155
- Raises:
156
- ValueError: If parameter name is invalid.
157
-
158
- Example:
159
- >>> scaler = AmplitudeScaler(scale_factor=1.0)
160
- >>> scaler.set_params(scale_factor=3.0)
161
- >>> print(scaler.scale_factor)
162
- 3.0
163
- """
164
- if not params:
165
- return self
166
-
167
- valid_params = self.get_params(deep=False)
168
-
169
- for key, value in params.items():
170
- # Handle nested parameters (e.g., 'filter__cutoff')
171
- if "__" in key:
172
- nested_obj, nested_key = key.split("__", 1)
173
- if nested_obj not in valid_params:
174
- raise ValueError(
175
- f"Invalid parameter {nested_obj} for transformer {self.__class__.__name__}"
176
- )
177
- nested = getattr(self, nested_obj)
178
- if hasattr(nested, "set_params"):
179
- nested.set_params(**{nested_key: value})
180
- else:
181
- raise ValueError(f"Parameter {nested_obj} does not support set_params")
182
- else:
183
- if key not in valid_params:
184
- raise ValueError(
185
- f"Invalid parameter {key} for transformer "
186
- f"{self.__class__.__name__}. "
187
- f"Valid parameters: {list(valid_params.keys())}"
188
- )
189
- setattr(self, key, value)
190
-
191
- return self
192
-
193
- def clone(self) -> TraceTransformer:
194
- """Create a copy of this transformer with the same parameters.
195
-
196
- Returns:
197
- New instance of the transformer with same parameters.
198
-
199
- Example:
200
- >>> scaler = AmplitudeScaler(scale_factor=2.0)
201
- >>> scaler_copy = scaler.clone()
202
- >>> scaler_copy.scale_factor
203
- 2.0
204
- """
205
- params = self.get_params(deep=False)
206
- return self.__class__(**params)
207
-
208
- def __getstate__(self) -> dict[str, Any]:
209
- """Get state for pickling.
210
-
211
- Returns:
212
- Dictionary containing transformer state.
213
- """
214
- return self.__dict__.copy()
215
-
216
- def __setstate__(self, state: dict[str, Any]) -> None:
217
- """Set state from unpickling.
218
-
219
- Args:
220
- state: Dictionary containing transformer state.
221
- """
222
- self.__dict__.update(state)
223
-
224
- def get_intermediate_result(self, key: str) -> Any:
225
- """Get intermediate result from last transformation.
226
-
227
- Some transformers cache intermediate results (e.g., FFT coefficients,
228
- filter states) that can be accessed after transformation.
229
-
230
- Args:
231
- key: Name of intermediate result to retrieve.
232
-
233
- Returns:
234
- Intermediate result value.
235
-
236
- Raises:
237
- KeyError: If key not found or transformer doesn't support intermediates.
238
-
239
- Example:
240
- >>> filter = LowPassFilter(cutoff=1e6)
241
- >>> result = filter.transform(trace)
242
- >>> transfer_func = filter.get_intermediate_result('transfer_function')
243
-
244
- References:
245
- API-005: Intermediate Result Access
246
- """
247
- # Check if transformer has _intermediates cache
248
- if not hasattr(self, "_intermediates"):
249
- raise KeyError(f"{self.__class__.__name__} does not cache intermediate results")
250
-
251
- intermediates = self._intermediates
252
- if key not in intermediates:
253
- available = list(intermediates.keys())
254
- raise KeyError(
255
- f"Intermediate '{key}' not found in {self.__class__.__name__}. "
256
- f"Available: {available}"
257
- )
258
-
259
- return intermediates[key]
260
-
261
- def has_intermediate_result(self, key: str) -> bool:
262
- """Check if intermediate result is available.
263
-
264
- Args:
265
- key: Name of intermediate result.
266
-
267
- Returns:
268
- True if intermediate result exists.
269
-
270
- Example:
271
- >>> if filter.has_intermediate_result('impulse_response'):
272
- ... impulse = filter.get_intermediate_result('impulse_response')
273
-
274
- References:
275
- API-005: Intermediate Result Access
276
- """
277
- if not hasattr(self, "_intermediates"):
278
- return False
279
- return key in self._intermediates
280
-
281
- def list_intermediate_results(self) -> list[str]:
282
- """List all available intermediate result keys.
283
-
284
- Returns:
285
- List of intermediate result names, or empty list if none available.
286
-
287
- Example:
288
- >>> print(filter.list_intermediate_results())
289
- ['transfer_function', 'impulse_response', 'frequency_response']
290
-
291
- References:
292
- API-005: Intermediate Result Access
293
- """
294
- if not hasattr(self, "_intermediates"):
295
- return []
296
- return list(self._intermediates.keys())
297
-
298
- def _cache_intermediate(self, key: str, value: Any) -> None:
299
- """Cache an intermediate result for later access.
300
-
301
- This is a protected method for subclasses to use when storing
302
- intermediate computation results.
303
-
304
- Args:
305
- key: Name of intermediate result.
306
- value: Value to cache.
307
-
308
- Example (in subclass):
309
- >>> def transform(self, trace):
310
- ... fft_coeffs = compute_fft(trace)
311
- ... self._cache_intermediate('fft_coeffs', fft_coeffs)
312
- ... return processed_trace
313
-
314
- References:
315
- API-005: Intermediate Result Access
316
- """
317
- if not hasattr(self, "_intermediates"):
318
- self._intermediates = {}
319
- self._intermediates[key] = value
320
-
321
- def _clear_intermediates(self) -> None:
322
- """Clear all cached intermediate results.
323
-
324
- Useful for freeing memory when intermediate results are no longer needed.
325
-
326
- Example (in subclass):
327
- >>> def transform(self, trace):
328
- ... self._clear_intermediates() # Clear previous results
329
- ... # ... perform transformation ...
330
-
331
- References:
332
- API-005: Intermediate Result Access
333
- """
334
- if hasattr(self, "_intermediates"):
335
- self._intermediates.clear()
336
-
337
-
338
- __all__ = ["TraceTransformer"]
@@ -1,248 +0,0 @@
1
- """Functional composition operators for trace transformations.
2
-
3
- This module implements compose() and pipe() functions for functional-style
4
- trace processing, with support for operator overloading.
5
- """
6
-
7
- from collections.abc import Callable
8
- from functools import reduce, wraps
9
- from typing import Any, TypeVar
10
-
11
- from oscura.core.types import WaveformTrace
12
-
13
- # Type variables for generic composition
14
- T = TypeVar("T")
15
- TraceFunc = Callable[[WaveformTrace], WaveformTrace]
16
-
17
-
18
- def compose(*funcs: TraceFunc) -> TraceFunc:
19
- """Compose functions right-to-left: compose(f, g, h)(x) == f(g(h(x))).
20
-
21
- Creates a single function that applies the given functions in reverse order.
22
- This follows mathematical function composition notation.
23
-
24
- Args:
25
- *funcs: Variable number of functions to compose. Each function should
26
- take a WaveformTrace and return a WaveformTrace.
27
-
28
- Returns:
29
- Composite function that applies all functions in reverse order.
30
-
31
- Raises:
32
- ValueError: If no functions provided.
33
-
34
- Example:
35
- >>> import oscura as osc
36
- >>> from functools import partial
37
- >>> # Create composed analysis function
38
- >>> analyze_signal = osc.compose(
39
- ... osc.extract_thd,
40
- ... partial(osc.fft, nfft=8192, window='hann'),
41
- ... partial(osc.normalize, method='peak'),
42
- ... partial(osc.low_pass, cutoff=5e6)
43
- ... )
44
- >>> # Apply to trace: low_pass -> normalize -> fft -> extract_thd
45
- >>> thd = analyze_signal(trace)
46
-
47
- References:
48
- API-002: Function Composition Operators
49
- toolz.functoolz
50
- https://github.com/pytoolz/toolz
51
- """
52
- if not funcs:
53
- raise ValueError("compose() requires at least one function")
54
-
55
- if len(funcs) == 1:
56
- return funcs[0]
57
-
58
- def composed(x: WaveformTrace) -> WaveformTrace:
59
- """Apply composed functions right-to-left."""
60
- # Apply functions in reverse order (right to left)
61
- return reduce(lambda val, func: func(val), reversed(funcs), x)
62
-
63
- # Preserve function metadata (handle functools.partial which lacks __name__)
64
- func_names = []
65
- for f in funcs:
66
- if hasattr(f, "__name__"):
67
- func_names.append(f.__name__)
68
- elif hasattr(f, "func"): # functools.partial
69
- func_names.append(f.func.__name__)
70
- else:
71
- func_names.append(repr(f))
72
- composed.__name__ = "compose(" + ", ".join(func_names) + ")"
73
- composed.__doc__ = f"Composition of {len(funcs)} functions"
74
-
75
- return composed
76
-
77
-
78
- def pipe(data: WaveformTrace, *funcs: TraceFunc) -> WaveformTrace:
79
- """Apply functions left-to-right: pipe(x, f, g, h) == h(g(f(x))).
80
-
81
- Applies the given functions sequentially to the data, passing the output
82
- of each function to the next. This is more intuitive for sequential
83
- processing pipelines.
84
-
85
- Args:
86
- data: Initial WaveformTrace to process.
87
- *funcs: Variable number of functions to apply sequentially.
88
-
89
- Returns:
90
- Transformed WaveformTrace after applying all functions.
91
-
92
- Example:
93
- >>> import oscura as osc
94
- >>> # Apply operations left-to-right
95
- >>> result = osc.pipe(
96
- ... trace,
97
- ... osc.low_pass(cutoff=1e6),
98
- ... osc.resample(rate=1e9),
99
- ... osc.fft(nfft=8192)
100
- ... )
101
- >>> # Equivalent to: fft(resample(low_pass(trace)))
102
-
103
- Advanced Example:
104
- >>> # Use with partial application
105
- >>> from functools import partial
106
- >>> result = osc.pipe(
107
- ... trace,
108
- ... partial(osc.low_pass, cutoff=1e6),
109
- ... partial(osc.normalize, method='zscore'),
110
- ... partial(osc.fft, nfft=8192, window='hann')
111
- ... )
112
-
113
- References:
114
- API-002: Function Composition Operators
115
- toolz.pipe
116
- """
117
- # Apply functions left-to-right
118
- return reduce(lambda val, func: func(val), funcs, data)
119
-
120
-
121
- class Composable:
122
- """Mixin class to enable >> operator for function composition.
123
-
124
- This class provides the __rshift__ operator to enable pipe-style
125
- composition using the >> syntax. Intended to be mixed into WaveformTrace
126
- or used as a wrapper for transformer functions.
127
-
128
- Example:
129
- >>> # Enable >> operator on WaveformTrace
130
- >>> result = trace >> low_pass(1e6) >> normalize() >> fft()
131
- >>> # Equivalent to: fft(normalize(low_pass(trace)))
132
-
133
- References:
134
- API-002: Function Composition Operators
135
- """
136
-
137
- def __rshift__(self, func: Callable[[Any], Any]) -> Any:
138
- """Enable >> operator for function application.
139
-
140
- Args:
141
- func: Function to apply to self.
142
-
143
- Returns:
144
- Result of applying func to self.
145
-
146
- Example:
147
- >>> result = trace >> low_pass(1e6)
148
- """
149
- return func(self)
150
-
151
-
152
- def make_composable(func: Callable[..., WaveformTrace]) -> Callable[..., TraceFunc]:
153
- """Decorator to make a function support partial application and composition.
154
-
155
- Wraps a function so it can be used in compose() and pipe() with
156
- partial argument application.
157
-
158
- Args:
159
- func: Function to wrap.
160
-
161
- Returns:
162
- Wrapped function that returns a partially applied function when
163
- called without a trace argument.
164
-
165
- Example:
166
- >>> @make_composable
167
- ... def scale(trace, factor=1.0):
168
- ... return WaveformTrace(
169
- ... data=trace.data * factor,
170
- ... metadata=trace.metadata
171
- ... )
172
- >>> # Use with partial application
173
- >>> double = scale(factor=2.0)
174
- >>> result = double(trace)
175
- >>> # Or in pipe
176
- >>> result = pipe(trace, scale(factor=2.0), scale(factor=0.5))
177
-
178
- References:
179
- API-002: Function Composition Operators
180
- """
181
-
182
- @wraps(func)
183
- def wrapper(*args: Any, **kwargs: Any) -> TraceFunc | WaveformTrace:
184
- # If first arg is a WaveformTrace, apply function immediately
185
- if args and isinstance(args[0], WaveformTrace):
186
- return func(*args, **kwargs)
187
-
188
- # Otherwise, return a partially applied function
189
- def partial_func(trace: WaveformTrace) -> WaveformTrace:
190
- return func(trace, *args, **kwargs)
191
-
192
- return partial_func
193
-
194
- return wrapper # type: ignore[return-value]
195
-
196
-
197
- def curry(func: Callable[..., WaveformTrace]) -> Callable[..., TraceFunc]:
198
- """Curry a function for easier composition.
199
-
200
- Transforms a multi-argument function into a series of single-argument
201
- functions. Useful for creating reusable transformation functions.
202
-
203
- Args:
204
- func: Function to curry.
205
-
206
- Returns:
207
- Curried version of the function.
208
-
209
- Example:
210
- >>> @curry
211
- ... def scale_and_offset(trace, scale, offset):
212
- ... return WaveformTrace(
213
- ... data=trace.data * scale + offset,
214
- ... metadata=trace.metadata
215
- ... )
216
- >>> # Create specialized functions
217
- >>> double_and_shift = scale_and_offset(scale=2.0, offset=1.0)
218
- >>> result = double_and_shift(trace)
219
-
220
- References:
221
- API-002: Function Composition Operators
222
- Functional programming currying
223
- """
224
-
225
- @wraps(func)
226
- def curried(*args: Any, **kwargs: Any) -> TraceFunc | WaveformTrace:
227
- # If we have a WaveformTrace as first arg, apply immediately
228
- if args and isinstance(args[0], WaveformTrace):
229
- return func(*args, **kwargs)
230
-
231
- # Return a function that waits for the trace
232
- def partial(*more_args: Any, **more_kwargs: Any) -> WaveformTrace:
233
- all_args = args + more_args
234
- all_kwargs = {**kwargs, **more_kwargs}
235
- return func(*all_args, **all_kwargs)
236
-
237
- return partial
238
-
239
- return curried # type: ignore[return-value]
240
-
241
-
242
- __all__ = [
243
- "Composable",
244
- "compose",
245
- "curry",
246
- "make_composable",
247
- "pipe",
248
- ]