oscura 0.1.2__py3-none-any.whl → 0.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (116) hide show
  1. oscura/__init__.py +1 -7
  2. oscura/acquisition/__init__.py +147 -0
  3. oscura/acquisition/file.py +255 -0
  4. oscura/acquisition/hardware.py +186 -0
  5. oscura/acquisition/saleae.py +340 -0
  6. oscura/acquisition/socketcan.py +315 -0
  7. oscura/acquisition/streaming.py +38 -0
  8. oscura/acquisition/synthetic.py +229 -0
  9. oscura/acquisition/visa.py +376 -0
  10. oscura/analyzers/__init__.py +3 -0
  11. oscura/analyzers/digital/clock.py +9 -1
  12. oscura/analyzers/digital/edges.py +1 -1
  13. oscura/analyzers/digital/timing.py +41 -11
  14. oscura/analyzers/packet/payload_extraction.py +2 -4
  15. oscura/analyzers/packet/stream.py +5 -5
  16. oscura/analyzers/patterns/__init__.py +4 -3
  17. oscura/analyzers/patterns/clustering.py +3 -1
  18. oscura/analyzers/power/ac_power.py +0 -2
  19. oscura/analyzers/power/basic.py +0 -2
  20. oscura/analyzers/power/ripple.py +0 -2
  21. oscura/analyzers/side_channel/__init__.py +52 -0
  22. oscura/analyzers/side_channel/power.py +690 -0
  23. oscura/analyzers/side_channel/timing.py +369 -0
  24. oscura/analyzers/signal_integrity/embedding.py +0 -2
  25. oscura/analyzers/signal_integrity/sparams.py +28 -206
  26. oscura/analyzers/spectral/fft.py +0 -2
  27. oscura/analyzers/statistical/__init__.py +3 -3
  28. oscura/analyzers/statistical/checksum.py +2 -0
  29. oscura/analyzers/statistical/classification.py +2 -0
  30. oscura/analyzers/statistical/entropy.py +11 -9
  31. oscura/analyzers/statistical/ngrams.py +4 -2
  32. oscura/api/fluent.py +2 -2
  33. oscura/automotive/__init__.py +4 -4
  34. oscura/automotive/can/__init__.py +0 -2
  35. oscura/automotive/can/patterns.py +3 -1
  36. oscura/automotive/can/session.py +277 -78
  37. oscura/automotive/can/state_machine.py +5 -2
  38. oscura/automotive/dbc/__init__.py +0 -2
  39. oscura/automotive/dtc/__init__.py +0 -2
  40. oscura/automotive/dtc/data.json +2763 -0
  41. oscura/automotive/dtc/database.py +37 -2769
  42. oscura/automotive/j1939/__init__.py +0 -2
  43. oscura/automotive/loaders/__init__.py +0 -2
  44. oscura/automotive/loaders/asc.py +0 -2
  45. oscura/automotive/loaders/blf.py +0 -2
  46. oscura/automotive/loaders/csv_can.py +0 -2
  47. oscura/automotive/obd/__init__.py +0 -2
  48. oscura/automotive/uds/__init__.py +0 -2
  49. oscura/automotive/uds/models.py +0 -2
  50. oscura/builders/__init__.py +9 -11
  51. oscura/builders/signal_builder.py +99 -191
  52. oscura/cli/main.py +0 -2
  53. oscura/cli/shell.py +0 -2
  54. oscura/config/loader.py +0 -2
  55. oscura/core/backend_selector.py +1 -1
  56. oscura/core/correlation.py +0 -2
  57. oscura/core/exceptions.py +61 -3
  58. oscura/core/lazy.py +5 -3
  59. oscura/core/memory_limits.py +0 -2
  60. oscura/core/numba_backend.py +5 -7
  61. oscura/core/uncertainty.py +3 -3
  62. oscura/dsl/interpreter.py +2 -0
  63. oscura/dsl/parser.py +8 -6
  64. oscura/exploratory/error_recovery.py +3 -3
  65. oscura/exploratory/parse.py +2 -0
  66. oscura/exploratory/recovery.py +2 -0
  67. oscura/exploratory/sync.py +2 -0
  68. oscura/export/wireshark/generator.py +1 -1
  69. oscura/export/wireshark/type_mapping.py +2 -0
  70. oscura/exporters/hdf5.py +1 -3
  71. oscura/extensibility/templates.py +0 -8
  72. oscura/inference/active_learning/lstar.py +2 -4
  73. oscura/inference/active_learning/observation_table.py +0 -2
  74. oscura/inference/active_learning/oracle.py +3 -1
  75. oscura/inference/active_learning/teachers/simulator.py +1 -3
  76. oscura/inference/alignment.py +2 -0
  77. oscura/inference/message_format.py +2 -0
  78. oscura/inference/protocol_dsl.py +7 -5
  79. oscura/inference/sequences.py +12 -14
  80. oscura/inference/state_machine.py +2 -0
  81. oscura/integrations/llm.py +3 -1
  82. oscura/jupyter/display.py +0 -2
  83. oscura/loaders/__init__.py +68 -51
  84. oscura/loaders/chipwhisperer.py +393 -0
  85. oscura/loaders/pcap.py +1 -1
  86. oscura/loaders/touchstone.py +221 -0
  87. oscura/math/arithmetic.py +0 -2
  88. oscura/optimization/parallel.py +9 -6
  89. oscura/pipeline/composition.py +0 -2
  90. oscura/plugins/cli.py +0 -2
  91. oscura/reporting/comparison.py +0 -2
  92. oscura/reporting/config.py +1 -1
  93. oscura/reporting/formatting/emphasis.py +2 -0
  94. oscura/reporting/formatting/numbers.py +0 -2
  95. oscura/reporting/output.py +1 -3
  96. oscura/reporting/sections.py +0 -2
  97. oscura/search/anomaly.py +2 -0
  98. oscura/session/session.py +91 -16
  99. oscura/sessions/__init__.py +70 -0
  100. oscura/sessions/base.py +323 -0
  101. oscura/sessions/blackbox.py +640 -0
  102. oscura/sessions/generic.py +189 -0
  103. oscura/testing/synthetic.py +2 -0
  104. oscura/ui/formatters.py +4 -2
  105. oscura/utils/buffer.py +2 -2
  106. oscura/utils/lazy.py +5 -5
  107. oscura/utils/memory_advanced.py +2 -2
  108. oscura/utils/memory_extensions.py +2 -2
  109. oscura/visualization/colors.py +0 -2
  110. oscura/visualization/power.py +2 -0
  111. oscura/workflows/multi_trace.py +2 -0
  112. {oscura-0.1.2.dist-info → oscura-0.4.0.dist-info}/METADATA +122 -20
  113. {oscura-0.1.2.dist-info → oscura-0.4.0.dist-info}/RECORD +116 -98
  114. {oscura-0.1.2.dist-info → oscura-0.4.0.dist-info}/WHEEL +0 -0
  115. {oscura-0.1.2.dist-info → oscura-0.4.0.dist-info}/entry_points.txt +0 -0
  116. {oscura-0.1.2.dist-info → oscura-0.4.0.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,369 @@
1
+ """Timing side-channel analysis.
2
+
3
+ This module implements timing attack analysis for extracting secrets from
4
+ execution time variations in cryptographic implementations.
5
+
6
+ Example:
7
+ >>> from oscura.analyzers.side_channel.timing import TimingAnalyzer
8
+ >>> analyzer = TimingAnalyzer(confidence_level=0.95)
9
+ >>> result = analyzer.analyze(timings, inputs)
10
+ >>> if result.has_leak:
11
+ ... print(f"Timing leak detected with {result.confidence:.2%} confidence")
12
+
13
+ References:
14
+ Kocher "Timing Attacks on Implementations of Diffie-Hellman, RSA, DSS" (CRYPTO 1996)
15
+ Brumley & Boneh "Remote Timing Attacks are Practical" (USENIX Security 2003)
16
+ """
17
+
18
+ from __future__ import annotations
19
+
20
+ from collections.abc import Callable
21
+ from dataclasses import dataclass
22
+ from typing import TYPE_CHECKING, Any
23
+
24
+ import numpy as np
25
+ from scipy import stats
26
+
27
+ if TYPE_CHECKING:
28
+ from numpy.typing import NDArray
29
+
30
+ __all__ = [
31
+ "TimingAnalyzer",
32
+ "TimingAttackResult",
33
+ "TimingLeak",
34
+ ]
35
+
36
+
37
+ @dataclass
38
+ class TimingLeak:
39
+ """Detected timing leak information.
40
+
41
+ Attributes:
42
+ input_bit: Bit position causing leak (if bit-level analysis).
43
+ input_byte: Byte position causing leak (if byte-level analysis).
44
+ mean_difference: Mean timing difference between groups.
45
+ t_statistic: T-test statistic.
46
+ p_value: Statistical p-value.
47
+ confidence: Confidence level (1 - p_value).
48
+ effect_size: Cohen's d effect size.
49
+ """
50
+
51
+ input_bit: int | None
52
+ input_byte: int | None
53
+ mean_difference: float
54
+ t_statistic: float
55
+ p_value: float
56
+ confidence: float
57
+ effect_size: float
58
+
59
+ @property
60
+ def is_significant(self) -> bool:
61
+ """Check if leak is statistically significant (p < 0.05)."""
62
+ return self.p_value < 0.05
63
+
64
+
65
+ @dataclass
66
+ class TimingAttackResult:
67
+ """Result of timing attack analysis.
68
+
69
+ Attributes:
70
+ has_leak: Whether timing leak detected.
71
+ leaks: List of detected leaks.
72
+ confidence: Overall confidence in leak detection.
73
+ timing_statistics: Summary statistics of timings.
74
+ """
75
+
76
+ has_leak: bool
77
+ leaks: list[TimingLeak]
78
+ confidence: float
79
+ timing_statistics: dict[str, float]
80
+
81
+
82
+ class TimingAnalyzer:
83
+ """Timing side-channel attack analyzer.
84
+
85
+ Detects timing leaks in cryptographic implementations by analyzing
86
+ execution time variations correlated with input data.
87
+
88
+ Args:
89
+ confidence_level: Confidence level for leak detection (default 0.95).
90
+ min_samples: Minimum samples required for analysis (default 100).
91
+
92
+ Example:
93
+ >>> analyzer = TimingAnalyzer(confidence_level=0.99)
94
+ >>> # Measure RSA decryption times
95
+ >>> timings = measure_decryption_times(ciphertexts)
96
+ >>> result = analyzer.analyze(timings, ciphertexts)
97
+ >>> for leak in result.leaks:
98
+ ... print(f"Leak at bit {leak.input_bit}: p={leak.p_value:.6f}")
99
+
100
+ References:
101
+ Kocher "Timing Attacks on Implementations of Diffie-Hellman, RSA, DSS"
102
+ """
103
+
104
+ def __init__(self, confidence_level: float = 0.95, min_samples: int = 100) -> None:
105
+ """Initialize timing analyzer.
106
+
107
+ Args:
108
+ confidence_level: Confidence level (0.0-1.0).
109
+ min_samples: Minimum number of samples.
110
+
111
+ Raises:
112
+ ValueError: If parameters out of range.
113
+ """
114
+ if not 0.0 < confidence_level < 1.0:
115
+ raise ValueError(f"confidence_level must be in (0, 1), got {confidence_level}")
116
+ if min_samples < 10:
117
+ raise ValueError(f"min_samples must be >= 10, got {min_samples}")
118
+
119
+ self.confidence_level = confidence_level
120
+ self.min_samples = min_samples
121
+
122
+ def analyze(
123
+ self,
124
+ timings: NDArray[np.floating[Any]],
125
+ inputs: NDArray[np.integer[Any]],
126
+ ) -> TimingAttackResult:
127
+ """Analyze timing measurements for leaks.
128
+
129
+ Performs statistical analysis (t-tests) to detect correlations
130
+ between input bits/bytes and execution time.
131
+
132
+ Args:
133
+ timings: Execution times (n_samples,).
134
+ inputs: Input values (n_samples, input_size) or (n_samples,).
135
+
136
+ Returns:
137
+ TimingAttackResult with detected leaks.
138
+
139
+ Raises:
140
+ ValueError: If inputs invalid or insufficient samples.
141
+
142
+ Example:
143
+ >>> timings = np.array([...]) # Measured execution times
144
+ >>> inputs = np.random.randint(0, 256, (1000, 16), dtype=np.uint8)
145
+ >>> result = analyzer.analyze(timings, inputs)
146
+ >>> print(f"Leaks detected: {len(result.leaks)}")
147
+ """
148
+ if len(timings) < self.min_samples:
149
+ raise ValueError(f"Insufficient samples: {len(timings)} < {self.min_samples}")
150
+
151
+ if len(timings) != len(inputs):
152
+ raise ValueError(f"Timings ({len(timings)}) and inputs ({len(inputs)}) length mismatch")
153
+
154
+ # Compute timing statistics
155
+ timing_stats = {
156
+ "mean": float(np.mean(timings)),
157
+ "std": float(np.std(timings)),
158
+ "min": float(np.min(timings)),
159
+ "max": float(np.max(timings)),
160
+ "median": float(np.median(timings)),
161
+ }
162
+
163
+ # Detect leaks
164
+ leaks: list[TimingLeak] = []
165
+
166
+ if inputs.ndim == 1:
167
+ # Single byte per input
168
+ byte_leaks = self._analyze_byte_leaks(timings, inputs, byte_pos=0)
169
+ leaks.extend(byte_leaks)
170
+ else:
171
+ # Multiple bytes per input
172
+ for byte_pos in range(inputs.shape[1]):
173
+ byte_leaks = self._analyze_byte_leaks(timings, inputs[:, byte_pos], byte_pos)
174
+ leaks.extend(byte_leaks)
175
+
176
+ # Overall confidence is max of individual leak confidences
177
+ overall_confidence = max([leak.confidence for leak in leaks], default=0.0)
178
+
179
+ return TimingAttackResult(
180
+ has_leak=len(leaks) > 0,
181
+ leaks=leaks,
182
+ confidence=overall_confidence,
183
+ timing_statistics=timing_stats,
184
+ )
185
+
186
+ def _analyze_byte_leaks(
187
+ self,
188
+ timings: NDArray[np.floating[Any]],
189
+ byte_values: NDArray[np.integer[Any]],
190
+ byte_pos: int,
191
+ ) -> list[TimingLeak]:
192
+ """Analyze timing leaks for specific byte position.
193
+
194
+ Args:
195
+ timings: Execution times.
196
+ byte_values: Byte values at this position.
197
+ byte_pos: Byte position index.
198
+
199
+ Returns:
200
+ List of detected leaks for this byte.
201
+ """
202
+ leaks: list[TimingLeak] = []
203
+
204
+ # Test each bit position in the byte
205
+ for bit_pos in range(8):
206
+ # Partition timings by bit value
207
+ bit_mask = 1 << bit_pos
208
+ bit_set = (byte_values & bit_mask) != 0
209
+
210
+ timings_0 = timings[~bit_set]
211
+ timings_1 = timings[bit_set]
212
+
213
+ # Need sufficient samples in each group
214
+ if len(timings_0) < 10 or len(timings_1) < 10:
215
+ continue
216
+
217
+ # Welch's t-test (unequal variances)
218
+ t_stat, p_value = stats.ttest_ind(timings_0, timings_1, equal_var=False)
219
+
220
+ # Check for significance
221
+ alpha = 1.0 - self.confidence_level
222
+ if p_value < alpha:
223
+ # Calculate effect size (Cohen's d)
224
+ mean_diff = float(np.mean(timings_1) - np.mean(timings_0))
225
+ pooled_std = np.sqrt((np.var(timings_0) + np.var(timings_1)) / 2)
226
+ effect_size = mean_diff / pooled_std if pooled_std > 0 else 0.0
227
+
228
+ leak = TimingLeak(
229
+ input_bit=bit_pos,
230
+ input_byte=byte_pos,
231
+ mean_difference=mean_diff,
232
+ t_statistic=float(t_stat),
233
+ p_value=float(p_value),
234
+ confidence=1.0 - float(p_value),
235
+ effect_size=float(effect_size),
236
+ )
237
+ leaks.append(leak)
238
+
239
+ return leaks
240
+
241
+ def analyze_with_partitioning(
242
+ self,
243
+ timings: NDArray[np.floating[Any]],
244
+ inputs: NDArray[np.integer[Any]],
245
+ partition_func: Callable[[NDArray[np.integer[Any]]], NDArray[np.bool_]],
246
+ ) -> tuple[float, float, float]:
247
+ """Analyze timing with custom partitioning function.
248
+
249
+ Allows custom grouping criteria beyond bit values.
250
+
251
+ Args:
252
+ timings: Execution times.
253
+ inputs: Input values.
254
+ partition_func: Function mapping inputs to boolean partition.
255
+
256
+ Returns:
257
+ Tuple of (mean_difference, t_statistic, p_value).
258
+
259
+ Example:
260
+ >>> # Partition by high/low byte value
261
+ >>> def partition(x):
262
+ ... return x >= 128
263
+ >>> diff, t_stat, p_val = analyzer.analyze_with_partitioning(
264
+ ... timings, inputs, partition
265
+ ... )
266
+ """
267
+ partition = partition_func(inputs)
268
+
269
+ timings_0 = timings[~partition]
270
+ timings_1 = timings[partition]
271
+
272
+ if len(timings_0) < 10 or len(timings_1) < 10:
273
+ return 0.0, 0.0, 1.0
274
+
275
+ t_stat, p_value = stats.ttest_ind(timings_0, timings_1, equal_var=False)
276
+ mean_diff = float(np.mean(timings_1) - np.mean(timings_0))
277
+
278
+ return mean_diff, float(t_stat), float(p_value)
279
+
280
+ def detect_outliers(
281
+ self,
282
+ timings: NDArray[np.floating[Any]],
283
+ threshold: float = 3.0,
284
+ ) -> NDArray[np.bool_]:
285
+ """Detect outlier measurements using modified Z-score.
286
+
287
+ Outliers may indicate measurement errors or specific attack scenarios.
288
+
289
+ Args:
290
+ timings: Execution times.
291
+ threshold: Z-score threshold for outliers (default 3.0).
292
+
293
+ Returns:
294
+ Boolean array marking outliers.
295
+
296
+ Example:
297
+ >>> outliers = analyzer.detect_outliers(timings)
298
+ >>> clean_timings = timings[~outliers]
299
+ """
300
+ median = np.median(timings)
301
+ mad = np.median(np.abs(timings - median))
302
+
303
+ # Modified Z-score (more robust than standard Z-score)
304
+ if mad == 0:
305
+ # All values identical
306
+ return np.zeros(len(timings), dtype=bool)
307
+
308
+ modified_z_scores = 0.6745 * (timings - median) / mad
309
+ outliers: NDArray[np.bool_] = np.abs(modified_z_scores) > threshold
310
+
311
+ return outliers
312
+
313
+ def compute_mutual_information(
314
+ self,
315
+ timings: NDArray[np.floating[Any]],
316
+ inputs: NDArray[np.integer[Any]],
317
+ n_bins: int = 10,
318
+ ) -> float:
319
+ """Compute mutual information between timings and inputs.
320
+
321
+ Measures information leakage in bits.
322
+
323
+ Args:
324
+ timings: Execution times.
325
+ inputs: Input values.
326
+ n_bins: Number of bins for discretization.
327
+
328
+ Returns:
329
+ Mutual information in bits.
330
+
331
+ Example:
332
+ >>> mi = analyzer.compute_mutual_information(timings, inputs)
333
+ >>> print(f"Information leakage: {mi:.4f} bits")
334
+ """
335
+ # Discretize timings into bins
336
+ timing_bins = np.digitize(
337
+ timings,
338
+ bins=np.linspace(np.min(timings), np.max(timings), n_bins),
339
+ )
340
+
341
+ # Discretize inputs
342
+ if inputs.ndim == 1:
343
+ input_bins = inputs
344
+ else:
345
+ # Hash multi-dimensional inputs
346
+ input_bins = np.sum(inputs * np.arange(inputs.shape[1]), axis=1)
347
+
348
+ # Compute joint histogram
349
+ joint_hist, _, _ = np.histogram2d(
350
+ timing_bins,
351
+ input_bins,
352
+ bins=[n_bins, len(np.unique(input_bins))],
353
+ )
354
+ joint_prob = joint_hist / np.sum(joint_hist)
355
+
356
+ # Marginal distributions
357
+ timing_prob = np.sum(joint_prob, axis=1)
358
+ input_prob = np.sum(joint_prob, axis=0)
359
+
360
+ # Mutual information: I(X; Y) = sum p(x,y) * log(p(x,y) / (p(x) * p(y)))
361
+ mi = 0.0
362
+ for i in range(len(timing_prob)):
363
+ for j in range(len(input_prob)):
364
+ if joint_prob[i, j] > 0:
365
+ mi += joint_prob[i, j] * np.log2(
366
+ joint_prob[i, j] / (timing_prob[i] * input_prob[j])
367
+ )
368
+
369
+ return float(mi)
@@ -12,8 +12,6 @@ References:
12
12
  IEEE 370-2020: Standard for Electrical Characterization of PCBs
13
13
  """
14
14
 
15
- from __future__ import annotations
16
-
17
15
  import numpy as np
18
16
 
19
17
  from oscura.analyzers.signal_integrity.sparams import (
@@ -1,11 +1,14 @@
1
- """S-Parameter handling and Touchstone file support.
1
+ """S-Parameter handling and analysis.
2
2
 
3
- This module provides Touchstone file loading and S-parameter
4
- calculations including return loss and insertion loss.
3
+ This module provides S-parameter calculations including return loss and
4
+ insertion loss for signal integrity analysis.
5
5
 
6
+ For loading Touchstone files, use:
7
+ >>> from oscura.loaders import load_touchstone
6
8
 
7
9
  Example:
8
- >>> from oscura.analyzers.signal_integrity.sparams import load_touchstone
10
+ >>> from oscura.loaders import load_touchstone
11
+ >>> from oscura.analyzers.signal_integrity.sparams import return_loss
9
12
  >>> s_params = load_touchstone("cable.s2p")
10
13
  >>> rl = return_loss(s_params, frequency=1e9)
11
14
 
@@ -16,16 +19,11 @@ References:
16
19
 
17
20
  from __future__ import annotations
18
21
 
19
- import contextlib
20
- import re
21
22
  from dataclasses import dataclass, field
22
- from pathlib import Path
23
- from typing import TYPE_CHECKING
23
+ from typing import TYPE_CHECKING, Any
24
24
 
25
25
  import numpy as np
26
26
 
27
- from oscura.core.exceptions import FormatError, LoaderError
28
-
29
27
  if TYPE_CHECKING:
30
28
  from numpy.typing import NDArray
31
29
 
@@ -94,201 +92,6 @@ class SParameterData:
94
92
  )
95
93
 
96
94
 
97
- def load_touchstone(path: str | Path) -> SParameterData:
98
- """Load S-parameter data from Touchstone file.
99
-
100
- Supports .s1p through .s8p formats and both Touchstone 1.0
101
- and 2.0 file formats.
102
-
103
- Args:
104
- path: Path to Touchstone file.
105
-
106
- Returns:
107
- SParameterData with loaded S-parameters.
108
-
109
- Raises:
110
- LoaderError: If file cannot be read.
111
- FormatError: If file format is invalid.
112
-
113
- Example:
114
- >>> s_params = load_touchstone("cable.s2p")
115
- >>> print(f"Loaded {s_params.n_ports}-port, {len(s_params.frequencies)} points")
116
-
117
- References:
118
- Touchstone 2.0 File Format Specification
119
- """
120
- path = Path(path)
121
-
122
- if not path.exists():
123
- raise LoaderError(f"File not found: {path}")
124
-
125
- # Determine number of ports from extension
126
- suffix = path.suffix.lower()
127
- match = re.match(r"\.s(\d+)p", suffix)
128
- if not match:
129
- raise FormatError(f"Unsupported file extension: {suffix}")
130
-
131
- n_ports = int(match.group(1))
132
-
133
- try:
134
- with open(path) as f:
135
- lines = f.readlines()
136
- except Exception as e:
137
- raise LoaderError(f"Failed to read file: {e}") # noqa: B904
138
-
139
- return _parse_touchstone(lines, n_ports, str(path))
140
-
141
-
142
- def _parse_touchstone(
143
- lines: list[str],
144
- n_ports: int,
145
- source_file: str,
146
- ) -> SParameterData:
147
- """Parse Touchstone file content.
148
-
149
- Args:
150
- lines: File lines.
151
- n_ports: Number of ports.
152
- source_file: Source file path.
153
-
154
- Returns:
155
- Parsed SParameterData.
156
-
157
- Raises:
158
- FormatError: If file format is invalid.
159
- """
160
- comments = []
161
- option_line = None
162
- data_lines = []
163
-
164
- for line in lines:
165
- line = line.strip()
166
-
167
- if not line:
168
- continue
169
-
170
- if line.startswith("!"):
171
- comments.append(line[1:].strip())
172
- elif line.startswith("#"):
173
- option_line = line
174
- else:
175
- data_lines.append(line)
176
-
177
- # Parse option line
178
- freq_unit = 1e9 # Default GHz
179
- format_type = "ma" # Default MA (magnitude/angle)
180
- z0 = 50.0
181
-
182
- if option_line:
183
- option_line = option_line.lower()
184
- parts = option_line.split()
185
-
186
- for i, part in enumerate(parts):
187
- if part in ("hz", "khz", "mhz", "ghz"):
188
- freq_unit = {
189
- "hz": 1.0,
190
- "khz": 1e3,
191
- "mhz": 1e6,
192
- "ghz": 1e9,
193
- }[part]
194
- elif part in ("db", "ma", "ri"):
195
- format_type = part
196
- elif part == "r":
197
- # Reference impedance follows
198
- if i + 1 < len(parts):
199
- with contextlib.suppress(ValueError):
200
- z0 = float(parts[i + 1])
201
-
202
- # Parse data
203
- frequencies = []
204
- s_data = []
205
-
206
- # Number of S-parameters per frequency
207
- n_s_params = n_ports * n_ports
208
-
209
- i = 0
210
- while i < len(data_lines):
211
- # First line has frequency and first S-parameters
212
- parts = data_lines[i].split()
213
-
214
- if len(parts) < 1:
215
- i += 1
216
- continue
217
-
218
- freq = float(parts[0]) * freq_unit
219
- frequencies.append(freq)
220
-
221
- # Collect all S-parameter values for this frequency
222
- s_values = []
223
-
224
- # Add values from first line
225
- for j in range(1, len(parts), 2):
226
- if j + 1 < len(parts):
227
- val1 = float(parts[j])
228
- val2 = float(parts[j + 1])
229
- s_values.append((val1, val2))
230
-
231
- i += 1
232
-
233
- # Continue collecting from subsequent lines if needed
234
- while len(s_values) < n_s_params and i < len(data_lines):
235
- parts = data_lines[i].split()
236
-
237
- # Check if this is a new frequency (has odd number of values)
238
- try:
239
- float(parts[0])
240
- if len(parts) % 2 == 1:
241
- break # New frequency line
242
- except (ValueError, IndexError):
243
- pass
244
-
245
- for j in range(0, len(parts), 2):
246
- if j + 1 < len(parts):
247
- val1 = float(parts[j])
248
- val2 = float(parts[j + 1])
249
- s_values.append((val1, val2))
250
-
251
- i += 1
252
-
253
- # Convert to complex based on format
254
- s_complex = []
255
- for val1, val2 in s_values:
256
- if format_type == "ri":
257
- # Real/Imaginary
258
- s_complex.append(complex(val1, val2))
259
- elif format_type == "ma":
260
- # Magnitude/Angle (degrees)
261
- mag = val1
262
- angle_rad = np.radians(val2)
263
- s_complex.append(mag * np.exp(1j * angle_rad))
264
- elif format_type == "db":
265
- # dB/Angle (degrees)
266
- mag = 10 ** (val1 / 20)
267
- angle_rad = np.radians(val2)
268
- s_complex.append(mag * np.exp(1j * angle_rad))
269
-
270
- # Reshape into matrix
271
- if len(s_complex) == n_s_params:
272
- s_matrix = np.array(s_complex).reshape(n_ports, n_ports)
273
- s_data.append(s_matrix)
274
-
275
- if len(frequencies) == 0:
276
- raise FormatError("No valid frequency points found")
277
-
278
- frequencies_arr = np.array(frequencies, dtype=np.float64)
279
- s_matrix_arr = np.array(s_data, dtype=np.complex128)
280
-
281
- return SParameterData(
282
- frequencies=frequencies_arr,
283
- s_matrix=s_matrix_arr,
284
- n_ports=n_ports,
285
- z0=z0,
286
- format=format_type,
287
- source_file=source_file,
288
- comments=comments,
289
- )
290
-
291
-
292
95
  def return_loss(
293
96
  s_params: SParameterData,
294
97
  frequency: float | None = None,
@@ -474,11 +277,30 @@ def _abcd_to_s_single(abcd: NDArray[np.complex128], z0: float) -> NDArray[np.com
474
277
  return np.array([[S11, S12], [S21, S22]], dtype=np.complex128)
475
278
 
476
279
 
280
+ # Backward compatibility: load_touchstone moved to loaders module
281
+ # Import with deprecation warning
282
+ def __getattr__(name: str) -> Any:
283
+ """Provide backward compatibility for load_touchstone."""
284
+ if name == "load_touchstone":
285
+ import warnings
286
+
287
+ from oscura.loaders.touchstone import load_touchstone
288
+
289
+ warnings.warn(
290
+ "Importing load_touchstone from oscura.analyzers.signal_integrity.sparams "
291
+ "is deprecated. Use 'from oscura.loaders import load_touchstone' instead.",
292
+ DeprecationWarning,
293
+ stacklevel=2,
294
+ )
295
+ return load_touchstone
296
+ raise AttributeError(f"module '{__name__}' has no attribute '{name}'")
297
+
298
+
477
299
  __all__ = [
478
300
  "SParameterData",
479
301
  "abcd_to_s",
480
302
  "insertion_loss",
481
- "load_touchstone",
303
+ # Note: load_touchstone moved to oscura.loaders module (backward compat via __getattr__)
482
304
  "return_loss",
483
305
  "s_to_abcd",
484
306
  ]
@@ -4,8 +4,6 @@ This module provides FFT-based spectral analysis including chunked processing
4
4
  for large datasets.
5
5
  """
6
6
 
7
- from __future__ import annotations
8
-
9
7
  from typing import Any
10
8
 
11
9
  import numpy as np
@@ -17,6 +17,8 @@ Requirements:
17
17
  - RE-ENT-002: Byte Frequency Distribution
18
18
  """
19
19
 
20
+ from __future__ import annotations
21
+
20
22
  from typing import TYPE_CHECKING, Union
21
23
 
22
24
  import numpy as np
@@ -134,9 +136,7 @@ entropy = shannon_entropy
134
136
  DataType = Union[bytes, bytearray, "NDArray[np.uint8]"]
135
137
 
136
138
 
137
- def entropy_windowed(
138
- data: DataType, window_size: int = 256, step: int = 1
139
- ) -> "NDArray[np.float64]":
139
+ def entropy_windowed(data: DataType, window_size: int = 256, step: int = 1) -> NDArray[np.float64]:
140
140
  """Windowed entropy calculation (alias for sliding_entropy)."""
141
141
  return sliding_entropy(data, window_size=window_size, step=step)
142
142
 
@@ -5,6 +5,8 @@ This module provides tools for detecting checksum and CRC fields in binary
5
5
  messages by analyzing field correlations and testing common algorithms.
6
6
  """
7
7
 
8
+ from __future__ import annotations
9
+
8
10
  from dataclasses import dataclass, field
9
11
  from typing import TYPE_CHECKING, Any, Literal, Union
10
12
 
@@ -6,6 +6,8 @@ binary, compressed, encrypted, or padding using multiple statistical tests
6
6
  and heuristics.
7
7
  """
8
8
 
9
+ from __future__ import annotations
10
+
9
11
  from dataclasses import dataclass, field
10
12
  from typing import Any, Literal, Union
11
13