oscura 0.3.0__py3-none-any.whl → 0.5.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. oscura/__init__.py +1 -7
  2. oscura/acquisition/__init__.py +147 -0
  3. oscura/acquisition/file.py +255 -0
  4. oscura/acquisition/hardware.py +186 -0
  5. oscura/acquisition/saleae.py +340 -0
  6. oscura/acquisition/socketcan.py +315 -0
  7. oscura/acquisition/streaming.py +38 -0
  8. oscura/acquisition/synthetic.py +229 -0
  9. oscura/acquisition/visa.py +376 -0
  10. oscura/analyzers/__init__.py +3 -0
  11. oscura/analyzers/digital/__init__.py +48 -0
  12. oscura/analyzers/digital/clock.py +9 -1
  13. oscura/analyzers/digital/edges.py +1 -1
  14. oscura/analyzers/digital/extraction.py +195 -0
  15. oscura/analyzers/digital/ic_database.py +498 -0
  16. oscura/analyzers/digital/timing.py +41 -11
  17. oscura/analyzers/digital/timing_paths.py +339 -0
  18. oscura/analyzers/digital/vintage.py +377 -0
  19. oscura/analyzers/digital/vintage_result.py +148 -0
  20. oscura/analyzers/protocols/__init__.py +22 -1
  21. oscura/analyzers/protocols/parallel_bus.py +449 -0
  22. oscura/analyzers/side_channel/__init__.py +52 -0
  23. oscura/analyzers/side_channel/power.py +690 -0
  24. oscura/analyzers/side_channel/timing.py +369 -0
  25. oscura/analyzers/signal_integrity/sparams.py +1 -1
  26. oscura/automotive/__init__.py +4 -2
  27. oscura/automotive/can/patterns.py +3 -1
  28. oscura/automotive/can/session.py +277 -78
  29. oscura/automotive/can/state_machine.py +5 -2
  30. oscura/builders/__init__.py +9 -11
  31. oscura/builders/signal_builder.py +99 -191
  32. oscura/core/exceptions.py +5 -1
  33. oscura/export/__init__.py +12 -0
  34. oscura/export/wavedrom.py +430 -0
  35. oscura/exporters/json_export.py +47 -0
  36. oscura/exporters/vintage_logic_csv.py +247 -0
  37. oscura/loaders/__init__.py +1 -0
  38. oscura/loaders/chipwhisperer.py +393 -0
  39. oscura/loaders/touchstone.py +1 -1
  40. oscura/reporting/__init__.py +7 -0
  41. oscura/reporting/vintage_logic_report.py +523 -0
  42. oscura/session/session.py +54 -46
  43. oscura/sessions/__init__.py +70 -0
  44. oscura/sessions/base.py +323 -0
  45. oscura/sessions/blackbox.py +640 -0
  46. oscura/sessions/generic.py +189 -0
  47. oscura/utils/autodetect.py +5 -1
  48. oscura/visualization/digital_advanced.py +718 -0
  49. oscura/visualization/figure_manager.py +156 -0
  50. {oscura-0.3.0.dist-info → oscura-0.5.0.dist-info}/METADATA +86 -5
  51. {oscura-0.3.0.dist-info → oscura-0.5.0.dist-info}/RECORD +54 -33
  52. oscura/automotive/dtc/data.json +0 -2763
  53. oscura/schemas/bus_configuration.json +0 -322
  54. oscura/schemas/device_mapping.json +0 -182
  55. oscura/schemas/packet_format.json +0 -418
  56. oscura/schemas/protocol_definition.json +0 -363
  57. {oscura-0.3.0.dist-info → oscura-0.5.0.dist-info}/WHEEL +0 -0
  58. {oscura-0.3.0.dist-info → oscura-0.5.0.dist-info}/entry_points.txt +0 -0
  59. {oscura-0.3.0.dist-info → oscura-0.5.0.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,369 @@
1
+ """Timing side-channel analysis.
2
+
3
+ This module implements timing attack analysis for extracting secrets from
4
+ execution time variations in cryptographic implementations.
5
+
6
+ Example:
7
+ >>> from oscura.analyzers.side_channel.timing import TimingAnalyzer
8
+ >>> analyzer = TimingAnalyzer(confidence_level=0.95)
9
+ >>> result = analyzer.analyze(timings, inputs)
10
+ >>> if result.has_leak:
11
+ ... print(f"Timing leak detected with {result.confidence:.2%} confidence")
12
+
13
+ References:
14
+ Kocher "Timing Attacks on Implementations of Diffie-Hellman, RSA, DSS" (CRYPTO 1996)
15
+ Brumley & Boneh "Remote Timing Attacks are Practical" (USENIX Security 2003)
16
+ """
17
+
18
+ from __future__ import annotations
19
+
20
+ from collections.abc import Callable
21
+ from dataclasses import dataclass
22
+ from typing import TYPE_CHECKING, Any
23
+
24
+ import numpy as np
25
+ from scipy import stats
26
+
27
+ if TYPE_CHECKING:
28
+ from numpy.typing import NDArray
29
+
30
+ __all__ = [
31
+ "TimingAnalyzer",
32
+ "TimingAttackResult",
33
+ "TimingLeak",
34
+ ]
35
+
36
+
37
+ @dataclass
38
+ class TimingLeak:
39
+ """Detected timing leak information.
40
+
41
+ Attributes:
42
+ input_bit: Bit position causing leak (if bit-level analysis).
43
+ input_byte: Byte position causing leak (if byte-level analysis).
44
+ mean_difference: Mean timing difference between groups.
45
+ t_statistic: T-test statistic.
46
+ p_value: Statistical p-value.
47
+ confidence: Confidence level (1 - p_value).
48
+ effect_size: Cohen's d effect size.
49
+ """
50
+
51
+ input_bit: int | None
52
+ input_byte: int | None
53
+ mean_difference: float
54
+ t_statistic: float
55
+ p_value: float
56
+ confidence: float
57
+ effect_size: float
58
+
59
+ @property
60
+ def is_significant(self) -> bool:
61
+ """Check if leak is statistically significant (p < 0.05)."""
62
+ return self.p_value < 0.05
63
+
64
+
65
+ @dataclass
66
+ class TimingAttackResult:
67
+ """Result of timing attack analysis.
68
+
69
+ Attributes:
70
+ has_leak: Whether timing leak detected.
71
+ leaks: List of detected leaks.
72
+ confidence: Overall confidence in leak detection.
73
+ timing_statistics: Summary statistics of timings.
74
+ """
75
+
76
+ has_leak: bool
77
+ leaks: list[TimingLeak]
78
+ confidence: float
79
+ timing_statistics: dict[str, float]
80
+
81
+
82
+ class TimingAnalyzer:
83
+ """Timing side-channel attack analyzer.
84
+
85
+ Detects timing leaks in cryptographic implementations by analyzing
86
+ execution time variations correlated with input data.
87
+
88
+ Args:
89
+ confidence_level: Confidence level for leak detection (default 0.95).
90
+ min_samples: Minimum samples required for analysis (default 100).
91
+
92
+ Example:
93
+ >>> analyzer = TimingAnalyzer(confidence_level=0.99)
94
+ >>> # Measure RSA decryption times
95
+ >>> timings = measure_decryption_times(ciphertexts)
96
+ >>> result = analyzer.analyze(timings, ciphertexts)
97
+ >>> for leak in result.leaks:
98
+ ... print(f"Leak at bit {leak.input_bit}: p={leak.p_value:.6f}")
99
+
100
+ References:
101
+ Kocher "Timing Attacks on Implementations of Diffie-Hellman, RSA, DSS"
102
+ """
103
+
104
+ def __init__(self, confidence_level: float = 0.95, min_samples: int = 100) -> None:
105
+ """Initialize timing analyzer.
106
+
107
+ Args:
108
+ confidence_level: Confidence level (0.0-1.0).
109
+ min_samples: Minimum number of samples.
110
+
111
+ Raises:
112
+ ValueError: If parameters out of range.
113
+ """
114
+ if not 0.0 < confidence_level < 1.0:
115
+ raise ValueError(f"confidence_level must be in (0, 1), got {confidence_level}")
116
+ if min_samples < 10:
117
+ raise ValueError(f"min_samples must be >= 10, got {min_samples}")
118
+
119
+ self.confidence_level = confidence_level
120
+ self.min_samples = min_samples
121
+
122
+ def analyze(
123
+ self,
124
+ timings: NDArray[np.floating[Any]],
125
+ inputs: NDArray[np.integer[Any]],
126
+ ) -> TimingAttackResult:
127
+ """Analyze timing measurements for leaks.
128
+
129
+ Performs statistical analysis (t-tests) to detect correlations
130
+ between input bits/bytes and execution time.
131
+
132
+ Args:
133
+ timings: Execution times (n_samples,).
134
+ inputs: Input values (n_samples, input_size) or (n_samples,).
135
+
136
+ Returns:
137
+ TimingAttackResult with detected leaks.
138
+
139
+ Raises:
140
+ ValueError: If inputs invalid or insufficient samples.
141
+
142
+ Example:
143
+ >>> timings = np.array([...]) # Measured execution times
144
+ >>> inputs = np.random.randint(0, 256, (1000, 16), dtype=np.uint8)
145
+ >>> result = analyzer.analyze(timings, inputs)
146
+ >>> print(f"Leaks detected: {len(result.leaks)}")
147
+ """
148
+ if len(timings) < self.min_samples:
149
+ raise ValueError(f"Insufficient samples: {len(timings)} < {self.min_samples}")
150
+
151
+ if len(timings) != len(inputs):
152
+ raise ValueError(f"Timings ({len(timings)}) and inputs ({len(inputs)}) length mismatch")
153
+
154
+ # Compute timing statistics
155
+ timing_stats = {
156
+ "mean": float(np.mean(timings)),
157
+ "std": float(np.std(timings)),
158
+ "min": float(np.min(timings)),
159
+ "max": float(np.max(timings)),
160
+ "median": float(np.median(timings)),
161
+ }
162
+
163
+ # Detect leaks
164
+ leaks: list[TimingLeak] = []
165
+
166
+ if inputs.ndim == 1:
167
+ # Single byte per input
168
+ byte_leaks = self._analyze_byte_leaks(timings, inputs, byte_pos=0)
169
+ leaks.extend(byte_leaks)
170
+ else:
171
+ # Multiple bytes per input
172
+ for byte_pos in range(inputs.shape[1]):
173
+ byte_leaks = self._analyze_byte_leaks(timings, inputs[:, byte_pos], byte_pos)
174
+ leaks.extend(byte_leaks)
175
+
176
+ # Overall confidence is max of individual leak confidences
177
+ overall_confidence = max([leak.confidence for leak in leaks], default=0.0)
178
+
179
+ return TimingAttackResult(
180
+ has_leak=len(leaks) > 0,
181
+ leaks=leaks,
182
+ confidence=overall_confidence,
183
+ timing_statistics=timing_stats,
184
+ )
185
+
186
+ def _analyze_byte_leaks(
187
+ self,
188
+ timings: NDArray[np.floating[Any]],
189
+ byte_values: NDArray[np.integer[Any]],
190
+ byte_pos: int,
191
+ ) -> list[TimingLeak]:
192
+ """Analyze timing leaks for specific byte position.
193
+
194
+ Args:
195
+ timings: Execution times.
196
+ byte_values: Byte values at this position.
197
+ byte_pos: Byte position index.
198
+
199
+ Returns:
200
+ List of detected leaks for this byte.
201
+ """
202
+ leaks: list[TimingLeak] = []
203
+
204
+ # Test each bit position in the byte
205
+ for bit_pos in range(8):
206
+ # Partition timings by bit value
207
+ bit_mask = 1 << bit_pos
208
+ bit_set = (byte_values & bit_mask) != 0
209
+
210
+ timings_0 = timings[~bit_set]
211
+ timings_1 = timings[bit_set]
212
+
213
+ # Need sufficient samples in each group
214
+ if len(timings_0) < 10 or len(timings_1) < 10:
215
+ continue
216
+
217
+ # Welch's t-test (unequal variances)
218
+ t_stat, p_value = stats.ttest_ind(timings_0, timings_1, equal_var=False)
219
+
220
+ # Check for significance
221
+ alpha = 1.0 - self.confidence_level
222
+ if p_value < alpha:
223
+ # Calculate effect size (Cohen's d)
224
+ mean_diff = float(np.mean(timings_1) - np.mean(timings_0))
225
+ pooled_std = np.sqrt((np.var(timings_0) + np.var(timings_1)) / 2)
226
+ effect_size = mean_diff / pooled_std if pooled_std > 0 else 0.0
227
+
228
+ leak = TimingLeak(
229
+ input_bit=bit_pos,
230
+ input_byte=byte_pos,
231
+ mean_difference=mean_diff,
232
+ t_statistic=float(t_stat),
233
+ p_value=float(p_value),
234
+ confidence=1.0 - float(p_value),
235
+ effect_size=float(effect_size),
236
+ )
237
+ leaks.append(leak)
238
+
239
+ return leaks
240
+
241
+ def analyze_with_partitioning(
242
+ self,
243
+ timings: NDArray[np.floating[Any]],
244
+ inputs: NDArray[np.integer[Any]],
245
+ partition_func: Callable[[NDArray[np.integer[Any]]], NDArray[np.bool_]],
246
+ ) -> tuple[float, float, float]:
247
+ """Analyze timing with custom partitioning function.
248
+
249
+ Allows custom grouping criteria beyond bit values.
250
+
251
+ Args:
252
+ timings: Execution times.
253
+ inputs: Input values.
254
+ partition_func: Function mapping inputs to boolean partition.
255
+
256
+ Returns:
257
+ Tuple of (mean_difference, t_statistic, p_value).
258
+
259
+ Example:
260
+ >>> # Partition by high/low byte value
261
+ >>> def partition(x):
262
+ ... return x >= 128
263
+ >>> diff, t_stat, p_val = analyzer.analyze_with_partitioning(
264
+ ... timings, inputs, partition
265
+ ... )
266
+ """
267
+ partition = partition_func(inputs)
268
+
269
+ timings_0 = timings[~partition]
270
+ timings_1 = timings[partition]
271
+
272
+ if len(timings_0) < 10 or len(timings_1) < 10:
273
+ return 0.0, 0.0, 1.0
274
+
275
+ t_stat, p_value = stats.ttest_ind(timings_0, timings_1, equal_var=False)
276
+ mean_diff = float(np.mean(timings_1) - np.mean(timings_0))
277
+
278
+ return mean_diff, float(t_stat), float(p_value)
279
+
280
+ def detect_outliers(
281
+ self,
282
+ timings: NDArray[np.floating[Any]],
283
+ threshold: float = 3.0,
284
+ ) -> NDArray[np.bool_]:
285
+ """Detect outlier measurements using modified Z-score.
286
+
287
+ Outliers may indicate measurement errors or specific attack scenarios.
288
+
289
+ Args:
290
+ timings: Execution times.
291
+ threshold: Z-score threshold for outliers (default 3.0).
292
+
293
+ Returns:
294
+ Boolean array marking outliers.
295
+
296
+ Example:
297
+ >>> outliers = analyzer.detect_outliers(timings)
298
+ >>> clean_timings = timings[~outliers]
299
+ """
300
+ median = np.median(timings)
301
+ mad = np.median(np.abs(timings - median))
302
+
303
+ # Modified Z-score (more robust than standard Z-score)
304
+ if mad == 0:
305
+ # All values identical
306
+ return np.zeros(len(timings), dtype=bool)
307
+
308
+ modified_z_scores = 0.6745 * (timings - median) / mad
309
+ outliers: NDArray[np.bool_] = np.abs(modified_z_scores) > threshold
310
+
311
+ return outliers
312
+
313
+ def compute_mutual_information(
314
+ self,
315
+ timings: NDArray[np.floating[Any]],
316
+ inputs: NDArray[np.integer[Any]],
317
+ n_bins: int = 10,
318
+ ) -> float:
319
+ """Compute mutual information between timings and inputs.
320
+
321
+ Measures information leakage in bits.
322
+
323
+ Args:
324
+ timings: Execution times.
325
+ inputs: Input values.
326
+ n_bins: Number of bins for discretization.
327
+
328
+ Returns:
329
+ Mutual information in bits.
330
+
331
+ Example:
332
+ >>> mi = analyzer.compute_mutual_information(timings, inputs)
333
+ >>> print(f"Information leakage: {mi:.4f} bits")
334
+ """
335
+ # Discretize timings into bins
336
+ timing_bins = np.digitize(
337
+ timings,
338
+ bins=np.linspace(np.min(timings), np.max(timings), n_bins),
339
+ )
340
+
341
+ # Discretize inputs
342
+ if inputs.ndim == 1:
343
+ input_bins = inputs
344
+ else:
345
+ # Hash multi-dimensional inputs
346
+ input_bins = np.sum(inputs * np.arange(inputs.shape[1]), axis=1)
347
+
348
+ # Compute joint histogram
349
+ joint_hist, _, _ = np.histogram2d(
350
+ timing_bins,
351
+ input_bins,
352
+ bins=[n_bins, len(np.unique(input_bins))],
353
+ )
354
+ joint_prob = joint_hist / np.sum(joint_hist)
355
+
356
+ # Marginal distributions
357
+ timing_prob = np.sum(joint_prob, axis=1)
358
+ input_prob = np.sum(joint_prob, axis=0)
359
+
360
+ # Mutual information: I(X; Y) = sum p(x,y) * log(p(x,y) / (p(x) * p(y)))
361
+ mi = 0.0
362
+ for i in range(len(timing_prob)):
363
+ for j in range(len(input_prob)):
364
+ if joint_prob[i, j] > 0:
365
+ mi += joint_prob[i, j] * np.log2(
366
+ joint_prob[i, j] / (timing_prob[i] * input_prob[j])
367
+ )
368
+
369
+ return float(mi)
@@ -300,7 +300,7 @@ __all__ = [
300
300
  "SParameterData",
301
301
  "abcd_to_s",
302
302
  "insertion_loss",
303
- "load_touchstone", # Backward compatibility via __getattr__ # noqa: F822
303
+ # Note: load_touchstone moved to oscura.loaders module (backward compat via __getattr__)
304
304
  "return_loss",
305
305
  "s_to_abcd",
306
306
  ]
@@ -17,8 +17,10 @@ Key features:
17
17
 
18
18
  Example:
19
19
  >>> from oscura.automotive.can import CANSession
20
+ >>> from oscura.automotive.sources import FileSource
20
21
  >>> # Load automotive log file
21
- >>> session = CANSession.from_log("capture.blf")
22
+ >>> session = CANSession(name="Analysis")
23
+ >>> session.add_recording("main", FileSource("capture.blf"))
22
24
  >>> # View message inventory
23
25
  >>> inventory = session.inventory()
24
26
  >>> # Analyze specific message
@@ -40,7 +42,7 @@ Example:
40
42
  P0420: Catalyst System Efficiency Below Threshold (Bank 1)
41
43
  """
42
44
 
43
- __version__ = "0.3.0" # pragma: no cover
45
+ __version__ = "0.5.0" # pragma: no cover
44
46
 
45
47
  __all__ = [
46
48
  "CANMessage",
@@ -113,7 +113,9 @@ class PatternAnalyzer:
113
113
  CAN messages, useful for understanding message dependencies and control flows.
114
114
 
115
115
  Example - Find message pairs:
116
- >>> session = CANSession.from_log("capture.blf")
116
+ >>> from oscura.automotive.sources import FileSource
117
+ >>> session = CANSession(name="Analysis")
118
+ >>> session.add_recording("main", FileSource("capture.blf"))
117
119
  >>> pairs = PatternAnalyzer.find_message_pairs(session, time_window_ms=100)
118
120
  >>> for pair in pairs:
119
121
  ... print(pair)