oscura 0.3.0__py3-none-any.whl → 0.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. oscura/__init__.py +1 -7
  2. oscura/acquisition/__init__.py +147 -0
  3. oscura/acquisition/file.py +255 -0
  4. oscura/acquisition/hardware.py +186 -0
  5. oscura/acquisition/saleae.py +340 -0
  6. oscura/acquisition/socketcan.py +315 -0
  7. oscura/acquisition/streaming.py +38 -0
  8. oscura/acquisition/synthetic.py +229 -0
  9. oscura/acquisition/visa.py +376 -0
  10. oscura/analyzers/__init__.py +3 -0
  11. oscura/analyzers/digital/clock.py +9 -1
  12. oscura/analyzers/digital/edges.py +1 -1
  13. oscura/analyzers/digital/timing.py +41 -11
  14. oscura/analyzers/side_channel/__init__.py +52 -0
  15. oscura/analyzers/side_channel/power.py +690 -0
  16. oscura/analyzers/side_channel/timing.py +369 -0
  17. oscura/analyzers/signal_integrity/sparams.py +1 -1
  18. oscura/automotive/__init__.py +4 -2
  19. oscura/automotive/can/patterns.py +3 -1
  20. oscura/automotive/can/session.py +277 -78
  21. oscura/automotive/can/state_machine.py +5 -2
  22. oscura/builders/__init__.py +9 -11
  23. oscura/builders/signal_builder.py +99 -191
  24. oscura/core/exceptions.py +5 -1
  25. oscura/loaders/__init__.py +1 -0
  26. oscura/loaders/chipwhisperer.py +393 -0
  27. oscura/loaders/touchstone.py +1 -1
  28. oscura/session/session.py +54 -46
  29. oscura/sessions/__init__.py +70 -0
  30. oscura/sessions/base.py +323 -0
  31. oscura/sessions/blackbox.py +640 -0
  32. oscura/sessions/generic.py +189 -0
  33. {oscura-0.3.0.dist-info → oscura-0.4.0.dist-info}/METADATA +86 -5
  34. {oscura-0.3.0.dist-info → oscura-0.4.0.dist-info}/RECORD +37 -21
  35. {oscura-0.3.0.dist-info → oscura-0.4.0.dist-info}/WHEEL +0 -0
  36. {oscura-0.3.0.dist-info → oscura-0.4.0.dist-info}/entry_points.txt +0 -0
  37. {oscura-0.3.0.dist-info → oscura-0.4.0.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,393 @@
1
+ """ChipWhisperer trace loader.
2
+
3
+ This module loads power/EM traces from ChipWhisperer capture files (.npy, .trs).
4
+
5
+ ChipWhisperer is a widely-used open-source platform for side-channel analysis
6
+ and hardware security testing.
7
+
8
+ Example:
9
+ >>> from oscura.loaders.chipwhisperer import load_chipwhisperer
10
+ >>> traces, metadata = load_chipwhisperer("capture_data.npy")
11
+ >>> print(f"Loaded {len(traces)} traces")
12
+
13
+ References:
14
+ ChipWhisperer Project: https://github.com/newaetech/chipwhisperer
15
+ """
16
+
17
+ from __future__ import annotations
18
+
19
+ from dataclasses import dataclass
20
+ from pathlib import Path
21
+ from typing import TYPE_CHECKING, Any
22
+
23
+ import numpy as np
24
+
25
+ from oscura.core.exceptions import FormatError, LoaderError
26
+ from oscura.core.types import TraceMetadata, WaveformTrace
27
+
28
+ if TYPE_CHECKING:
29
+ from os import PathLike
30
+
31
+ from numpy.typing import NDArray
32
+
33
+ __all__ = [
34
+ "ChipWhispererTraceSet",
35
+ "load_chipwhisperer",
36
+ "load_chipwhisperer_npy",
37
+ "load_chipwhisperer_trs",
38
+ ]
39
+
40
+
41
+ @dataclass
42
+ class ChipWhispererTraceSet:
43
+ """ChipWhisperer trace set container.
44
+
45
+ Attributes:
46
+ traces: Power/EM traces (n_traces, n_samples).
47
+ plaintexts: Input plaintexts (n_traces, plaintext_size).
48
+ ciphertexts: Output ciphertexts (n_traces, ciphertext_size).
49
+ keys: Encryption keys if known (n_traces, key_size).
50
+ sample_rate: Sample rate in Hz.
51
+ metadata: Additional metadata.
52
+ """
53
+
54
+ traces: NDArray[np.floating[Any]]
55
+ plaintexts: NDArray[np.integer[Any]] | None = None
56
+ ciphertexts: NDArray[np.integer[Any]] | None = None
57
+ keys: NDArray[np.integer[Any]] | None = None
58
+ sample_rate: float = 1e6
59
+ metadata: dict[str, object] | None = None
60
+
61
+ @property
62
+ def n_traces(self) -> int:
63
+ """Number of traces."""
64
+ return int(self.traces.shape[0])
65
+
66
+ @property
67
+ def n_samples(self) -> int:
68
+ """Number of samples per trace."""
69
+ return int(self.traces.shape[1])
70
+
71
+
72
+ def load_chipwhisperer(
73
+ path: str | PathLike[str],
74
+ *,
75
+ sample_rate: float | None = None,
76
+ ) -> ChipWhispererTraceSet:
77
+ """Load ChipWhisperer traces from file.
78
+
79
+ Auto-detects file format (.npy, .trs) and delegates to appropriate loader.
80
+
81
+ Args:
82
+ path: Path to ChipWhisperer trace file.
83
+ sample_rate: Override sample rate (if not in file).
84
+
85
+ Returns:
86
+ ChipWhispererTraceSet with traces and metadata.
87
+
88
+ Raises:
89
+ LoaderError: If file cannot be loaded.
90
+ FormatError: If file format invalid.
91
+
92
+ Example:
93
+ >>> traceset = load_chipwhisperer("traces.npy")
94
+ >>> print(f"Loaded {traceset.n_traces} traces")
95
+ >>> print(f"Samples per trace: {traceset.n_samples}")
96
+ """
97
+ path = Path(path)
98
+
99
+ if not path.exists():
100
+ raise LoaderError("File not found", file_path=str(path))
101
+
102
+ ext = path.suffix.lower()
103
+
104
+ if ext == ".npy":
105
+ return load_chipwhisperer_npy(path, sample_rate=sample_rate)
106
+ elif ext == ".trs":
107
+ return load_chipwhisperer_trs(path, sample_rate=sample_rate)
108
+ else:
109
+ raise FormatError(
110
+ f"Unsupported ChipWhisperer format: {ext}",
111
+ file_path=str(path),
112
+ expected=".npy or .trs",
113
+ got=ext,
114
+ )
115
+
116
+
117
+ def load_chipwhisperer_npy(
118
+ path: str | PathLike[str],
119
+ *,
120
+ sample_rate: float | None = None,
121
+ ) -> ChipWhispererTraceSet:
122
+ """Load ChipWhisperer traces from .npy file.
123
+
124
+ ChipWhisperer often saves trace data as numpy .npy files with
125
+ associated metadata in .npy files (textin.npy, textout.npy, etc.).
126
+
127
+ Args:
128
+ path: Path to traces .npy file.
129
+ sample_rate: Override sample rate.
130
+
131
+ Returns:
132
+ ChipWhispererTraceSet with traces and metadata.
133
+
134
+ Raises:
135
+ LoaderError: If file cannot be loaded.
136
+
137
+ Example:
138
+ >>> traceset = load_chipwhisperer_npy("traces.npy")
139
+ >>> # Look for associated files
140
+ >>> if traceset.plaintexts is not None:
141
+ ... print("Plaintexts available")
142
+ """
143
+ path = Path(path)
144
+ base_path = path.parent
145
+ base_name = path.stem
146
+
147
+ try:
148
+ # Load main trace data
149
+ traces = np.load(path)
150
+
151
+ # Ensure 2D array (n_traces, n_samples)
152
+ if traces.ndim == 1:
153
+ traces = traces.reshape(1, -1)
154
+ elif traces.ndim > 2:
155
+ raise FormatError(
156
+ f"Expected 1D or 2D trace array, got {traces.ndim}D",
157
+ file_path=str(path),
158
+ )
159
+
160
+ except (OSError, ValueError) as e:
161
+ # Catch file I/O errors, but let FormatError propagate
162
+ raise LoaderError(
163
+ "Failed to load trace file",
164
+ file_path=str(path),
165
+ details=str(e),
166
+ ) from e
167
+
168
+ # Try to load associated files (common ChipWhisperer naming)
169
+ plaintexts = None
170
+ ciphertexts = None
171
+ keys = None
172
+
173
+ # Look for textin.npy (plaintexts)
174
+ textin_path = base_path / f"{base_name}_textin.npy"
175
+ if not textin_path.exists():
176
+ textin_path = base_path / "textin.npy"
177
+ if textin_path.exists():
178
+ try:
179
+ plaintexts = np.load(textin_path)
180
+ except Exception:
181
+ pass # Optional metadata file, silently ignore if missing or corrupt # Not critical
182
+
183
+ # Look for textout.npy (ciphertexts)
184
+ textout_path = base_path / f"{base_name}_textout.npy"
185
+ if not textout_path.exists():
186
+ textout_path = base_path / "textout.npy"
187
+ if textout_path.exists():
188
+ try:
189
+ ciphertexts = np.load(textout_path)
190
+ except Exception:
191
+ pass
192
+
193
+ # Look for keys.npy
194
+ keys_path = base_path / f"{base_name}_keys.npy"
195
+ if not keys_path.exists():
196
+ keys_path = base_path / "keys.npy"
197
+ if keys_path.exists():
198
+ try:
199
+ keys = np.load(keys_path)
200
+ except Exception:
201
+ pass # Optional metadata file, silently ignore if corrupt
202
+
203
+ # Use default sample rate if not specified
204
+ if sample_rate is None:
205
+ sample_rate = 1e6 # Default 1 MS/s
206
+
207
+ return ChipWhispererTraceSet(
208
+ traces=traces.astype(np.float64),
209
+ plaintexts=plaintexts.astype(np.uint8) if plaintexts is not None else None,
210
+ ciphertexts=ciphertexts.astype(np.uint8) if ciphertexts is not None else None,
211
+ keys=keys.astype(np.uint8) if keys is not None else None,
212
+ sample_rate=sample_rate,
213
+ metadata={
214
+ "source_file": str(path),
215
+ "format": "chipwhisperer_npy",
216
+ },
217
+ )
218
+
219
+
220
+ def load_chipwhisperer_trs(
221
+ path: str | PathLike[str],
222
+ *,
223
+ sample_rate: float | None = None,
224
+ ) -> ChipWhispererTraceSet:
225
+ """Load ChipWhisperer traces from Inspector .trs file.
226
+
227
+ The .trs format is used by Riscure Inspector and supported by ChipWhisperer.
228
+
229
+ TRS file structure:
230
+ - Header with metadata
231
+ - Trace data (interleaved with trace-specific data)
232
+
233
+ Args:
234
+ path: Path to .trs file.
235
+ sample_rate: Override sample rate.
236
+
237
+ Returns:
238
+ ChipWhispererTraceSet with traces and metadata.
239
+
240
+ Raises:
241
+ LoaderError: If file cannot be loaded.
242
+ FormatError: If TRS format invalid.
243
+
244
+ Example:
245
+ >>> traceset = load_chipwhisperer_trs("capture.trs")
246
+ >>> print(f"Loaded {traceset.n_traces} traces")
247
+
248
+ References:
249
+ Inspector Trace Set (.trs) file format specification
250
+ """
251
+ path = Path(path)
252
+
253
+ try:
254
+ with open(path, "rb") as f:
255
+ # Read TRS header
256
+ # Tag-Length-Value structure
257
+ tags = {}
258
+
259
+ while True:
260
+ tag_byte = f.read(1)
261
+ if not tag_byte or tag_byte == b"\x5f": # End of header
262
+ break
263
+
264
+ tag = tag_byte[0]
265
+ length = int.from_bytes(f.read(1), byteorder="little")
266
+
267
+ # Extended length for large values
268
+ if length == 0xFF:
269
+ length = int.from_bytes(f.read(4), byteorder="little")
270
+
271
+ value = f.read(length)
272
+ tags[tag] = value
273
+
274
+ # Parse critical tags
275
+ # 0x41: Number of traces
276
+ n_traces = int.from_bytes(tags.get(0x41, b"\x00\x00"), byteorder="little")
277
+
278
+ # 0x42: Number of samples per trace
279
+ n_samples = int.from_bytes(tags.get(0x42, b"\x00\x00"), byteorder="little")
280
+
281
+ # 0x43: Sample coding (1=byte, 2=short, 4=float)
282
+ sample_coding = tags.get(0x43, b"\x01")[0]
283
+
284
+ # 0x44: Data length (plaintext/ciphertext)
285
+ data_length = int.from_bytes(tags.get(0x44, b"\x00\x00"), byteorder="little")
286
+
287
+ if n_traces == 0 or n_samples == 0:
288
+ raise FormatError(
289
+ "Invalid TRS file: zero traces or samples",
290
+ file_path=str(path),
291
+ )
292
+
293
+ # Determine numpy dtype from sample coding
294
+ dtype: type[np.int8] | type[np.int16] | type[np.float32]
295
+ if sample_coding == 1:
296
+ dtype = np.int8
297
+ elif sample_coding == 2:
298
+ dtype = np.int16
299
+ elif sample_coding == 4:
300
+ dtype = np.float32
301
+ else:
302
+ raise FormatError(
303
+ f"Unsupported sample coding: {sample_coding}",
304
+ file_path=str(path),
305
+ )
306
+
307
+ # Read traces
308
+ traces = np.zeros((n_traces, n_samples), dtype=np.float64)
309
+ plaintexts = (
310
+ np.zeros((n_traces, data_length), dtype=np.uint8) if data_length > 0 else None
311
+ )
312
+ ciphertexts = None # Not typically in TRS files
313
+
314
+ for trace_idx in range(n_traces):
315
+ # Read trace-specific data (plaintext/key)
316
+ if data_length > 0:
317
+ trace_data = np.frombuffer(f.read(data_length), dtype=np.uint8)
318
+ if plaintexts is not None:
319
+ plaintexts[trace_idx] = trace_data
320
+
321
+ # Read trace samples
322
+ trace_samples = np.frombuffer(f.read(n_samples * dtype(0).itemsize), dtype=dtype)
323
+ traces[trace_idx] = trace_samples.astype(np.float64)
324
+
325
+ except OSError as e:
326
+ raise LoaderError(
327
+ "Failed to read TRS file",
328
+ file_path=str(path),
329
+ details=str(e),
330
+ ) from e
331
+ except Exception as e:
332
+ if isinstance(e, (LoaderError, FormatError)):
333
+ raise
334
+ raise LoaderError(
335
+ "Failed to parse TRS file",
336
+ file_path=str(path),
337
+ details=str(e),
338
+ ) from e
339
+
340
+ # Use default sample rate if not specified
341
+ if sample_rate is None:
342
+ sample_rate = 1e6 # Default 1 MS/s
343
+
344
+ return ChipWhispererTraceSet(
345
+ traces=traces,
346
+ plaintexts=plaintexts,
347
+ ciphertexts=ciphertexts,
348
+ keys=None,
349
+ sample_rate=sample_rate,
350
+ metadata={
351
+ "source_file": str(path),
352
+ "format": "chipwhisperer_trs",
353
+ "n_traces": n_traces,
354
+ "n_samples": n_samples,
355
+ "sample_coding": sample_coding,
356
+ },
357
+ )
358
+
359
+
360
+ def to_waveform_trace(
361
+ traceset: ChipWhispererTraceSet,
362
+ trace_index: int = 0,
363
+ ) -> WaveformTrace:
364
+ """Convert ChipWhisperer trace to WaveformTrace.
365
+
366
+ Args:
367
+ traceset: ChipWhisperer trace set.
368
+ trace_index: Index of trace to convert.
369
+
370
+ Returns:
371
+ WaveformTrace for single trace.
372
+
373
+ Raises:
374
+ IndexError: If trace_index out of range.
375
+
376
+ Example:
377
+ >>> traceset = load_chipwhisperer("traces.npy")
378
+ >>> trace = to_waveform_trace(traceset, trace_index=0)
379
+ >>> print(f"Sample rate: {trace.metadata.sample_rate} Hz")
380
+ """
381
+ if not 0 <= trace_index < traceset.n_traces:
382
+ raise IndexError(f"trace_index {trace_index} out of range [0, {traceset.n_traces})")
383
+
384
+ metadata = TraceMetadata(
385
+ sample_rate=traceset.sample_rate,
386
+ source_file=str(traceset.metadata.get("source_file", "")) if traceset.metadata else "",
387
+ channel_name=f"trace_{trace_index}",
388
+ )
389
+
390
+ return WaveformTrace(
391
+ data=traceset.traces[trace_index],
392
+ metadata=metadata,
393
+ )
@@ -169,7 +169,7 @@ def _parse_touchstone(
169
169
  if len(parts) % 2 == 1:
170
170
  break # New frequency line
171
171
  except (ValueError, IndexError):
172
- pass
172
+ pass # Skip lines that can't be parsed as numeric data
173
173
 
174
174
  for j in range(0, len(parts), 2):
175
175
  if j + 1 < len(parts):
oscura/session/session.py CHANGED
@@ -18,7 +18,6 @@ import gzip
18
18
  import hashlib
19
19
  import hmac
20
20
  import pickle
21
- import warnings
22
21
  from dataclasses import dataclass, field
23
22
  from datetime import datetime
24
23
  from pathlib import Path
@@ -421,19 +420,21 @@ class Session:
421
420
  return "\n".join(lines)
422
421
 
423
422
 
424
- def load_session(path: str | Path, *, verify_signature: bool = True) -> Session:
425
- """Load session from file with optional signature verification.
423
+ def load_session(path: str | Path) -> Session:
424
+ """Load session from file with HMAC signature verification.
425
+
426
+ Session files must be in the current OSC1 format with HMAC signature.
427
+ Legacy session files without signatures are not supported.
426
428
 
427
429
  Args:
428
430
  path: Path to session file (.tks).
429
- verify_signature: Verify HMAC signature (default: True). Set to False
430
- only when loading legacy session files without signatures.
431
431
 
432
432
  Returns:
433
433
  Loaded Session object.
434
434
 
435
435
  Raises:
436
- SecurityError: If signature verification fails.
436
+ SecurityError: If signature verification fails or file is not in OSC1 format.
437
+ gzip.BadGzipFile: If file is neither valid gzip nor uncompressed session.
437
438
 
438
439
  Example:
439
440
  >>> session = load_session('debug_session.tks')
@@ -444,61 +445,68 @@ def load_session(path: str | Path, *, verify_signature: bool = True) -> Session:
444
445
  trusted sources. Loading a malicious .tks file could execute arbitrary
445
446
  code. Never load session files from untrusted or unknown sources.
446
447
 
447
- New session files include HMAC signatures for integrity verification.
448
- Legacy files without signatures will trigger a warning.
449
-
450
- For secure data exchange, consider exporting to JSON or HDF5 formats
451
- instead of using pickle-based session files.
448
+ All session files must include HMAC signatures for integrity verification.
449
+ For secure data exchange with untrusted parties, consider exporting to
450
+ JSON or HDF5 formats instead of using pickle-based session files.
452
451
  """
453
452
  path = Path(path)
454
453
 
455
- # Helper to load with signature verification
456
- def _load_with_verification(f: Any, is_compressed: bool = False) -> dict[str, Any]:
457
- # Read magic bytes to detect format
454
+ def _load_with_verification(f: Any) -> dict[str, Any]:
455
+ """Load and verify session file with HMAC signature.
456
+
457
+ Args:
458
+ f: File object (gzip or regular).
459
+
460
+ Returns:
461
+ Deserialized session dictionary.
462
+
463
+ Raises:
464
+ SecurityError: If magic bytes or signature verification fails.
465
+ """
466
+ # Read magic bytes
458
467
  magic = f.read(len(_SESSION_MAGIC))
459
468
 
460
- if magic == _SESSION_MAGIC:
461
- # New format with signature
462
- signature = f.read(_SESSION_SIGNATURE_SIZE)
463
- serialized = f.read()
464
-
465
- if verify_signature:
466
- # Verify HMAC signature
467
- expected = hmac.new(_SECURITY_KEY, serialized, hashlib.sha256).digest()
468
- if not hmac.compare_digest(signature, expected):
469
- raise SecurityError(
470
- "Session file signature verification failed",
471
- file_path=str(path),
472
- check_type="HMAC signature",
473
- details="File may be corrupted or tampered with",
474
- )
469
+ if magic != _SESSION_MAGIC:
470
+ raise SecurityError(
471
+ "This is a legacy session file. Please re-save with current version.",
472
+ file_path=str(path),
473
+ check_type="Session format",
474
+ details="Expected OSC1 format with HMAC signature",
475
+ )
475
476
 
476
- # Deserialize verified data
477
- data = cast("dict[str, Any]", pickle.loads(serialized))
478
- else:
479
- # Legacy format without signature
480
- warnings.warn(
481
- f"Loading legacy session file without signature verification: {path}. "
482
- "Re-save the session to enable security features.",
483
- UserWarning,
484
- stacklevel=3,
477
+ # Read signature and payload
478
+ signature = f.read(_SESSION_SIGNATURE_SIZE)
479
+ serialized = f.read()
480
+
481
+ if not signature or not serialized:
482
+ raise SecurityError(
483
+ "This is a legacy session file. Please re-save with current version.",
484
+ file_path=str(path),
485
+ check_type="Session format",
486
+ details="File is incomplete or corrupted",
485
487
  )
486
488
 
487
- # Rewind and load as legacy pickle
488
- f.seek(0)
489
- data = cast("dict[str, Any]", pickle.load(f))
489
+ # Verify HMAC signature
490
+ expected = hmac.new(_SECURITY_KEY, serialized, hashlib.sha256).digest()
491
+ if not hmac.compare_digest(signature, expected):
492
+ raise SecurityError(
493
+ "Session file signature verification failed",
494
+ file_path=str(path),
495
+ check_type="HMAC signature",
496
+ details="File may be corrupted or tampered with",
497
+ )
490
498
 
499
+ # Deserialize verified data
500
+ data = cast("dict[str, Any]", pickle.loads(serialized))
491
501
  return data
492
502
 
493
- # Try loading (compressed or uncompressed)
503
+ # Try loading (compressed first, then uncompressed)
494
504
  try:
495
- # Try gzip compressed first
496
505
  with gzip.open(path, "rb") as f:
497
- data = _load_with_verification(f, is_compressed=True)
506
+ data = _load_with_verification(f)
498
507
  except gzip.BadGzipFile:
499
- # Fall back to uncompressed
500
508
  with open(path, "rb") as f: # type: ignore[assignment]
501
- data = _load_with_verification(f, is_compressed=False)
509
+ data = _load_with_verification(f)
502
510
 
503
511
  session = Session._from_dict(data)
504
512
  session._file_path = path
@@ -0,0 +1,70 @@
1
+ """Unified session management for Oscura.
2
+
3
+ This module provides the AnalysisSession hierarchy - a unified pattern for
4
+ interactive signal analysis across different domains.
5
+
6
+ All analysis sessions (CAN, Serial, BlackBox, RF, etc.) inherit from
7
+ AnalysisSession and provide consistent interfaces for:
8
+ - Recording management (add, list, compare)
9
+ - Differential analysis
10
+ - Result export
11
+ - Domain-specific analysis methods
12
+
13
+ Example - Generic Session:
14
+ >>> from oscura.sessions import GenericSession
15
+ >>> from oscura.acquisition import FileSource
16
+ >>>
17
+ >>> session = GenericSession()
18
+ >>> session.add_recording("test", FileSource("capture.wfm"))
19
+ >>> results = session.analyze()
20
+ >>> print(results["summary"]["test"]["mean"])
21
+
22
+ Example - Domain-Specific Session:
23
+ >>> from oscura.sessions import AnalysisSession
24
+ >>> from oscura.acquisition import FileSource
25
+ >>>
26
+ >>> class CANSession(AnalysisSession):
27
+ ... def analyze(self):
28
+ ... # CAN-specific signal discovery
29
+ ... return self.discover_signals()
30
+ ...
31
+ ... def discover_signals(self):
32
+ ... # Extract CAN signals from recordings
33
+ ... pass
34
+ >>>
35
+ >>> session = CANSession()
36
+ >>> session.add_recording("baseline", FileSource("idle.blf"))
37
+ >>> signals = session.analyze()
38
+
39
+ Pattern Decision Table:
40
+ - Use GenericSession for general waveform analysis
41
+ - Extend AnalysisSession for domain-specific workflows
42
+ - Use existing session.Session for backward compatibility
43
+
44
+ Architecture:
45
+ Layer 3 (High-Level API) - User-Facing
46
+ ├── AnalysisSession (ABC)
47
+ │ ├── GenericSession
48
+ │ ├── CANSession (Phase 1)
49
+ │ ├── SerialSession (Phase 1)
50
+ │ ├── BlackBoxSession (Phase 1)
51
+ │ └── [Future domain sessions]
52
+ └── [Workflows wrapping sessions]
53
+
54
+ References:
55
+ Architecture Plan Phase 0.3: AnalysisSession Base Class
56
+ docs/architecture/api-patterns.md: When to use Sessions vs Workflows
57
+ """
58
+
59
+ from oscura.sessions.base import AnalysisSession, ComparisonResult
60
+ from oscura.sessions.blackbox import BlackBoxSession, FieldHypothesis, ProtocolSpec
61
+ from oscura.sessions.generic import GenericSession
62
+
63
+ __all__ = [
64
+ "AnalysisSession",
65
+ "BlackBoxSession",
66
+ "ComparisonResult",
67
+ "FieldHypothesis",
68
+ "GenericSession",
69
+ "ProtocolSpec",
70
+ ]