oscura 0.1.2__py3-none-any.whl → 0.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- oscura/__init__.py +1 -7
- oscura/acquisition/__init__.py +147 -0
- oscura/acquisition/file.py +255 -0
- oscura/acquisition/hardware.py +186 -0
- oscura/acquisition/saleae.py +340 -0
- oscura/acquisition/socketcan.py +315 -0
- oscura/acquisition/streaming.py +38 -0
- oscura/acquisition/synthetic.py +229 -0
- oscura/acquisition/visa.py +376 -0
- oscura/analyzers/__init__.py +3 -0
- oscura/analyzers/digital/clock.py +9 -1
- oscura/analyzers/digital/edges.py +1 -1
- oscura/analyzers/digital/timing.py +41 -11
- oscura/analyzers/packet/payload_extraction.py +2 -4
- oscura/analyzers/packet/stream.py +5 -5
- oscura/analyzers/patterns/__init__.py +4 -3
- oscura/analyzers/patterns/clustering.py +3 -1
- oscura/analyzers/power/ac_power.py +0 -2
- oscura/analyzers/power/basic.py +0 -2
- oscura/analyzers/power/ripple.py +0 -2
- oscura/analyzers/side_channel/__init__.py +52 -0
- oscura/analyzers/side_channel/power.py +690 -0
- oscura/analyzers/side_channel/timing.py +369 -0
- oscura/analyzers/signal_integrity/embedding.py +0 -2
- oscura/analyzers/signal_integrity/sparams.py +28 -206
- oscura/analyzers/spectral/fft.py +0 -2
- oscura/analyzers/statistical/__init__.py +3 -3
- oscura/analyzers/statistical/checksum.py +2 -0
- oscura/analyzers/statistical/classification.py +2 -0
- oscura/analyzers/statistical/entropy.py +11 -9
- oscura/analyzers/statistical/ngrams.py +4 -2
- oscura/api/fluent.py +2 -2
- oscura/automotive/__init__.py +4 -4
- oscura/automotive/can/__init__.py +0 -2
- oscura/automotive/can/patterns.py +3 -1
- oscura/automotive/can/session.py +277 -78
- oscura/automotive/can/state_machine.py +5 -2
- oscura/automotive/dbc/__init__.py +0 -2
- oscura/automotive/dtc/__init__.py +0 -2
- oscura/automotive/dtc/data.json +2763 -0
- oscura/automotive/dtc/database.py +37 -2769
- oscura/automotive/j1939/__init__.py +0 -2
- oscura/automotive/loaders/__init__.py +0 -2
- oscura/automotive/loaders/asc.py +0 -2
- oscura/automotive/loaders/blf.py +0 -2
- oscura/automotive/loaders/csv_can.py +0 -2
- oscura/automotive/obd/__init__.py +0 -2
- oscura/automotive/uds/__init__.py +0 -2
- oscura/automotive/uds/models.py +0 -2
- oscura/builders/__init__.py +9 -11
- oscura/builders/signal_builder.py +99 -191
- oscura/cli/main.py +0 -2
- oscura/cli/shell.py +0 -2
- oscura/config/loader.py +0 -2
- oscura/core/backend_selector.py +1 -1
- oscura/core/correlation.py +0 -2
- oscura/core/exceptions.py +61 -3
- oscura/core/lazy.py +5 -3
- oscura/core/memory_limits.py +0 -2
- oscura/core/numba_backend.py +5 -7
- oscura/core/uncertainty.py +3 -3
- oscura/dsl/interpreter.py +2 -0
- oscura/dsl/parser.py +8 -6
- oscura/exploratory/error_recovery.py +3 -3
- oscura/exploratory/parse.py +2 -0
- oscura/exploratory/recovery.py +2 -0
- oscura/exploratory/sync.py +2 -0
- oscura/export/wireshark/generator.py +1 -1
- oscura/export/wireshark/type_mapping.py +2 -0
- oscura/exporters/hdf5.py +1 -3
- oscura/extensibility/templates.py +0 -8
- oscura/inference/active_learning/lstar.py +2 -4
- oscura/inference/active_learning/observation_table.py +0 -2
- oscura/inference/active_learning/oracle.py +3 -1
- oscura/inference/active_learning/teachers/simulator.py +1 -3
- oscura/inference/alignment.py +2 -0
- oscura/inference/message_format.py +2 -0
- oscura/inference/protocol_dsl.py +7 -5
- oscura/inference/sequences.py +12 -14
- oscura/inference/state_machine.py +2 -0
- oscura/integrations/llm.py +3 -1
- oscura/jupyter/display.py +0 -2
- oscura/loaders/__init__.py +68 -51
- oscura/loaders/chipwhisperer.py +393 -0
- oscura/loaders/pcap.py +1 -1
- oscura/loaders/touchstone.py +221 -0
- oscura/math/arithmetic.py +0 -2
- oscura/optimization/parallel.py +9 -6
- oscura/pipeline/composition.py +0 -2
- oscura/plugins/cli.py +0 -2
- oscura/reporting/comparison.py +0 -2
- oscura/reporting/config.py +1 -1
- oscura/reporting/formatting/emphasis.py +2 -0
- oscura/reporting/formatting/numbers.py +0 -2
- oscura/reporting/output.py +1 -3
- oscura/reporting/sections.py +0 -2
- oscura/search/anomaly.py +2 -0
- oscura/session/session.py +91 -16
- oscura/sessions/__init__.py +70 -0
- oscura/sessions/base.py +323 -0
- oscura/sessions/blackbox.py +640 -0
- oscura/sessions/generic.py +189 -0
- oscura/testing/synthetic.py +2 -0
- oscura/ui/formatters.py +4 -2
- oscura/utils/buffer.py +2 -2
- oscura/utils/lazy.py +5 -5
- oscura/utils/memory_advanced.py +2 -2
- oscura/utils/memory_extensions.py +2 -2
- oscura/visualization/colors.py +0 -2
- oscura/visualization/power.py +2 -0
- oscura/workflows/multi_trace.py +2 -0
- {oscura-0.1.2.dist-info → oscura-0.4.0.dist-info}/METADATA +122 -20
- {oscura-0.1.2.dist-info → oscura-0.4.0.dist-info}/RECORD +116 -98
- {oscura-0.1.2.dist-info → oscura-0.4.0.dist-info}/WHEEL +0 -0
- {oscura-0.1.2.dist-info → oscura-0.4.0.dist-info}/entry_points.txt +0 -0
- {oscura-0.1.2.dist-info → oscura-0.4.0.dist-info}/licenses/LICENSE +0 -0
oscura/reporting/comparison.py
CHANGED
oscura/reporting/config.py
CHANGED
|
@@ -594,7 +594,7 @@ def get_available_analyses(input_type: InputType) -> list[AnalysisDomain]:
|
|
|
594
594
|
|
|
595
595
|
|
|
596
596
|
# Type alias for progress callbacks
|
|
597
|
-
ProgressCallback = Callable[[ProgressInfo], None]
|
|
597
|
+
ProgressCallback = Callable[["ProgressInfo"], None]
|
|
598
598
|
|
|
599
599
|
|
|
600
600
|
__all__ = [
|
oscura/reporting/output.py
CHANGED
|
@@ -4,8 +4,6 @@ This module provides directory structure and file management for analysis
|
|
|
4
4
|
report outputs, including plots, JSON/YAML data exports, and logs.
|
|
5
5
|
"""
|
|
6
6
|
|
|
7
|
-
from __future__ import annotations
|
|
8
|
-
|
|
9
7
|
import json
|
|
10
8
|
from datetime import datetime
|
|
11
9
|
from pathlib import Path
|
|
@@ -14,7 +12,7 @@ from typing import Any
|
|
|
14
12
|
import numpy as np
|
|
15
13
|
import yaml
|
|
16
14
|
|
|
17
|
-
from oscura.reporting.config import AnalysisDomain
|
|
15
|
+
from oscura.reporting.config import AnalysisDomain
|
|
18
16
|
|
|
19
17
|
|
|
20
18
|
def _sanitize_for_serialization(obj: Any, max_depth: int = 10) -> Any:
|
oscura/reporting/sections.py
CHANGED
oscura/search/anomaly.py
CHANGED
oscura/session/session.py
CHANGED
|
@@ -15,17 +15,25 @@ Example:
|
|
|
15
15
|
from __future__ import annotations
|
|
16
16
|
|
|
17
17
|
import gzip
|
|
18
|
+
import hashlib
|
|
19
|
+
import hmac
|
|
18
20
|
import pickle
|
|
19
21
|
from dataclasses import dataclass, field
|
|
20
22
|
from datetime import datetime
|
|
21
23
|
from pathlib import Path
|
|
22
|
-
from typing import Any
|
|
24
|
+
from typing import Any, cast
|
|
23
25
|
|
|
24
26
|
import numpy as np
|
|
25
27
|
|
|
28
|
+
from oscura.core.exceptions import SecurityError
|
|
26
29
|
from oscura.session.annotations import AnnotationLayer
|
|
27
30
|
from oscura.session.history import OperationHistory
|
|
28
31
|
|
|
32
|
+
# Session file format constants
|
|
33
|
+
_SESSION_MAGIC = b"OSC1" # Magic bytes for new format with signature
|
|
34
|
+
_SESSION_SIGNATURE_SIZE = 32 # SHA256 hash size in bytes
|
|
35
|
+
_SECURITY_KEY = hashlib.sha256(b"oscura-session-v1").digest()
|
|
36
|
+
|
|
29
37
|
|
|
30
38
|
@dataclass
|
|
31
39
|
class Session:
|
|
@@ -273,7 +281,7 @@ class Session:
|
|
|
273
281
|
include_traces: bool = True,
|
|
274
282
|
compress: bool = True,
|
|
275
283
|
) -> Path:
|
|
276
|
-
"""Save session to file.
|
|
284
|
+
"""Save session to file with HMAC signature for integrity verification.
|
|
277
285
|
|
|
278
286
|
Args:
|
|
279
287
|
path: Output path (default: use existing or generate).
|
|
@@ -287,9 +295,10 @@ class Session:
|
|
|
287
295
|
>>> session.save('analysis.tks')
|
|
288
296
|
|
|
289
297
|
Security Note:
|
|
290
|
-
Session files
|
|
291
|
-
|
|
292
|
-
with untrusted parties, use JSON or HDF5
|
|
298
|
+
Session files now include HMAC signatures for integrity verification.
|
|
299
|
+
Files are still pickle-based - only load from trusted sources.
|
|
300
|
+
For secure data exchange with untrusted parties, use JSON or HDF5
|
|
301
|
+
export formats instead.
|
|
293
302
|
"""
|
|
294
303
|
if path is None:
|
|
295
304
|
path = self._file_path or Path(f"{self.name.replace(' ', '_')}.tks")
|
|
@@ -302,13 +311,23 @@ class Session:
|
|
|
302
311
|
# Build session data
|
|
303
312
|
data = self._to_dict(include_traces=include_traces)
|
|
304
313
|
|
|
305
|
-
# Serialize
|
|
314
|
+
# Serialize with pickle
|
|
315
|
+
serialized = pickle.dumps(data, protocol=pickle.HIGHEST_PROTOCOL)
|
|
316
|
+
|
|
317
|
+
# Compute HMAC signature
|
|
318
|
+
signature = hmac.new(_SECURITY_KEY, serialized, hashlib.sha256).digest()
|
|
319
|
+
|
|
320
|
+
# Write: magic bytes + signature + pickled data
|
|
306
321
|
if compress:
|
|
307
322
|
with gzip.open(path, "wb") as f:
|
|
308
|
-
|
|
323
|
+
f.write(_SESSION_MAGIC)
|
|
324
|
+
f.write(signature)
|
|
325
|
+
f.write(serialized)
|
|
309
326
|
else:
|
|
310
327
|
with open(path, "wb") as f:
|
|
311
|
-
|
|
328
|
+
f.write(_SESSION_MAGIC)
|
|
329
|
+
f.write(signature)
|
|
330
|
+
f.write(serialized)
|
|
312
331
|
|
|
313
332
|
self.history.record("save", {"path": str(path)})
|
|
314
333
|
|
|
@@ -402,7 +421,10 @@ class Session:
|
|
|
402
421
|
|
|
403
422
|
|
|
404
423
|
def load_session(path: str | Path) -> Session:
|
|
405
|
-
"""Load session from file.
|
|
424
|
+
"""Load session from file with HMAC signature verification.
|
|
425
|
+
|
|
426
|
+
Session files must be in the current OSC1 format with HMAC signature.
|
|
427
|
+
Legacy session files without signatures are not supported.
|
|
406
428
|
|
|
407
429
|
Args:
|
|
408
430
|
path: Path to session file (.tks).
|
|
@@ -410,6 +432,10 @@ def load_session(path: str | Path) -> Session:
|
|
|
410
432
|
Returns:
|
|
411
433
|
Loaded Session object.
|
|
412
434
|
|
|
435
|
+
Raises:
|
|
436
|
+
SecurityError: If signature verification fails or file is not in OSC1 format.
|
|
437
|
+
gzip.BadGzipFile: If file is neither valid gzip nor uncompressed session.
|
|
438
|
+
|
|
413
439
|
Example:
|
|
414
440
|
>>> session = load_session('debug_session.tks')
|
|
415
441
|
>>> print(session.list_traces())
|
|
@@ -419,19 +445,68 @@ def load_session(path: str | Path) -> Session:
|
|
|
419
445
|
trusted sources. Loading a malicious .tks file could execute arbitrary
|
|
420
446
|
code. Never load session files from untrusted or unknown sources.
|
|
421
447
|
|
|
422
|
-
|
|
423
|
-
|
|
448
|
+
All session files must include HMAC signatures for integrity verification.
|
|
449
|
+
For secure data exchange with untrusted parties, consider exporting to
|
|
450
|
+
JSON or HDF5 formats instead of using pickle-based session files.
|
|
424
451
|
"""
|
|
425
452
|
path = Path(path)
|
|
426
453
|
|
|
454
|
+
def _load_with_verification(f: Any) -> dict[str, Any]:
|
|
455
|
+
"""Load and verify session file with HMAC signature.
|
|
456
|
+
|
|
457
|
+
Args:
|
|
458
|
+
f: File object (gzip or regular).
|
|
459
|
+
|
|
460
|
+
Returns:
|
|
461
|
+
Deserialized session dictionary.
|
|
462
|
+
|
|
463
|
+
Raises:
|
|
464
|
+
SecurityError: If magic bytes or signature verification fails.
|
|
465
|
+
"""
|
|
466
|
+
# Read magic bytes
|
|
467
|
+
magic = f.read(len(_SESSION_MAGIC))
|
|
468
|
+
|
|
469
|
+
if magic != _SESSION_MAGIC:
|
|
470
|
+
raise SecurityError(
|
|
471
|
+
"This is a legacy session file. Please re-save with current version.",
|
|
472
|
+
file_path=str(path),
|
|
473
|
+
check_type="Session format",
|
|
474
|
+
details="Expected OSC1 format with HMAC signature",
|
|
475
|
+
)
|
|
476
|
+
|
|
477
|
+
# Read signature and payload
|
|
478
|
+
signature = f.read(_SESSION_SIGNATURE_SIZE)
|
|
479
|
+
serialized = f.read()
|
|
480
|
+
|
|
481
|
+
if not signature or not serialized:
|
|
482
|
+
raise SecurityError(
|
|
483
|
+
"This is a legacy session file. Please re-save with current version.",
|
|
484
|
+
file_path=str(path),
|
|
485
|
+
check_type="Session format",
|
|
486
|
+
details="File is incomplete or corrupted",
|
|
487
|
+
)
|
|
488
|
+
|
|
489
|
+
# Verify HMAC signature
|
|
490
|
+
expected = hmac.new(_SECURITY_KEY, serialized, hashlib.sha256).digest()
|
|
491
|
+
if not hmac.compare_digest(signature, expected):
|
|
492
|
+
raise SecurityError(
|
|
493
|
+
"Session file signature verification failed",
|
|
494
|
+
file_path=str(path),
|
|
495
|
+
check_type="HMAC signature",
|
|
496
|
+
details="File may be corrupted or tampered with",
|
|
497
|
+
)
|
|
498
|
+
|
|
499
|
+
# Deserialize verified data
|
|
500
|
+
data = cast("dict[str, Any]", pickle.loads(serialized))
|
|
501
|
+
return data
|
|
502
|
+
|
|
503
|
+
# Try loading (compressed first, then uncompressed)
|
|
427
504
|
try:
|
|
428
|
-
# Try gzip compressed first
|
|
429
505
|
with gzip.open(path, "rb") as f:
|
|
430
|
-
data =
|
|
506
|
+
data = _load_with_verification(f)
|
|
431
507
|
except gzip.BadGzipFile:
|
|
432
|
-
|
|
433
|
-
|
|
434
|
-
data = pickle.load(f)
|
|
508
|
+
with open(path, "rb") as f: # type: ignore[assignment]
|
|
509
|
+
data = _load_with_verification(f)
|
|
435
510
|
|
|
436
511
|
session = Session._from_dict(data)
|
|
437
512
|
session._file_path = path
|
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
"""Unified session management for Oscura.
|
|
2
|
+
|
|
3
|
+
This module provides the AnalysisSession hierarchy - a unified pattern for
|
|
4
|
+
interactive signal analysis across different domains.
|
|
5
|
+
|
|
6
|
+
All analysis sessions (CAN, Serial, BlackBox, RF, etc.) inherit from
|
|
7
|
+
AnalysisSession and provide consistent interfaces for:
|
|
8
|
+
- Recording management (add, list, compare)
|
|
9
|
+
- Differential analysis
|
|
10
|
+
- Result export
|
|
11
|
+
- Domain-specific analysis methods
|
|
12
|
+
|
|
13
|
+
Example - Generic Session:
|
|
14
|
+
>>> from oscura.sessions import GenericSession
|
|
15
|
+
>>> from oscura.acquisition import FileSource
|
|
16
|
+
>>>
|
|
17
|
+
>>> session = GenericSession()
|
|
18
|
+
>>> session.add_recording("test", FileSource("capture.wfm"))
|
|
19
|
+
>>> results = session.analyze()
|
|
20
|
+
>>> print(results["summary"]["test"]["mean"])
|
|
21
|
+
|
|
22
|
+
Example - Domain-Specific Session:
|
|
23
|
+
>>> from oscura.sessions import AnalysisSession
|
|
24
|
+
>>> from oscura.acquisition import FileSource
|
|
25
|
+
>>>
|
|
26
|
+
>>> class CANSession(AnalysisSession):
|
|
27
|
+
... def analyze(self):
|
|
28
|
+
... # CAN-specific signal discovery
|
|
29
|
+
... return self.discover_signals()
|
|
30
|
+
...
|
|
31
|
+
... def discover_signals(self):
|
|
32
|
+
... # Extract CAN signals from recordings
|
|
33
|
+
... pass
|
|
34
|
+
>>>
|
|
35
|
+
>>> session = CANSession()
|
|
36
|
+
>>> session.add_recording("baseline", FileSource("idle.blf"))
|
|
37
|
+
>>> signals = session.analyze()
|
|
38
|
+
|
|
39
|
+
Pattern Decision Table:
|
|
40
|
+
- Use GenericSession for general waveform analysis
|
|
41
|
+
- Extend AnalysisSession for domain-specific workflows
|
|
42
|
+
- Use existing session.Session for backward compatibility
|
|
43
|
+
|
|
44
|
+
Architecture:
|
|
45
|
+
Layer 3 (High-Level API) - User-Facing
|
|
46
|
+
├── AnalysisSession (ABC)
|
|
47
|
+
│ ├── GenericSession
|
|
48
|
+
│ ├── CANSession (Phase 1)
|
|
49
|
+
│ ├── SerialSession (Phase 1)
|
|
50
|
+
│ ├── BlackBoxSession (Phase 1)
|
|
51
|
+
│ └── [Future domain sessions]
|
|
52
|
+
└── [Workflows wrapping sessions]
|
|
53
|
+
|
|
54
|
+
References:
|
|
55
|
+
Architecture Plan Phase 0.3: AnalysisSession Base Class
|
|
56
|
+
docs/architecture/api-patterns.md: When to use Sessions vs Workflows
|
|
57
|
+
"""
|
|
58
|
+
|
|
59
|
+
from oscura.sessions.base import AnalysisSession, ComparisonResult
|
|
60
|
+
from oscura.sessions.blackbox import BlackBoxSession, FieldHypothesis, ProtocolSpec
|
|
61
|
+
from oscura.sessions.generic import GenericSession
|
|
62
|
+
|
|
63
|
+
__all__ = [
|
|
64
|
+
"AnalysisSession",
|
|
65
|
+
"BlackBoxSession",
|
|
66
|
+
"ComparisonResult",
|
|
67
|
+
"FieldHypothesis",
|
|
68
|
+
"GenericSession",
|
|
69
|
+
"ProtocolSpec",
|
|
70
|
+
]
|
oscura/sessions/base.py
ADDED
|
@@ -0,0 +1,323 @@
|
|
|
1
|
+
"""Base class for analysis sessions.
|
|
2
|
+
|
|
3
|
+
This module defines AnalysisSession - the abstract base class for all
|
|
4
|
+
interactive analysis sessions in Oscura. It provides a unified pattern
|
|
5
|
+
for domain-specific sessions (CAN, Serial, BlackBox, etc.).
|
|
6
|
+
|
|
7
|
+
Example:
|
|
8
|
+
>>> from oscura.sessions import AnalysisSession
|
|
9
|
+
>>> from oscura.acquisition import FileSource, HardwareSource
|
|
10
|
+
>>>
|
|
11
|
+
>>> # Domain-specific sessions extend AnalysisSession
|
|
12
|
+
>>> class CANSession(AnalysisSession):
|
|
13
|
+
... def analyze(self):
|
|
14
|
+
... # CAN-specific analysis
|
|
15
|
+
... return self.discover_signals()
|
|
16
|
+
...
|
|
17
|
+
... def discover_signals(self):
|
|
18
|
+
... # Extract CAN signals from recordings
|
|
19
|
+
... pass
|
|
20
|
+
>>>
|
|
21
|
+
>>> # Unified interface across all sessions
|
|
22
|
+
>>> session = CANSession()
|
|
23
|
+
>>> session.add_recording("baseline", FileSource("idle.blf"))
|
|
24
|
+
>>> session.add_recording("active", FileSource("running.blf"))
|
|
25
|
+
>>> diff = session.compare("baseline", "active")
|
|
26
|
+
|
|
27
|
+
Pattern:
|
|
28
|
+
All domain-specific sessions (CAN, Serial, BlackBox, etc.) inherit
|
|
29
|
+
from AnalysisSession and provide:
|
|
30
|
+
- Recording management (add_recording, list_recordings)
|
|
31
|
+
- Comparison and differential analysis
|
|
32
|
+
- Result export (reports, specs, dissectors)
|
|
33
|
+
- Domain-specific analysis methods (abstract)
|
|
34
|
+
|
|
35
|
+
Benefits:
|
|
36
|
+
- Consistent API across all analysis domains
|
|
37
|
+
- Polymorphic session handling
|
|
38
|
+
- Shared infrastructure (comparison, export, history)
|
|
39
|
+
- Domain-specific specialization via inheritance
|
|
40
|
+
|
|
41
|
+
References:
|
|
42
|
+
Architecture Plan Phase 0.3: AnalysisSession Base Class
|
|
43
|
+
docs/architecture/api-patterns.md: When to use Sessions
|
|
44
|
+
"""
|
|
45
|
+
|
|
46
|
+
from __future__ import annotations
|
|
47
|
+
|
|
48
|
+
from abc import ABC, abstractmethod
|
|
49
|
+
from dataclasses import dataclass, field
|
|
50
|
+
from datetime import datetime
|
|
51
|
+
from pathlib import Path
|
|
52
|
+
from typing import TYPE_CHECKING, Any
|
|
53
|
+
|
|
54
|
+
if TYPE_CHECKING:
|
|
55
|
+
from oscura.acquisition import Source
|
|
56
|
+
from oscura.core.types import Trace
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
@dataclass
|
|
60
|
+
class ComparisonResult:
|
|
61
|
+
"""Result of comparing two recordings.
|
|
62
|
+
|
|
63
|
+
Attributes:
|
|
64
|
+
recording1: Name of first recording.
|
|
65
|
+
recording2: Name of second recording.
|
|
66
|
+
changed_bytes: Number of bytes that differ.
|
|
67
|
+
changed_regions: List of (start, end, description) tuples.
|
|
68
|
+
similarity_score: Similarity metric (0.0 to 1.0).
|
|
69
|
+
details: Additional comparison details.
|
|
70
|
+
|
|
71
|
+
Example:
|
|
72
|
+
>>> result = session.compare("baseline", "stimulus")
|
|
73
|
+
>>> print(f"Changed bytes: {result.changed_bytes}")
|
|
74
|
+
>>> print(f"Similarity: {result.similarity_score:.2%}")
|
|
75
|
+
"""
|
|
76
|
+
|
|
77
|
+
recording1: str
|
|
78
|
+
recording2: str
|
|
79
|
+
changed_bytes: int
|
|
80
|
+
changed_regions: list[tuple[int, int, str]] = field(default_factory=list)
|
|
81
|
+
similarity_score: float = 0.0
|
|
82
|
+
details: dict[str, Any] = field(default_factory=dict)
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
class AnalysisSession(ABC):
|
|
86
|
+
"""Abstract base class for all analysis sessions.
|
|
87
|
+
|
|
88
|
+
Provides unified interface for interactive signal analysis across
|
|
89
|
+
different domains (CAN, Serial, RF, BlackBox, etc.). All domain-specific
|
|
90
|
+
sessions extend this class.
|
|
91
|
+
|
|
92
|
+
Subclasses must implement:
|
|
93
|
+
- analyze(): Domain-specific analysis method
|
|
94
|
+
|
|
95
|
+
Subclasses may override:
|
|
96
|
+
- export_results(): Custom export formats
|
|
97
|
+
- compare(): Domain-specific comparison logic
|
|
98
|
+
|
|
99
|
+
Attributes:
|
|
100
|
+
name: Session name.
|
|
101
|
+
recordings: Dictionary mapping names to (source, trace) tuples.
|
|
102
|
+
metadata: Session metadata dictionary.
|
|
103
|
+
created_at: Session creation timestamp.
|
|
104
|
+
modified_at: Last modification timestamp.
|
|
105
|
+
|
|
106
|
+
Example:
|
|
107
|
+
>>> # Subclass for domain-specific analysis
|
|
108
|
+
>>> class SerialSession(AnalysisSession):
|
|
109
|
+
... def analyze(self):
|
|
110
|
+
... # Detect baud rate, decode UART
|
|
111
|
+
... baud = self.detect_baud_rate()
|
|
112
|
+
... frames = self.decode_uart(baud)
|
|
113
|
+
... return {"baud_rate": baud, "frames": frames}
|
|
114
|
+
...
|
|
115
|
+
... def detect_baud_rate(self):
|
|
116
|
+
... # Domain-specific logic
|
|
117
|
+
... pass
|
|
118
|
+
>>>
|
|
119
|
+
>>> session = SerialSession()
|
|
120
|
+
>>> session.add_recording("capture", FileSource("uart.wfm"))
|
|
121
|
+
>>> results = session.analyze()
|
|
122
|
+
"""
|
|
123
|
+
|
|
124
|
+
def __init__(self, name: str = "Untitled Session") -> None:
|
|
125
|
+
"""Initialize analysis session.
|
|
126
|
+
|
|
127
|
+
Args:
|
|
128
|
+
name: Session name (default: "Untitled Session").
|
|
129
|
+
|
|
130
|
+
Example:
|
|
131
|
+
>>> session = CANSession(name="Vehicle Debug Session")
|
|
132
|
+
"""
|
|
133
|
+
self.name = name
|
|
134
|
+
self.recordings: dict[str, tuple[Source, Trace | None]] = {}
|
|
135
|
+
self.metadata: dict[str, Any] = {}
|
|
136
|
+
self.created_at = datetime.now()
|
|
137
|
+
self.modified_at = datetime.now()
|
|
138
|
+
|
|
139
|
+
def add_recording(
|
|
140
|
+
self,
|
|
141
|
+
name: str,
|
|
142
|
+
source: Source,
|
|
143
|
+
*,
|
|
144
|
+
load_immediately: bool = True,
|
|
145
|
+
) -> None:
|
|
146
|
+
"""Add a recording to the session.
|
|
147
|
+
|
|
148
|
+
Args:
|
|
149
|
+
name: Name for this recording (e.g., "baseline", "stimulus1").
|
|
150
|
+
source: Source to acquire data from (FileSource, HardwareSource, etc.).
|
|
151
|
+
load_immediately: If True, load trace now. If False, defer loading.
|
|
152
|
+
|
|
153
|
+
Raises:
|
|
154
|
+
ValueError: If name already exists.
|
|
155
|
+
|
|
156
|
+
Example:
|
|
157
|
+
>>> from oscura.acquisition import FileSource
|
|
158
|
+
>>> session.add_recording("baseline", FileSource("idle.blf"))
|
|
159
|
+
>>> session.add_recording("active", FileSource("running.blf"))
|
|
160
|
+
"""
|
|
161
|
+
if name in self.recordings:
|
|
162
|
+
raise ValueError(f"Recording '{name}' already exists in session")
|
|
163
|
+
|
|
164
|
+
# Load trace if requested
|
|
165
|
+
trace = source.read() if load_immediately else None
|
|
166
|
+
|
|
167
|
+
self.recordings[name] = (source, trace)
|
|
168
|
+
self.modified_at = datetime.now()
|
|
169
|
+
|
|
170
|
+
def get_recording(self, name: str) -> Trace:
|
|
171
|
+
"""Get a recording by name, loading if necessary.
|
|
172
|
+
|
|
173
|
+
Args:
|
|
174
|
+
name: Recording name.
|
|
175
|
+
|
|
176
|
+
Returns:
|
|
177
|
+
Loaded trace.
|
|
178
|
+
|
|
179
|
+
Raises:
|
|
180
|
+
KeyError: If recording not found.
|
|
181
|
+
|
|
182
|
+
Example:
|
|
183
|
+
>>> trace = session.get_recording("baseline")
|
|
184
|
+
>>> print(f"Loaded {len(trace.data)} samples")
|
|
185
|
+
"""
|
|
186
|
+
if name not in self.recordings:
|
|
187
|
+
available = list(self.recordings.keys())
|
|
188
|
+
raise KeyError(f"Recording '{name}' not found. Available: {available}")
|
|
189
|
+
|
|
190
|
+
source, trace = self.recordings[name]
|
|
191
|
+
|
|
192
|
+
# Load if not already loaded
|
|
193
|
+
if trace is None:
|
|
194
|
+
trace = source.read()
|
|
195
|
+
self.recordings[name] = (source, trace)
|
|
196
|
+
|
|
197
|
+
return trace
|
|
198
|
+
|
|
199
|
+
def list_recordings(self) -> list[str]:
|
|
200
|
+
"""List all recording names in the session.
|
|
201
|
+
|
|
202
|
+
Returns:
|
|
203
|
+
List of recording names.
|
|
204
|
+
|
|
205
|
+
Example:
|
|
206
|
+
>>> session.list_recordings()
|
|
207
|
+
['baseline', 'stimulus1', 'stimulus2']
|
|
208
|
+
"""
|
|
209
|
+
return list(self.recordings.keys())
|
|
210
|
+
|
|
211
|
+
def compare(self, name1: str, name2: str) -> ComparisonResult:
|
|
212
|
+
"""Compare two recordings (differential analysis).
|
|
213
|
+
|
|
214
|
+
Default implementation provides basic byte-level comparison.
|
|
215
|
+
Subclasses can override for domain-specific comparison logic.
|
|
216
|
+
|
|
217
|
+
Args:
|
|
218
|
+
name1: First recording name.
|
|
219
|
+
name2: Second recording name.
|
|
220
|
+
|
|
221
|
+
Returns:
|
|
222
|
+
ComparisonResult with differences.
|
|
223
|
+
|
|
224
|
+
Raises:
|
|
225
|
+
KeyError: If recordings not found.
|
|
226
|
+
|
|
227
|
+
Example:
|
|
228
|
+
>>> result = session.compare("baseline", "stimulus")
|
|
229
|
+
>>> print(f"Changed: {result.changed_bytes} bytes")
|
|
230
|
+
>>> print(f"Similarity: {result.similarity_score:.2%}")
|
|
231
|
+
"""
|
|
232
|
+
trace1 = self.get_recording(name1)
|
|
233
|
+
trace2 = self.get_recording(name2)
|
|
234
|
+
|
|
235
|
+
# Basic comparison - count differing samples
|
|
236
|
+
import numpy as np
|
|
237
|
+
|
|
238
|
+
from oscura.core.types import IQTrace
|
|
239
|
+
|
|
240
|
+
# Handle IQTrace separately
|
|
241
|
+
if isinstance(trace1, IQTrace) or isinstance(trace2, IQTrace):
|
|
242
|
+
raise TypeError("IQTrace comparison not yet supported in base session")
|
|
243
|
+
|
|
244
|
+
min_len = min(len(trace1.data), len(trace2.data))
|
|
245
|
+
data1 = trace1.data[:min_len]
|
|
246
|
+
data2 = trace2.data[:min_len]
|
|
247
|
+
|
|
248
|
+
# For analog traces, use threshold comparison
|
|
249
|
+
threshold = 0.01 # 1% tolerance
|
|
250
|
+
changed = np.abs(data1 - data2) > threshold
|
|
251
|
+
changed_count = int(np.sum(changed))
|
|
252
|
+
|
|
253
|
+
# Similarity score
|
|
254
|
+
similarity = 1.0 - (changed_count / min_len)
|
|
255
|
+
|
|
256
|
+
return ComparisonResult(
|
|
257
|
+
recording1=name1,
|
|
258
|
+
recording2=name2,
|
|
259
|
+
changed_bytes=changed_count,
|
|
260
|
+
similarity_score=similarity,
|
|
261
|
+
details={
|
|
262
|
+
"trace1_length": len(trace1.data),
|
|
263
|
+
"trace2_length": len(trace2.data),
|
|
264
|
+
"compared_length": min_len,
|
|
265
|
+
},
|
|
266
|
+
)
|
|
267
|
+
|
|
268
|
+
def export_results(self, format: str, path: str | Path) -> None:
|
|
269
|
+
"""Export analysis results to file.
|
|
270
|
+
|
|
271
|
+
Default implementation provides basic export. Subclasses should
|
|
272
|
+
override to support domain-specific formats (DBC, Wireshark, etc.).
|
|
273
|
+
|
|
274
|
+
Args:
|
|
275
|
+
format: Export format (e.g., "report", "json", "csv").
|
|
276
|
+
path: Output file path.
|
|
277
|
+
|
|
278
|
+
Raises:
|
|
279
|
+
ValueError: If format not supported.
|
|
280
|
+
|
|
281
|
+
Example:
|
|
282
|
+
>>> session.export_results("report", "analysis.txt")
|
|
283
|
+
>>> session.export_results("json", "results.json")
|
|
284
|
+
"""
|
|
285
|
+
path = Path(path)
|
|
286
|
+
path.parent.mkdir(parents=True, exist_ok=True)
|
|
287
|
+
|
|
288
|
+
if format == "report":
|
|
289
|
+
# Basic text report
|
|
290
|
+
with open(path, "w") as f:
|
|
291
|
+
f.write(f"Analysis Session: {self.name}\n")
|
|
292
|
+
f.write(f"Created: {self.created_at}\n")
|
|
293
|
+
f.write(f"Modified: {self.modified_at}\n\n")
|
|
294
|
+
f.write(f"Recordings: {len(self.recordings)}\n")
|
|
295
|
+
for name in self.list_recordings():
|
|
296
|
+
f.write(f" - {name}\n")
|
|
297
|
+
else:
|
|
298
|
+
raise ValueError(f"Unsupported export format: {format}")
|
|
299
|
+
|
|
300
|
+
@abstractmethod
|
|
301
|
+
def analyze(self) -> Any:
|
|
302
|
+
"""Perform domain-specific analysis.
|
|
303
|
+
|
|
304
|
+
Subclasses must implement this method to provide domain-specific
|
|
305
|
+
analysis functionality (CAN signal discovery, UART decoding,
|
|
306
|
+
protocol reverse engineering, etc.).
|
|
307
|
+
|
|
308
|
+
Returns:
|
|
309
|
+
Analysis results (format depends on domain).
|
|
310
|
+
|
|
311
|
+
Example:
|
|
312
|
+
>>> class CANSession(AnalysisSession):
|
|
313
|
+
... def analyze(self):
|
|
314
|
+
... signals = self.discover_signals()
|
|
315
|
+
... return {"signals": signals}
|
|
316
|
+
"""
|
|
317
|
+
|
|
318
|
+
def __repr__(self) -> str:
|
|
319
|
+
"""String representation."""
|
|
320
|
+
return f"{self.__class__.__name__}(name={self.name!r}, recordings={len(self.recordings)})"
|
|
321
|
+
|
|
322
|
+
|
|
323
|
+
__all__ = ["AnalysisSession", "ComparisonResult"]
|