oscura 0.7.0__py3-none-any.whl → 0.10.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- oscura/__init__.py +19 -19
- oscura/analyzers/__init__.py +2 -0
- oscura/analyzers/digital/extraction.py +2 -3
- oscura/analyzers/digital/quality.py +1 -1
- oscura/analyzers/digital/timing.py +1 -1
- oscura/analyzers/eye/__init__.py +5 -1
- oscura/analyzers/eye/generation.py +501 -0
- oscura/analyzers/jitter/__init__.py +6 -6
- oscura/analyzers/jitter/timing.py +419 -0
- oscura/analyzers/patterns/__init__.py +94 -0
- oscura/analyzers/patterns/reverse_engineering.py +991 -0
- oscura/analyzers/power/__init__.py +35 -12
- oscura/analyzers/power/basic.py +3 -3
- oscura/analyzers/power/soa.py +1 -1
- oscura/analyzers/power/switching.py +3 -3
- oscura/analyzers/signal_classification.py +529 -0
- oscura/analyzers/signal_integrity/sparams.py +3 -3
- oscura/analyzers/statistics/__init__.py +4 -0
- oscura/analyzers/statistics/basic.py +152 -0
- oscura/analyzers/statistics/correlation.py +47 -6
- oscura/analyzers/validation.py +1 -1
- oscura/analyzers/waveform/__init__.py +2 -0
- oscura/analyzers/waveform/measurements.py +329 -163
- oscura/analyzers/waveform/measurements_with_uncertainty.py +91 -35
- oscura/analyzers/waveform/spectral.py +498 -54
- oscura/api/dsl/commands.py +15 -6
- oscura/api/server/templates/base.html +137 -146
- oscura/api/server/templates/export.html +84 -110
- oscura/api/server/templates/home.html +248 -267
- oscura/api/server/templates/protocols.html +44 -48
- oscura/api/server/templates/reports.html +27 -35
- oscura/api/server/templates/session_detail.html +68 -78
- oscura/api/server/templates/sessions.html +62 -72
- oscura/api/server/templates/waveforms.html +54 -64
- oscura/automotive/__init__.py +1 -1
- oscura/automotive/can/session.py +1 -1
- oscura/automotive/dbc/generator.py +638 -23
- oscura/automotive/dtc/data.json +102 -17
- oscura/automotive/uds/decoder.py +99 -6
- oscura/cli/analyze.py +8 -2
- oscura/cli/batch.py +36 -5
- oscura/cli/characterize.py +18 -4
- oscura/cli/export.py +47 -5
- oscura/cli/main.py +2 -0
- oscura/cli/onboarding/wizard.py +10 -6
- oscura/cli/pipeline.py +585 -0
- oscura/cli/visualize.py +6 -4
- oscura/convenience.py +400 -32
- oscura/core/config/loader.py +0 -1
- oscura/core/measurement_result.py +286 -0
- oscura/core/progress.py +1 -1
- oscura/core/schemas/device_mapping.json +8 -2
- oscura/core/schemas/packet_format.json +24 -4
- oscura/core/schemas/protocol_definition.json +12 -2
- oscura/core/types.py +300 -199
- oscura/correlation/multi_protocol.py +1 -1
- oscura/export/legacy/__init__.py +11 -0
- oscura/export/legacy/wav.py +75 -0
- oscura/exporters/__init__.py +19 -0
- oscura/exporters/wireshark.py +809 -0
- oscura/hardware/acquisition/file.py +5 -19
- oscura/hardware/acquisition/saleae.py +10 -10
- oscura/hardware/acquisition/socketcan.py +4 -6
- oscura/hardware/acquisition/synthetic.py +1 -5
- oscura/hardware/acquisition/visa.py +6 -6
- oscura/hardware/security/side_channel_detector.py +5 -508
- oscura/inference/message_format.py +686 -1
- oscura/jupyter/display.py +2 -2
- oscura/jupyter/magic.py +3 -3
- oscura/loaders/__init__.py +17 -12
- oscura/loaders/binary.py +1 -1
- oscura/loaders/chipwhisperer.py +1 -2
- oscura/loaders/configurable.py +1 -1
- oscura/loaders/csv_loader.py +2 -2
- oscura/loaders/hdf5_loader.py +1 -1
- oscura/loaders/lazy.py +6 -1
- oscura/loaders/mmap_loader.py +0 -1
- oscura/loaders/numpy_loader.py +8 -7
- oscura/loaders/preprocessing.py +3 -5
- oscura/loaders/rigol.py +21 -7
- oscura/loaders/sigrok.py +2 -5
- oscura/loaders/tdms.py +3 -2
- oscura/loaders/tektronix.py +38 -32
- oscura/loaders/tss.py +20 -27
- oscura/loaders/vcd.py +13 -8
- oscura/loaders/wav.py +1 -6
- oscura/pipeline/__init__.py +76 -0
- oscura/pipeline/handlers/__init__.py +165 -0
- oscura/pipeline/handlers/analyzers.py +1045 -0
- oscura/pipeline/handlers/decoders.py +899 -0
- oscura/pipeline/handlers/exporters.py +1103 -0
- oscura/pipeline/handlers/filters.py +891 -0
- oscura/pipeline/handlers/loaders.py +640 -0
- oscura/pipeline/handlers/transforms.py +768 -0
- oscura/reporting/__init__.py +88 -1
- oscura/reporting/automation.py +348 -0
- oscura/reporting/citations.py +374 -0
- oscura/reporting/core.py +54 -0
- oscura/reporting/formatting/__init__.py +11 -0
- oscura/reporting/formatting/measurements.py +320 -0
- oscura/reporting/html.py +57 -0
- oscura/reporting/interpretation.py +431 -0
- oscura/reporting/summary.py +329 -0
- oscura/reporting/templates/enhanced/protocol_re.html +504 -503
- oscura/reporting/visualization.py +542 -0
- oscura/side_channel/__init__.py +38 -57
- oscura/utils/builders/signal_builder.py +5 -5
- oscura/utils/comparison/compare.py +7 -9
- oscura/utils/comparison/golden.py +1 -1
- oscura/utils/filtering/convenience.py +2 -2
- oscura/utils/math/arithmetic.py +38 -62
- oscura/utils/math/interpolation.py +20 -20
- oscura/utils/pipeline/__init__.py +4 -17
- oscura/utils/progressive.py +1 -4
- oscura/utils/triggering/edge.py +1 -1
- oscura/utils/triggering/pattern.py +2 -2
- oscura/utils/triggering/pulse.py +2 -2
- oscura/utils/triggering/window.py +3 -3
- oscura/validation/hil_testing.py +11 -11
- oscura/visualization/__init__.py +47 -284
- oscura/visualization/batch.py +160 -0
- oscura/visualization/plot.py +542 -53
- oscura/visualization/styles.py +184 -318
- oscura/workflows/__init__.py +2 -0
- oscura/workflows/batch/advanced.py +1 -1
- oscura/workflows/batch/aggregate.py +7 -8
- oscura/workflows/complete_re.py +251 -23
- oscura/workflows/digital.py +27 -4
- oscura/workflows/multi_trace.py +136 -17
- oscura/workflows/waveform.py +788 -0
- {oscura-0.7.0.dist-info → oscura-0.10.0.dist-info}/METADATA +59 -79
- {oscura-0.7.0.dist-info → oscura-0.10.0.dist-info}/RECORD +135 -149
- oscura/side_channel/dpa.py +0 -1025
- oscura/utils/optimization/__init__.py +0 -19
- oscura/utils/optimization/parallel.py +0 -443
- oscura/utils/optimization/search.py +0 -532
- oscura/utils/pipeline/base.py +0 -338
- oscura/utils/pipeline/composition.py +0 -248
- oscura/utils/pipeline/parallel.py +0 -449
- oscura/utils/pipeline/pipeline.py +0 -375
- oscura/utils/search/__init__.py +0 -16
- oscura/utils/search/anomaly.py +0 -424
- oscura/utils/search/context.py +0 -294
- oscura/utils/search/pattern.py +0 -288
- oscura/utils/storage/__init__.py +0 -61
- oscura/utils/storage/database.py +0 -1166
- oscura/visualization/accessibility.py +0 -526
- oscura/visualization/annotations.py +0 -371
- oscura/visualization/axis_scaling.py +0 -305
- oscura/visualization/colors.py +0 -451
- oscura/visualization/digital.py +0 -436
- oscura/visualization/eye.py +0 -571
- oscura/visualization/histogram.py +0 -281
- oscura/visualization/interactive.py +0 -1035
- oscura/visualization/jitter.py +0 -1042
- oscura/visualization/keyboard.py +0 -394
- oscura/visualization/layout.py +0 -400
- oscura/visualization/optimization.py +0 -1079
- oscura/visualization/palettes.py +0 -446
- oscura/visualization/power.py +0 -508
- oscura/visualization/power_extended.py +0 -955
- oscura/visualization/presets.py +0 -469
- oscura/visualization/protocols.py +0 -1246
- oscura/visualization/render.py +0 -223
- oscura/visualization/rendering.py +0 -444
- oscura/visualization/reverse_engineering.py +0 -838
- oscura/visualization/signal_integrity.py +0 -989
- oscura/visualization/specialized.py +0 -643
- oscura/visualization/spectral.py +0 -1226
- oscura/visualization/thumbnails.py +0 -340
- oscura/visualization/time_axis.py +0 -351
- oscura/visualization/waveform.py +0 -454
- {oscura-0.7.0.dist-info → oscura-0.10.0.dist-info}/WHEEL +0 -0
- {oscura-0.7.0.dist-info → oscura-0.10.0.dist-info}/entry_points.txt +0 -0
- {oscura-0.7.0.dist-info → oscura-0.10.0.dist-info}/licenses/LICENSE +0 -0
oscura/utils/storage/database.py
DELETED
|
@@ -1,1166 +0,0 @@
|
|
|
1
|
-
"""Database backend for storing and querying analysis results.
|
|
2
|
-
|
|
3
|
-
This module provides a comprehensive database abstraction for persisting
|
|
4
|
-
hardware reverse engineering session data including protocols, messages,
|
|
5
|
-
and analysis results.
|
|
6
|
-
|
|
7
|
-
Example:
|
|
8
|
-
>>> from oscura.utils.storage import DatabaseBackend, DatabaseConfig
|
|
9
|
-
>>>
|
|
10
|
-
>>> # SQLite (default, no dependencies)
|
|
11
|
-
>>> config = DatabaseConfig(url="sqlite:///analysis.db")
|
|
12
|
-
>>> db = DatabaseBackend(config)
|
|
13
|
-
>>>
|
|
14
|
-
>>> # PostgreSQL (optional)
|
|
15
|
-
>>> config = DatabaseConfig(
|
|
16
|
-
... url="postgresql://user:pass@localhost/oscura",
|
|
17
|
-
... pool_size=10
|
|
18
|
-
... )
|
|
19
|
-
>>> db = DatabaseBackend(config)
|
|
20
|
-
>>>
|
|
21
|
-
>>> # Store analysis workflow
|
|
22
|
-
>>> project_id = db.create_project("CAN Bus RE", "Automotive reverse engineering")
|
|
23
|
-
>>> session_id = db.create_session(project_id, "can", {"bus": "HS-CAN"})
|
|
24
|
-
>>> protocol_id = db.store_protocol(session_id, "UDS", spec_json, confidence=0.9)
|
|
25
|
-
>>> db.store_message(protocol_id, timestamp=1.5, data=b"\\x02\\x10\\x01", decoded)
|
|
26
|
-
>>>
|
|
27
|
-
>>> # Query historical data
|
|
28
|
-
>>> protocols = db.find_protocols(name_pattern="UDS%", min_confidence=0.8)
|
|
29
|
-
>>> messages = db.query_messages(protocol_id, time_range=(0.0, 10.0))
|
|
30
|
-
>>> results = db.get_analysis_results(session_id, analysis_type="dpa")
|
|
31
|
-
|
|
32
|
-
Architecture:
|
|
33
|
-
- SQLite by default (serverless, file-based)
|
|
34
|
-
- PostgreSQL optional (production deployments)
|
|
35
|
-
- Raw SQL fallback (no ORM dependencies)
|
|
36
|
-
- Connection pooling for performance
|
|
37
|
-
- Automatic schema migration
|
|
38
|
-
- Transaction support
|
|
39
|
-
|
|
40
|
-
Database Schema:
|
|
41
|
-
projects: Project metadata and descriptions
|
|
42
|
-
sessions: Analysis sessions per project
|
|
43
|
-
protocols: Discovered protocols per session
|
|
44
|
-
messages: Decoded messages per protocol
|
|
45
|
-
analysis_results: DPA, timing, entropy, etc.
|
|
46
|
-
|
|
47
|
-
References:
|
|
48
|
-
V0.6.0_COMPLETE_COMPREHENSIVE_PLAN.md: Phase 5 Feature 45
|
|
49
|
-
SQLite Documentation: https://www.sqlite.org/docs.html
|
|
50
|
-
PostgreSQL Documentation: https://www.postgresql.org/docs/
|
|
51
|
-
"""
|
|
52
|
-
|
|
53
|
-
from __future__ import annotations
|
|
54
|
-
|
|
55
|
-
import json
|
|
56
|
-
import logging
|
|
57
|
-
import sqlite3
|
|
58
|
-
from dataclasses import asdict, dataclass, field
|
|
59
|
-
from datetime import datetime
|
|
60
|
-
from pathlib import Path
|
|
61
|
-
from typing import Any
|
|
62
|
-
|
|
63
|
-
# Optional PostgreSQL support
|
|
64
|
-
try:
|
|
65
|
-
import psycopg2 # type: ignore[import-untyped]
|
|
66
|
-
from psycopg2.pool import SimpleConnectionPool # type: ignore[import-untyped]
|
|
67
|
-
|
|
68
|
-
HAS_POSTGRES = True
|
|
69
|
-
except ImportError:
|
|
70
|
-
psycopg2 = None
|
|
71
|
-
SimpleConnectionPool = None
|
|
72
|
-
HAS_POSTGRES = False
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
logger = logging.getLogger(__name__)
|
|
76
|
-
|
|
77
|
-
# SQL Schema constants for SQLite
|
|
78
|
-
_SQL_CREATE_PROJECTS_SQLITE = """
|
|
79
|
-
CREATE TABLE IF NOT EXISTS projects (
|
|
80
|
-
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
81
|
-
name TEXT NOT NULL,
|
|
82
|
-
description TEXT,
|
|
83
|
-
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
84
|
-
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
85
|
-
metadata TEXT
|
|
86
|
-
)
|
|
87
|
-
"""
|
|
88
|
-
|
|
89
|
-
_SQL_CREATE_SESSIONS_SQLITE = """
|
|
90
|
-
CREATE TABLE IF NOT EXISTS sessions (
|
|
91
|
-
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
92
|
-
project_id INTEGER NOT NULL,
|
|
93
|
-
session_type TEXT NOT NULL,
|
|
94
|
-
timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
95
|
-
metadata TEXT,
|
|
96
|
-
FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE
|
|
97
|
-
)
|
|
98
|
-
"""
|
|
99
|
-
|
|
100
|
-
_SQL_CREATE_PROTOCOLS_SQLITE = """
|
|
101
|
-
CREATE TABLE IF NOT EXISTS protocols (
|
|
102
|
-
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
103
|
-
session_id INTEGER NOT NULL,
|
|
104
|
-
name TEXT NOT NULL,
|
|
105
|
-
spec_json TEXT NOT NULL,
|
|
106
|
-
confidence REAL NOT NULL,
|
|
107
|
-
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
108
|
-
FOREIGN KEY (session_id) REFERENCES sessions(id) ON DELETE CASCADE
|
|
109
|
-
)
|
|
110
|
-
"""
|
|
111
|
-
|
|
112
|
-
_SQL_CREATE_MESSAGES_SQLITE = """
|
|
113
|
-
CREATE TABLE IF NOT EXISTS messages (
|
|
114
|
-
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
115
|
-
protocol_id INTEGER NOT NULL,
|
|
116
|
-
timestamp REAL NOT NULL,
|
|
117
|
-
data TEXT NOT NULL,
|
|
118
|
-
decoded_fields TEXT,
|
|
119
|
-
FOREIGN KEY (protocol_id) REFERENCES protocols(id) ON DELETE CASCADE
|
|
120
|
-
)
|
|
121
|
-
"""
|
|
122
|
-
|
|
123
|
-
_SQL_CREATE_ANALYSIS_SQLITE = """
|
|
124
|
-
CREATE TABLE IF NOT EXISTS analysis_results (
|
|
125
|
-
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
126
|
-
session_id INTEGER NOT NULL,
|
|
127
|
-
analysis_type TEXT NOT NULL,
|
|
128
|
-
results_json TEXT NOT NULL,
|
|
129
|
-
metrics TEXT,
|
|
130
|
-
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
131
|
-
FOREIGN KEY (session_id) REFERENCES sessions(id) ON DELETE CASCADE
|
|
132
|
-
)
|
|
133
|
-
"""
|
|
134
|
-
|
|
135
|
-
# SQL Schema constants for PostgreSQL
|
|
136
|
-
_SQL_CREATE_PROJECTS_POSTGRES = """
|
|
137
|
-
CREATE TABLE IF NOT EXISTS projects (
|
|
138
|
-
id SERIAL PRIMARY KEY,
|
|
139
|
-
name TEXT NOT NULL,
|
|
140
|
-
description TEXT,
|
|
141
|
-
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
142
|
-
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
143
|
-
metadata JSONB
|
|
144
|
-
)
|
|
145
|
-
"""
|
|
146
|
-
|
|
147
|
-
_SQL_CREATE_SESSIONS_POSTGRES = """
|
|
148
|
-
CREATE TABLE IF NOT EXISTS sessions (
|
|
149
|
-
id SERIAL PRIMARY KEY,
|
|
150
|
-
project_id INTEGER NOT NULL REFERENCES projects(id) ON DELETE CASCADE,
|
|
151
|
-
session_type TEXT NOT NULL,
|
|
152
|
-
timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
153
|
-
metadata JSONB
|
|
154
|
-
)
|
|
155
|
-
"""
|
|
156
|
-
|
|
157
|
-
_SQL_CREATE_PROTOCOLS_POSTGRES = """
|
|
158
|
-
CREATE TABLE IF NOT EXISTS protocols (
|
|
159
|
-
id SERIAL PRIMARY KEY,
|
|
160
|
-
session_id INTEGER NOT NULL REFERENCES sessions(id) ON DELETE CASCADE,
|
|
161
|
-
name TEXT NOT NULL,
|
|
162
|
-
spec_json JSONB NOT NULL,
|
|
163
|
-
confidence REAL NOT NULL,
|
|
164
|
-
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
|
165
|
-
)
|
|
166
|
-
"""
|
|
167
|
-
|
|
168
|
-
_SQL_CREATE_MESSAGES_POSTGRES = """
|
|
169
|
-
CREATE TABLE IF NOT EXISTS messages (
|
|
170
|
-
id SERIAL PRIMARY KEY,
|
|
171
|
-
protocol_id INTEGER NOT NULL REFERENCES protocols(id) ON DELETE CASCADE,
|
|
172
|
-
timestamp REAL NOT NULL,
|
|
173
|
-
data TEXT NOT NULL,
|
|
174
|
-
decoded_fields JSONB
|
|
175
|
-
)
|
|
176
|
-
"""
|
|
177
|
-
|
|
178
|
-
_SQL_CREATE_ANALYSIS_POSTGRES = """
|
|
179
|
-
CREATE TABLE IF NOT EXISTS analysis_results (
|
|
180
|
-
id SERIAL PRIMARY KEY,
|
|
181
|
-
session_id INTEGER NOT NULL REFERENCES sessions(id) ON DELETE CASCADE,
|
|
182
|
-
analysis_type TEXT NOT NULL,
|
|
183
|
-
results_json JSONB NOT NULL,
|
|
184
|
-
metrics JSONB,
|
|
185
|
-
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
|
186
|
-
)
|
|
187
|
-
"""
|
|
188
|
-
|
|
189
|
-
# Index creation statements
|
|
190
|
-
_SQL_CREATE_INDEXES = [
|
|
191
|
-
"CREATE INDEX IF NOT EXISTS idx_sessions_project ON sessions(project_id)",
|
|
192
|
-
"CREATE INDEX IF NOT EXISTS idx_protocols_session ON protocols(session_id)",
|
|
193
|
-
"CREATE INDEX IF NOT EXISTS idx_protocols_name ON protocols(name)",
|
|
194
|
-
"CREATE INDEX IF NOT EXISTS idx_messages_protocol ON messages(protocol_id)",
|
|
195
|
-
"CREATE INDEX IF NOT EXISTS idx_messages_timestamp ON messages(timestamp)",
|
|
196
|
-
"CREATE INDEX IF NOT EXISTS idx_analysis_session ON analysis_results(session_id)",
|
|
197
|
-
]
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
@dataclass
|
|
201
|
-
class DatabaseConfig:
|
|
202
|
-
"""Database configuration.
|
|
203
|
-
|
|
204
|
-
Attributes:
|
|
205
|
-
url: Database URL (sqlite:///path.db or postgresql://...)
|
|
206
|
-
pool_size: Connection pool size (PostgreSQL only)
|
|
207
|
-
timeout: Connection timeout in seconds
|
|
208
|
-
echo_sql: Log SQL statements for debugging
|
|
209
|
-
|
|
210
|
-
Example:
|
|
211
|
-
>>> # SQLite (default)
|
|
212
|
-
>>> config = DatabaseConfig(url="sqlite:///analysis.db")
|
|
213
|
-
>>>
|
|
214
|
-
>>> # PostgreSQL
|
|
215
|
-
>>> config = DatabaseConfig(
|
|
216
|
-
... url="postgresql://user:pass@localhost/oscura",
|
|
217
|
-
... pool_size=10,
|
|
218
|
-
... timeout=30.0
|
|
219
|
-
... )
|
|
220
|
-
"""
|
|
221
|
-
|
|
222
|
-
url: str = "sqlite:///oscura_analysis.db"
|
|
223
|
-
pool_size: int = 5
|
|
224
|
-
timeout: float = 30.0
|
|
225
|
-
echo_sql: bool = False
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
@dataclass
|
|
229
|
-
class Project:
|
|
230
|
-
"""Project metadata.
|
|
231
|
-
|
|
232
|
-
Attributes:
|
|
233
|
-
id: Project ID (auto-assigned)
|
|
234
|
-
name: Project name
|
|
235
|
-
description: Project description
|
|
236
|
-
created_at: Creation timestamp
|
|
237
|
-
updated_at: Last update timestamp
|
|
238
|
-
metadata: Additional metadata
|
|
239
|
-
|
|
240
|
-
Example:
|
|
241
|
-
>>> project = Project(
|
|
242
|
-
... id=1,
|
|
243
|
-
... name="Automotive CAN",
|
|
244
|
-
... description="CAN bus protocol analysis",
|
|
245
|
-
... created_at=datetime.now(UTC),
|
|
246
|
-
... updated_at=datetime.now(UTC)
|
|
247
|
-
... )
|
|
248
|
-
"""
|
|
249
|
-
|
|
250
|
-
id: int | None = None
|
|
251
|
-
name: str = ""
|
|
252
|
-
description: str = ""
|
|
253
|
-
created_at: datetime | None = None
|
|
254
|
-
updated_at: datetime | None = None
|
|
255
|
-
metadata: dict[str, Any] = field(default_factory=dict)
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
@dataclass
|
|
259
|
-
class Session:
|
|
260
|
-
"""Analysis session.
|
|
261
|
-
|
|
262
|
-
Attributes:
|
|
263
|
-
id: Session ID (auto-assigned)
|
|
264
|
-
project_id: Parent project ID
|
|
265
|
-
session_type: Session type (blackbox, can, uart, etc.)
|
|
266
|
-
timestamp: Session timestamp
|
|
267
|
-
metadata: Session-specific metadata
|
|
268
|
-
|
|
269
|
-
Example:
|
|
270
|
-
>>> session = Session(
|
|
271
|
-
... id=1,
|
|
272
|
-
... project_id=1,
|
|
273
|
-
... session_type="blackbox",
|
|
274
|
-
... timestamp=datetime.now(UTC),
|
|
275
|
-
... metadata={"capture_file": "device.bin"}
|
|
276
|
-
... )
|
|
277
|
-
"""
|
|
278
|
-
|
|
279
|
-
id: int | None = None
|
|
280
|
-
project_id: int | None = None
|
|
281
|
-
session_type: str = ""
|
|
282
|
-
timestamp: datetime | None = None
|
|
283
|
-
metadata: dict[str, Any] = field(default_factory=dict)
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
@dataclass
|
|
287
|
-
class Protocol:
|
|
288
|
-
"""Discovered protocol.
|
|
289
|
-
|
|
290
|
-
Attributes:
|
|
291
|
-
id: Protocol ID (auto-assigned)
|
|
292
|
-
session_id: Parent session ID
|
|
293
|
-
name: Protocol name
|
|
294
|
-
spec_json: Protocol specification as JSON
|
|
295
|
-
confidence: Confidence score (0.0-1.0)
|
|
296
|
-
created_at: Creation timestamp
|
|
297
|
-
|
|
298
|
-
Example:
|
|
299
|
-
>>> protocol = Protocol(
|
|
300
|
-
... id=1,
|
|
301
|
-
... session_id=1,
|
|
302
|
-
... name="IoT Protocol",
|
|
303
|
-
... spec_json={"fields": [...]},
|
|
304
|
-
... confidence=0.85
|
|
305
|
-
... )
|
|
306
|
-
"""
|
|
307
|
-
|
|
308
|
-
id: int | None = None
|
|
309
|
-
session_id: int | None = None
|
|
310
|
-
name: str = ""
|
|
311
|
-
spec_json: dict[str, Any] = field(default_factory=dict)
|
|
312
|
-
confidence: float = 0.0
|
|
313
|
-
created_at: datetime | None = None
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
@dataclass
|
|
317
|
-
class Message:
|
|
318
|
-
"""Decoded message.
|
|
319
|
-
|
|
320
|
-
Attributes:
|
|
321
|
-
id: Message ID (auto-assigned)
|
|
322
|
-
protocol_id: Parent protocol ID
|
|
323
|
-
timestamp: Message timestamp
|
|
324
|
-
data: Raw message data (hex string)
|
|
325
|
-
decoded_fields: Decoded field values
|
|
326
|
-
|
|
327
|
-
Example:
|
|
328
|
-
>>> message = Message(
|
|
329
|
-
... id=1,
|
|
330
|
-
... protocol_id=1,
|
|
331
|
-
... timestamp=1.5,
|
|
332
|
-
... data="aa5501",
|
|
333
|
-
... decoded_fields={"id": 1, "counter": 0}
|
|
334
|
-
... )
|
|
335
|
-
"""
|
|
336
|
-
|
|
337
|
-
id: int | None = None
|
|
338
|
-
protocol_id: int | None = None
|
|
339
|
-
timestamp: float = 0.0
|
|
340
|
-
data: str = ""
|
|
341
|
-
decoded_fields: dict[str, Any] = field(default_factory=dict)
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
@dataclass
|
|
345
|
-
class AnalysisResult:
|
|
346
|
-
"""Analysis result.
|
|
347
|
-
|
|
348
|
-
Attributes:
|
|
349
|
-
id: Result ID (auto-assigned)
|
|
350
|
-
session_id: Parent session ID
|
|
351
|
-
analysis_type: Analysis type (dpa, timing, entropy, etc.)
|
|
352
|
-
results_json: Analysis results as JSON
|
|
353
|
-
metrics: Computed metrics
|
|
354
|
-
created_at: Creation timestamp
|
|
355
|
-
|
|
356
|
-
Example:
|
|
357
|
-
>>> result = AnalysisResult(
|
|
358
|
-
... id=1,
|
|
359
|
-
... session_id=1,
|
|
360
|
-
... analysis_type="dpa",
|
|
361
|
-
... results_json={"recovered_key": "0x1234..."},
|
|
362
|
-
... metrics={"confidence": 0.95}
|
|
363
|
-
... )
|
|
364
|
-
"""
|
|
365
|
-
|
|
366
|
-
id: int | None = None
|
|
367
|
-
session_id: int | None = None
|
|
368
|
-
analysis_type: str = ""
|
|
369
|
-
results_json: dict[str, Any] = field(default_factory=dict)
|
|
370
|
-
metrics: dict[str, Any] = field(default_factory=dict)
|
|
371
|
-
created_at: datetime | None = None
|
|
372
|
-
|
|
373
|
-
|
|
374
|
-
@dataclass
|
|
375
|
-
class QueryResult:
|
|
376
|
-
"""Paginated query result.
|
|
377
|
-
|
|
378
|
-
Attributes:
|
|
379
|
-
items: Result items
|
|
380
|
-
total: Total number of results
|
|
381
|
-
page: Current page number (0-indexed)
|
|
382
|
-
page_size: Items per page
|
|
383
|
-
|
|
384
|
-
Example:
|
|
385
|
-
>>> result = QueryResult(
|
|
386
|
-
... items=[msg1, msg2, msg3],
|
|
387
|
-
... total=100,
|
|
388
|
-
... page=0,
|
|
389
|
-
... page_size=10
|
|
390
|
-
... )
|
|
391
|
-
>>> print(f"Page 1/{result.total_pages}: {len(result.items)} items")
|
|
392
|
-
"""
|
|
393
|
-
|
|
394
|
-
items: list[Any] = field(default_factory=list)
|
|
395
|
-
total: int = 0
|
|
396
|
-
page: int = 0
|
|
397
|
-
page_size: int = 100
|
|
398
|
-
|
|
399
|
-
@property
|
|
400
|
-
def total_pages(self) -> int:
|
|
401
|
-
"""Calculate total number of pages.
|
|
402
|
-
|
|
403
|
-
Returns:
|
|
404
|
-
Number of pages (at least 1)
|
|
405
|
-
"""
|
|
406
|
-
return max(1, (self.total + self.page_size - 1) // self.page_size)
|
|
407
|
-
|
|
408
|
-
@property
|
|
409
|
-
def has_next(self) -> bool:
|
|
410
|
-
"""Check if there is a next page.
|
|
411
|
-
|
|
412
|
-
Returns:
|
|
413
|
-
True if more pages available
|
|
414
|
-
"""
|
|
415
|
-
return self.page < self.total_pages - 1
|
|
416
|
-
|
|
417
|
-
@property
|
|
418
|
-
def has_prev(self) -> bool:
|
|
419
|
-
"""Check if there is a previous page.
|
|
420
|
-
|
|
421
|
-
Returns:
|
|
422
|
-
True if previous pages exist
|
|
423
|
-
"""
|
|
424
|
-
return self.page > 0
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
class DatabaseBackend:
|
|
428
|
-
"""Database backend for storing analysis results.
|
|
429
|
-
|
|
430
|
-
Supports SQLite (default) and PostgreSQL (optional).
|
|
431
|
-
Uses raw SQL for simplicity and graceful degradation.
|
|
432
|
-
|
|
433
|
-
Example:
|
|
434
|
-
>>> config = DatabaseConfig(url="sqlite:///analysis.db")
|
|
435
|
-
>>> db = DatabaseBackend(config)
|
|
436
|
-
>>>
|
|
437
|
-
>>> # Create project hierarchy
|
|
438
|
-
>>> proj_id = db.create_project("IoT RE", "Device protocol analysis")
|
|
439
|
-
>>> sess_id = db.create_session(proj_id, "blackbox", {"file": "capture.bin"})
|
|
440
|
-
>>> prot_id = db.store_protocol(sess_id, "IoT", {"fields": []}, 0.9)
|
|
441
|
-
>>>
|
|
442
|
-
>>> # Store messages
|
|
443
|
-
>>> db.store_message(prot_id, 0.0, b"\\xaa\\x55", {"id": 1})
|
|
444
|
-
>>>
|
|
445
|
-
>>> # Query
|
|
446
|
-
>>> protocols = db.find_protocols(min_confidence=0.8)
|
|
447
|
-
>>> messages = db.query_messages(prot_id, limit=10)
|
|
448
|
-
"""
|
|
449
|
-
|
|
450
|
-
def __init__(self, config: DatabaseConfig | None = None) -> None:
|
|
451
|
-
"""Initialize database backend.
|
|
452
|
-
|
|
453
|
-
Args:
|
|
454
|
-
config: Database configuration (default: SQLite)
|
|
455
|
-
|
|
456
|
-
Raises:
|
|
457
|
-
ValueError: If PostgreSQL URL but psycopg2 not installed
|
|
458
|
-
sqlite3.Error: If SQLite database creation fails
|
|
459
|
-
"""
|
|
460
|
-
self.config = config or DatabaseConfig()
|
|
461
|
-
self._conn: Any = None
|
|
462
|
-
self._pool: Any = None
|
|
463
|
-
|
|
464
|
-
# Determine backend type
|
|
465
|
-
self._is_postgres = self.config.url.startswith("postgresql://")
|
|
466
|
-
|
|
467
|
-
if self._is_postgres and not HAS_POSTGRES:
|
|
468
|
-
raise ValueError(
|
|
469
|
-
"PostgreSQL URL specified but psycopg2 not installed. "
|
|
470
|
-
"Install with: pip install psycopg2-binary"
|
|
471
|
-
)
|
|
472
|
-
|
|
473
|
-
# Initialize connection/pool
|
|
474
|
-
self._init_connection()
|
|
475
|
-
|
|
476
|
-
# Create schema
|
|
477
|
-
self._create_schema()
|
|
478
|
-
|
|
479
|
-
def _init_connection(self) -> None:
|
|
480
|
-
"""Initialize database connection or pool."""
|
|
481
|
-
if self._is_postgres:
|
|
482
|
-
# PostgreSQL connection pool
|
|
483
|
-
self._pool = SimpleConnectionPool(
|
|
484
|
-
1,
|
|
485
|
-
self.config.pool_size,
|
|
486
|
-
self.config.url,
|
|
487
|
-
connect_timeout=int(self.config.timeout),
|
|
488
|
-
)
|
|
489
|
-
else:
|
|
490
|
-
# SQLite connection
|
|
491
|
-
db_path = self.config.url.replace("sqlite:///", "")
|
|
492
|
-
Path(db_path).parent.mkdir(parents=True, exist_ok=True)
|
|
493
|
-
self._conn = sqlite3.connect(
|
|
494
|
-
db_path,
|
|
495
|
-
timeout=self.config.timeout,
|
|
496
|
-
check_same_thread=False,
|
|
497
|
-
)
|
|
498
|
-
self._conn.row_factory = sqlite3.Row
|
|
499
|
-
|
|
500
|
-
def _get_connection(self) -> Any:
|
|
501
|
-
"""Get database connection.
|
|
502
|
-
|
|
503
|
-
Returns:
|
|
504
|
-
Connection object (sqlite3.Connection or psycopg2.connection)
|
|
505
|
-
"""
|
|
506
|
-
if self._is_postgres:
|
|
507
|
-
return self._pool.getconn()
|
|
508
|
-
return self._conn
|
|
509
|
-
|
|
510
|
-
def _return_connection(self, conn: Any) -> None:
|
|
511
|
-
"""Return connection to pool (PostgreSQL only).
|
|
512
|
-
|
|
513
|
-
Args:
|
|
514
|
-
conn: Connection to return
|
|
515
|
-
"""
|
|
516
|
-
if self._is_postgres:
|
|
517
|
-
self._pool.putconn(conn)
|
|
518
|
-
|
|
519
|
-
def _execute(self, sql: str, params: tuple[Any, ...] = ()) -> Any:
|
|
520
|
-
"""Execute SQL statement.
|
|
521
|
-
|
|
522
|
-
Args:
|
|
523
|
-
sql: SQL statement
|
|
524
|
-
params: Query parameters
|
|
525
|
-
|
|
526
|
-
Returns:
|
|
527
|
-
Cursor after execution
|
|
528
|
-
"""
|
|
529
|
-
conn = self._get_connection()
|
|
530
|
-
try:
|
|
531
|
-
cursor = conn.cursor()
|
|
532
|
-
if self.config.echo_sql:
|
|
533
|
-
logger.debug(f"SQL: {sql}")
|
|
534
|
-
logger.debug(f"Params: {params}")
|
|
535
|
-
cursor.execute(sql, params)
|
|
536
|
-
conn.commit()
|
|
537
|
-
return cursor
|
|
538
|
-
finally:
|
|
539
|
-
self._return_connection(conn)
|
|
540
|
-
|
|
541
|
-
def _fetchall(self, sql: str, params: tuple[Any, ...] = ()) -> list[Any]:
|
|
542
|
-
"""Execute query and fetch all results.
|
|
543
|
-
|
|
544
|
-
Args:
|
|
545
|
-
sql: SQL query
|
|
546
|
-
params: Query parameters
|
|
547
|
-
|
|
548
|
-
Returns:
|
|
549
|
-
List of row dictionaries
|
|
550
|
-
"""
|
|
551
|
-
conn = self._get_connection()
|
|
552
|
-
try:
|
|
553
|
-
cursor = conn.cursor()
|
|
554
|
-
if self.config.echo_sql:
|
|
555
|
-
logger.debug(f"SQL: {sql}")
|
|
556
|
-
logger.debug(f"Params: {params}")
|
|
557
|
-
cursor.execute(sql, params)
|
|
558
|
-
|
|
559
|
-
if self._is_postgres:
|
|
560
|
-
columns = [desc[0] for desc in cursor.description]
|
|
561
|
-
return [dict(zip(columns, row, strict=True)) for row in cursor.fetchall()]
|
|
562
|
-
else:
|
|
563
|
-
return [dict(row) for row in cursor.fetchall()]
|
|
564
|
-
finally:
|
|
565
|
-
self._return_connection(conn)
|
|
566
|
-
|
|
567
|
-
def _fetchone(self, sql: str, params: tuple[Any, ...] = ()) -> dict[str, Any] | None:
|
|
568
|
-
"""Execute query and fetch one result.
|
|
569
|
-
|
|
570
|
-
Args:
|
|
571
|
-
sql: SQL query
|
|
572
|
-
params: Query parameters
|
|
573
|
-
|
|
574
|
-
Returns:
|
|
575
|
-
Row dictionary or None
|
|
576
|
-
"""
|
|
577
|
-
results = self._fetchall(sql, params)
|
|
578
|
-
return results[0] if results else None
|
|
579
|
-
|
|
580
|
-
def _create_schema(self) -> None:
|
|
581
|
-
"""Create database schema if not exists."""
|
|
582
|
-
tables = [
|
|
583
|
-
("projects", _SQL_CREATE_PROJECTS_SQLITE, _SQL_CREATE_PROJECTS_POSTGRES),
|
|
584
|
-
("sessions", _SQL_CREATE_SESSIONS_SQLITE, _SQL_CREATE_SESSIONS_POSTGRES),
|
|
585
|
-
("protocols", _SQL_CREATE_PROTOCOLS_SQLITE, _SQL_CREATE_PROTOCOLS_POSTGRES),
|
|
586
|
-
("messages", _SQL_CREATE_MESSAGES_SQLITE, _SQL_CREATE_MESSAGES_POSTGRES),
|
|
587
|
-
("analysis_results", _SQL_CREATE_ANALYSIS_SQLITE, _SQL_CREATE_ANALYSIS_POSTGRES),
|
|
588
|
-
]
|
|
589
|
-
|
|
590
|
-
for _, sqlite_sql, postgres_sql in tables:
|
|
591
|
-
self._execute(sqlite_sql if not self._is_postgres else postgres_sql)
|
|
592
|
-
|
|
593
|
-
# Create indexes
|
|
594
|
-
for idx_sql in _SQL_CREATE_INDEXES:
|
|
595
|
-
self._execute(idx_sql)
|
|
596
|
-
|
|
597
|
-
def create_project(
|
|
598
|
-
self, name: str, description: str = "", metadata: dict[str, Any] | None = None
|
|
599
|
-
) -> int:
|
|
600
|
-
"""Create new project.
|
|
601
|
-
|
|
602
|
-
Args:
|
|
603
|
-
name: Project name
|
|
604
|
-
description: Project description
|
|
605
|
-
metadata: Additional metadata
|
|
606
|
-
|
|
607
|
-
Returns:
|
|
608
|
-
Project ID
|
|
609
|
-
|
|
610
|
-
Example:
|
|
611
|
-
>>> db = DatabaseBackend()
|
|
612
|
-
>>> project_id = db.create_project("IoT RE", "Unknown device protocol")
|
|
613
|
-
"""
|
|
614
|
-
metadata_json = json.dumps(metadata or {})
|
|
615
|
-
cursor = self._execute(
|
|
616
|
-
"INSERT INTO projects (name, description, metadata) VALUES (?, ?, ?)",
|
|
617
|
-
(name, description, metadata_json),
|
|
618
|
-
)
|
|
619
|
-
result: int = cursor.lastrowid
|
|
620
|
-
return result
|
|
621
|
-
|
|
622
|
-
def get_project(self, project_id: int) -> Project | None:
|
|
623
|
-
"""Get project by ID.
|
|
624
|
-
|
|
625
|
-
Args:
|
|
626
|
-
project_id: Project ID
|
|
627
|
-
|
|
628
|
-
Returns:
|
|
629
|
-
Project or None if not found
|
|
630
|
-
|
|
631
|
-
Example:
|
|
632
|
-
>>> project = db.get_project(1)
|
|
633
|
-
>>> print(project.name)
|
|
634
|
-
"""
|
|
635
|
-
row = self._fetchone("SELECT * FROM projects WHERE id = ?", (project_id,))
|
|
636
|
-
if not row:
|
|
637
|
-
return None
|
|
638
|
-
|
|
639
|
-
return Project(
|
|
640
|
-
id=row["id"],
|
|
641
|
-
name=row["name"],
|
|
642
|
-
description=row["description"] or "",
|
|
643
|
-
created_at=datetime.fromisoformat(row["created_at"]),
|
|
644
|
-
updated_at=datetime.fromisoformat(row["updated_at"]),
|
|
645
|
-
metadata=json.loads(row["metadata"]) if row["metadata"] else {},
|
|
646
|
-
)
|
|
647
|
-
|
|
648
|
-
def list_projects(self) -> list[Project]:
|
|
649
|
-
"""List all projects.
|
|
650
|
-
|
|
651
|
-
Returns:
|
|
652
|
-
List of projects
|
|
653
|
-
|
|
654
|
-
Example:
|
|
655
|
-
>>> projects = db.list_projects()
|
|
656
|
-
>>> for proj in projects:
|
|
657
|
-
... print(f"{proj.id}: {proj.name}")
|
|
658
|
-
"""
|
|
659
|
-
rows = self._fetchall("SELECT * FROM projects ORDER BY updated_at DESC")
|
|
660
|
-
return [
|
|
661
|
-
Project(
|
|
662
|
-
id=row["id"],
|
|
663
|
-
name=row["name"],
|
|
664
|
-
description=row["description"] or "",
|
|
665
|
-
created_at=datetime.fromisoformat(row["created_at"]),
|
|
666
|
-
updated_at=datetime.fromisoformat(row["updated_at"]),
|
|
667
|
-
metadata=json.loads(row["metadata"]) if row["metadata"] else {},
|
|
668
|
-
)
|
|
669
|
-
for row in rows
|
|
670
|
-
]
|
|
671
|
-
|
|
672
|
-
def create_session(
|
|
673
|
-
self,
|
|
674
|
-
project_id: int,
|
|
675
|
-
session_type: str,
|
|
676
|
-
metadata: dict[str, Any] | None = None,
|
|
677
|
-
) -> int:
|
|
678
|
-
"""Create new session.
|
|
679
|
-
|
|
680
|
-
Args:
|
|
681
|
-
project_id: Parent project ID
|
|
682
|
-
session_type: Session type (blackbox, can, uart, etc.)
|
|
683
|
-
metadata: Session metadata
|
|
684
|
-
|
|
685
|
-
Returns:
|
|
686
|
-
Session ID
|
|
687
|
-
|
|
688
|
-
Example:
|
|
689
|
-
>>> session_id = db.create_session(
|
|
690
|
-
... project_id=1,
|
|
691
|
-
... session_type="blackbox",
|
|
692
|
-
... metadata={"capture": "device.bin"}
|
|
693
|
-
... )
|
|
694
|
-
"""
|
|
695
|
-
metadata_json = json.dumps(metadata or {})
|
|
696
|
-
cursor = self._execute(
|
|
697
|
-
"INSERT INTO sessions (project_id, session_type, metadata) VALUES (?, ?, ?)",
|
|
698
|
-
(project_id, session_type, metadata_json),
|
|
699
|
-
)
|
|
700
|
-
result: int = cursor.lastrowid
|
|
701
|
-
return result
|
|
702
|
-
|
|
703
|
-
def get_sessions(self, project_id: int) -> list[Session]:
|
|
704
|
-
"""Get all sessions for project.
|
|
705
|
-
|
|
706
|
-
Args:
|
|
707
|
-
project_id: Project ID
|
|
708
|
-
|
|
709
|
-
Returns:
|
|
710
|
-
List of sessions
|
|
711
|
-
|
|
712
|
-
Example:
|
|
713
|
-
>>> sessions = db.get_sessions(project_id=1)
|
|
714
|
-
>>> for sess in sessions:
|
|
715
|
-
... print(f"{sess.id}: {sess.session_type}")
|
|
716
|
-
"""
|
|
717
|
-
rows = self._fetchall(
|
|
718
|
-
"SELECT * FROM sessions WHERE project_id = ? ORDER BY timestamp DESC",
|
|
719
|
-
(project_id,),
|
|
720
|
-
)
|
|
721
|
-
return [
|
|
722
|
-
Session(
|
|
723
|
-
id=row["id"],
|
|
724
|
-
project_id=row["project_id"],
|
|
725
|
-
session_type=row["session_type"],
|
|
726
|
-
timestamp=datetime.fromisoformat(row["timestamp"]),
|
|
727
|
-
metadata=json.loads(row["metadata"]) if row["metadata"] else {},
|
|
728
|
-
)
|
|
729
|
-
for row in rows
|
|
730
|
-
]
|
|
731
|
-
|
|
732
|
-
def store_protocol(
|
|
733
|
-
self,
|
|
734
|
-
session_id: int,
|
|
735
|
-
name: str,
|
|
736
|
-
spec_json: dict[str, Any],
|
|
737
|
-
confidence: float,
|
|
738
|
-
) -> int:
|
|
739
|
-
"""Store discovered protocol.
|
|
740
|
-
|
|
741
|
-
Args:
|
|
742
|
-
session_id: Parent session ID
|
|
743
|
-
name: Protocol name
|
|
744
|
-
spec_json: Protocol specification
|
|
745
|
-
confidence: Confidence score (0.0-1.0)
|
|
746
|
-
|
|
747
|
-
Returns:
|
|
748
|
-
Protocol ID
|
|
749
|
-
|
|
750
|
-
Example:
|
|
751
|
-
>>> protocol_id = db.store_protocol(
|
|
752
|
-
... session_id=1,
|
|
753
|
-
... name="IoT Protocol",
|
|
754
|
-
... spec_json={"fields": [...]},
|
|
755
|
-
... confidence=0.85
|
|
756
|
-
... )
|
|
757
|
-
"""
|
|
758
|
-
spec_str = json.dumps(spec_json)
|
|
759
|
-
cursor = self._execute(
|
|
760
|
-
"INSERT INTO protocols (session_id, name, spec_json, confidence) VALUES (?, ?, ?, ?)",
|
|
761
|
-
(session_id, name, spec_str, confidence),
|
|
762
|
-
)
|
|
763
|
-
result: int = cursor.lastrowid
|
|
764
|
-
return result
|
|
765
|
-
|
|
766
|
-
def find_protocols(
|
|
767
|
-
self,
|
|
768
|
-
name_pattern: str | None = None,
|
|
769
|
-
min_confidence: float | None = None,
|
|
770
|
-
) -> list[Protocol]:
|
|
771
|
-
"""Find protocols by criteria.
|
|
772
|
-
|
|
773
|
-
Args:
|
|
774
|
-
name_pattern: SQL LIKE pattern (e.g., "UDS%")
|
|
775
|
-
min_confidence: Minimum confidence threshold
|
|
776
|
-
|
|
777
|
-
Returns:
|
|
778
|
-
List of matching protocols
|
|
779
|
-
|
|
780
|
-
Example:
|
|
781
|
-
>>> # Find all UDS protocols with confidence > 0.8
|
|
782
|
-
>>> protocols = db.find_protocols(name_pattern="UDS%", min_confidence=0.8)
|
|
783
|
-
"""
|
|
784
|
-
conditions = []
|
|
785
|
-
params: list[Any] = []
|
|
786
|
-
|
|
787
|
-
if name_pattern:
|
|
788
|
-
conditions.append("name LIKE ?")
|
|
789
|
-
params.append(name_pattern)
|
|
790
|
-
|
|
791
|
-
if min_confidence is not None:
|
|
792
|
-
conditions.append("confidence >= ?")
|
|
793
|
-
params.append(min_confidence)
|
|
794
|
-
|
|
795
|
-
where_clause = f"WHERE {' AND '.join(conditions)}" if conditions else ""
|
|
796
|
-
sql = f"SELECT * FROM protocols {where_clause} ORDER BY confidence DESC"
|
|
797
|
-
|
|
798
|
-
rows = self._fetchall(sql, tuple(params))
|
|
799
|
-
return [
|
|
800
|
-
Protocol(
|
|
801
|
-
id=row["id"],
|
|
802
|
-
session_id=row["session_id"],
|
|
803
|
-
name=row["name"],
|
|
804
|
-
spec_json=json.loads(row["spec_json"]),
|
|
805
|
-
confidence=row["confidence"],
|
|
806
|
-
created_at=datetime.fromisoformat(row["created_at"]),
|
|
807
|
-
)
|
|
808
|
-
for row in rows
|
|
809
|
-
]
|
|
810
|
-
|
|
811
|
-
def store_message(
|
|
812
|
-
self,
|
|
813
|
-
protocol_id: int,
|
|
814
|
-
timestamp: float,
|
|
815
|
-
data: bytes,
|
|
816
|
-
decoded_fields: dict[str, Any] | None = None,
|
|
817
|
-
) -> int:
|
|
818
|
-
"""Store decoded message.
|
|
819
|
-
|
|
820
|
-
Args:
|
|
821
|
-
protocol_id: Parent protocol ID
|
|
822
|
-
timestamp: Message timestamp
|
|
823
|
-
data: Raw message bytes
|
|
824
|
-
decoded_fields: Decoded field values
|
|
825
|
-
|
|
826
|
-
Returns:
|
|
827
|
-
Message ID
|
|
828
|
-
|
|
829
|
-
Example:
|
|
830
|
-
>>> msg_id = db.store_message(
|
|
831
|
-
... protocol_id=1,
|
|
832
|
-
... timestamp=1.5,
|
|
833
|
-
... data=b"\\xaa\\x55\\x01",
|
|
834
|
-
... decoded_fields={"id": 1, "counter": 0}
|
|
835
|
-
... )
|
|
836
|
-
"""
|
|
837
|
-
data_hex = data.hex()
|
|
838
|
-
fields_json = json.dumps(decoded_fields or {})
|
|
839
|
-
cursor = self._execute(
|
|
840
|
-
"INSERT INTO messages (protocol_id, timestamp, data, decoded_fields) "
|
|
841
|
-
"VALUES (?, ?, ?, ?)",
|
|
842
|
-
(protocol_id, timestamp, data_hex, fields_json),
|
|
843
|
-
)
|
|
844
|
-
result: int = cursor.lastrowid
|
|
845
|
-
return result
|
|
846
|
-
|
|
847
|
-
def query_messages(
|
|
848
|
-
self,
|
|
849
|
-
protocol_id: int,
|
|
850
|
-
time_range: tuple[float, float] | None = None,
|
|
851
|
-
field_filters: dict[str, Any] | None = None,
|
|
852
|
-
limit: int = 100,
|
|
853
|
-
offset: int = 0,
|
|
854
|
-
) -> QueryResult:
|
|
855
|
-
"""Query messages with filtering and pagination.
|
|
856
|
-
|
|
857
|
-
Args:
|
|
858
|
-
protocol_id: Protocol ID
|
|
859
|
-
time_range: (start_time, end_time) tuple
|
|
860
|
-
field_filters: Field name -> value filters
|
|
861
|
-
limit: Maximum results per page
|
|
862
|
-
offset: Result offset for pagination
|
|
863
|
-
|
|
864
|
-
Returns:
|
|
865
|
-
Paginated query results
|
|
866
|
-
|
|
867
|
-
Example:
|
|
868
|
-
>>> # Get first 10 messages between t=0 and t=10
|
|
869
|
-
>>> result = db.query_messages(
|
|
870
|
-
... protocol_id=1,
|
|
871
|
-
... time_range=(0.0, 10.0),
|
|
872
|
-
... limit=10
|
|
873
|
-
... )
|
|
874
|
-
>>> print(f"Page {result.page + 1}/{result.total_pages}")
|
|
875
|
-
>>> for msg in result.items:
|
|
876
|
-
... print(msg.decoded_fields)
|
|
877
|
-
"""
|
|
878
|
-
conditions = ["protocol_id = ?"]
|
|
879
|
-
params: list[Any] = [protocol_id]
|
|
880
|
-
|
|
881
|
-
if time_range:
|
|
882
|
-
conditions.append("timestamp >= ? AND timestamp <= ?")
|
|
883
|
-
params.extend(time_range)
|
|
884
|
-
|
|
885
|
-
where_clause = f"WHERE {' AND '.join(conditions)}"
|
|
886
|
-
|
|
887
|
-
# Count total
|
|
888
|
-
count_sql = f"SELECT COUNT(*) as total FROM messages {where_clause}"
|
|
889
|
-
count_row = self._fetchone(count_sql, tuple(params))
|
|
890
|
-
total = count_row["total"] if count_row else 0
|
|
891
|
-
|
|
892
|
-
# Fetch page
|
|
893
|
-
sql = f"SELECT * FROM messages {where_clause} ORDER BY timestamp LIMIT ? OFFSET ?"
|
|
894
|
-
params.extend([limit, offset])
|
|
895
|
-
rows = self._fetchall(sql, tuple(params))
|
|
896
|
-
|
|
897
|
-
messages = [
|
|
898
|
-
Message(
|
|
899
|
-
id=row["id"],
|
|
900
|
-
protocol_id=row["protocol_id"],
|
|
901
|
-
timestamp=row["timestamp"],
|
|
902
|
-
data=row["data"],
|
|
903
|
-
decoded_fields=json.loads(row["decoded_fields"]) if row["decoded_fields"] else {},
|
|
904
|
-
)
|
|
905
|
-
for row in rows
|
|
906
|
-
]
|
|
907
|
-
|
|
908
|
-
# Apply field filters (client-side for SQLite)
|
|
909
|
-
if field_filters:
|
|
910
|
-
messages = [
|
|
911
|
-
msg
|
|
912
|
-
for msg in messages
|
|
913
|
-
if all(msg.decoded_fields.get(k) == v for k, v in field_filters.items())
|
|
914
|
-
]
|
|
915
|
-
|
|
916
|
-
return QueryResult(
|
|
917
|
-
items=messages,
|
|
918
|
-
total=total,
|
|
919
|
-
page=offset // limit,
|
|
920
|
-
page_size=limit,
|
|
921
|
-
)
|
|
922
|
-
|
|
923
|
-
def store_analysis_result(
|
|
924
|
-
self,
|
|
925
|
-
session_id: int,
|
|
926
|
-
analysis_type: str,
|
|
927
|
-
results_json: dict[str, Any],
|
|
928
|
-
metrics: dict[str, Any] | None = None,
|
|
929
|
-
) -> int:
|
|
930
|
-
"""Store analysis result.
|
|
931
|
-
|
|
932
|
-
Args:
|
|
933
|
-
session_id: Parent session ID
|
|
934
|
-
analysis_type: Analysis type (dpa, timing, entropy, etc.)
|
|
935
|
-
results_json: Analysis results
|
|
936
|
-
metrics: Computed metrics
|
|
937
|
-
|
|
938
|
-
Returns:
|
|
939
|
-
Result ID
|
|
940
|
-
|
|
941
|
-
Example:
|
|
942
|
-
>>> result_id = db.store_analysis_result(
|
|
943
|
-
... session_id=1,
|
|
944
|
-
... analysis_type="dpa",
|
|
945
|
-
... results_json={"recovered_key": "0x1234..."},
|
|
946
|
-
... metrics={"confidence": 0.95}
|
|
947
|
-
... )
|
|
948
|
-
"""
|
|
949
|
-
results_str = json.dumps(results_json)
|
|
950
|
-
metrics_str = json.dumps(metrics or {})
|
|
951
|
-
cursor = self._execute(
|
|
952
|
-
"INSERT INTO analysis_results (session_id, analysis_type, results_json, metrics) "
|
|
953
|
-
"VALUES (?, ?, ?, ?)",
|
|
954
|
-
(session_id, analysis_type, results_str, metrics_str),
|
|
955
|
-
)
|
|
956
|
-
result: int = cursor.lastrowid
|
|
957
|
-
return result
|
|
958
|
-
|
|
959
|
-
def get_analysis_results(
|
|
960
|
-
self, session_id: int, analysis_type: str | None = None
|
|
961
|
-
) -> list[AnalysisResult]:
|
|
962
|
-
"""Get analysis results for session.
|
|
963
|
-
|
|
964
|
-
Args:
|
|
965
|
-
session_id: Session ID
|
|
966
|
-
analysis_type: Filter by analysis type (optional)
|
|
967
|
-
|
|
968
|
-
Returns:
|
|
969
|
-
List of analysis results
|
|
970
|
-
|
|
971
|
-
Example:
|
|
972
|
-
>>> # Get all DPA results
|
|
973
|
-
>>> results = db.get_analysis_results(session_id=1, analysis_type="dpa")
|
|
974
|
-
>>> for result in results:
|
|
975
|
-
... print(result.metrics["confidence"])
|
|
976
|
-
"""
|
|
977
|
-
if analysis_type:
|
|
978
|
-
sql = (
|
|
979
|
-
"SELECT * FROM analysis_results "
|
|
980
|
-
"WHERE session_id = ? AND analysis_type = ? "
|
|
981
|
-
"ORDER BY created_at DESC"
|
|
982
|
-
)
|
|
983
|
-
params: tuple[Any, ...] = (session_id, analysis_type)
|
|
984
|
-
else:
|
|
985
|
-
sql = "SELECT * FROM analysis_results WHERE session_id = ? ORDER BY created_at DESC"
|
|
986
|
-
params = (session_id,)
|
|
987
|
-
|
|
988
|
-
rows = self._fetchall(sql, params)
|
|
989
|
-
return [
|
|
990
|
-
AnalysisResult(
|
|
991
|
-
id=row["id"],
|
|
992
|
-
session_id=row["session_id"],
|
|
993
|
-
analysis_type=row["analysis_type"],
|
|
994
|
-
results_json=json.loads(row["results_json"]),
|
|
995
|
-
metrics=json.loads(row["metrics"]) if row["metrics"] else {},
|
|
996
|
-
created_at=datetime.fromisoformat(row["created_at"]),
|
|
997
|
-
)
|
|
998
|
-
for row in rows
|
|
999
|
-
]
|
|
1000
|
-
|
|
1001
|
-
def export_to_sql(self, output_path: str | Path) -> None:
|
|
1002
|
-
"""Export database to SQL dump.
|
|
1003
|
-
|
|
1004
|
-
Args:
|
|
1005
|
-
output_path: Output SQL file path
|
|
1006
|
-
|
|
1007
|
-
Example:
|
|
1008
|
-
>>> db.export_to_sql("backup.sql")
|
|
1009
|
-
"""
|
|
1010
|
-
output_path = Path(output_path)
|
|
1011
|
-
|
|
1012
|
-
if self._is_postgres:
|
|
1013
|
-
raise NotImplementedError("PostgreSQL export via pg_dump recommended")
|
|
1014
|
-
|
|
1015
|
-
# SQLite dump
|
|
1016
|
-
with open(output_path, "w") as f:
|
|
1017
|
-
for line in self._conn.iterdump():
|
|
1018
|
-
f.write(f"{line}\n")
|
|
1019
|
-
|
|
1020
|
-
def export_to_json(self, output_path: str | Path, project_id: int | None = None) -> None:
|
|
1021
|
-
"""Export database contents to JSON.
|
|
1022
|
-
|
|
1023
|
-
Args:
|
|
1024
|
-
output_path: Output JSON file path
|
|
1025
|
-
project_id: Export specific project (optional)
|
|
1026
|
-
|
|
1027
|
-
Example:
|
|
1028
|
-
>>> db.export_to_json("export.json", project_id=1)
|
|
1029
|
-
"""
|
|
1030
|
-
output_path = Path(output_path)
|
|
1031
|
-
|
|
1032
|
-
projects_list: list[Project | None]
|
|
1033
|
-
if project_id:
|
|
1034
|
-
projects_list = [self.get_project(project_id)]
|
|
1035
|
-
else:
|
|
1036
|
-
projects_list = list(self.list_projects())
|
|
1037
|
-
|
|
1038
|
-
export_data = []
|
|
1039
|
-
for proj in projects_list:
|
|
1040
|
-
if proj is None:
|
|
1041
|
-
continue
|
|
1042
|
-
|
|
1043
|
-
proj_data = asdict(proj)
|
|
1044
|
-
proj_data["sessions"] = []
|
|
1045
|
-
|
|
1046
|
-
sessions = self.get_sessions(proj.id) # type: ignore[arg-type]
|
|
1047
|
-
for sess in sessions:
|
|
1048
|
-
sess_data = asdict(sess)
|
|
1049
|
-
sess_data["protocols"] = []
|
|
1050
|
-
sess_data["analysis_results"] = []
|
|
1051
|
-
|
|
1052
|
-
# Get protocols
|
|
1053
|
-
protocols = self.find_protocols()
|
|
1054
|
-
for prot in protocols:
|
|
1055
|
-
if prot.session_id == sess.id:
|
|
1056
|
-
prot_data = asdict(prot)
|
|
1057
|
-
# Get messages
|
|
1058
|
-
msgs = self.query_messages(prot.id, limit=1000) # type: ignore[arg-type]
|
|
1059
|
-
prot_data["messages"] = [asdict(msg) for msg in msgs.items]
|
|
1060
|
-
sess_data["protocols"].append(prot_data)
|
|
1061
|
-
|
|
1062
|
-
# Get analysis results
|
|
1063
|
-
results = self.get_analysis_results(sess.id) # type: ignore[arg-type]
|
|
1064
|
-
sess_data["analysis_results"] = [asdict(r) for r in results]
|
|
1065
|
-
|
|
1066
|
-
proj_data["sessions"].append(sess_data)
|
|
1067
|
-
|
|
1068
|
-
export_data.append(proj_data)
|
|
1069
|
-
|
|
1070
|
-
with open(output_path, "w") as f:
|
|
1071
|
-
json.dump(export_data, f, indent=2, default=str)
|
|
1072
|
-
|
|
1073
|
-
def export_to_csv(self, output_dir: str | Path, project_id: int | None = None) -> None:
|
|
1074
|
-
"""Export database to CSV files.
|
|
1075
|
-
|
|
1076
|
-
Args:
|
|
1077
|
-
output_dir: Output directory for CSV files
|
|
1078
|
-
project_id: Export specific project (optional)
|
|
1079
|
-
|
|
1080
|
-
Example:
|
|
1081
|
-
>>> db.export_to_csv("csv_export/", project_id=1)
|
|
1082
|
-
"""
|
|
1083
|
-
import csv
|
|
1084
|
-
|
|
1085
|
-
output_dir = Path(output_dir)
|
|
1086
|
-
output_dir.mkdir(parents=True, exist_ok=True)
|
|
1087
|
-
|
|
1088
|
-
projects_list: list[Project | None]
|
|
1089
|
-
if project_id:
|
|
1090
|
-
projects_list = [self.get_project(project_id)]
|
|
1091
|
-
else:
|
|
1092
|
-
projects_list = list(self.list_projects())
|
|
1093
|
-
|
|
1094
|
-
# Export projects
|
|
1095
|
-
with open(output_dir / "projects.csv", "w", newline="") as f:
|
|
1096
|
-
writer = csv.DictWriter(
|
|
1097
|
-
f, fieldnames=["id", "name", "description", "created_at", "updated_at"]
|
|
1098
|
-
)
|
|
1099
|
-
writer.writeheader()
|
|
1100
|
-
for proj in projects_list:
|
|
1101
|
-
if proj:
|
|
1102
|
-
writer.writerow(
|
|
1103
|
-
{
|
|
1104
|
-
"id": proj.id,
|
|
1105
|
-
"name": proj.name,
|
|
1106
|
-
"description": proj.description,
|
|
1107
|
-
"created_at": proj.created_at,
|
|
1108
|
-
"updated_at": proj.updated_at,
|
|
1109
|
-
}
|
|
1110
|
-
)
|
|
1111
|
-
|
|
1112
|
-
# Export sessions
|
|
1113
|
-
all_sessions = []
|
|
1114
|
-
for proj in projects_list:
|
|
1115
|
-
if proj:
|
|
1116
|
-
all_sessions.extend(self.get_sessions(proj.id)) # type: ignore[arg-type]
|
|
1117
|
-
|
|
1118
|
-
with open(output_dir / "sessions.csv", "w", newline="") as f:
|
|
1119
|
-
writer = csv.DictWriter(f, fieldnames=["id", "project_id", "session_type", "timestamp"])
|
|
1120
|
-
writer.writeheader()
|
|
1121
|
-
for sess in all_sessions:
|
|
1122
|
-
writer.writerow(
|
|
1123
|
-
{
|
|
1124
|
-
"id": sess.id,
|
|
1125
|
-
"project_id": sess.project_id,
|
|
1126
|
-
"session_type": sess.session_type,
|
|
1127
|
-
"timestamp": sess.timestamp,
|
|
1128
|
-
}
|
|
1129
|
-
)
|
|
1130
|
-
|
|
1131
|
-
# Export protocols
|
|
1132
|
-
all_protocols = self.find_protocols()
|
|
1133
|
-
with open(output_dir / "protocols.csv", "w", newline="") as f:
|
|
1134
|
-
writer = csv.DictWriter(
|
|
1135
|
-
f, fieldnames=["id", "session_id", "name", "confidence", "created_at"]
|
|
1136
|
-
)
|
|
1137
|
-
writer.writeheader()
|
|
1138
|
-
for prot in all_protocols:
|
|
1139
|
-
writer.writerow(
|
|
1140
|
-
{
|
|
1141
|
-
"id": prot.id,
|
|
1142
|
-
"session_id": prot.session_id,
|
|
1143
|
-
"name": prot.name,
|
|
1144
|
-
"confidence": prot.confidence,
|
|
1145
|
-
"created_at": prot.created_at,
|
|
1146
|
-
}
|
|
1147
|
-
)
|
|
1148
|
-
|
|
1149
|
-
def close(self) -> None:
|
|
1150
|
-
"""Close database connection/pool.
|
|
1151
|
-
|
|
1152
|
-
Example:
|
|
1153
|
-
>>> db.close()
|
|
1154
|
-
"""
|
|
1155
|
-
if self._is_postgres and self._pool:
|
|
1156
|
-
self._pool.closeall()
|
|
1157
|
-
elif self._conn:
|
|
1158
|
-
self._conn.close()
|
|
1159
|
-
|
|
1160
|
-
def __enter__(self) -> DatabaseBackend:
|
|
1161
|
-
"""Context manager entry."""
|
|
1162
|
-
return self
|
|
1163
|
-
|
|
1164
|
-
def __exit__(self, *args: Any) -> None:
|
|
1165
|
-
"""Context manager exit."""
|
|
1166
|
-
self.close()
|