pyconvexity 0.4.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pyconvexity might be problematic. Click here for more details.
- pyconvexity/__init__.py +241 -0
- pyconvexity/_version.py +1 -0
- pyconvexity/core/__init__.py +60 -0
- pyconvexity/core/database.py +485 -0
- pyconvexity/core/errors.py +106 -0
- pyconvexity/core/types.py +400 -0
- pyconvexity/dashboard.py +265 -0
- pyconvexity/data/README.md +101 -0
- pyconvexity/data/__init__.py +17 -0
- pyconvexity/data/loaders/__init__.py +3 -0
- pyconvexity/data/loaders/cache.py +213 -0
- pyconvexity/data/schema/01_core_schema.sql +420 -0
- pyconvexity/data/schema/02_data_metadata.sql +120 -0
- pyconvexity/data/schema/03_validation_data.sql +507 -0
- pyconvexity/data/sources/__init__.py +5 -0
- pyconvexity/data/sources/gem.py +442 -0
- pyconvexity/io/__init__.py +26 -0
- pyconvexity/io/excel_exporter.py +1226 -0
- pyconvexity/io/excel_importer.py +1381 -0
- pyconvexity/io/netcdf_exporter.py +191 -0
- pyconvexity/io/netcdf_importer.py +1802 -0
- pyconvexity/models/__init__.py +195 -0
- pyconvexity/models/attributes.py +730 -0
- pyconvexity/models/carriers.py +159 -0
- pyconvexity/models/components.py +611 -0
- pyconvexity/models/network.py +503 -0
- pyconvexity/models/results.py +148 -0
- pyconvexity/models/scenarios.py +234 -0
- pyconvexity/solvers/__init__.py +29 -0
- pyconvexity/solvers/pypsa/__init__.py +30 -0
- pyconvexity/solvers/pypsa/api.py +446 -0
- pyconvexity/solvers/pypsa/batch_loader.py +296 -0
- pyconvexity/solvers/pypsa/builder.py +655 -0
- pyconvexity/solvers/pypsa/clearing_price.py +678 -0
- pyconvexity/solvers/pypsa/constraints.py +405 -0
- pyconvexity/solvers/pypsa/solver.py +1442 -0
- pyconvexity/solvers/pypsa/storage.py +2096 -0
- pyconvexity/timeseries.py +330 -0
- pyconvexity/validation/__init__.py +25 -0
- pyconvexity/validation/rules.py +312 -0
- pyconvexity-0.4.8.dist-info/METADATA +148 -0
- pyconvexity-0.4.8.dist-info/RECORD +44 -0
- pyconvexity-0.4.8.dist-info/WHEEL +5 -0
- pyconvexity-0.4.8.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,485 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Database connection and schema management for PyConvexity.
|
|
3
|
+
|
|
4
|
+
Provides clean abstractions for database operations with proper connection
|
|
5
|
+
management, schema validation, and resource cleanup.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import sqlite3
|
|
9
|
+
import sys
|
|
10
|
+
from contextlib import contextmanager
|
|
11
|
+
from pathlib import Path
|
|
12
|
+
from typing import Generator, List, Optional
|
|
13
|
+
|
|
14
|
+
from pyconvexity.core.errors import ConnectionError, DatabaseError, ValidationError
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class DatabaseContext:
|
|
18
|
+
"""
|
|
19
|
+
Context manager for database connections with automatic cleanup.
|
|
20
|
+
|
|
21
|
+
Provides a clean way to manage database connections with proper
|
|
22
|
+
resource cleanup and error handling.
|
|
23
|
+
"""
|
|
24
|
+
|
|
25
|
+
def __init__(self, db_path: str, read_only: bool = False):
|
|
26
|
+
self.db_path = db_path
|
|
27
|
+
self.read_only = read_only
|
|
28
|
+
self.connection: Optional[sqlite3.Connection] = None
|
|
29
|
+
|
|
30
|
+
def __enter__(self) -> sqlite3.Connection:
|
|
31
|
+
self.connection = open_connection(self.db_path, read_only=self.read_only)
|
|
32
|
+
return self.connection
|
|
33
|
+
|
|
34
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
35
|
+
if self.connection:
|
|
36
|
+
if exc_type is None:
|
|
37
|
+
# No exception, commit any pending changes
|
|
38
|
+
self.connection.commit()
|
|
39
|
+
else:
|
|
40
|
+
# Exception occurred, rollback
|
|
41
|
+
self.connection.rollback()
|
|
42
|
+
self.connection.close()
|
|
43
|
+
self.connection = None
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
@contextmanager
|
|
47
|
+
def database_context(
|
|
48
|
+
db_path: str, read_only: bool = False
|
|
49
|
+
) -> Generator[sqlite3.Connection, None, None]:
|
|
50
|
+
"""
|
|
51
|
+
Context manager function for database connections.
|
|
52
|
+
|
|
53
|
+
Args:
|
|
54
|
+
db_path: Path to the SQLite database file
|
|
55
|
+
read_only: If True, open in read-only mode
|
|
56
|
+
|
|
57
|
+
Yields:
|
|
58
|
+
sqlite3.Connection: Database connection with proper configuration
|
|
59
|
+
|
|
60
|
+
Example:
|
|
61
|
+
with database_context("model.db") as conn:
|
|
62
|
+
cursor = conn.execute("SELECT * FROM networks")
|
|
63
|
+
networks = cursor.fetchall()
|
|
64
|
+
"""
|
|
65
|
+
with DatabaseContext(db_path, read_only) as conn:
|
|
66
|
+
yield conn
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
def open_connection(db_path: str, read_only: bool = False) -> sqlite3.Connection:
|
|
70
|
+
"""
|
|
71
|
+
Open database connection with proper settings.
|
|
72
|
+
|
|
73
|
+
Args:
|
|
74
|
+
db_path: Path to the SQLite database file
|
|
75
|
+
read_only: If True, open in read-only mode
|
|
76
|
+
|
|
77
|
+
Returns:
|
|
78
|
+
sqlite3.Connection: Configured database connection
|
|
79
|
+
|
|
80
|
+
Raises:
|
|
81
|
+
ConnectionError: If database connection fails
|
|
82
|
+
"""
|
|
83
|
+
try:
|
|
84
|
+
# Build connection URI for read-only mode if needed
|
|
85
|
+
if read_only:
|
|
86
|
+
uri = f"file:{db_path}?mode=ro"
|
|
87
|
+
conn = sqlite3.connect(uri, uri=True)
|
|
88
|
+
else:
|
|
89
|
+
conn = sqlite3.connect(db_path)
|
|
90
|
+
|
|
91
|
+
# Configure connection
|
|
92
|
+
conn.row_factory = sqlite3.Row # Enable column access by name
|
|
93
|
+
conn.execute("PRAGMA foreign_keys = ON") # Enable foreign key constraints
|
|
94
|
+
|
|
95
|
+
# Configure for concurrent access (WAL mode for better concurrency)
|
|
96
|
+
if not read_only:
|
|
97
|
+
conn.execute(
|
|
98
|
+
"PRAGMA journal_mode = WAL"
|
|
99
|
+
) # Write-Ahead Logging for concurrency
|
|
100
|
+
conn.execute("PRAGMA synchronous = NORMAL") # Faster than FULL, still safe
|
|
101
|
+
conn.execute(
|
|
102
|
+
"PRAGMA wal_autocheckpoint = 1000"
|
|
103
|
+
) # Less frequent checkpoints
|
|
104
|
+
conn.execute("PRAGMA temp_store = MEMORY") # Faster temporary operations
|
|
105
|
+
|
|
106
|
+
# Set reasonable timeouts
|
|
107
|
+
conn.execute("PRAGMA busy_timeout = 30000") # 30 second timeout
|
|
108
|
+
|
|
109
|
+
return conn
|
|
110
|
+
|
|
111
|
+
except sqlite3.Error as e:
|
|
112
|
+
raise ConnectionError(f"Failed to open database at {db_path}: {e}") from e
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
def validate_database(conn: sqlite3.Connection) -> None:
|
|
116
|
+
"""
|
|
117
|
+
Validate database schema has required tables.
|
|
118
|
+
|
|
119
|
+
Args:
|
|
120
|
+
conn: Database connection to validate
|
|
121
|
+
|
|
122
|
+
Raises:
|
|
123
|
+
ValidationError: If required tables are missing
|
|
124
|
+
"""
|
|
125
|
+
required_tables = [
|
|
126
|
+
"networks",
|
|
127
|
+
"components",
|
|
128
|
+
"component_attributes",
|
|
129
|
+
"attribute_validation_rules",
|
|
130
|
+
"carriers",
|
|
131
|
+
"scenarios",
|
|
132
|
+
]
|
|
133
|
+
|
|
134
|
+
missing_tables = []
|
|
135
|
+
|
|
136
|
+
for table in required_tables:
|
|
137
|
+
cursor = conn.execute(
|
|
138
|
+
"SELECT name FROM sqlite_master WHERE type='table' AND name=?", (table,)
|
|
139
|
+
)
|
|
140
|
+
if not cursor.fetchone():
|
|
141
|
+
missing_tables.append(table)
|
|
142
|
+
|
|
143
|
+
if missing_tables:
|
|
144
|
+
raise ValidationError(
|
|
145
|
+
f"Required tables not found in database: {', '.join(missing_tables)}"
|
|
146
|
+
)
|
|
147
|
+
|
|
148
|
+
|
|
149
|
+
def create_database_with_schema(db_path: str) -> None:
|
|
150
|
+
"""
|
|
151
|
+
Create a new database and apply the complete schema.
|
|
152
|
+
|
|
153
|
+
Args:
|
|
154
|
+
db_path: Path where the new database should be created
|
|
155
|
+
|
|
156
|
+
Raises:
|
|
157
|
+
DatabaseError: If schema files cannot be found or applied
|
|
158
|
+
"""
|
|
159
|
+
db_path_obj = Path(db_path)
|
|
160
|
+
|
|
161
|
+
# Ensure parent directory exists
|
|
162
|
+
if db_path_obj.parent and not db_path_obj.parent.exists():
|
|
163
|
+
db_path_obj.parent.mkdir(parents=True, exist_ok=True)
|
|
164
|
+
|
|
165
|
+
# Remove existing file if it exists, to ensure a clean start
|
|
166
|
+
if db_path_obj.exists():
|
|
167
|
+
db_path_obj.unlink()
|
|
168
|
+
|
|
169
|
+
# Find schema files
|
|
170
|
+
schema_dir = _find_schema_directory()
|
|
171
|
+
if not schema_dir:
|
|
172
|
+
raise DatabaseError("Could not find schema directory")
|
|
173
|
+
|
|
174
|
+
schema_files = [
|
|
175
|
+
"01_core_schema.sql",
|
|
176
|
+
"02_data_metadata.sql",
|
|
177
|
+
"03_validation_data.sql",
|
|
178
|
+
]
|
|
179
|
+
|
|
180
|
+
# Verify all schema files exist
|
|
181
|
+
missing_files = []
|
|
182
|
+
for filename in schema_files:
|
|
183
|
+
schema_file = schema_dir / filename
|
|
184
|
+
if not schema_file.exists():
|
|
185
|
+
missing_files.append(filename)
|
|
186
|
+
|
|
187
|
+
if missing_files:
|
|
188
|
+
raise DatabaseError(f"Schema files not found: {', '.join(missing_files)}")
|
|
189
|
+
|
|
190
|
+
# Create connection and apply schemas
|
|
191
|
+
try:
|
|
192
|
+
conn = sqlite3.connect(db_path)
|
|
193
|
+
|
|
194
|
+
# Enable foreign key constraints
|
|
195
|
+
conn.execute("PRAGMA foreign_keys = ON")
|
|
196
|
+
|
|
197
|
+
# Configure for concurrent access
|
|
198
|
+
conn.execute("PRAGMA journal_mode = WAL")
|
|
199
|
+
conn.execute("PRAGMA synchronous = NORMAL")
|
|
200
|
+
conn.execute("PRAGMA wal_autocheckpoint = 1000")
|
|
201
|
+
conn.execute("PRAGMA temp_store = MEMORY")
|
|
202
|
+
conn.execute("PRAGMA busy_timeout = 30000")
|
|
203
|
+
|
|
204
|
+
# Execute schemas in order
|
|
205
|
+
for filename in schema_files:
|
|
206
|
+
schema_file = schema_dir / filename
|
|
207
|
+
with open(schema_file, "r") as f:
|
|
208
|
+
conn.executescript(f.read())
|
|
209
|
+
|
|
210
|
+
conn.close()
|
|
211
|
+
|
|
212
|
+
except sqlite3.Error as e:
|
|
213
|
+
# Clean up partial database on error
|
|
214
|
+
if db_path_obj.exists():
|
|
215
|
+
db_path_obj.unlink()
|
|
216
|
+
raise DatabaseError(f"Failed to create database schema: {e}") from e
|
|
217
|
+
|
|
218
|
+
|
|
219
|
+
def _find_schema_directory() -> Optional[Path]:
|
|
220
|
+
"""
|
|
221
|
+
Find the schema directory in various possible locations.
|
|
222
|
+
|
|
223
|
+
Returns:
|
|
224
|
+
Path to schema directory or None if not found
|
|
225
|
+
""" # Try package data location first (PyPI/pip install)
|
|
226
|
+
try:
|
|
227
|
+
import importlib.resources
|
|
228
|
+
|
|
229
|
+
schema_path = importlib.resources.files("pyconvexity") / "data" / "schema"
|
|
230
|
+
if schema_path.is_dir():
|
|
231
|
+
return Path(str(schema_path))
|
|
232
|
+
except (ImportError, AttributeError):
|
|
233
|
+
pass
|
|
234
|
+
|
|
235
|
+
# Try relative to this file (development mode)
|
|
236
|
+
current_file = Path(__file__)
|
|
237
|
+
|
|
238
|
+
# Look for schema in the package data directory
|
|
239
|
+
# pyconvexity/src/pyconvexity/core/database.py -> pyconvexity/src/pyconvexity/data/schema
|
|
240
|
+
package_schema_dir = current_file.parent.parent / "data" / "schema"
|
|
241
|
+
if package_schema_dir.exists():
|
|
242
|
+
return package_schema_dir
|
|
243
|
+
|
|
244
|
+
# Look for schema in the main project (development mode)
|
|
245
|
+
# Assuming pyconvexity/src/pyconvexity/core/database.py
|
|
246
|
+
# and schema is at project_root/schema
|
|
247
|
+
project_root = current_file.parent.parent.parent.parent.parent
|
|
248
|
+
dev_schema_dir = project_root / "schema"
|
|
249
|
+
if dev_schema_dir.exists():
|
|
250
|
+
return dev_schema_dir
|
|
251
|
+
|
|
252
|
+
# Try bundled location (PyInstaller)
|
|
253
|
+
for p in sys.path:
|
|
254
|
+
candidate = Path(p) / "schema"
|
|
255
|
+
if candidate.exists() and candidate.is_dir():
|
|
256
|
+
return candidate
|
|
257
|
+
|
|
258
|
+
return None
|
|
259
|
+
|
|
260
|
+
|
|
261
|
+
def get_database_info(conn: sqlite3.Connection) -> dict:
|
|
262
|
+
"""
|
|
263
|
+
Get information about the database structure and contents.
|
|
264
|
+
|
|
265
|
+
Args:
|
|
266
|
+
conn: Database connection
|
|
267
|
+
|
|
268
|
+
Returns:
|
|
269
|
+
Dictionary with database information
|
|
270
|
+
"""
|
|
271
|
+
info = {
|
|
272
|
+
"tables": [],
|
|
273
|
+
"networks": 0,
|
|
274
|
+
"components": 0,
|
|
275
|
+
"attributes": 0,
|
|
276
|
+
"scenarios": 0,
|
|
277
|
+
"carriers": 0,
|
|
278
|
+
}
|
|
279
|
+
|
|
280
|
+
# Get table list
|
|
281
|
+
cursor = conn.execute(
|
|
282
|
+
"SELECT name FROM sqlite_master WHERE type='table' ORDER BY name"
|
|
283
|
+
)
|
|
284
|
+
info["tables"] = [row[0] for row in cursor.fetchall()]
|
|
285
|
+
|
|
286
|
+
# Get counts for main entities
|
|
287
|
+
count_queries = {
|
|
288
|
+
"networks": "SELECT COUNT(*) FROM networks",
|
|
289
|
+
"components": "SELECT COUNT(*) FROM components",
|
|
290
|
+
"attributes": "SELECT COUNT(*) FROM component_attributes",
|
|
291
|
+
"scenarios": "SELECT COUNT(*) FROM scenarios",
|
|
292
|
+
"carriers": "SELECT COUNT(*) FROM carriers",
|
|
293
|
+
}
|
|
294
|
+
|
|
295
|
+
for key, query in count_queries.items():
|
|
296
|
+
try:
|
|
297
|
+
cursor = conn.execute(query)
|
|
298
|
+
info[key] = cursor.fetchone()[0]
|
|
299
|
+
except sqlite3.Error:
|
|
300
|
+
# Table might not exist
|
|
301
|
+
info[key] = 0
|
|
302
|
+
|
|
303
|
+
return info
|
|
304
|
+
|
|
305
|
+
|
|
306
|
+
def check_database_compatibility(conn: sqlite3.Connection) -> dict:
|
|
307
|
+
"""
|
|
308
|
+
Check if database is compatible with current PyConvexity version.
|
|
309
|
+
|
|
310
|
+
Args:
|
|
311
|
+
conn: Database connection
|
|
312
|
+
|
|
313
|
+
Returns:
|
|
314
|
+
Dictionary with compatibility information
|
|
315
|
+
"""
|
|
316
|
+
result = {"compatible": True, "version": None, "issues": [], "warnings": []}
|
|
317
|
+
|
|
318
|
+
try:
|
|
319
|
+
validate_database(conn)
|
|
320
|
+
except ValidationError as e:
|
|
321
|
+
result["compatible"] = False
|
|
322
|
+
result["issues"].append(str(e))
|
|
323
|
+
|
|
324
|
+
# Check for version information (if we add a version table later)
|
|
325
|
+
try:
|
|
326
|
+
cursor = conn.execute("SELECT version FROM database_version LIMIT 1")
|
|
327
|
+
row = cursor.fetchone()
|
|
328
|
+
if row:
|
|
329
|
+
result["version"] = row[0]
|
|
330
|
+
except sqlite3.Error:
|
|
331
|
+
result["warnings"].append("No version information found in database")
|
|
332
|
+
|
|
333
|
+
return result
|
|
334
|
+
|
|
335
|
+
|
|
336
|
+
# ============================================================================
|
|
337
|
+
# DATABASE MAINTENANCE FUNCTIONS
|
|
338
|
+
# ============================================================================
|
|
339
|
+
|
|
340
|
+
|
|
341
|
+
def vacuum_database(conn: sqlite3.Connection) -> None:
|
|
342
|
+
"""
|
|
343
|
+
Run VACUUM to reclaim database space and defragment.
|
|
344
|
+
|
|
345
|
+
VACUUM rebuilds the database file, repacking it into a minimal amount of disk space.
|
|
346
|
+
This is useful after deleting large amounts of data or after many INSERT/UPDATE/DELETE operations.
|
|
347
|
+
|
|
348
|
+
Args:
|
|
349
|
+
conn: Database connection
|
|
350
|
+
|
|
351
|
+
Note:
|
|
352
|
+
VACUUM can take a significant amount of time on large databases and requires
|
|
353
|
+
temporary disk space up to twice the size of the original database.
|
|
354
|
+
"""
|
|
355
|
+
import logging
|
|
356
|
+
|
|
357
|
+
logger = logging.getLogger(__name__)
|
|
358
|
+
|
|
359
|
+
logger.info("Running VACUUM to reclaim database space and defragment")
|
|
360
|
+
conn.execute("VACUUM")
|
|
361
|
+
logger.info("VACUUM completed successfully")
|
|
362
|
+
|
|
363
|
+
|
|
364
|
+
def analyze_database(conn: sqlite3.Connection) -> None:
|
|
365
|
+
"""
|
|
366
|
+
Run ANALYZE to update query planner statistics.
|
|
367
|
+
|
|
368
|
+
ANALYZE gathers statistics about the contents of tables and indices.
|
|
369
|
+
These statistics are used by the query planner to help make better choices about how to perform queries.
|
|
370
|
+
|
|
371
|
+
Args:
|
|
372
|
+
conn: Database connection
|
|
373
|
+
"""
|
|
374
|
+
import logging
|
|
375
|
+
|
|
376
|
+
logger = logging.getLogger(__name__)
|
|
377
|
+
|
|
378
|
+
logger.info("Running ANALYZE to update query planner statistics")
|
|
379
|
+
conn.execute("ANALYZE")
|
|
380
|
+
logger.info("ANALYZE completed successfully")
|
|
381
|
+
|
|
382
|
+
|
|
383
|
+
def optimize_database(conn: sqlite3.Connection) -> dict:
|
|
384
|
+
"""
|
|
385
|
+
Run complete database optimization (VACUUM + ANALYZE).
|
|
386
|
+
|
|
387
|
+
This performs both VACUUM and ANALYZE operations in the correct order:
|
|
388
|
+
1. VACUUM first to reclaim space and defragment
|
|
389
|
+
2. ANALYZE to update statistics with the new layout
|
|
390
|
+
|
|
391
|
+
Args:
|
|
392
|
+
conn: Database connection
|
|
393
|
+
|
|
394
|
+
Returns:
|
|
395
|
+
Dictionary with optimization results including before/after size information
|
|
396
|
+
"""
|
|
397
|
+
import logging
|
|
398
|
+
import time
|
|
399
|
+
|
|
400
|
+
logger = logging.getLogger(__name__)
|
|
401
|
+
|
|
402
|
+
logger.info("Running database optimization (VACUUM + ANALYZE)")
|
|
403
|
+
start_time = time.time()
|
|
404
|
+
|
|
405
|
+
# Get size before optimization
|
|
406
|
+
size_before = get_database_size_info(conn)
|
|
407
|
+
|
|
408
|
+
# VACUUM first to reclaim space and defragment
|
|
409
|
+
vacuum_database(conn)
|
|
410
|
+
|
|
411
|
+
# Then ANALYZE to update statistics with the new layout
|
|
412
|
+
analyze_database(conn)
|
|
413
|
+
|
|
414
|
+
# Get size after optimization
|
|
415
|
+
size_after = get_database_size_info(conn)
|
|
416
|
+
|
|
417
|
+
optimization_time = time.time() - start_time
|
|
418
|
+
|
|
419
|
+
result = {
|
|
420
|
+
"success": True,
|
|
421
|
+
"optimization_time": optimization_time,
|
|
422
|
+
"size_before": size_before,
|
|
423
|
+
"size_after": size_after,
|
|
424
|
+
"space_reclaimed": size_before["total_size"] - size_after["total_size"],
|
|
425
|
+
"free_pages_reclaimed": size_before["free_pages"] - size_after["free_pages"],
|
|
426
|
+
}
|
|
427
|
+
|
|
428
|
+
logger.info(f"Database optimization completed in {optimization_time:.2f} seconds")
|
|
429
|
+
logger.info(
|
|
430
|
+
f"Space reclaimed: {result['space_reclaimed']:,} bytes ({result['space_reclaimed']/1024/1024:.1f} MB)"
|
|
431
|
+
)
|
|
432
|
+
|
|
433
|
+
return result
|
|
434
|
+
|
|
435
|
+
|
|
436
|
+
def get_database_size_info(conn: sqlite3.Connection) -> dict:
|
|
437
|
+
"""
|
|
438
|
+
Get detailed information about database size and space usage.
|
|
439
|
+
|
|
440
|
+
Args:
|
|
441
|
+
conn: Database connection
|
|
442
|
+
|
|
443
|
+
Returns:
|
|
444
|
+
Dictionary with size information including total, used, and free space
|
|
445
|
+
"""
|
|
446
|
+
# Get page count, page size, and freelist count
|
|
447
|
+
page_count = conn.execute("PRAGMA page_count").fetchone()[0]
|
|
448
|
+
page_size = conn.execute("PRAGMA page_size").fetchone()[0]
|
|
449
|
+
freelist_count = conn.execute("PRAGMA freelist_count").fetchone()[0]
|
|
450
|
+
|
|
451
|
+
total_size = page_count * page_size
|
|
452
|
+
free_size = freelist_count * page_size
|
|
453
|
+
used_size = total_size - free_size
|
|
454
|
+
|
|
455
|
+
return {
|
|
456
|
+
"total_size": total_size,
|
|
457
|
+
"used_size": used_size,
|
|
458
|
+
"free_size": free_size,
|
|
459
|
+
"page_count": page_count,
|
|
460
|
+
"page_size": page_size,
|
|
461
|
+
"free_pages": freelist_count,
|
|
462
|
+
"utilization_percent": (used_size / total_size * 100) if total_size > 0 else 0,
|
|
463
|
+
}
|
|
464
|
+
|
|
465
|
+
|
|
466
|
+
def should_optimize_database(
|
|
467
|
+
conn: sqlite3.Connection, free_space_threshold_percent: float = 10.0
|
|
468
|
+
) -> bool:
|
|
469
|
+
"""
|
|
470
|
+
Check if database would benefit from optimization based on free space.
|
|
471
|
+
|
|
472
|
+
Args:
|
|
473
|
+
conn: Database connection
|
|
474
|
+
free_space_threshold_percent: Threshold percentage of free space to trigger optimization
|
|
475
|
+
|
|
476
|
+
Returns:
|
|
477
|
+
True if optimization is recommended, False otherwise
|
|
478
|
+
"""
|
|
479
|
+
size_info = get_database_size_info(conn)
|
|
480
|
+
|
|
481
|
+
if size_info["total_size"] == 0:
|
|
482
|
+
return False
|
|
483
|
+
|
|
484
|
+
free_space_percent = (size_info["free_size"] / size_info["total_size"]) * 100
|
|
485
|
+
return free_space_percent >= free_space_threshold_percent
|
|
@@ -0,0 +1,106 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Error classes for PyConvexity.
|
|
3
|
+
|
|
4
|
+
These mirror the error handling from the original Rust implementation
|
|
5
|
+
while providing Python-specific enhancements.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from typing import Optional
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class PyConvexityError(Exception):
|
|
12
|
+
"""Base exception for all PyConvexity errors"""
|
|
13
|
+
|
|
14
|
+
pass
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class DatabaseError(PyConvexityError):
|
|
18
|
+
"""Database-related errors"""
|
|
19
|
+
|
|
20
|
+
pass
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class ConnectionError(DatabaseError):
|
|
24
|
+
"""Database connection failed"""
|
|
25
|
+
|
|
26
|
+
pass
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class ValidationError(PyConvexityError):
|
|
30
|
+
"""Data validation error"""
|
|
31
|
+
|
|
32
|
+
pass
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
class ComponentNotFound(PyConvexityError):
|
|
36
|
+
"""Component not found in database"""
|
|
37
|
+
|
|
38
|
+
def __init__(self, component_id: int, message: Optional[str] = None):
|
|
39
|
+
self.component_id = component_id
|
|
40
|
+
if message is None:
|
|
41
|
+
message = f"Component not found: {component_id}"
|
|
42
|
+
super().__init__(message)
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
class AttributeNotFound(PyConvexityError):
|
|
46
|
+
"""Attribute not found for component"""
|
|
47
|
+
|
|
48
|
+
def __init__(
|
|
49
|
+
self, component_id: int, attribute_name: str, message: Optional[str] = None
|
|
50
|
+
):
|
|
51
|
+
self.component_id = component_id
|
|
52
|
+
self.attribute_name = attribute_name
|
|
53
|
+
if message is None:
|
|
54
|
+
message = f"Attribute not found: component {component_id}, attribute '{attribute_name}'"
|
|
55
|
+
super().__init__(message)
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
class InvalidDataType(ValidationError):
|
|
59
|
+
"""Invalid data type for attribute"""
|
|
60
|
+
|
|
61
|
+
def __init__(self, expected: str, actual: str, message: Optional[str] = None):
|
|
62
|
+
self.expected = expected
|
|
63
|
+
self.actual = actual
|
|
64
|
+
if message is None:
|
|
65
|
+
message = f"Invalid data type: expected {expected}, got {actual}"
|
|
66
|
+
super().__init__(message)
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
class TimeseriesError(PyConvexityError):
|
|
70
|
+
"""Timeseries serialization/deserialization error"""
|
|
71
|
+
|
|
72
|
+
pass
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
class NetworkNotFound(PyConvexityError):
|
|
76
|
+
"""Network not found in database"""
|
|
77
|
+
|
|
78
|
+
def __init__(self, message: Optional[str] = None):
|
|
79
|
+
if message is None:
|
|
80
|
+
message = f"Network not found"
|
|
81
|
+
super().__init__(message)
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
class ScenarioNotFound(PyConvexityError):
|
|
85
|
+
"""Scenario not found in database"""
|
|
86
|
+
|
|
87
|
+
def __init__(self, scenario_id: int, message: Optional[str] = None):
|
|
88
|
+
self.scenario_id = scenario_id
|
|
89
|
+
if message is None:
|
|
90
|
+
message = f"Scenario not found: {scenario_id}"
|
|
91
|
+
super().__init__(message)
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
class CarrierNotFound(PyConvexityError):
|
|
95
|
+
"""Carrier not found in database"""
|
|
96
|
+
|
|
97
|
+
def __init__(self, carrier_id: int, message: Optional[str] = None):
|
|
98
|
+
self.carrier_id = carrier_id
|
|
99
|
+
if message is None:
|
|
100
|
+
message = f"Carrier not found: {carrier_id}"
|
|
101
|
+
super().__init__(message)
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
# Legacy aliases for backward compatibility with existing code
|
|
105
|
+
# These will be deprecated in future versions
|
|
106
|
+
DbError = PyConvexityError
|