pyconvexity 0.3.8.post7__py3-none-any.whl → 0.4.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pyconvexity/__init__.py +87 -46
- pyconvexity/_version.py +1 -1
- pyconvexity/core/__init__.py +3 -5
- pyconvexity/core/database.py +111 -103
- pyconvexity/core/errors.py +16 -10
- pyconvexity/core/types.py +61 -54
- pyconvexity/data/__init__.py +0 -1
- pyconvexity/data/loaders/cache.py +65 -64
- pyconvexity/data/schema/01_core_schema.sql +134 -234
- pyconvexity/data/schema/02_data_metadata.sql +38 -168
- pyconvexity/data/schema/03_validation_data.sql +327 -264
- pyconvexity/data/sources/gem.py +169 -139
- pyconvexity/io/__init__.py +4 -10
- pyconvexity/io/excel_exporter.py +694 -480
- pyconvexity/io/excel_importer.py +817 -545
- pyconvexity/io/netcdf_exporter.py +66 -61
- pyconvexity/io/netcdf_importer.py +850 -619
- pyconvexity/models/__init__.py +109 -59
- pyconvexity/models/attributes.py +197 -178
- pyconvexity/models/carriers.py +70 -67
- pyconvexity/models/components.py +260 -236
- pyconvexity/models/network.py +202 -284
- pyconvexity/models/results.py +65 -55
- pyconvexity/models/scenarios.py +58 -88
- pyconvexity/solvers/__init__.py +5 -5
- pyconvexity/solvers/pypsa/__init__.py +3 -3
- pyconvexity/solvers/pypsa/api.py +150 -134
- pyconvexity/solvers/pypsa/batch_loader.py +165 -162
- pyconvexity/solvers/pypsa/builder.py +390 -291
- pyconvexity/solvers/pypsa/constraints.py +184 -162
- pyconvexity/solvers/pypsa/solver.py +968 -666
- pyconvexity/solvers/pypsa/storage.py +1377 -671
- pyconvexity/timeseries.py +63 -60
- pyconvexity/validation/__init__.py +14 -6
- pyconvexity/validation/rules.py +95 -84
- pyconvexity-0.4.1.dist-info/METADATA +46 -0
- pyconvexity-0.4.1.dist-info/RECORD +42 -0
- pyconvexity/data/__pycache__/__init__.cpython-313.pyc +0 -0
- pyconvexity/data/loaders/__pycache__/__init__.cpython-313.pyc +0 -0
- pyconvexity/data/loaders/__pycache__/cache.cpython-313.pyc +0 -0
- pyconvexity/data/schema/04_scenario_schema.sql +0 -122
- pyconvexity/data/schema/migrate_add_geometries.sql +0 -73
- pyconvexity/data/sources/__pycache__/__init__.cpython-313.pyc +0 -0
- pyconvexity/data/sources/__pycache__/gem.cpython-313.pyc +0 -0
- pyconvexity-0.3.8.post7.dist-info/METADATA +0 -138
- pyconvexity-0.3.8.post7.dist-info/RECORD +0 -49
- {pyconvexity-0.3.8.post7.dist-info → pyconvexity-0.4.1.dist-info}/WHEEL +0 -0
- {pyconvexity-0.3.8.post7.dist-info → pyconvexity-0.4.1.dist-info}/top_level.txt +0 -0
pyconvexity/core/database.py
CHANGED
|
@@ -17,20 +17,20 @@ from pyconvexity.core.errors import ConnectionError, DatabaseError, ValidationEr
|
|
|
17
17
|
class DatabaseContext:
|
|
18
18
|
"""
|
|
19
19
|
Context manager for database connections with automatic cleanup.
|
|
20
|
-
|
|
20
|
+
|
|
21
21
|
Provides a clean way to manage database connections with proper
|
|
22
22
|
resource cleanup and error handling.
|
|
23
23
|
"""
|
|
24
|
-
|
|
24
|
+
|
|
25
25
|
def __init__(self, db_path: str, read_only: bool = False):
|
|
26
26
|
self.db_path = db_path
|
|
27
27
|
self.read_only = read_only
|
|
28
28
|
self.connection: Optional[sqlite3.Connection] = None
|
|
29
|
-
|
|
29
|
+
|
|
30
30
|
def __enter__(self) -> sqlite3.Connection:
|
|
31
31
|
self.connection = open_connection(self.db_path, read_only=self.read_only)
|
|
32
32
|
return self.connection
|
|
33
|
-
|
|
33
|
+
|
|
34
34
|
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
35
35
|
if self.connection:
|
|
36
36
|
if exc_type is None:
|
|
@@ -44,17 +44,19 @@ class DatabaseContext:
|
|
|
44
44
|
|
|
45
45
|
|
|
46
46
|
@contextmanager
|
|
47
|
-
def database_context(
|
|
47
|
+
def database_context(
|
|
48
|
+
db_path: str, read_only: bool = False
|
|
49
|
+
) -> Generator[sqlite3.Connection, None, None]:
|
|
48
50
|
"""
|
|
49
51
|
Context manager function for database connections.
|
|
50
|
-
|
|
52
|
+
|
|
51
53
|
Args:
|
|
52
54
|
db_path: Path to the SQLite database file
|
|
53
55
|
read_only: If True, open in read-only mode
|
|
54
|
-
|
|
56
|
+
|
|
55
57
|
Yields:
|
|
56
58
|
sqlite3.Connection: Database connection with proper configuration
|
|
57
|
-
|
|
59
|
+
|
|
58
60
|
Example:
|
|
59
61
|
with database_context("model.db") as conn:
|
|
60
62
|
cursor = conn.execute("SELECT * FROM networks")
|
|
@@ -67,14 +69,14 @@ def database_context(db_path: str, read_only: bool = False) -> Generator[sqlite3
|
|
|
67
69
|
def open_connection(db_path: str, read_only: bool = False) -> sqlite3.Connection:
|
|
68
70
|
"""
|
|
69
71
|
Open database connection with proper settings.
|
|
70
|
-
|
|
72
|
+
|
|
71
73
|
Args:
|
|
72
74
|
db_path: Path to the SQLite database file
|
|
73
75
|
read_only: If True, open in read-only mode
|
|
74
|
-
|
|
76
|
+
|
|
75
77
|
Returns:
|
|
76
78
|
sqlite3.Connection: Configured database connection
|
|
77
|
-
|
|
79
|
+
|
|
78
80
|
Raises:
|
|
79
81
|
ConnectionError: If database connection fails
|
|
80
82
|
"""
|
|
@@ -85,23 +87,27 @@ def open_connection(db_path: str, read_only: bool = False) -> sqlite3.Connection
|
|
|
85
87
|
conn = sqlite3.connect(uri, uri=True)
|
|
86
88
|
else:
|
|
87
89
|
conn = sqlite3.connect(db_path)
|
|
88
|
-
|
|
90
|
+
|
|
89
91
|
# Configure connection
|
|
90
92
|
conn.row_factory = sqlite3.Row # Enable column access by name
|
|
91
93
|
conn.execute("PRAGMA foreign_keys = ON") # Enable foreign key constraints
|
|
92
|
-
|
|
94
|
+
|
|
93
95
|
# Configure for concurrent access (WAL mode for better concurrency)
|
|
94
96
|
if not read_only:
|
|
95
|
-
conn.execute(
|
|
97
|
+
conn.execute(
|
|
98
|
+
"PRAGMA journal_mode = WAL"
|
|
99
|
+
) # Write-Ahead Logging for concurrency
|
|
96
100
|
conn.execute("PRAGMA synchronous = NORMAL") # Faster than FULL, still safe
|
|
97
|
-
conn.execute(
|
|
101
|
+
conn.execute(
|
|
102
|
+
"PRAGMA wal_autocheckpoint = 1000"
|
|
103
|
+
) # Less frequent checkpoints
|
|
98
104
|
conn.execute("PRAGMA temp_store = MEMORY") # Faster temporary operations
|
|
99
|
-
|
|
105
|
+
|
|
100
106
|
# Set reasonable timeouts
|
|
101
107
|
conn.execute("PRAGMA busy_timeout = 30000") # 30 second timeout
|
|
102
|
-
|
|
108
|
+
|
|
103
109
|
return conn
|
|
104
|
-
|
|
110
|
+
|
|
105
111
|
except sqlite3.Error as e:
|
|
106
112
|
raise ConnectionError(f"Failed to open database at {db_path}: {e}") from e
|
|
107
113
|
|
|
@@ -109,32 +115,31 @@ def open_connection(db_path: str, read_only: bool = False) -> sqlite3.Connection
|
|
|
109
115
|
def validate_database(conn: sqlite3.Connection) -> None:
|
|
110
116
|
"""
|
|
111
117
|
Validate database schema has required tables.
|
|
112
|
-
|
|
118
|
+
|
|
113
119
|
Args:
|
|
114
120
|
conn: Database connection to validate
|
|
115
|
-
|
|
121
|
+
|
|
116
122
|
Raises:
|
|
117
123
|
ValidationError: If required tables are missing
|
|
118
124
|
"""
|
|
119
125
|
required_tables = [
|
|
120
126
|
"networks",
|
|
121
|
-
"components",
|
|
127
|
+
"components",
|
|
122
128
|
"component_attributes",
|
|
123
129
|
"attribute_validation_rules",
|
|
124
130
|
"carriers",
|
|
125
|
-
"scenarios"
|
|
131
|
+
"scenarios",
|
|
126
132
|
]
|
|
127
|
-
|
|
133
|
+
|
|
128
134
|
missing_tables = []
|
|
129
|
-
|
|
135
|
+
|
|
130
136
|
for table in required_tables:
|
|
131
137
|
cursor = conn.execute(
|
|
132
|
-
"SELECT name FROM sqlite_master WHERE type='table' AND name=?",
|
|
133
|
-
(table,)
|
|
138
|
+
"SELECT name FROM sqlite_master WHERE type='table' AND name=?", (table,)
|
|
134
139
|
)
|
|
135
140
|
if not cursor.fetchone():
|
|
136
141
|
missing_tables.append(table)
|
|
137
|
-
|
|
142
|
+
|
|
138
143
|
if missing_tables:
|
|
139
144
|
raise ValidationError(
|
|
140
145
|
f"Required tables not found in database: {', '.join(missing_tables)}"
|
|
@@ -144,67 +149,66 @@ def validate_database(conn: sqlite3.Connection) -> None:
|
|
|
144
149
|
def create_database_with_schema(db_path: str) -> None:
|
|
145
150
|
"""
|
|
146
151
|
Create a new database and apply the complete schema.
|
|
147
|
-
|
|
152
|
+
|
|
148
153
|
Args:
|
|
149
154
|
db_path: Path where the new database should be created
|
|
150
|
-
|
|
155
|
+
|
|
151
156
|
Raises:
|
|
152
157
|
DatabaseError: If schema files cannot be found or applied
|
|
153
158
|
"""
|
|
154
159
|
db_path_obj = Path(db_path)
|
|
155
|
-
|
|
160
|
+
|
|
156
161
|
# Ensure parent directory exists
|
|
157
162
|
if db_path_obj.parent and not db_path_obj.parent.exists():
|
|
158
163
|
db_path_obj.parent.mkdir(parents=True, exist_ok=True)
|
|
159
|
-
|
|
164
|
+
|
|
160
165
|
# Remove existing file if it exists, to ensure a clean start
|
|
161
166
|
if db_path_obj.exists():
|
|
162
167
|
db_path_obj.unlink()
|
|
163
|
-
|
|
168
|
+
|
|
164
169
|
# Find schema files
|
|
165
170
|
schema_dir = _find_schema_directory()
|
|
166
171
|
if not schema_dir:
|
|
167
172
|
raise DatabaseError("Could not find schema directory")
|
|
168
|
-
|
|
173
|
+
|
|
169
174
|
schema_files = [
|
|
170
175
|
"01_core_schema.sql",
|
|
171
|
-
"02_data_metadata.sql",
|
|
176
|
+
"02_data_metadata.sql",
|
|
172
177
|
"03_validation_data.sql",
|
|
173
|
-
"04_scenario_schema.sql"
|
|
174
178
|
]
|
|
175
|
-
|
|
179
|
+
|
|
176
180
|
# Verify all schema files exist
|
|
177
181
|
missing_files = []
|
|
178
182
|
for filename in schema_files:
|
|
179
183
|
schema_file = schema_dir / filename
|
|
180
184
|
if not schema_file.exists():
|
|
181
185
|
missing_files.append(filename)
|
|
182
|
-
|
|
186
|
+
|
|
183
187
|
if missing_files:
|
|
184
188
|
raise DatabaseError(f"Schema files not found: {', '.join(missing_files)}")
|
|
185
|
-
|
|
189
|
+
|
|
186
190
|
# Create connection and apply schemas
|
|
187
191
|
try:
|
|
188
192
|
conn = sqlite3.connect(db_path)
|
|
189
|
-
|
|
193
|
+
|
|
190
194
|
# Enable foreign key constraints
|
|
191
195
|
conn.execute("PRAGMA foreign_keys = ON")
|
|
192
|
-
|
|
196
|
+
|
|
193
197
|
# Configure for concurrent access
|
|
194
198
|
conn.execute("PRAGMA journal_mode = WAL")
|
|
195
199
|
conn.execute("PRAGMA synchronous = NORMAL")
|
|
196
200
|
conn.execute("PRAGMA wal_autocheckpoint = 1000")
|
|
197
201
|
conn.execute("PRAGMA temp_store = MEMORY")
|
|
198
202
|
conn.execute("PRAGMA busy_timeout = 30000")
|
|
199
|
-
|
|
203
|
+
|
|
200
204
|
# Execute schemas in order
|
|
201
205
|
for filename in schema_files:
|
|
202
206
|
schema_file = schema_dir / filename
|
|
203
|
-
with open(schema_file,
|
|
207
|
+
with open(schema_file, "r") as f:
|
|
204
208
|
conn.executescript(f.read())
|
|
205
|
-
|
|
209
|
+
|
|
206
210
|
conn.close()
|
|
207
|
-
|
|
211
|
+
|
|
208
212
|
except sqlite3.Error as e:
|
|
209
213
|
# Clean up partial database on error
|
|
210
214
|
if db_path_obj.exists():
|
|
@@ -215,27 +219,28 @@ def create_database_with_schema(db_path: str) -> None:
|
|
|
215
219
|
def _find_schema_directory() -> Optional[Path]:
|
|
216
220
|
"""
|
|
217
221
|
Find the schema directory in various possible locations.
|
|
218
|
-
|
|
222
|
+
|
|
219
223
|
Returns:
|
|
220
224
|
Path to schema directory or None if not found
|
|
221
|
-
"""
|
|
225
|
+
""" # Try package data location first (PyPI/pip install)
|
|
222
226
|
try:
|
|
223
227
|
import importlib.resources
|
|
224
|
-
|
|
228
|
+
|
|
229
|
+
schema_path = importlib.resources.files("pyconvexity") / "data" / "schema"
|
|
225
230
|
if schema_path.is_dir():
|
|
226
231
|
return Path(str(schema_path))
|
|
227
232
|
except (ImportError, AttributeError):
|
|
228
233
|
pass
|
|
229
|
-
|
|
234
|
+
|
|
230
235
|
# Try relative to this file (development mode)
|
|
231
236
|
current_file = Path(__file__)
|
|
232
|
-
|
|
237
|
+
|
|
233
238
|
# Look for schema in the package data directory
|
|
234
239
|
# pyconvexity/src/pyconvexity/core/database.py -> pyconvexity/src/pyconvexity/data/schema
|
|
235
240
|
package_schema_dir = current_file.parent.parent / "data" / "schema"
|
|
236
241
|
if package_schema_dir.exists():
|
|
237
242
|
return package_schema_dir
|
|
238
|
-
|
|
243
|
+
|
|
239
244
|
# Look for schema in the main project (development mode)
|
|
240
245
|
# Assuming pyconvexity/src/pyconvexity/core/database.py
|
|
241
246
|
# and schema is at project_root/schema
|
|
@@ -243,23 +248,23 @@ def _find_schema_directory() -> Optional[Path]:
|
|
|
243
248
|
dev_schema_dir = project_root / "schema"
|
|
244
249
|
if dev_schema_dir.exists():
|
|
245
250
|
return dev_schema_dir
|
|
246
|
-
|
|
251
|
+
|
|
247
252
|
# Try bundled location (PyInstaller)
|
|
248
253
|
for p in sys.path:
|
|
249
254
|
candidate = Path(p) / "schema"
|
|
250
255
|
if candidate.exists() and candidate.is_dir():
|
|
251
256
|
return candidate
|
|
252
|
-
|
|
257
|
+
|
|
253
258
|
return None
|
|
254
259
|
|
|
255
260
|
|
|
256
261
|
def get_database_info(conn: sqlite3.Connection) -> dict:
|
|
257
262
|
"""
|
|
258
263
|
Get information about the database structure and contents.
|
|
259
|
-
|
|
264
|
+
|
|
260
265
|
Args:
|
|
261
266
|
conn: Database connection
|
|
262
|
-
|
|
267
|
+
|
|
263
268
|
Returns:
|
|
264
269
|
Dictionary with database information
|
|
265
270
|
"""
|
|
@@ -269,24 +274,24 @@ def get_database_info(conn: sqlite3.Connection) -> dict:
|
|
|
269
274
|
"components": 0,
|
|
270
275
|
"attributes": 0,
|
|
271
276
|
"scenarios": 0,
|
|
272
|
-
"carriers": 0
|
|
277
|
+
"carriers": 0,
|
|
273
278
|
}
|
|
274
|
-
|
|
279
|
+
|
|
275
280
|
# Get table list
|
|
276
281
|
cursor = conn.execute(
|
|
277
282
|
"SELECT name FROM sqlite_master WHERE type='table' ORDER BY name"
|
|
278
283
|
)
|
|
279
284
|
info["tables"] = [row[0] for row in cursor.fetchall()]
|
|
280
|
-
|
|
285
|
+
|
|
281
286
|
# Get counts for main entities
|
|
282
287
|
count_queries = {
|
|
283
288
|
"networks": "SELECT COUNT(*) FROM networks",
|
|
284
|
-
"components": "SELECT COUNT(*) FROM components",
|
|
289
|
+
"components": "SELECT COUNT(*) FROM components",
|
|
285
290
|
"attributes": "SELECT COUNT(*) FROM component_attributes",
|
|
286
291
|
"scenarios": "SELECT COUNT(*) FROM scenarios",
|
|
287
|
-
"carriers": "SELECT COUNT(*) FROM carriers"
|
|
292
|
+
"carriers": "SELECT COUNT(*) FROM carriers",
|
|
288
293
|
}
|
|
289
|
-
|
|
294
|
+
|
|
290
295
|
for key, query in count_queries.items():
|
|
291
296
|
try:
|
|
292
297
|
cursor = conn.execute(query)
|
|
@@ -294,33 +299,28 @@ def get_database_info(conn: sqlite3.Connection) -> dict:
|
|
|
294
299
|
except sqlite3.Error:
|
|
295
300
|
# Table might not exist
|
|
296
301
|
info[key] = 0
|
|
297
|
-
|
|
302
|
+
|
|
298
303
|
return info
|
|
299
304
|
|
|
300
305
|
|
|
301
306
|
def check_database_compatibility(conn: sqlite3.Connection) -> dict:
|
|
302
307
|
"""
|
|
303
308
|
Check if database is compatible with current PyConvexity version.
|
|
304
|
-
|
|
309
|
+
|
|
305
310
|
Args:
|
|
306
311
|
conn: Database connection
|
|
307
|
-
|
|
312
|
+
|
|
308
313
|
Returns:
|
|
309
314
|
Dictionary with compatibility information
|
|
310
315
|
"""
|
|
311
|
-
result = {
|
|
312
|
-
|
|
313
|
-
"version": None,
|
|
314
|
-
"issues": [],
|
|
315
|
-
"warnings": []
|
|
316
|
-
}
|
|
317
|
-
|
|
316
|
+
result = {"compatible": True, "version": None, "issues": [], "warnings": []}
|
|
317
|
+
|
|
318
318
|
try:
|
|
319
319
|
validate_database(conn)
|
|
320
320
|
except ValidationError as e:
|
|
321
321
|
result["compatible"] = False
|
|
322
322
|
result["issues"].append(str(e))
|
|
323
|
-
|
|
323
|
+
|
|
324
324
|
# Check for version information (if we add a version table later)
|
|
325
325
|
try:
|
|
326
326
|
cursor = conn.execute("SELECT version FROM database_version LIMIT 1")
|
|
@@ -329,7 +329,7 @@ def check_database_compatibility(conn: sqlite3.Connection) -> dict:
|
|
|
329
329
|
result["version"] = row[0]
|
|
330
330
|
except sqlite3.Error:
|
|
331
331
|
result["warnings"].append("No version information found in database")
|
|
332
|
-
|
|
332
|
+
|
|
333
333
|
return result
|
|
334
334
|
|
|
335
335
|
|
|
@@ -337,23 +337,25 @@ def check_database_compatibility(conn: sqlite3.Connection) -> dict:
|
|
|
337
337
|
# DATABASE MAINTENANCE FUNCTIONS
|
|
338
338
|
# ============================================================================
|
|
339
339
|
|
|
340
|
+
|
|
340
341
|
def vacuum_database(conn: sqlite3.Connection) -> None:
|
|
341
342
|
"""
|
|
342
343
|
Run VACUUM to reclaim database space and defragment.
|
|
343
|
-
|
|
344
|
+
|
|
344
345
|
VACUUM rebuilds the database file, repacking it into a minimal amount of disk space.
|
|
345
346
|
This is useful after deleting large amounts of data or after many INSERT/UPDATE/DELETE operations.
|
|
346
|
-
|
|
347
|
+
|
|
347
348
|
Args:
|
|
348
349
|
conn: Database connection
|
|
349
|
-
|
|
350
|
+
|
|
350
351
|
Note:
|
|
351
352
|
VACUUM can take a significant amount of time on large databases and requires
|
|
352
353
|
temporary disk space up to twice the size of the original database.
|
|
353
354
|
"""
|
|
354
355
|
import logging
|
|
356
|
+
|
|
355
357
|
logger = logging.getLogger(__name__)
|
|
356
|
-
|
|
358
|
+
|
|
357
359
|
logger.info("Running VACUUM to reclaim database space and defragment")
|
|
358
360
|
conn.execute("VACUUM")
|
|
359
361
|
logger.info("VACUUM completed successfully")
|
|
@@ -362,16 +364,17 @@ def vacuum_database(conn: sqlite3.Connection) -> None:
|
|
|
362
364
|
def analyze_database(conn: sqlite3.Connection) -> None:
|
|
363
365
|
"""
|
|
364
366
|
Run ANALYZE to update query planner statistics.
|
|
365
|
-
|
|
367
|
+
|
|
366
368
|
ANALYZE gathers statistics about the contents of tables and indices.
|
|
367
369
|
These statistics are used by the query planner to help make better choices about how to perform queries.
|
|
368
|
-
|
|
370
|
+
|
|
369
371
|
Args:
|
|
370
372
|
conn: Database connection
|
|
371
373
|
"""
|
|
372
374
|
import logging
|
|
375
|
+
|
|
373
376
|
logger = logging.getLogger(__name__)
|
|
374
|
-
|
|
377
|
+
|
|
375
378
|
logger.info("Running ANALYZE to update query planner statistics")
|
|
376
379
|
conn.execute("ANALYZE")
|
|
377
380
|
logger.info("ANALYZE completed successfully")
|
|
@@ -380,60 +383,63 @@ def analyze_database(conn: sqlite3.Connection) -> None:
|
|
|
380
383
|
def optimize_database(conn: sqlite3.Connection) -> dict:
|
|
381
384
|
"""
|
|
382
385
|
Run complete database optimization (VACUUM + ANALYZE).
|
|
383
|
-
|
|
386
|
+
|
|
384
387
|
This performs both VACUUM and ANALYZE operations in the correct order:
|
|
385
388
|
1. VACUUM first to reclaim space and defragment
|
|
386
389
|
2. ANALYZE to update statistics with the new layout
|
|
387
|
-
|
|
390
|
+
|
|
388
391
|
Args:
|
|
389
392
|
conn: Database connection
|
|
390
|
-
|
|
393
|
+
|
|
391
394
|
Returns:
|
|
392
395
|
Dictionary with optimization results including before/after size information
|
|
393
396
|
"""
|
|
394
397
|
import logging
|
|
395
398
|
import time
|
|
399
|
+
|
|
396
400
|
logger = logging.getLogger(__name__)
|
|
397
|
-
|
|
401
|
+
|
|
398
402
|
logger.info("Running database optimization (VACUUM + ANALYZE)")
|
|
399
403
|
start_time = time.time()
|
|
400
|
-
|
|
404
|
+
|
|
401
405
|
# Get size before optimization
|
|
402
406
|
size_before = get_database_size_info(conn)
|
|
403
|
-
|
|
407
|
+
|
|
404
408
|
# VACUUM first to reclaim space and defragment
|
|
405
409
|
vacuum_database(conn)
|
|
406
|
-
|
|
410
|
+
|
|
407
411
|
# Then ANALYZE to update statistics with the new layout
|
|
408
412
|
analyze_database(conn)
|
|
409
|
-
|
|
413
|
+
|
|
410
414
|
# Get size after optimization
|
|
411
415
|
size_after = get_database_size_info(conn)
|
|
412
|
-
|
|
416
|
+
|
|
413
417
|
optimization_time = time.time() - start_time
|
|
414
|
-
|
|
418
|
+
|
|
415
419
|
result = {
|
|
416
420
|
"success": True,
|
|
417
421
|
"optimization_time": optimization_time,
|
|
418
422
|
"size_before": size_before,
|
|
419
423
|
"size_after": size_after,
|
|
420
424
|
"space_reclaimed": size_before["total_size"] - size_after["total_size"],
|
|
421
|
-
"free_pages_reclaimed": size_before["free_pages"] - size_after["free_pages"]
|
|
425
|
+
"free_pages_reclaimed": size_before["free_pages"] - size_after["free_pages"],
|
|
422
426
|
}
|
|
423
|
-
|
|
427
|
+
|
|
424
428
|
logger.info(f"Database optimization completed in {optimization_time:.2f} seconds")
|
|
425
|
-
logger.info(
|
|
426
|
-
|
|
429
|
+
logger.info(
|
|
430
|
+
f"Space reclaimed: {result['space_reclaimed']:,} bytes ({result['space_reclaimed']/1024/1024:.1f} MB)"
|
|
431
|
+
)
|
|
432
|
+
|
|
427
433
|
return result
|
|
428
434
|
|
|
429
435
|
|
|
430
436
|
def get_database_size_info(conn: sqlite3.Connection) -> dict:
|
|
431
437
|
"""
|
|
432
438
|
Get detailed information about database size and space usage.
|
|
433
|
-
|
|
439
|
+
|
|
434
440
|
Args:
|
|
435
441
|
conn: Database connection
|
|
436
|
-
|
|
442
|
+
|
|
437
443
|
Returns:
|
|
438
444
|
Dictionary with size information including total, used, and free space
|
|
439
445
|
"""
|
|
@@ -441,11 +447,11 @@ def get_database_size_info(conn: sqlite3.Connection) -> dict:
|
|
|
441
447
|
page_count = conn.execute("PRAGMA page_count").fetchone()[0]
|
|
442
448
|
page_size = conn.execute("PRAGMA page_size").fetchone()[0]
|
|
443
449
|
freelist_count = conn.execute("PRAGMA freelist_count").fetchone()[0]
|
|
444
|
-
|
|
450
|
+
|
|
445
451
|
total_size = page_count * page_size
|
|
446
452
|
free_size = freelist_count * page_size
|
|
447
453
|
used_size = total_size - free_size
|
|
448
|
-
|
|
454
|
+
|
|
449
455
|
return {
|
|
450
456
|
"total_size": total_size,
|
|
451
457
|
"used_size": used_size,
|
|
@@ -453,25 +459,27 @@ def get_database_size_info(conn: sqlite3.Connection) -> dict:
|
|
|
453
459
|
"page_count": page_count,
|
|
454
460
|
"page_size": page_size,
|
|
455
461
|
"free_pages": freelist_count,
|
|
456
|
-
"utilization_percent": (used_size / total_size * 100) if total_size > 0 else 0
|
|
462
|
+
"utilization_percent": (used_size / total_size * 100) if total_size > 0 else 0,
|
|
457
463
|
}
|
|
458
464
|
|
|
459
465
|
|
|
460
|
-
def should_optimize_database(
|
|
466
|
+
def should_optimize_database(
|
|
467
|
+
conn: sqlite3.Connection, free_space_threshold_percent: float = 10.0
|
|
468
|
+
) -> bool:
|
|
461
469
|
"""
|
|
462
470
|
Check if database would benefit from optimization based on free space.
|
|
463
|
-
|
|
471
|
+
|
|
464
472
|
Args:
|
|
465
473
|
conn: Database connection
|
|
466
474
|
free_space_threshold_percent: Threshold percentage of free space to trigger optimization
|
|
467
|
-
|
|
475
|
+
|
|
468
476
|
Returns:
|
|
469
477
|
True if optimization is recommended, False otherwise
|
|
470
478
|
"""
|
|
471
479
|
size_info = get_database_size_info(conn)
|
|
472
|
-
|
|
480
|
+
|
|
473
481
|
if size_info["total_size"] == 0:
|
|
474
482
|
return False
|
|
475
|
-
|
|
483
|
+
|
|
476
484
|
free_space_percent = (size_info["free_size"] / size_info["total_size"]) * 100
|
|
477
485
|
return free_space_percent >= free_space_threshold_percent
|
pyconvexity/core/errors.py
CHANGED
|
@@ -10,27 +10,31 @@ from typing import Optional
|
|
|
10
10
|
|
|
11
11
|
class PyConvexityError(Exception):
|
|
12
12
|
"""Base exception for all PyConvexity errors"""
|
|
13
|
+
|
|
13
14
|
pass
|
|
14
15
|
|
|
15
16
|
|
|
16
17
|
class DatabaseError(PyConvexityError):
|
|
17
18
|
"""Database-related errors"""
|
|
19
|
+
|
|
18
20
|
pass
|
|
19
21
|
|
|
20
22
|
|
|
21
23
|
class ConnectionError(DatabaseError):
|
|
22
24
|
"""Database connection failed"""
|
|
25
|
+
|
|
23
26
|
pass
|
|
24
27
|
|
|
25
28
|
|
|
26
29
|
class ValidationError(PyConvexityError):
|
|
27
30
|
"""Data validation error"""
|
|
31
|
+
|
|
28
32
|
pass
|
|
29
33
|
|
|
30
34
|
|
|
31
35
|
class ComponentNotFound(PyConvexityError):
|
|
32
36
|
"""Component not found in database"""
|
|
33
|
-
|
|
37
|
+
|
|
34
38
|
def __init__(self, component_id: int, message: Optional[str] = None):
|
|
35
39
|
self.component_id = component_id
|
|
36
40
|
if message is None:
|
|
@@ -40,8 +44,10 @@ class ComponentNotFound(PyConvexityError):
|
|
|
40
44
|
|
|
41
45
|
class AttributeNotFound(PyConvexityError):
|
|
42
46
|
"""Attribute not found for component"""
|
|
43
|
-
|
|
44
|
-
def __init__(
|
|
47
|
+
|
|
48
|
+
def __init__(
|
|
49
|
+
self, component_id: int, attribute_name: str, message: Optional[str] = None
|
|
50
|
+
):
|
|
45
51
|
self.component_id = component_id
|
|
46
52
|
self.attribute_name = attribute_name
|
|
47
53
|
if message is None:
|
|
@@ -51,7 +57,7 @@ class AttributeNotFound(PyConvexityError):
|
|
|
51
57
|
|
|
52
58
|
class InvalidDataType(ValidationError):
|
|
53
59
|
"""Invalid data type for attribute"""
|
|
54
|
-
|
|
60
|
+
|
|
55
61
|
def __init__(self, expected: str, actual: str, message: Optional[str] = None):
|
|
56
62
|
self.expected = expected
|
|
57
63
|
self.actual = actual
|
|
@@ -62,22 +68,22 @@ class InvalidDataType(ValidationError):
|
|
|
62
68
|
|
|
63
69
|
class TimeseriesError(PyConvexityError):
|
|
64
70
|
"""Timeseries serialization/deserialization error"""
|
|
71
|
+
|
|
65
72
|
pass
|
|
66
73
|
|
|
67
74
|
|
|
68
75
|
class NetworkNotFound(PyConvexityError):
|
|
69
76
|
"""Network not found in database"""
|
|
70
|
-
|
|
71
|
-
def __init__(self,
|
|
72
|
-
self.network_id = network_id
|
|
77
|
+
|
|
78
|
+
def __init__(self, message: Optional[str] = None):
|
|
73
79
|
if message is None:
|
|
74
|
-
message = f"Network not found
|
|
80
|
+
message = f"Network not found"
|
|
75
81
|
super().__init__(message)
|
|
76
82
|
|
|
77
83
|
|
|
78
84
|
class ScenarioNotFound(PyConvexityError):
|
|
79
85
|
"""Scenario not found in database"""
|
|
80
|
-
|
|
86
|
+
|
|
81
87
|
def __init__(self, scenario_id: int, message: Optional[str] = None):
|
|
82
88
|
self.scenario_id = scenario_id
|
|
83
89
|
if message is None:
|
|
@@ -87,7 +93,7 @@ class ScenarioNotFound(PyConvexityError):
|
|
|
87
93
|
|
|
88
94
|
class CarrierNotFound(PyConvexityError):
|
|
89
95
|
"""Carrier not found in database"""
|
|
90
|
-
|
|
96
|
+
|
|
91
97
|
def __init__(self, carrier_id: int, message: Optional[str] = None):
|
|
92
98
|
self.carrier_id = carrier_id
|
|
93
99
|
if message is None:
|