pyconvexity 0.4.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pyconvexity might be problematic. Click here for more details.
- pyconvexity/__init__.py +226 -0
- pyconvexity/_version.py +1 -0
- pyconvexity/core/__init__.py +60 -0
- pyconvexity/core/database.py +485 -0
- pyconvexity/core/errors.py +106 -0
- pyconvexity/core/types.py +400 -0
- pyconvexity/data/README.md +101 -0
- pyconvexity/data/__init__.py +17 -0
- pyconvexity/data/loaders/__init__.py +3 -0
- pyconvexity/data/loaders/cache.py +213 -0
- pyconvexity/data/schema/01_core_schema.sql +420 -0
- pyconvexity/data/schema/02_data_metadata.sql +120 -0
- pyconvexity/data/schema/03_validation_data.sql +506 -0
- pyconvexity/data/sources/__init__.py +5 -0
- pyconvexity/data/sources/gem.py +442 -0
- pyconvexity/io/__init__.py +26 -0
- pyconvexity/io/excel_exporter.py +1226 -0
- pyconvexity/io/excel_importer.py +1381 -0
- pyconvexity/io/netcdf_exporter.py +197 -0
- pyconvexity/io/netcdf_importer.py +1833 -0
- pyconvexity/models/__init__.py +195 -0
- pyconvexity/models/attributes.py +730 -0
- pyconvexity/models/carriers.py +159 -0
- pyconvexity/models/components.py +611 -0
- pyconvexity/models/network.py +503 -0
- pyconvexity/models/results.py +148 -0
- pyconvexity/models/scenarios.py +234 -0
- pyconvexity/solvers/__init__.py +29 -0
- pyconvexity/solvers/pypsa/__init__.py +24 -0
- pyconvexity/solvers/pypsa/api.py +460 -0
- pyconvexity/solvers/pypsa/batch_loader.py +307 -0
- pyconvexity/solvers/pypsa/builder.py +675 -0
- pyconvexity/solvers/pypsa/constraints.py +405 -0
- pyconvexity/solvers/pypsa/solver.py +1509 -0
- pyconvexity/solvers/pypsa/storage.py +2048 -0
- pyconvexity/timeseries.py +330 -0
- pyconvexity/validation/__init__.py +25 -0
- pyconvexity/validation/rules.py +312 -0
- pyconvexity-0.4.3.dist-info/METADATA +47 -0
- pyconvexity-0.4.3.dist-info/RECORD +42 -0
- pyconvexity-0.4.3.dist-info/WHEEL +5 -0
- pyconvexity-0.4.3.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,503 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Network management operations for PyConvexity.
|
|
3
|
+
|
|
4
|
+
Provides operations for creating, managing, and querying energy system networks
|
|
5
|
+
including time periods, carriers, and network configuration.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import sqlite3
|
|
9
|
+
import json
|
|
10
|
+
import logging
|
|
11
|
+
from typing import Dict, Any, Optional, List
|
|
12
|
+
from datetime import datetime, timezone
|
|
13
|
+
|
|
14
|
+
from pyconvexity.core.types import CreateNetworkRequest, TimePeriod, Network
|
|
15
|
+
from pyconvexity.core.errors import ValidationError, DatabaseError
|
|
16
|
+
|
|
17
|
+
logger = logging.getLogger(__name__)
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def create_network(conn: sqlite3.Connection, request: CreateNetworkRequest) -> None:
|
|
21
|
+
"""
|
|
22
|
+
Create network metadata and time periods (single network per database).
|
|
23
|
+
|
|
24
|
+
Args:
|
|
25
|
+
conn: Database connection
|
|
26
|
+
request: Network creation request
|
|
27
|
+
|
|
28
|
+
Raises:
|
|
29
|
+
ValidationError: If required fields are missing
|
|
30
|
+
DatabaseError: If creation fails
|
|
31
|
+
"""
|
|
32
|
+
|
|
33
|
+
# Validate required fields
|
|
34
|
+
if not request.start_time:
|
|
35
|
+
raise ValidationError("start_time is required")
|
|
36
|
+
if not request.end_time:
|
|
37
|
+
raise ValidationError("end_time is required")
|
|
38
|
+
|
|
39
|
+
time_resolution = request.time_resolution or "PT1H"
|
|
40
|
+
|
|
41
|
+
# Insert into network_metadata table (single row per database)
|
|
42
|
+
conn.execute(
|
|
43
|
+
"""
|
|
44
|
+
INSERT INTO network_metadata (name, description, time_start, time_end, time_interval, created_at, updated_at)
|
|
45
|
+
VALUES (?, ?, ?, ?, ?, datetime('now'), datetime('now'))
|
|
46
|
+
""",
|
|
47
|
+
(
|
|
48
|
+
request.name,
|
|
49
|
+
request.description
|
|
50
|
+
or f"Created on {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}",
|
|
51
|
+
request.start_time,
|
|
52
|
+
request.end_time,
|
|
53
|
+
time_resolution,
|
|
54
|
+
),
|
|
55
|
+
)
|
|
56
|
+
|
|
57
|
+
# Automatically create time periods from the request parameters
|
|
58
|
+
_create_time_periods_from_request(conn, request.start_time, request.end_time, time_resolution)
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def _create_time_periods_from_request(
|
|
62
|
+
conn: sqlite3.Connection,
|
|
63
|
+
start_time: str,
|
|
64
|
+
end_time: str,
|
|
65
|
+
time_resolution: str,
|
|
66
|
+
) -> None:
|
|
67
|
+
"""
|
|
68
|
+
Create time periods from network request parameters.
|
|
69
|
+
|
|
70
|
+
Args:
|
|
71
|
+
conn: Database connection
|
|
72
|
+
start_time: Start time string (YYYY-MM-DD HH:MM:SS)
|
|
73
|
+
end_time: End time string (YYYY-MM-DD HH:MM:SS)
|
|
74
|
+
time_resolution: ISO 8601 duration (PT1H, PT30M, PT2H, etc.)
|
|
75
|
+
"""
|
|
76
|
+
# Parse start and end times
|
|
77
|
+
start_dt = datetime.strptime(start_time, "%Y-%m-%d %H:%M:%S")
|
|
78
|
+
end_dt = datetime.strptime(end_time, "%Y-%m-%d %H:%M:%S")
|
|
79
|
+
|
|
80
|
+
# Parse time resolution to seconds
|
|
81
|
+
interval_seconds = _parse_iso8601_duration_to_seconds(time_resolution)
|
|
82
|
+
|
|
83
|
+
# Calculate period count (inclusive of both start and end)
|
|
84
|
+
total_seconds = int((end_dt - start_dt).total_seconds())
|
|
85
|
+
period_count = (total_seconds // interval_seconds) + 1
|
|
86
|
+
|
|
87
|
+
# Get Unix timestamp for start
|
|
88
|
+
start_timestamp = int(start_dt.timestamp())
|
|
89
|
+
|
|
90
|
+
# Insert time periods
|
|
91
|
+
conn.execute(
|
|
92
|
+
"""
|
|
93
|
+
INSERT INTO network_time_periods (period_count, start_timestamp, interval_seconds)
|
|
94
|
+
VALUES (?, ?, ?)
|
|
95
|
+
""",
|
|
96
|
+
(period_count, start_timestamp, interval_seconds),
|
|
97
|
+
)
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
def _parse_iso8601_duration_to_seconds(duration: str) -> int:
|
|
101
|
+
"""
|
|
102
|
+
Parse ISO 8601 duration string to seconds.
|
|
103
|
+
|
|
104
|
+
Supports: PT1H (1 hour), PT30M (30 minutes), PT2H (2 hours), PT15M (15 minutes), etc.
|
|
105
|
+
|
|
106
|
+
Args:
|
|
107
|
+
duration: ISO 8601 duration string
|
|
108
|
+
|
|
109
|
+
Returns:
|
|
110
|
+
Duration in seconds
|
|
111
|
+
|
|
112
|
+
Raises:
|
|
113
|
+
ValidationError: If duration format is invalid
|
|
114
|
+
"""
|
|
115
|
+
if not duration.startswith("PT"):
|
|
116
|
+
raise ValidationError(f"Invalid ISO 8601 duration format: {duration}. Must start with 'PT'")
|
|
117
|
+
|
|
118
|
+
remaining = duration[2:] # Remove 'PT' prefix
|
|
119
|
+
total_seconds = 0
|
|
120
|
+
|
|
121
|
+
# Parse hours
|
|
122
|
+
if "H" in remaining:
|
|
123
|
+
parts = remaining.split("H")
|
|
124
|
+
hours = int(parts[0]) if parts[0] else 0
|
|
125
|
+
total_seconds += hours * 3600
|
|
126
|
+
remaining = parts[1] if len(parts) > 1 else ""
|
|
127
|
+
|
|
128
|
+
# Parse minutes
|
|
129
|
+
if "M" in remaining:
|
|
130
|
+
parts = remaining.split("M")
|
|
131
|
+
minutes = int(parts[0]) if parts[0] else 0
|
|
132
|
+
total_seconds += minutes * 60
|
|
133
|
+
remaining = parts[1] if len(parts) > 1 else ""
|
|
134
|
+
|
|
135
|
+
# Parse seconds
|
|
136
|
+
if "S" in remaining:
|
|
137
|
+
parts = remaining.split("S")
|
|
138
|
+
seconds = int(parts[0]) if parts[0] else 0
|
|
139
|
+
total_seconds += seconds
|
|
140
|
+
|
|
141
|
+
if total_seconds == 0:
|
|
142
|
+
raise ValidationError(f"Invalid ISO 8601 duration: {duration}. Could not parse any time components.")
|
|
143
|
+
|
|
144
|
+
return total_seconds
|
|
145
|
+
|
|
146
|
+
|
|
147
|
+
def get_network_info(conn: sqlite3.Connection) -> Dict[str, Any]:
|
|
148
|
+
"""
|
|
149
|
+
Get network information (single network per database).
|
|
150
|
+
|
|
151
|
+
Args:
|
|
152
|
+
conn: Database connection
|
|
153
|
+
|
|
154
|
+
Returns:
|
|
155
|
+
Dictionary with network information
|
|
156
|
+
|
|
157
|
+
Raises:
|
|
158
|
+
ValidationError: If network metadata doesn't exist
|
|
159
|
+
"""
|
|
160
|
+
cursor = conn.execute(
|
|
161
|
+
"""
|
|
162
|
+
SELECT name, description, time_start, time_end, time_interval, created_at, updated_at
|
|
163
|
+
FROM network_metadata
|
|
164
|
+
LIMIT 1
|
|
165
|
+
"""
|
|
166
|
+
)
|
|
167
|
+
|
|
168
|
+
row = cursor.fetchone()
|
|
169
|
+
if not row:
|
|
170
|
+
raise ValidationError("No network metadata found in database")
|
|
171
|
+
|
|
172
|
+
return {
|
|
173
|
+
"name": row[0],
|
|
174
|
+
"description": row[1],
|
|
175
|
+
"time_start": row[2],
|
|
176
|
+
"time_end": row[3],
|
|
177
|
+
"time_interval": row[4],
|
|
178
|
+
"created_at": row[5],
|
|
179
|
+
"updated_at": row[6],
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
|
|
183
|
+
def get_network_time_periods(conn: sqlite3.Connection) -> List[TimePeriod]:
|
|
184
|
+
"""
|
|
185
|
+
Get network time periods using optimized storage (single network per database).
|
|
186
|
+
|
|
187
|
+
Args:
|
|
188
|
+
conn: Database connection
|
|
189
|
+
|
|
190
|
+
Returns:
|
|
191
|
+
List of TimePeriod objects ordered by period_index
|
|
192
|
+
"""
|
|
193
|
+
cursor = conn.execute(
|
|
194
|
+
"""
|
|
195
|
+
SELECT period_count, start_timestamp, interval_seconds
|
|
196
|
+
FROM network_time_periods
|
|
197
|
+
LIMIT 1
|
|
198
|
+
"""
|
|
199
|
+
)
|
|
200
|
+
|
|
201
|
+
row = cursor.fetchone()
|
|
202
|
+
if not row:
|
|
203
|
+
return [] # No time periods defined
|
|
204
|
+
|
|
205
|
+
period_count, start_timestamp, interval_seconds = row
|
|
206
|
+
|
|
207
|
+
# Generate all time periods computationally
|
|
208
|
+
periods = []
|
|
209
|
+
for period_index in range(period_count):
|
|
210
|
+
timestamp = start_timestamp + (period_index * interval_seconds)
|
|
211
|
+
|
|
212
|
+
# Format timestamp as string for compatibility - ALWAYS use UTC to avoid DST duplicates
|
|
213
|
+
dt = datetime.fromtimestamp(timestamp, tz=timezone.utc)
|
|
214
|
+
formatted_time = dt.strftime("%Y-%m-%d %H:%M:%S")
|
|
215
|
+
|
|
216
|
+
periods.append(
|
|
217
|
+
TimePeriod(
|
|
218
|
+
timestamp=timestamp,
|
|
219
|
+
period_index=period_index,
|
|
220
|
+
formatted_time=formatted_time,
|
|
221
|
+
)
|
|
222
|
+
)
|
|
223
|
+
|
|
224
|
+
return periods
|
|
225
|
+
|
|
226
|
+
|
|
227
|
+
def list_networks(conn: sqlite3.Connection) -> List[Dict[str, Any]]:
|
|
228
|
+
"""
|
|
229
|
+
Get network information (returns single network in list for backward compatibility).
|
|
230
|
+
|
|
231
|
+
Args:
|
|
232
|
+
conn: Database connection
|
|
233
|
+
|
|
234
|
+
Returns:
|
|
235
|
+
List with single network dictionary (for backward compatibility)
|
|
236
|
+
"""
|
|
237
|
+
try:
|
|
238
|
+
network_info = get_network_info(conn)
|
|
239
|
+
return [network_info]
|
|
240
|
+
except ValidationError:
|
|
241
|
+
return []
|
|
242
|
+
|
|
243
|
+
|
|
244
|
+
def get_first_network(conn: sqlite3.Connection) -> Optional[Dict[str, Any]]:
|
|
245
|
+
"""
|
|
246
|
+
Get network (for backward compatibility with single-network-per-database).
|
|
247
|
+
|
|
248
|
+
Args:
|
|
249
|
+
conn: Database connection
|
|
250
|
+
|
|
251
|
+
Returns:
|
|
252
|
+
Network dictionary or None if no network exists
|
|
253
|
+
"""
|
|
254
|
+
try:
|
|
255
|
+
return get_network_info(conn)
|
|
256
|
+
except ValidationError:
|
|
257
|
+
return None
|
|
258
|
+
|
|
259
|
+
|
|
260
|
+
def get_network_by_name(
|
|
261
|
+
conn: sqlite3.Connection, name: str
|
|
262
|
+
) -> Optional[Dict[str, Any]]:
|
|
263
|
+
"""
|
|
264
|
+
Get network by name (for backward compatibility - checks if name matches).
|
|
265
|
+
|
|
266
|
+
Args:
|
|
267
|
+
conn: Database connection
|
|
268
|
+
name: Network name to match
|
|
269
|
+
|
|
270
|
+
Returns:
|
|
271
|
+
Network dictionary if name matches, None otherwise
|
|
272
|
+
"""
|
|
273
|
+
try:
|
|
274
|
+
network_info = get_network_info(conn)
|
|
275
|
+
if network_info.get("name") == name:
|
|
276
|
+
return network_info
|
|
277
|
+
return None
|
|
278
|
+
except ValidationError:
|
|
279
|
+
return None
|
|
280
|
+
|
|
281
|
+
|
|
282
|
+
def create_carrier(
|
|
283
|
+
conn: sqlite3.Connection,
|
|
284
|
+
name: str,
|
|
285
|
+
co2_emissions: float = 0.0,
|
|
286
|
+
color: Optional[str] = None,
|
|
287
|
+
nice_name: Optional[str] = None,
|
|
288
|
+
) -> int:
|
|
289
|
+
"""
|
|
290
|
+
Create a carrier record and return carrier ID (single network per database).
|
|
291
|
+
|
|
292
|
+
Args:
|
|
293
|
+
conn: Database connection
|
|
294
|
+
name: Carrier name
|
|
295
|
+
co2_emissions: CO2 emissions factor
|
|
296
|
+
color: Display color
|
|
297
|
+
nice_name: Human-readable name
|
|
298
|
+
|
|
299
|
+
Returns:
|
|
300
|
+
ID of the newly created carrier
|
|
301
|
+
"""
|
|
302
|
+
cursor = conn.execute(
|
|
303
|
+
"""
|
|
304
|
+
INSERT INTO carriers (name, co2_emissions, color, nice_name)
|
|
305
|
+
VALUES (?, ?, ?, ?)
|
|
306
|
+
""",
|
|
307
|
+
(name, co2_emissions, color, nice_name),
|
|
308
|
+
)
|
|
309
|
+
|
|
310
|
+
carrier_id = cursor.lastrowid
|
|
311
|
+
if not carrier_id:
|
|
312
|
+
raise DatabaseError("Failed to create carrier")
|
|
313
|
+
|
|
314
|
+
return carrier_id
|
|
315
|
+
|
|
316
|
+
|
|
317
|
+
def list_carriers(conn: sqlite3.Connection) -> List[Dict[str, Any]]:
|
|
318
|
+
"""
|
|
319
|
+
List all carriers (single network per database).
|
|
320
|
+
|
|
321
|
+
Args:
|
|
322
|
+
conn: Database connection
|
|
323
|
+
|
|
324
|
+
Returns:
|
|
325
|
+
List of carrier dictionaries
|
|
326
|
+
"""
|
|
327
|
+
cursor = conn.execute(
|
|
328
|
+
"""
|
|
329
|
+
SELECT id, name, co2_emissions, color, nice_name
|
|
330
|
+
FROM carriers
|
|
331
|
+
ORDER BY name
|
|
332
|
+
"""
|
|
333
|
+
)
|
|
334
|
+
|
|
335
|
+
carriers = []
|
|
336
|
+
for row in cursor.fetchall():
|
|
337
|
+
carriers.append(
|
|
338
|
+
{
|
|
339
|
+
"id": row[0],
|
|
340
|
+
"name": row[1],
|
|
341
|
+
"co2_emissions": row[2],
|
|
342
|
+
"color": row[3],
|
|
343
|
+
"nice_name": row[4],
|
|
344
|
+
}
|
|
345
|
+
)
|
|
346
|
+
|
|
347
|
+
return carriers
|
|
348
|
+
|
|
349
|
+
|
|
350
|
+
def get_network_config(
|
|
351
|
+
conn: sqlite3.Connection, scenario_id: Optional[int] = None
|
|
352
|
+
) -> Dict[str, Any]:
|
|
353
|
+
"""
|
|
354
|
+
Get network configuration with scenario-aware fallback (single network per database).
|
|
355
|
+
|
|
356
|
+
Priority order:
|
|
357
|
+
1. Scenario-specific config (network_config WHERE scenario_id = X)
|
|
358
|
+
2. Network default config (network_config WHERE scenario_id IS NULL)
|
|
359
|
+
3. System default value
|
|
360
|
+
|
|
361
|
+
Args:
|
|
362
|
+
conn: Database connection
|
|
363
|
+
scenario_id: Optional scenario ID
|
|
364
|
+
|
|
365
|
+
Returns:
|
|
366
|
+
Dictionary with network configuration
|
|
367
|
+
"""
|
|
368
|
+
config = {}
|
|
369
|
+
|
|
370
|
+
# Load from network_config table with scenario fallback
|
|
371
|
+
cursor = conn.execute(
|
|
372
|
+
"""
|
|
373
|
+
SELECT param_name, param_type, param_value
|
|
374
|
+
FROM network_config
|
|
375
|
+
WHERE (scenario_id = ? OR scenario_id IS NULL)
|
|
376
|
+
ORDER BY scenario_id DESC NULLS LAST -- Scenario-specific values first
|
|
377
|
+
""",
|
|
378
|
+
(scenario_id,),
|
|
379
|
+
)
|
|
380
|
+
|
|
381
|
+
seen_params = set()
|
|
382
|
+
for row in cursor.fetchall():
|
|
383
|
+
param_name, param_type, param_value = row
|
|
384
|
+
|
|
385
|
+
# Skip if we already have this parameter (scenario-specific takes precedence)
|
|
386
|
+
if param_name in seen_params:
|
|
387
|
+
continue
|
|
388
|
+
seen_params.add(param_name)
|
|
389
|
+
|
|
390
|
+
# Parse value based on type
|
|
391
|
+
try:
|
|
392
|
+
if param_type == "boolean":
|
|
393
|
+
config[param_name] = param_value.lower() == "true"
|
|
394
|
+
elif param_type == "real":
|
|
395
|
+
config[param_name] = float(param_value)
|
|
396
|
+
elif param_type == "integer":
|
|
397
|
+
config[param_name] = int(param_value)
|
|
398
|
+
elif param_type == "json":
|
|
399
|
+
config[param_name] = json.loads(param_value)
|
|
400
|
+
else: # string
|
|
401
|
+
config[param_name] = param_value
|
|
402
|
+
except (ValueError, json.JSONDecodeError) as e:
|
|
403
|
+
logger.warning(f"Failed to parse config parameter {param_name}: {e}")
|
|
404
|
+
continue
|
|
405
|
+
|
|
406
|
+
# Apply system defaults for missing parameters
|
|
407
|
+
defaults = {
|
|
408
|
+
"unmet_load_active": True,
|
|
409
|
+
"discount_rate": 0.0, # No discounting by default
|
|
410
|
+
"solver_name": "default",
|
|
411
|
+
}
|
|
412
|
+
|
|
413
|
+
for param, default_value in defaults.items():
|
|
414
|
+
if param not in config:
|
|
415
|
+
config[param] = default_value
|
|
416
|
+
|
|
417
|
+
return config
|
|
418
|
+
|
|
419
|
+
|
|
420
|
+
def set_network_config(
|
|
421
|
+
conn: sqlite3.Connection,
|
|
422
|
+
param_name: str,
|
|
423
|
+
param_value: Any,
|
|
424
|
+
param_type: str,
|
|
425
|
+
scenario_id: Optional[int] = None,
|
|
426
|
+
description: Optional[str] = None,
|
|
427
|
+
) -> None:
|
|
428
|
+
"""
|
|
429
|
+
Set network configuration parameter (single network per database).
|
|
430
|
+
|
|
431
|
+
Args:
|
|
432
|
+
conn: Database connection
|
|
433
|
+
param_name: Parameter name
|
|
434
|
+
param_value: Parameter value
|
|
435
|
+
param_type: Parameter type ('boolean', 'real', 'integer', 'string', 'json')
|
|
436
|
+
scenario_id: Optional scenario ID (NULL for base network)
|
|
437
|
+
description: Optional parameter description
|
|
438
|
+
|
|
439
|
+
Raises:
|
|
440
|
+
ValidationError: If parameter type is invalid or serialization fails
|
|
441
|
+
"""
|
|
442
|
+
|
|
443
|
+
# Validate parameter type
|
|
444
|
+
valid_types = {"boolean", "real", "integer", "string", "json"}
|
|
445
|
+
if param_type not in valid_types:
|
|
446
|
+
raise ValidationError(
|
|
447
|
+
f"Invalid parameter type: {param_type}. Must be one of {valid_types}"
|
|
448
|
+
)
|
|
449
|
+
|
|
450
|
+
# Serialize value based on type
|
|
451
|
+
try:
|
|
452
|
+
if param_type == "boolean":
|
|
453
|
+
serialized = str(param_value).lower()
|
|
454
|
+
if serialized not in {"true", "false"}:
|
|
455
|
+
raise ValidationError(
|
|
456
|
+
f"Boolean parameter must be True/False, got: {param_value}"
|
|
457
|
+
)
|
|
458
|
+
elif param_type == "real":
|
|
459
|
+
serialized = str(float(param_value))
|
|
460
|
+
elif param_type == "integer":
|
|
461
|
+
serialized = str(int(param_value))
|
|
462
|
+
elif param_type == "json":
|
|
463
|
+
serialized = json.dumps(param_value)
|
|
464
|
+
else: # string
|
|
465
|
+
serialized = str(param_value)
|
|
466
|
+
except (ValueError, TypeError) as e:
|
|
467
|
+
raise ValidationError(
|
|
468
|
+
f"Failed to serialize parameter {param_name} as {param_type}: {e}"
|
|
469
|
+
)
|
|
470
|
+
|
|
471
|
+
# Insert or update parameter
|
|
472
|
+
conn.execute(
|
|
473
|
+
"""
|
|
474
|
+
INSERT OR REPLACE INTO network_config
|
|
475
|
+
(scenario_id, param_name, param_type, param_value, param_description, updated_at)
|
|
476
|
+
VALUES (?, ?, ?, ?, ?, datetime('now'))
|
|
477
|
+
""",
|
|
478
|
+
(scenario_id, param_name, param_type, serialized, description),
|
|
479
|
+
)
|
|
480
|
+
|
|
481
|
+
|
|
482
|
+
def get_component_counts(conn: sqlite3.Connection) -> Dict[str, int]:
|
|
483
|
+
"""
|
|
484
|
+
Get component counts by type (single network per database).
|
|
485
|
+
|
|
486
|
+
Args:
|
|
487
|
+
conn: Database connection
|
|
488
|
+
|
|
489
|
+
Returns:
|
|
490
|
+
Dictionary mapping component types to counts
|
|
491
|
+
"""
|
|
492
|
+
cursor = conn.execute(
|
|
493
|
+
"""
|
|
494
|
+
SELECT component_type, COUNT(*) FROM components
|
|
495
|
+
GROUP BY component_type
|
|
496
|
+
"""
|
|
497
|
+
)
|
|
498
|
+
|
|
499
|
+
counts = {}
|
|
500
|
+
for row in cursor.fetchall():
|
|
501
|
+
counts[row[0].lower()] = row[1]
|
|
502
|
+
|
|
503
|
+
return counts
|
|
@@ -0,0 +1,148 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Results and statistics operations for PyConvexity.
|
|
3
|
+
|
|
4
|
+
Provides operations for querying solve results and statistics.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import sqlite3
|
|
8
|
+
import json
|
|
9
|
+
import logging
|
|
10
|
+
from typing import Dict, Any, Optional
|
|
11
|
+
from dataclasses import dataclass
|
|
12
|
+
|
|
13
|
+
from pyconvexity.core.errors import ValidationError
|
|
14
|
+
|
|
15
|
+
logger = logging.getLogger(__name__)
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
@dataclass
|
|
19
|
+
class SolveResults:
|
|
20
|
+
"""Represents solve results for a scenario."""
|
|
21
|
+
|
|
22
|
+
network_statistics: Dict[str, Any]
|
|
23
|
+
metadata: Dict[str, Any]
|
|
24
|
+
status: str
|
|
25
|
+
objective_value: Optional[float]
|
|
26
|
+
solve_time: float
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
@dataclass
|
|
30
|
+
class YearlyResults:
|
|
31
|
+
"""Represents yearly solve results."""
|
|
32
|
+
|
|
33
|
+
year: int
|
|
34
|
+
network_statistics: Dict[str, Any]
|
|
35
|
+
metadata: Dict[str, Any]
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def get_solve_results(
|
|
39
|
+
conn: sqlite3.Connection, scenario_id: Optional[int] = None
|
|
40
|
+
) -> Optional[SolveResults]:
|
|
41
|
+
"""
|
|
42
|
+
Get overall solve results for a scenario (single network per database).
|
|
43
|
+
|
|
44
|
+
Args:
|
|
45
|
+
conn: Database connection
|
|
46
|
+
scenario_id: Scenario ID (NULL for base network)
|
|
47
|
+
|
|
48
|
+
Returns:
|
|
49
|
+
SolveResults object or None if no results found
|
|
50
|
+
"""
|
|
51
|
+
# Query based on scenario_id (NULL for base network)
|
|
52
|
+
if scenario_id is None:
|
|
53
|
+
cursor = conn.execute(
|
|
54
|
+
"""
|
|
55
|
+
SELECT results_json, metadata_json, solve_status, objective_value, solve_time_seconds
|
|
56
|
+
FROM network_solve_results
|
|
57
|
+
WHERE scenario_id IS NULL
|
|
58
|
+
ORDER BY solved_at DESC
|
|
59
|
+
LIMIT 1
|
|
60
|
+
"""
|
|
61
|
+
)
|
|
62
|
+
else:
|
|
63
|
+
cursor = conn.execute(
|
|
64
|
+
"""
|
|
65
|
+
SELECT results_json, metadata_json, solve_status, objective_value, solve_time_seconds
|
|
66
|
+
FROM network_solve_results
|
|
67
|
+
WHERE scenario_id = ?
|
|
68
|
+
ORDER BY solved_at DESC
|
|
69
|
+
LIMIT 1
|
|
70
|
+
""",
|
|
71
|
+
(scenario_id,),
|
|
72
|
+
)
|
|
73
|
+
|
|
74
|
+
row = cursor.fetchone()
|
|
75
|
+
if not row:
|
|
76
|
+
return None
|
|
77
|
+
|
|
78
|
+
try:
|
|
79
|
+
results_json = json.loads(row[0]) if row[0] else {}
|
|
80
|
+
metadata_json = json.loads(row[1]) if row[1] else {}
|
|
81
|
+
|
|
82
|
+
# Extract network_statistics from results_json
|
|
83
|
+
network_statistics = results_json.get("network_statistics", {})
|
|
84
|
+
|
|
85
|
+
return SolveResults(
|
|
86
|
+
network_statistics=network_statistics,
|
|
87
|
+
metadata=metadata_json,
|
|
88
|
+
status=row[2] or "unknown",
|
|
89
|
+
objective_value=row[3],
|
|
90
|
+
solve_time=row[4] or 0.0,
|
|
91
|
+
)
|
|
92
|
+
except json.JSONDecodeError as e:
|
|
93
|
+
logger.error(f"Error parsing JSON for scenario {scenario_id}: {e}")
|
|
94
|
+
return None
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
def get_yearly_results(
|
|
98
|
+
conn: sqlite3.Connection, scenario_id: Optional[int] = None
|
|
99
|
+
) -> Dict[int, YearlyResults]:
|
|
100
|
+
"""
|
|
101
|
+
Get year-by-year solve results for a scenario (single network per database).
|
|
102
|
+
|
|
103
|
+
Args:
|
|
104
|
+
conn: Database connection
|
|
105
|
+
scenario_id: Scenario ID (NULL for base network)
|
|
106
|
+
|
|
107
|
+
Returns:
|
|
108
|
+
Dictionary mapping years to YearlyResults objects
|
|
109
|
+
"""
|
|
110
|
+
# Query based on scenario_id (NULL for base network)
|
|
111
|
+
if scenario_id is None:
|
|
112
|
+
cursor = conn.execute(
|
|
113
|
+
"""
|
|
114
|
+
SELECT year, results_json, metadata_json
|
|
115
|
+
FROM network_solve_results_by_year
|
|
116
|
+
WHERE scenario_id IS NULL
|
|
117
|
+
ORDER BY year
|
|
118
|
+
"""
|
|
119
|
+
)
|
|
120
|
+
else:
|
|
121
|
+
cursor = conn.execute(
|
|
122
|
+
"""
|
|
123
|
+
SELECT year, results_json, metadata_json
|
|
124
|
+
FROM network_solve_results_by_year
|
|
125
|
+
WHERE scenario_id = ?
|
|
126
|
+
ORDER BY year
|
|
127
|
+
""",
|
|
128
|
+
(scenario_id,),
|
|
129
|
+
)
|
|
130
|
+
|
|
131
|
+
yearly_results = {}
|
|
132
|
+
for row in cursor.fetchall():
|
|
133
|
+
year = row[0]
|
|
134
|
+
try:
|
|
135
|
+
results_json = json.loads(row[1]) if row[1] else {}
|
|
136
|
+
metadata_json = json.loads(row[2]) if row[2] else {}
|
|
137
|
+
|
|
138
|
+
# Extract network_statistics from results_json
|
|
139
|
+
network_statistics = results_json.get("network_statistics", {})
|
|
140
|
+
|
|
141
|
+
yearly_results[year] = YearlyResults(
|
|
142
|
+
year=year, network_statistics=network_statistics, metadata=metadata_json
|
|
143
|
+
)
|
|
144
|
+
except json.JSONDecodeError as e:
|
|
145
|
+
logger.error(f"Error parsing JSON for year {year}: {e}")
|
|
146
|
+
continue
|
|
147
|
+
|
|
148
|
+
return yearly_results
|