pyconvexity 0.4.6__py3-none-any.whl → 0.4.6.post1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pyconvexity might be problematic. Click here for more details.
- pyconvexity/_version.py +1 -1
- pyconvexity/io/netcdf_exporter.py +1 -7
- pyconvexity/io/netcdf_importer.py +82 -113
- pyconvexity/solvers/pypsa/api.py +4 -18
- pyconvexity/solvers/pypsa/batch_loader.py +1 -12
- pyconvexity/solvers/pypsa/builder.py +3 -23
- pyconvexity/solvers/pypsa/solver.py +4 -71
- pyconvexity/solvers/pypsa/storage.py +1 -47
- {pyconvexity-0.4.6.dist-info → pyconvexity-0.4.6.post1.dist-info}/METADATA +1 -1
- {pyconvexity-0.4.6.dist-info → pyconvexity-0.4.6.post1.dist-info}/RECORD +12 -12
- {pyconvexity-0.4.6.dist-info → pyconvexity-0.4.6.post1.dist-info}/WHEEL +0 -0
- {pyconvexity-0.4.6.dist-info → pyconvexity-0.4.6.post1.dist-info}/top_level.txt +0 -0
pyconvexity/_version.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
__version__ = "0.4.6"
|
|
1
|
+
__version__ = "0.4.6.post1"
|
|
@@ -3,22 +3,18 @@ NetCDF exporter for PyConvexity energy system models.
|
|
|
3
3
|
Exports networks to PyPSA NetCDF format using existing PyPSA infrastructure.
|
|
4
4
|
"""
|
|
5
5
|
|
|
6
|
-
import logging
|
|
7
6
|
from typing import Dict, Any, Optional, Callable
|
|
8
7
|
from pathlib import Path
|
|
9
8
|
|
|
10
9
|
# Import existing PyPSA functionality from pyconvexity
|
|
11
|
-
from pyconvexity.core.database import open_connection
|
|
12
10
|
from pyconvexity.solvers.pypsa import build_pypsa_network
|
|
13
11
|
|
|
14
|
-
logger = logging.getLogger(__name__)
|
|
15
|
-
|
|
16
12
|
|
|
17
13
|
class NetCDFModelExporter:
|
|
18
14
|
"""Export network model to PyPSA NetCDF format"""
|
|
19
15
|
|
|
20
16
|
def __init__(self):
|
|
21
|
-
|
|
17
|
+
pass
|
|
22
18
|
|
|
23
19
|
def export_to_netcdf(
|
|
24
20
|
self,
|
|
@@ -78,7 +74,6 @@ class NetCDFModelExporter:
|
|
|
78
74
|
}
|
|
79
75
|
|
|
80
76
|
except Exception as e:
|
|
81
|
-
self.logger.error(f"NetCDF export failed: {e}", exc_info=True)
|
|
82
77
|
if progress_callback:
|
|
83
78
|
progress_callback(None, f"Export failed: {str(e)}")
|
|
84
79
|
raise
|
|
@@ -141,7 +136,6 @@ class NetCDFModelExporter:
|
|
|
141
136
|
}
|
|
142
137
|
|
|
143
138
|
except Exception as e:
|
|
144
|
-
self.logger.error(f"CSV export failed: {e}", exc_info=True)
|
|
145
139
|
if progress_callback:
|
|
146
140
|
progress_callback(None, f"Export failed: {str(e)}")
|
|
147
141
|
raise
|
|
@@ -3,7 +3,6 @@ NetCDF importer for PyConvexity energy system models.
|
|
|
3
3
|
Imports PyPSA NetCDF files into PyConvexity database format.
|
|
4
4
|
"""
|
|
5
5
|
|
|
6
|
-
import logging
|
|
7
6
|
import pandas as pd
|
|
8
7
|
import numpy as np
|
|
9
8
|
from typing import Dict, Any, Optional, Callable, Tuple, List
|
|
@@ -33,14 +32,81 @@ from pyconvexity.models import (
|
|
|
33
32
|
from pyconvexity.validation import get_validation_rule
|
|
34
33
|
from pyconvexity.timeseries import set_timeseries
|
|
35
34
|
|
|
36
|
-
|
|
35
|
+
|
|
36
|
+
def _pandas_freq_to_iso8601(freq: str) -> str:
|
|
37
|
+
"""
|
|
38
|
+
Convert pandas frequency code to ISO 8601 duration format.
|
|
39
|
+
|
|
40
|
+
Args:
|
|
41
|
+
freq: Pandas frequency code (e.g., "H", "30T", "2H", "15min", "D")
|
|
42
|
+
|
|
43
|
+
Returns:
|
|
44
|
+
ISO 8601 duration string (e.g., "PT1H", "PT30M", "PT2H", "PT15M", "P1D")
|
|
45
|
+
"""
|
|
46
|
+
if not freq:
|
|
47
|
+
return "PT1H" # Default to hourly
|
|
48
|
+
|
|
49
|
+
freq = freq.strip().upper()
|
|
50
|
+
|
|
51
|
+
# Handle common pandas frequency codes
|
|
52
|
+
# Hourly patterns: "H", "1H", "2H", etc.
|
|
53
|
+
if freq == "H" or freq == "1H":
|
|
54
|
+
return "PT1H"
|
|
55
|
+
if freq.endswith("H"):
|
|
56
|
+
try:
|
|
57
|
+
hours = int(freq[:-1])
|
|
58
|
+
return f"PT{hours}H"
|
|
59
|
+
except ValueError:
|
|
60
|
+
pass
|
|
61
|
+
|
|
62
|
+
# Minute patterns: "T", "MIN", "30T", "30MIN", "15T", etc.
|
|
63
|
+
if freq == "T" or freq == "MIN" or freq == "1T" or freq == "1MIN":
|
|
64
|
+
return "PT1M"
|
|
65
|
+
if freq.endswith("T"):
|
|
66
|
+
try:
|
|
67
|
+
minutes = int(freq[:-1])
|
|
68
|
+
return f"PT{minutes}M"
|
|
69
|
+
except ValueError:
|
|
70
|
+
pass
|
|
71
|
+
if freq.endswith("MIN"):
|
|
72
|
+
try:
|
|
73
|
+
minutes = int(freq[:-3])
|
|
74
|
+
return f"PT{minutes}M"
|
|
75
|
+
except ValueError:
|
|
76
|
+
pass
|
|
77
|
+
|
|
78
|
+
# Second patterns: "S", "1S", "30S", etc.
|
|
79
|
+
if freq == "S" or freq == "1S":
|
|
80
|
+
return "PT1S"
|
|
81
|
+
if freq.endswith("S") and not freq.endswith("MS"):
|
|
82
|
+
try:
|
|
83
|
+
seconds = int(freq[:-1])
|
|
84
|
+
return f"PT{seconds}S"
|
|
85
|
+
except ValueError:
|
|
86
|
+
pass
|
|
87
|
+
|
|
88
|
+
# Daily patterns: "D", "1D", etc.
|
|
89
|
+
if freq == "D" or freq == "1D":
|
|
90
|
+
return "P1D"
|
|
91
|
+
if freq.endswith("D"):
|
|
92
|
+
try:
|
|
93
|
+
days = int(freq[:-1])
|
|
94
|
+
return f"P{days}D"
|
|
95
|
+
except ValueError:
|
|
96
|
+
pass
|
|
97
|
+
|
|
98
|
+
# Weekly patterns: "W", "1W", etc.
|
|
99
|
+
if freq == "W" or freq == "1W" or freq.startswith("W-"):
|
|
100
|
+
return "P1W"
|
|
101
|
+
|
|
102
|
+
# If we can't parse it, default to hourly
|
|
103
|
+
return "PT1H"
|
|
37
104
|
|
|
38
105
|
|
|
39
106
|
class NetCDFModelImporter:
|
|
40
107
|
"""Import PyPSA NetCDF files into PyConvexity database format"""
|
|
41
108
|
|
|
42
109
|
def __init__(self):
|
|
43
|
-
self.logger = logging.getLogger(__name__)
|
|
44
110
|
# Set random seed for reproducible coordinate generation
|
|
45
111
|
random.seed(42)
|
|
46
112
|
np.random.seed(42)
|
|
@@ -103,7 +169,6 @@ class NetCDFModelImporter:
|
|
|
103
169
|
)
|
|
104
170
|
|
|
105
171
|
except Exception as e:
|
|
106
|
-
self.logger.error(f"Error importing NetCDF: {e}", exc_info=True)
|
|
107
172
|
if progress_callback:
|
|
108
173
|
progress_callback(None, f"Error: {str(e)}")
|
|
109
174
|
raise
|
|
@@ -164,7 +229,6 @@ class NetCDFModelImporter:
|
|
|
164
229
|
error_msg += f"\n\nThis indicates a required column is missing from one of your CSV files. "
|
|
165
230
|
error_msg += "Please ensure your CSV files follow the PyPSA format specification."
|
|
166
231
|
|
|
167
|
-
self.logger.error(error_msg)
|
|
168
232
|
raise ValueError(error_msg)
|
|
169
233
|
|
|
170
234
|
if progress_callback:
|
|
@@ -185,7 +249,6 @@ class NetCDFModelImporter:
|
|
|
185
249
|
)
|
|
186
250
|
|
|
187
251
|
except Exception as e:
|
|
188
|
-
self.logger.error(f"Error importing PyPSA CSV: {e}", exc_info=True)
|
|
189
252
|
if progress_callback:
|
|
190
253
|
progress_callback(None, f"Error: {str(e)}")
|
|
191
254
|
raise
|
|
@@ -197,16 +260,10 @@ class NetCDFModelImporter:
|
|
|
197
260
|
|
|
198
261
|
return pypsa
|
|
199
262
|
except ImportError as e:
|
|
200
|
-
self.logger.error(f"Failed to import PyPSA: {e}", exc_info=True)
|
|
201
263
|
raise ImportError(
|
|
202
264
|
"PyPSA is not installed or could not be imported. "
|
|
203
265
|
"Please ensure it is installed correctly in the environment."
|
|
204
266
|
) from e
|
|
205
|
-
except Exception as e:
|
|
206
|
-
self.logger.error(
|
|
207
|
-
f"An unexpected error occurred during PyPSA import: {e}", exc_info=True
|
|
208
|
-
)
|
|
209
|
-
raise
|
|
210
267
|
|
|
211
268
|
def _validate_csv_directory(self, csv_directory: str) -> None:
|
|
212
269
|
"""Validate that the CSV directory contains valid PyPSA CSV files"""
|
|
@@ -415,7 +472,6 @@ class NetCDFModelImporter:
|
|
|
415
472
|
conn.close()
|
|
416
473
|
|
|
417
474
|
except Exception as e:
|
|
418
|
-
self.logger.error(f"Error importing network: {e}", exc_info=True)
|
|
419
475
|
if progress_callback:
|
|
420
476
|
progress_callback(None, f"Error: {str(e)}")
|
|
421
477
|
raise
|
|
@@ -427,7 +483,6 @@ class NetCDFModelImporter:
|
|
|
427
483
|
def _extract_datetime_snapshots(self, network) -> pd.DatetimeIndex:
|
|
428
484
|
"""Extract datetime snapshots from a PyPSA network"""
|
|
429
485
|
if not hasattr(network, "snapshots") or len(network.snapshots) == 0:
|
|
430
|
-
self.logger.warning("No snapshots found in PyPSA network")
|
|
431
486
|
return pd.DatetimeIndex([])
|
|
432
487
|
|
|
433
488
|
snapshots = network.snapshots
|
|
@@ -435,7 +490,7 @@ class NetCDFModelImporter:
|
|
|
435
490
|
try:
|
|
436
491
|
# Try direct conversion first (works for simple DatetimeIndex)
|
|
437
492
|
return pd.to_datetime(snapshots)
|
|
438
|
-
except (TypeError, ValueError)
|
|
493
|
+
except (TypeError, ValueError):
|
|
439
494
|
# Handle MultiIndex case
|
|
440
495
|
if hasattr(snapshots, "nlevels") and snapshots.nlevels > 1:
|
|
441
496
|
# Try to use the timesteps attribute if available (common in multi-period networks)
|
|
@@ -450,15 +505,10 @@ class NetCDFModelImporter:
|
|
|
450
505
|
last_level = snapshots.get_level_values(snapshots.nlevels - 1)
|
|
451
506
|
datetime_snapshots = pd.to_datetime(last_level)
|
|
452
507
|
return datetime_snapshots
|
|
453
|
-
except Exception
|
|
454
|
-
|
|
455
|
-
f"Failed to extract datetime from MultiIndex: {multi_e}"
|
|
456
|
-
)
|
|
508
|
+
except Exception:
|
|
509
|
+
pass
|
|
457
510
|
|
|
458
511
|
# Final fallback: create a default hourly range
|
|
459
|
-
self.logger.warning(
|
|
460
|
-
"Could not extract datetime snapshots, creating default hourly range"
|
|
461
|
-
)
|
|
462
512
|
default_start = pd.Timestamp("2024-01-01 00:00:00")
|
|
463
513
|
default_end = pd.Timestamp("2024-01-01 23:59:59")
|
|
464
514
|
return pd.date_range(start=default_start, end=default_end, freq="H")
|
|
@@ -479,17 +529,17 @@ class NetCDFModelImporter:
|
|
|
479
529
|
time_start = snapshots.min().strftime("%Y-%m-%d %H:%M:%S")
|
|
480
530
|
time_end = snapshots.max().strftime("%Y-%m-%d %H:%M:%S")
|
|
481
531
|
|
|
482
|
-
# Try to infer time interval
|
|
532
|
+
# Try to infer time interval and convert to ISO 8601 format
|
|
483
533
|
if len(snapshots) > 1:
|
|
484
534
|
freq = pd.infer_freq(snapshots)
|
|
485
|
-
time_interval = freq
|
|
535
|
+
time_interval = _pandas_freq_to_iso8601(freq) if freq else "PT1H"
|
|
486
536
|
else:
|
|
487
|
-
time_interval = "
|
|
537
|
+
time_interval = "PT1H"
|
|
488
538
|
else:
|
|
489
539
|
# Default time range if no snapshots
|
|
490
540
|
time_start = "2024-01-01 00:00:00"
|
|
491
541
|
time_end = "2024-01-01 23:59:59"
|
|
492
|
-
time_interval = "
|
|
542
|
+
time_interval = "PT1H"
|
|
493
543
|
|
|
494
544
|
description = (
|
|
495
545
|
network_description
|
|
@@ -511,9 +561,6 @@ class NetCDFModelImporter:
|
|
|
511
561
|
snapshots = self._extract_datetime_snapshots(network)
|
|
512
562
|
|
|
513
563
|
if len(snapshots) == 0:
|
|
514
|
-
self.logger.warning(
|
|
515
|
-
"No valid snapshots found in PyPSA network, skipping time periods creation"
|
|
516
|
-
)
|
|
517
564
|
return
|
|
518
565
|
|
|
519
566
|
# Insert optimized time periods metadata
|
|
@@ -686,12 +733,9 @@ class NetCDFModelImporter:
|
|
|
686
733
|
# Generate a unique name for this bus
|
|
687
734
|
unique_name = self._generate_unique_name(str(bus_name), "BUS")
|
|
688
735
|
|
|
689
|
-
# Extract
|
|
736
|
+
# Extract coordinate data
|
|
690
737
|
x_value = bus_data.get("x", None)
|
|
691
738
|
y_value = bus_data.get("y", None)
|
|
692
|
-
self.logger.debug(
|
|
693
|
-
f"Bus '{bus_name}' -> '{unique_name}': x={x_value} (type: {type(x_value)}), y={y_value} (type: {type(y_value)})"
|
|
694
|
-
)
|
|
695
739
|
|
|
696
740
|
# Handle NaN/None values properly
|
|
697
741
|
longitude = (
|
|
@@ -743,7 +787,6 @@ class NetCDFModelImporter:
|
|
|
743
787
|
except Exception as e:
|
|
744
788
|
if strict_validation:
|
|
745
789
|
raise
|
|
746
|
-
self.logger.warning(f"Failed to import bus {bus_name}: {e}")
|
|
747
790
|
continue
|
|
748
791
|
|
|
749
792
|
return count
|
|
@@ -776,9 +819,6 @@ class NetCDFModelImporter:
|
|
|
776
819
|
bus_id = bus_name_to_id.get(bus_name) if bus_name else None
|
|
777
820
|
|
|
778
821
|
if not bus_id:
|
|
779
|
-
self.logger.warning(
|
|
780
|
-
f"Generator {gen_name}: bus '{bus_name}' not found, skipping"
|
|
781
|
-
)
|
|
782
822
|
continue
|
|
783
823
|
|
|
784
824
|
# Get or create carrier
|
|
@@ -821,7 +861,6 @@ class NetCDFModelImporter:
|
|
|
821
861
|
except Exception as e:
|
|
822
862
|
if strict_validation:
|
|
823
863
|
raise
|
|
824
|
-
self.logger.warning(f"Failed to import generator {gen_name}: {e}")
|
|
825
864
|
continue
|
|
826
865
|
|
|
827
866
|
return count
|
|
@@ -855,9 +894,6 @@ class NetCDFModelImporter:
|
|
|
855
894
|
try:
|
|
856
895
|
bus_id = bus_map.get(load_data["bus"])
|
|
857
896
|
if bus_id is None:
|
|
858
|
-
self.logger.warning(
|
|
859
|
-
f"Bus '{load_data['bus']}' not found for load '{load_name}'"
|
|
860
|
-
)
|
|
861
897
|
continue
|
|
862
898
|
|
|
863
899
|
# Generate a unique name for this load
|
|
@@ -918,7 +954,6 @@ class NetCDFModelImporter:
|
|
|
918
954
|
except Exception as e:
|
|
919
955
|
if strict_validation:
|
|
920
956
|
raise
|
|
921
|
-
self.logger.warning(f"Failed to import load {load_name}: {e}")
|
|
922
957
|
continue
|
|
923
958
|
|
|
924
959
|
return count
|
|
@@ -941,9 +976,6 @@ class NetCDFModelImporter:
|
|
|
941
976
|
bus1_id = bus_map.get(line_data["bus1"])
|
|
942
977
|
|
|
943
978
|
if bus0_id is None or bus1_id is None:
|
|
944
|
-
self.logger.warning(
|
|
945
|
-
f"Bus not found for line '{line_name}': bus0='{line_data['bus0']}', bus1='{line_data['bus1']}'"
|
|
946
|
-
)
|
|
947
979
|
continue
|
|
948
980
|
|
|
949
981
|
# Handle duplicate names by appending counter
|
|
@@ -951,9 +983,6 @@ class NetCDFModelImporter:
|
|
|
951
983
|
if line_name in name_counter:
|
|
952
984
|
name_counter[line_name] += 1
|
|
953
985
|
unique_name = f"{line_name}_{name_counter[line_name]}"
|
|
954
|
-
self.logger.warning(
|
|
955
|
-
f"Duplicate line name '{line_name}' renamed to '{unique_name}'"
|
|
956
|
-
)
|
|
957
986
|
else:
|
|
958
987
|
name_counter[line_name] = 0
|
|
959
988
|
|
|
@@ -993,7 +1022,6 @@ class NetCDFModelImporter:
|
|
|
993
1022
|
except Exception as e:
|
|
994
1023
|
if strict_validation:
|
|
995
1024
|
raise
|
|
996
|
-
self.logger.warning(f"Failed to import line {line_name}: {e}")
|
|
997
1025
|
continue
|
|
998
1026
|
|
|
999
1027
|
return count
|
|
@@ -1015,9 +1043,6 @@ class NetCDFModelImporter:
|
|
|
1015
1043
|
bus1_id = bus_map.get(link_data["bus1"])
|
|
1016
1044
|
|
|
1017
1045
|
if bus0_id is None or bus1_id is None:
|
|
1018
|
-
self.logger.warning(
|
|
1019
|
-
f"Bus not found for link '{link_name}': bus0='{link_data['bus0']}', bus1='{link_data['bus1']}'"
|
|
1020
|
-
)
|
|
1021
1046
|
continue
|
|
1022
1047
|
|
|
1023
1048
|
# Generate a unique name for this link
|
|
@@ -1064,7 +1089,6 @@ class NetCDFModelImporter:
|
|
|
1064
1089
|
except Exception as e:
|
|
1065
1090
|
if strict_validation:
|
|
1066
1091
|
raise
|
|
1067
|
-
self.logger.warning(f"Failed to import link {link_name}: {e}")
|
|
1068
1092
|
continue
|
|
1069
1093
|
|
|
1070
1094
|
return count
|
|
@@ -1098,9 +1122,6 @@ class NetCDFModelImporter:
|
|
|
1098
1122
|
try:
|
|
1099
1123
|
bus_id = bus_map.get(su_data["bus"])
|
|
1100
1124
|
if bus_id is None:
|
|
1101
|
-
self.logger.warning(
|
|
1102
|
-
f"Bus '{su_data['bus']}' not found for storage unit '{su_name}'"
|
|
1103
|
-
)
|
|
1104
1125
|
continue
|
|
1105
1126
|
|
|
1106
1127
|
# Generate a unique name for this storage unit
|
|
@@ -1166,7 +1187,6 @@ class NetCDFModelImporter:
|
|
|
1166
1187
|
except Exception as e:
|
|
1167
1188
|
if strict_validation:
|
|
1168
1189
|
raise
|
|
1169
|
-
self.logger.warning(f"Failed to import storage unit {su_name}: {e}")
|
|
1170
1190
|
continue
|
|
1171
1191
|
|
|
1172
1192
|
return count
|
|
@@ -1203,9 +1223,6 @@ class NetCDFModelImporter:
|
|
|
1203
1223
|
try:
|
|
1204
1224
|
bus_id = bus_map.get(store_data["bus"])
|
|
1205
1225
|
if bus_id is None:
|
|
1206
|
-
self.logger.warning(
|
|
1207
|
-
f"Bus '{store_data['bus']}' not found for store '{store_name}'"
|
|
1208
|
-
)
|
|
1209
1226
|
continue
|
|
1210
1227
|
|
|
1211
1228
|
# Handle duplicate names by appending counter
|
|
@@ -1213,9 +1230,6 @@ class NetCDFModelImporter:
|
|
|
1213
1230
|
if store_name in name_counter:
|
|
1214
1231
|
name_counter[store_name] += 1
|
|
1215
1232
|
unique_name = f"{store_name}_{name_counter[store_name]}"
|
|
1216
|
-
self.logger.warning(
|
|
1217
|
-
f"Duplicate store name '{store_name}' renamed to '{unique_name}'"
|
|
1218
|
-
)
|
|
1219
1233
|
else:
|
|
1220
1234
|
name_counter[store_name] = 0
|
|
1221
1235
|
|
|
@@ -1276,7 +1290,6 @@ class NetCDFModelImporter:
|
|
|
1276
1290
|
except Exception as e:
|
|
1277
1291
|
if strict_validation:
|
|
1278
1292
|
raise
|
|
1279
|
-
self.logger.warning(f"Failed to import store {store_name}: {e}")
|
|
1280
1293
|
continue
|
|
1281
1294
|
|
|
1282
1295
|
return count
|
|
@@ -1355,15 +1368,10 @@ class NetCDFModelImporter:
|
|
|
1355
1368
|
# Validate required columns
|
|
1356
1369
|
required_columns = {"name", "longitude", "latitude"}
|
|
1357
1370
|
if not required_columns.issubset(location_df.columns):
|
|
1358
|
-
missing_cols = required_columns - set(location_df.columns)
|
|
1359
|
-
self.logger.warning(
|
|
1360
|
-
f"Location CSV missing required columns: {missing_cols}. Found columns: {list(location_df.columns)}"
|
|
1361
|
-
)
|
|
1362
1371
|
return None
|
|
1363
1372
|
|
|
1364
1373
|
# Create lookup dictionary
|
|
1365
1374
|
location_map = {}
|
|
1366
|
-
skipped_count = 0
|
|
1367
1375
|
|
|
1368
1376
|
for _, row in location_df.iterrows():
|
|
1369
1377
|
name = row["name"]
|
|
@@ -1372,15 +1380,10 @@ class NetCDFModelImporter:
|
|
|
1372
1380
|
|
|
1373
1381
|
# Skip rows with missing data
|
|
1374
1382
|
if pd.isna(name) or pd.isna(longitude) or pd.isna(latitude):
|
|
1375
|
-
skipped_count += 1
|
|
1376
1383
|
continue
|
|
1377
1384
|
|
|
1378
1385
|
# Validate coordinate ranges
|
|
1379
1386
|
if not (-180 <= longitude <= 180) or not (-90 <= latitude <= 90):
|
|
1380
|
-
self.logger.warning(
|
|
1381
|
-
f"Invalid coordinates for '{name}': longitude={longitude}, latitude={latitude}"
|
|
1382
|
-
)
|
|
1383
|
-
skipped_count += 1
|
|
1384
1387
|
continue
|
|
1385
1388
|
|
|
1386
1389
|
location_map[str(name).strip()] = (
|
|
@@ -1388,17 +1391,12 @@ class NetCDFModelImporter:
|
|
|
1388
1391
|
float(longitude),
|
|
1389
1392
|
)
|
|
1390
1393
|
|
|
1391
|
-
self.logger.info(
|
|
1392
|
-
f"Loaded {len(location_map)} component locations from CSV (skipped {skipped_count} invalid entries)"
|
|
1393
|
-
)
|
|
1394
1394
|
return location_map
|
|
1395
1395
|
|
|
1396
|
-
except Exception
|
|
1397
|
-
self.logger.error(f"Failed to parse location CSV {csv_path}: {e}")
|
|
1396
|
+
except Exception:
|
|
1398
1397
|
return None
|
|
1399
1398
|
|
|
1400
|
-
except Exception
|
|
1401
|
-
self.logger.warning(f"Error detecting location CSV: {e}")
|
|
1399
|
+
except Exception:
|
|
1402
1400
|
return None
|
|
1403
1401
|
|
|
1404
1402
|
def _get_or_create_carrier(self, conn, carrier_name: str) -> int:
|
|
@@ -1564,16 +1562,9 @@ class NetCDFModelImporter:
|
|
|
1564
1562
|
if strict_validation:
|
|
1565
1563
|
raise
|
|
1566
1564
|
else:
|
|
1567
|
-
self.logger.warning(
|
|
1568
|
-
f"Skipping undefined/invalid attribute '{attr_name}' for {component_type} component {component_id}: {e}"
|
|
1569
|
-
)
|
|
1570
1565
|
skipped_count += 1
|
|
1571
1566
|
continue
|
|
1572
1567
|
else:
|
|
1573
|
-
# Log but don't fail on other attribute import errors (like type conversion issues)
|
|
1574
|
-
self.logger.warning(
|
|
1575
|
-
f"Skipping attribute {attr_name} for component {component_id}: {e}"
|
|
1576
|
-
)
|
|
1577
1568
|
skipped_count += 1
|
|
1578
1569
|
|
|
1579
1570
|
def _import_component_timeseries(
|
|
@@ -1632,9 +1623,6 @@ class NetCDFModelImporter:
|
|
|
1632
1623
|
values.append(float(value))
|
|
1633
1624
|
|
|
1634
1625
|
if not values:
|
|
1635
|
-
self.logger.warning(
|
|
1636
|
-
f"No valid timeseries points for '{attr_name}' on {component_type} '{component_name}'"
|
|
1637
|
-
)
|
|
1638
1626
|
continue
|
|
1639
1627
|
|
|
1640
1628
|
# Use optimized timeseries attribute setting
|
|
@@ -1647,16 +1635,8 @@ class NetCDFModelImporter:
|
|
|
1647
1635
|
if strict_validation:
|
|
1648
1636
|
raise
|
|
1649
1637
|
else:
|
|
1650
|
-
self.logger.warning(
|
|
1651
|
-
f"Skipping timeseries attribute '{attr_name}' for {component_type} component '{component_name}': {e}"
|
|
1652
|
-
)
|
|
1653
1638
|
continue
|
|
1654
1639
|
|
|
1655
|
-
if timeseries_count > 0:
|
|
1656
|
-
self.logger.debug(
|
|
1657
|
-
f"Imported {timeseries_count} timeseries attributes for {component_type} '{component_name}'"
|
|
1658
|
-
)
|
|
1659
|
-
|
|
1660
1640
|
def _generate_unique_name(self, base_name: str, component_type: str) -> str:
|
|
1661
1641
|
"""
|
|
1662
1642
|
Generate a unique name for a component, ensuring no duplicates across all component types.
|
|
@@ -1720,7 +1700,7 @@ class NetCDFModelImporter:
|
|
|
1720
1700
|
base_angle = (2 * math.pi * component_index) / component_count_at_bus
|
|
1721
1701
|
angle_jitter = random.uniform(
|
|
1722
1702
|
-math.pi / 8, math.pi / 8
|
|
1723
|
-
) #
|
|
1703
|
+
) # +/- 22.5 degrees jitter
|
|
1724
1704
|
angle = base_angle + angle_jitter
|
|
1725
1705
|
|
|
1726
1706
|
# Vary distance randomly within the radius (use more of the available radius)
|
|
@@ -1811,23 +1791,12 @@ class NetCDFModelImporter:
|
|
|
1811
1791
|
|
|
1812
1792
|
# Try exact match first
|
|
1813
1793
|
if component_name in location_map:
|
|
1814
|
-
|
|
1815
|
-
self.logger.debug(
|
|
1816
|
-
f"CSV location exact match for '{component_name}': {coordinates}"
|
|
1817
|
-
)
|
|
1818
|
-
return coordinates
|
|
1794
|
+
return location_map[component_name]
|
|
1819
1795
|
|
|
1820
1796
|
# Try resolving back to original name
|
|
1821
1797
|
original_name = self._resolve_original_component_name(component_name)
|
|
1822
1798
|
if original_name != component_name and original_name in location_map:
|
|
1823
|
-
|
|
1824
|
-
self.logger.debug(
|
|
1825
|
-
f"CSV location resolved match for '{component_name}' -> '{original_name}': {coordinates}"
|
|
1826
|
-
)
|
|
1827
|
-
return coordinates
|
|
1799
|
+
return location_map[original_name]
|
|
1828
1800
|
|
|
1829
1801
|
# No match found
|
|
1830
|
-
self.logger.debug(
|
|
1831
|
-
f"No CSV location found for component '{component_name}' (original: '{original_name}')"
|
|
1832
|
-
)
|
|
1833
1802
|
return None
|
pyconvexity/solvers/pypsa/api.py
CHANGED
|
@@ -4,7 +4,6 @@ High-level API for PyPSA solver integration.
|
|
|
4
4
|
Provides user-friendly functions for the most common workflows.
|
|
5
5
|
"""
|
|
6
6
|
|
|
7
|
-
import logging
|
|
8
7
|
from typing import Dict, Any, Optional, Callable
|
|
9
8
|
|
|
10
9
|
from pyconvexity.core.database import database_context
|
|
@@ -13,8 +12,6 @@ from pyconvexity.solvers.pypsa.solver import NetworkSolver
|
|
|
13
12
|
from pyconvexity.solvers.pypsa.storage import ResultStorage
|
|
14
13
|
from pyconvexity.solvers.pypsa.constraints import ConstraintApplicator
|
|
15
14
|
|
|
16
|
-
logger = logging.getLogger(__name__)
|
|
17
|
-
|
|
18
15
|
|
|
19
16
|
def solve_network(
|
|
20
17
|
db_path: str,
|
|
@@ -143,19 +140,11 @@ def solve_network(
|
|
|
143
140
|
|
|
144
141
|
# Only optimize if there's significant free space (>5% threshold for post-solve)
|
|
145
142
|
if should_optimize_database(conn, free_space_threshold_percent=5.0):
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
f"Database optimization completed: {optimization_result['space_reclaimed']:,} bytes reclaimed"
|
|
150
|
-
)
|
|
151
|
-
else:
|
|
152
|
-
logger.debug(
|
|
153
|
-
"Skipping database optimization - insufficient free space"
|
|
154
|
-
)
|
|
155
|
-
|
|
156
|
-
except Exception as e:
|
|
143
|
+
optimize_database(conn)
|
|
144
|
+
|
|
145
|
+
except Exception:
|
|
157
146
|
# Don't fail the solve if optimization fails
|
|
158
|
-
|
|
147
|
+
pass
|
|
159
148
|
|
|
160
149
|
if progress_callback:
|
|
161
150
|
progress_callback(100, "Complete")
|
|
@@ -448,9 +437,6 @@ def _transform_to_comprehensive_format(
|
|
|
448
437
|
return comprehensive_result
|
|
449
438
|
|
|
450
439
|
except Exception as e:
|
|
451
|
-
logger.error(
|
|
452
|
-
f"Failed to transform result to comprehensive format: {e}", exc_info=True
|
|
453
|
-
)
|
|
454
440
|
# Return original result with error info if transformation fails
|
|
455
441
|
return {
|
|
456
442
|
**pyconvexity_result,
|
|
@@ -3,7 +3,6 @@ PyPSA Batch Data Loader
|
|
|
3
3
|
Simplified to always create MultiIndex timeseries for consistent multi-period optimization.
|
|
4
4
|
"""
|
|
5
5
|
|
|
6
|
-
import logging
|
|
7
6
|
import pandas as pd
|
|
8
7
|
import json
|
|
9
8
|
from typing import Dict, Any, List, Optional
|
|
@@ -11,8 +10,6 @@ from typing import Dict, Any, List, Optional
|
|
|
11
10
|
from pyconvexity.models.attributes import get_timeseries
|
|
12
11
|
from pyconvexity.models import get_network_time_periods
|
|
13
12
|
|
|
14
|
-
logger = logging.getLogger(__name__)
|
|
15
|
-
|
|
16
13
|
|
|
17
14
|
class PyPSABatchLoader:
|
|
18
15
|
"""
|
|
@@ -156,7 +153,6 @@ class PyPSABatchLoader:
|
|
|
156
153
|
# Get network time periods for proper timestamp alignment
|
|
157
154
|
network_time_periods = get_network_time_periods(conn)
|
|
158
155
|
if not network_time_periods:
|
|
159
|
-
logger.warning("No time periods found for network")
|
|
160
156
|
return {comp_id: {} for comp_id in component_ids}
|
|
161
157
|
|
|
162
158
|
# Convert to timestamps and extract years
|
|
@@ -252,15 +248,8 @@ class PyPSABatchLoader:
|
|
|
252
248
|
component_timeseries[comp_id][attr_name] = pd.Series(
|
|
253
249
|
values, index=multi_index
|
|
254
250
|
)
|
|
255
|
-
else:
|
|
256
|
-
logger.warning(
|
|
257
|
-
f"No valid timestamps for timeseries {attr_name}"
|
|
258
|
-
)
|
|
259
251
|
|
|
260
|
-
except Exception
|
|
261
|
-
logger.warning(
|
|
262
|
-
f"Failed to load timeseries {attr_name} for component {comp_id}: {e}"
|
|
263
|
-
)
|
|
252
|
+
except Exception:
|
|
264
253
|
continue
|
|
265
254
|
|
|
266
255
|
return component_timeseries
|
|
@@ -4,7 +4,6 @@ Network building functionality for PyPSA solver integration.
|
|
|
4
4
|
Simplified to always use MultiIndex format for consistent multi-period optimization.
|
|
5
5
|
"""
|
|
6
6
|
|
|
7
|
-
import logging
|
|
8
7
|
import json
|
|
9
8
|
import pandas as pd
|
|
10
9
|
from typing import Dict, Any, Optional, Callable
|
|
@@ -15,8 +14,6 @@ from pyconvexity.models import (
|
|
|
15
14
|
get_network_info,
|
|
16
15
|
)
|
|
17
16
|
|
|
18
|
-
logger = logging.getLogger(__name__)
|
|
19
|
-
|
|
20
17
|
|
|
21
18
|
class NetworkBuilder:
|
|
22
19
|
"""
|
|
@@ -140,7 +137,7 @@ class NetworkBuilder:
|
|
|
140
137
|
for tp in time_periods
|
|
141
138
|
]
|
|
142
139
|
except Exception as e:
|
|
143
|
-
|
|
140
|
+
pass # Failed to load time periods
|
|
144
141
|
|
|
145
142
|
# Load all component types
|
|
146
143
|
component_types = [
|
|
@@ -173,7 +170,7 @@ class NetworkBuilder:
|
|
|
173
170
|
for comp in components
|
|
174
171
|
]
|
|
175
172
|
except Exception as e:
|
|
176
|
-
|
|
173
|
+
pass # Failed to load components
|
|
177
174
|
|
|
178
175
|
return data
|
|
179
176
|
|
|
@@ -188,7 +185,6 @@ class NetworkBuilder:
|
|
|
188
185
|
try:
|
|
189
186
|
time_periods = get_network_time_periods(conn)
|
|
190
187
|
if not time_periods:
|
|
191
|
-
logger.error("No time periods found for network")
|
|
192
188
|
return
|
|
193
189
|
|
|
194
190
|
# Convert to pandas DatetimeIndex
|
|
@@ -221,12 +217,7 @@ class NetworkBuilder:
|
|
|
221
217
|
# Store years for statistics
|
|
222
218
|
network._available_years = years
|
|
223
219
|
|
|
224
|
-
logger.info(
|
|
225
|
-
f"Time index: {len(multi_index)} snapshots across {len(years)} investment periods: {years}"
|
|
226
|
-
)
|
|
227
|
-
|
|
228
220
|
except Exception as e:
|
|
229
|
-
logger.error(f"Failed to set time index: {e}")
|
|
230
221
|
network._available_years = []
|
|
231
222
|
|
|
232
223
|
def _load_carriers(self, conn, network: "pypsa.Network"):
|
|
@@ -381,14 +372,8 @@ class NetworkBuilder:
|
|
|
381
372
|
if include_unmet_loads:
|
|
382
373
|
unmet_loads = list_components_by_type(conn, "UNMET_LOAD")
|
|
383
374
|
all_generators = generators + unmet_loads
|
|
384
|
-
if self.verbose:
|
|
385
|
-
logger.info(
|
|
386
|
-
f"Loading {len(generators)} generators and {len(unmet_loads)} unmet loads"
|
|
387
|
-
)
|
|
388
375
|
else:
|
|
389
376
|
all_generators = generators
|
|
390
|
-
if self.verbose:
|
|
391
|
-
logger.info(f"Loading {len(generators)} generators (unmet loads disabled)")
|
|
392
377
|
|
|
393
378
|
generator_ids = [gen.id for gen in all_generators]
|
|
394
379
|
|
|
@@ -635,12 +620,8 @@ class NetworkBuilder:
|
|
|
635
620
|
network.snapshot_weightings.loc[:, "objective"] = weightings
|
|
636
621
|
network.snapshot_weightings.loc[:, "generators"] = weightings
|
|
637
622
|
network.snapshot_weightings.loc[:, "stores"] = weightings
|
|
638
|
-
else:
|
|
639
|
-
logger.warning(
|
|
640
|
-
f"Mismatch between weightings ({len(weightings)}) and snapshots ({len(network.snapshots)})"
|
|
641
|
-
)
|
|
642
623
|
except Exception as e:
|
|
643
|
-
|
|
624
|
+
pass # Failed to set snapshot weightings
|
|
644
625
|
|
|
645
626
|
def _parse_time_interval(self, time_interval: str) -> Optional[float]:
|
|
646
627
|
"""Parse time interval string to hours."""
|
|
@@ -661,7 +642,6 @@ class NetworkBuilder:
|
|
|
661
642
|
# Try to parse as float (assume hours)
|
|
662
643
|
return float(time_interval)
|
|
663
644
|
except (ValueError, TypeError):
|
|
664
|
-
logger.warning(f"Could not parse time interval: {time_interval}")
|
|
665
645
|
return None
|
|
666
646
|
|
|
667
647
|
def _build_bus_id_to_name_map(self, conn) -> Dict[int, str]:
|
|
@@ -4,15 +4,12 @@ Solving functionality for PyPSA networks.
|
|
|
4
4
|
Simplified to always use multi-period optimization for consistency.
|
|
5
5
|
"""
|
|
6
6
|
|
|
7
|
-
import logging
|
|
8
7
|
import time
|
|
9
8
|
import uuid
|
|
10
9
|
import pandas as pd
|
|
11
10
|
import numpy as np
|
|
12
11
|
from typing import Dict, Any, Optional
|
|
13
12
|
|
|
14
|
-
logger = logging.getLogger(__name__)
|
|
15
|
-
|
|
16
13
|
|
|
17
14
|
class NetworkSolver:
|
|
18
15
|
"""
|
|
@@ -66,7 +63,6 @@ class NetworkSolver:
|
|
|
66
63
|
return settings_file if settings_file.exists() else None
|
|
67
64
|
|
|
68
65
|
except Exception as e:
|
|
69
|
-
logger.warning(f"Failed to determine user settings path: {e}")
|
|
70
66
|
return None
|
|
71
67
|
|
|
72
68
|
def _resolve_default_solver(self) -> str:
|
|
@@ -76,9 +72,6 @@ class NetworkSolver:
|
|
|
76
72
|
|
|
77
73
|
settings_path = self._get_user_settings_path()
|
|
78
74
|
if not settings_path:
|
|
79
|
-
logger.debug(
|
|
80
|
-
"User settings file not found, using 'highs' as default solver"
|
|
81
|
-
)
|
|
82
75
|
return "highs"
|
|
83
76
|
|
|
84
77
|
with open(settings_path, "r") as f:
|
|
@@ -115,13 +108,9 @@ class NetworkSolver:
|
|
|
115
108
|
if default_solver in known_solvers:
|
|
116
109
|
return default_solver
|
|
117
110
|
else:
|
|
118
|
-
logger.warning(
|
|
119
|
-
f"Unknown default solver '{default_solver}' in user settings, falling back to 'highs'"
|
|
120
|
-
)
|
|
121
111
|
return "highs"
|
|
122
112
|
|
|
123
113
|
except Exception as e:
|
|
124
|
-
logger.warning(f"Failed to read default solver from user settings: {e}")
|
|
125
114
|
return "highs"
|
|
126
115
|
|
|
127
116
|
def solve_network(
|
|
@@ -173,8 +162,6 @@ class NetworkSolver:
|
|
|
173
162
|
|
|
174
163
|
years = list(network.investment_periods)
|
|
175
164
|
|
|
176
|
-
logger.info(f"Solving with {actual_solver_name}: {len(years)} periods {years}, discount rate {effective_discount_rate}")
|
|
177
|
-
|
|
178
165
|
# Calculate investment period weightings with discount rate
|
|
179
166
|
self._calculate_investment_weightings(network, effective_discount_rate)
|
|
180
167
|
|
|
@@ -206,17 +193,10 @@ class NetworkSolver:
|
|
|
206
193
|
extra_functionality = self._create_extra_functionality(
|
|
207
194
|
model_constraints, constraint_applicator
|
|
208
195
|
)
|
|
209
|
-
if self.verbose:
|
|
210
|
-
logger.info(
|
|
211
|
-
f"Prepared {len(model_constraints)} model constraints for optimization"
|
|
212
|
-
)
|
|
213
196
|
|
|
214
197
|
# NOTE: Model constraints are applied DURING solve via extra_functionality
|
|
215
198
|
# Network constraints were already applied to the network structure before solve
|
|
216
199
|
|
|
217
|
-
if self.verbose:
|
|
218
|
-
logger.info(f"Snapshots: {len(network.snapshots)}, Solver options: {solver_config}")
|
|
219
|
-
|
|
220
200
|
if solver_config:
|
|
221
201
|
result = network.optimize(
|
|
222
202
|
solver_name=actual_solver_name,
|
|
@@ -252,17 +232,10 @@ class NetworkSolver:
|
|
|
252
232
|
solve_result["year_statistics"] = year_statistics
|
|
253
233
|
solve_result["year_statistics_available"] = len(year_statistics) > 0
|
|
254
234
|
|
|
255
|
-
objective_value = getattr(network, "objective", None)
|
|
256
|
-
logger.info(
|
|
257
|
-
f"Solve completed in {solve_time:.2f}s - status: {solve_result['status']}, objective: {objective_value}"
|
|
258
|
-
)
|
|
259
|
-
|
|
260
235
|
return solve_result
|
|
261
236
|
|
|
262
237
|
except Exception as e:
|
|
263
238
|
solve_time = time.time() - start_time
|
|
264
|
-
logger.error(f"Solve failed after {solve_time:.2f} seconds: {e}")
|
|
265
|
-
logger.exception("Full solve error traceback:")
|
|
266
239
|
|
|
267
240
|
return {
|
|
268
241
|
"success": False,
|
|
@@ -609,10 +582,7 @@ class NetworkSolver:
|
|
|
609
582
|
return solver_name, solver_options
|
|
610
583
|
|
|
611
584
|
else:
|
|
612
|
-
# Unknown solver name -
|
|
613
|
-
logger.warning(
|
|
614
|
-
f"Unknown solver name '{solver_name}' - falling back to 'highs'"
|
|
615
|
-
)
|
|
585
|
+
# Unknown solver name - fall back to highs
|
|
616
586
|
return "highs", solver_options
|
|
617
587
|
|
|
618
588
|
def _detect_constraint_type(self, constraint_code: str) -> str:
|
|
@@ -690,14 +660,10 @@ class NetworkSolver:
|
|
|
690
660
|
network, snapshots, constraint
|
|
691
661
|
)
|
|
692
662
|
except Exception as e:
|
|
693
|
-
logger.error(
|
|
694
|
-
f"Failed to apply optimization constraint {constraint.get('name', 'unknown')}: {e}"
|
|
695
|
-
)
|
|
696
663
|
continue
|
|
697
664
|
|
|
698
665
|
except Exception as e:
|
|
699
|
-
|
|
700
|
-
# Don't re-raise - let optimization continue
|
|
666
|
+
pass # Don't re-raise - let optimization continue
|
|
701
667
|
|
|
702
668
|
return extra_functionality
|
|
703
669
|
|
|
@@ -717,9 +683,6 @@ class NetworkSolver:
|
|
|
717
683
|
|
|
718
684
|
if weight is None:
|
|
719
685
|
weight = 1.0
|
|
720
|
-
logger.warning(
|
|
721
|
-
f"Could not parse time interval '{time_interval}', using default weight of 1.0"
|
|
722
|
-
)
|
|
723
686
|
|
|
724
687
|
# Create weightings array - all snapshots get the same weight for this time resolution
|
|
725
688
|
weightings = [weight] * len(time_periods)
|
|
@@ -729,14 +692,8 @@ class NetworkSolver:
|
|
|
729
692
|
network.snapshot_weightings.loc[:, "objective"] = weightings
|
|
730
693
|
network.snapshot_weightings.loc[:, "generators"] = weightings
|
|
731
694
|
network.snapshot_weightings.loc[:, "stores"] = weightings
|
|
732
|
-
else:
|
|
733
|
-
logger.warning(
|
|
734
|
-
f"Mismatch between weightings ({len(weightings)}) and snapshots ({len(network.snapshots)})"
|
|
735
|
-
)
|
|
736
695
|
except Exception as e:
|
|
737
|
-
|
|
738
|
-
f"Failed to set snapshot weightings after multi-period setup: {e}"
|
|
739
|
-
)
|
|
696
|
+
pass # Failed to set snapshot weightings
|
|
740
697
|
|
|
741
698
|
def _parse_time_interval(self, time_interval: str) -> Optional[float]:
|
|
742
699
|
"""Parse time interval string to hours - handles multiple formats."""
|
|
@@ -780,7 +737,6 @@ class NetworkSolver:
|
|
|
780
737
|
return float(interval)
|
|
781
738
|
|
|
782
739
|
except (ValueError, TypeError) as e:
|
|
783
|
-
logger.warning(f"Could not parse time interval '{time_interval}': {e}")
|
|
784
740
|
return None
|
|
785
741
|
|
|
786
742
|
def _calculate_investment_weightings(
|
|
@@ -872,8 +828,7 @@ class NetworkSolver:
|
|
|
872
828
|
network.investment_period_weightings = weightings_df
|
|
873
829
|
|
|
874
830
|
except Exception as e:
|
|
875
|
-
|
|
876
|
-
logger.exception("Full traceback:")
|
|
831
|
+
pass # Failed to calculate investment weightings
|
|
877
832
|
|
|
878
833
|
def _extract_solve_results(
|
|
879
834
|
self,
|
|
@@ -934,7 +889,6 @@ class NetworkSolver:
|
|
|
934
889
|
return solve_result
|
|
935
890
|
|
|
936
891
|
except Exception as e:
|
|
937
|
-
logger.error(f"Failed to extract solve results: {e}")
|
|
938
892
|
return {
|
|
939
893
|
"success": False,
|
|
940
894
|
"status": "extraction_failed",
|
|
@@ -983,13 +937,9 @@ class NetworkSolver:
|
|
|
983
937
|
if "optimal" in term_condition:
|
|
984
938
|
return True
|
|
985
939
|
|
|
986
|
-
logger.warning(
|
|
987
|
-
f"Could not determine solve success: status={status}, objective={objective_value}"
|
|
988
|
-
)
|
|
989
940
|
return False
|
|
990
941
|
|
|
991
942
|
except Exception as e:
|
|
992
|
-
logger.error(f"Error determining solve success: {e}")
|
|
993
943
|
return False
|
|
994
944
|
|
|
995
945
|
def _convert_pypsa_result_to_dict(self, result) -> Dict[str, Any]:
|
|
@@ -1029,7 +979,6 @@ class NetworkSolver:
|
|
|
1029
979
|
return result_dict
|
|
1030
980
|
|
|
1031
981
|
except Exception as e:
|
|
1032
|
-
logger.warning(f"Failed to convert PyPSA result to dict: {e}")
|
|
1033
982
|
return {"status": "conversion_failed", "error": str(e)}
|
|
1034
983
|
|
|
1035
984
|
def _calculate_comprehensive_network_statistics(
|
|
@@ -1125,7 +1074,6 @@ class NetworkSolver:
|
|
|
1125
1074
|
else:
|
|
1126
1075
|
statistics["pypsa_statistics"] = {}
|
|
1127
1076
|
except Exception as e:
|
|
1128
|
-
logger.error(f"Failed to calculate PyPSA statistics: {e}")
|
|
1129
1077
|
statistics["pypsa_statistics"] = {}
|
|
1130
1078
|
|
|
1131
1079
|
# Custom statistics - calculate detailed breakdowns
|
|
@@ -1205,10 +1153,6 @@ class NetworkSolver:
|
|
|
1205
1153
|
return statistics
|
|
1206
1154
|
|
|
1207
1155
|
except Exception as e:
|
|
1208
|
-
logger.error(
|
|
1209
|
-
f"Failed to calculate comprehensive network statistics: {e}",
|
|
1210
|
-
exc_info=True,
|
|
1211
|
-
)
|
|
1212
1156
|
return {
|
|
1213
1157
|
"error": str(e),
|
|
1214
1158
|
"core_summary": {},
|
|
@@ -1245,15 +1189,11 @@ class NetworkSolver:
|
|
|
1245
1189
|
)
|
|
1246
1190
|
year_statistics[year] = year_stats
|
|
1247
1191
|
except Exception as e:
|
|
1248
|
-
logger.error(f"Failed to calculate statistics for year {year}: {e}")
|
|
1249
1192
|
continue
|
|
1250
1193
|
|
|
1251
1194
|
return year_statistics
|
|
1252
1195
|
|
|
1253
1196
|
except Exception as e:
|
|
1254
|
-
logger.error(
|
|
1255
|
-
f"Failed to calculate year-based statistics: {e}", exc_info=True
|
|
1256
|
-
)
|
|
1257
1197
|
return {}
|
|
1258
1198
|
|
|
1259
1199
|
def _calculate_network_statistics_for_year(
|
|
@@ -1367,10 +1307,6 @@ class NetworkSolver:
|
|
|
1367
1307
|
return statistics
|
|
1368
1308
|
|
|
1369
1309
|
except Exception as e:
|
|
1370
|
-
logger.error(
|
|
1371
|
-
f"Failed to calculate network statistics for year {year}: {e}",
|
|
1372
|
-
exc_info=True,
|
|
1373
|
-
)
|
|
1374
1310
|
return {
|
|
1375
1311
|
"error": str(e),
|
|
1376
1312
|
"core_summary": {},
|
|
@@ -1402,7 +1338,6 @@ class NetworkSolver:
|
|
|
1402
1338
|
return None
|
|
1403
1339
|
|
|
1404
1340
|
except Exception as e:
|
|
1405
|
-
logger.error(f"Failed to filter timeseries by year {year}: {e}")
|
|
1406
1341
|
return None
|
|
1407
1342
|
|
|
1408
1343
|
def _get_year_weightings(self, network: "pypsa.Network", year: int) -> "np.ndarray":
|
|
@@ -1438,7 +1373,6 @@ class NetworkSolver:
|
|
|
1438
1373
|
return None
|
|
1439
1374
|
|
|
1440
1375
|
except Exception as e:
|
|
1441
|
-
logger.error(f"Failed to get year weightings for year {year}: {e}")
|
|
1442
1376
|
return None
|
|
1443
1377
|
|
|
1444
1378
|
def _count_year_snapshots(self, snapshots: "pd.Index", year: int) -> int:
|
|
@@ -1459,7 +1393,6 @@ class NetworkSolver:
|
|
|
1459
1393
|
return 0
|
|
1460
1394
|
|
|
1461
1395
|
except Exception as e:
|
|
1462
|
-
logger.error(f"Failed to count snapshots for year {year}: {e}")
|
|
1463
1396
|
return 0
|
|
1464
1397
|
|
|
1465
1398
|
def _calculate_year_carrier_statistics(
|
|
@@ -4,7 +4,6 @@ Result storage functionality for PyPSA solver integration.
|
|
|
4
4
|
Handles storing solve results back to the database with proper validation and error handling.
|
|
5
5
|
"""
|
|
6
6
|
|
|
7
|
-
import logging
|
|
8
7
|
import uuid
|
|
9
8
|
import pandas as pd
|
|
10
9
|
import numpy as np
|
|
@@ -18,8 +17,6 @@ from pyconvexity.models import (
|
|
|
18
17
|
)
|
|
19
18
|
from pyconvexity.validation import get_validation_rule
|
|
20
19
|
|
|
21
|
-
logger = logging.getLogger(__name__)
|
|
22
|
-
|
|
23
20
|
|
|
24
21
|
class ResultStorage:
|
|
25
22
|
"""
|
|
@@ -80,12 +77,6 @@ class ResultStorage:
|
|
|
80
77
|
)
|
|
81
78
|
conn.commit()
|
|
82
79
|
|
|
83
|
-
total_gen = network_stats.get("core_summary", {}).get("total_generation_mwh", 0)
|
|
84
|
-
total_cost = network_stats.get("core_summary", {}).get("total_cost", 0)
|
|
85
|
-
logger.info(
|
|
86
|
-
f"Results stored: {total_gen:.0f} MWh generation, {total_cost:.0f} cost, {year_stats_stored} years"
|
|
87
|
-
)
|
|
88
|
-
|
|
89
80
|
return {
|
|
90
81
|
"component_stats": component_stats,
|
|
91
82
|
"network_stats": network_stats,
|
|
@@ -95,7 +86,6 @@ class ResultStorage:
|
|
|
95
86
|
}
|
|
96
87
|
|
|
97
88
|
except Exception as e:
|
|
98
|
-
logger.error(f"Result storage failed: {e}")
|
|
99
89
|
return {
|
|
100
90
|
"component_stats": {},
|
|
101
91
|
"network_stats": {},
|
|
@@ -200,7 +190,6 @@ class ResultStorage:
|
|
|
200
190
|
return results_stats
|
|
201
191
|
|
|
202
192
|
except Exception as e:
|
|
203
|
-
logger.error(f"Error storing solve results: {e}", exc_info=True)
|
|
204
193
|
results_stats["errors"] += 1
|
|
205
194
|
return results_stats
|
|
206
195
|
|
|
@@ -272,9 +261,6 @@ class ResultStorage:
|
|
|
272
261
|
):
|
|
273
262
|
continue
|
|
274
263
|
else:
|
|
275
|
-
logger.warning(
|
|
276
|
-
f"Error storing timeseries {attr_name} for {component_type} '{component_name}': {e}"
|
|
277
|
-
)
|
|
278
264
|
continue
|
|
279
265
|
|
|
280
266
|
# Store static optimization results - ONLY OUTPUT ATTRIBUTES (is_input=FALSE)
|
|
@@ -325,17 +311,11 @@ class ResultStorage:
|
|
|
325
311
|
):
|
|
326
312
|
continue
|
|
327
313
|
else:
|
|
328
|
-
logger.warning(
|
|
329
|
-
f"Error storing static {attr_name} for {component_type} '{component_name}': {e}"
|
|
330
|
-
)
|
|
331
314
|
continue
|
|
332
315
|
|
|
333
316
|
return stored_count
|
|
334
317
|
|
|
335
318
|
except Exception as e:
|
|
336
|
-
logger.error(
|
|
337
|
-
f"Error storing results for {component_type}: {e}", exc_info=True
|
|
338
|
-
)
|
|
339
319
|
return stored_count
|
|
340
320
|
|
|
341
321
|
def _store_solve_summary(
|
|
@@ -394,7 +374,6 @@ class ResultStorage:
|
|
|
394
374
|
)
|
|
395
375
|
|
|
396
376
|
except Exception as e:
|
|
397
|
-
logger.error(f"Failed to store solve summary: {e}")
|
|
398
377
|
raise # Re-raise to trigger rollback
|
|
399
378
|
|
|
400
379
|
def _calculate_network_statistics(
|
|
@@ -487,13 +466,9 @@ class ResultStorage:
|
|
|
487
466
|
},
|
|
488
467
|
}
|
|
489
468
|
|
|
490
|
-
logger.info(
|
|
491
|
-
f"Calculated network statistics: core_summary={network_statistics['core_summary']}"
|
|
492
|
-
)
|
|
493
469
|
return network_statistics
|
|
494
470
|
|
|
495
471
|
except Exception as e:
|
|
496
|
-
logger.error(f"Failed to calculate network statistics: {e}", exc_info=True)
|
|
497
472
|
# Return empty structure matching expected format
|
|
498
473
|
return {
|
|
499
474
|
"core_summary": {
|
|
@@ -549,7 +524,6 @@ class ResultStorage:
|
|
|
549
524
|
years = network._available_years
|
|
550
525
|
else:
|
|
551
526
|
years = [2020] # Fallback
|
|
552
|
-
logger.warning(f"No year information found, using fallback: {years}")
|
|
553
527
|
|
|
554
528
|
# Calculate per-year statistics first
|
|
555
529
|
all_year_stats = {
|
|
@@ -618,7 +592,6 @@ class ResultStorage:
|
|
|
618
592
|
return all_year_stats
|
|
619
593
|
|
|
620
594
|
except Exception as e:
|
|
621
|
-
logger.error(f"Failed to calculate carrier statistics: {e}")
|
|
622
595
|
return {
|
|
623
596
|
"dispatch_by_carrier": {},
|
|
624
597
|
"power_capacity_by_carrier": {},
|
|
@@ -718,13 +691,11 @@ class ResultStorage:
|
|
|
718
691
|
stored_count += 1
|
|
719
692
|
|
|
720
693
|
except Exception as e:
|
|
721
|
-
logger.error(f"Failed to store statistics for year {year}: {e}")
|
|
722
694
|
continue
|
|
723
695
|
|
|
724
696
|
return stored_count
|
|
725
697
|
|
|
726
698
|
except Exception as e:
|
|
727
|
-
logger.error(f"Failed to store year-based statistics: {e}")
|
|
728
699
|
return 0
|
|
729
700
|
|
|
730
701
|
def _calculate_year_carrier_statistics(
|
|
@@ -805,9 +776,6 @@ class ResultStorage:
|
|
|
805
776
|
else:
|
|
806
777
|
# Fallback: simple sum (will be incorrect for non-1H models)
|
|
807
778
|
generation_mwh = float(year_generation[gen_name].sum())
|
|
808
|
-
logger.warning(
|
|
809
|
-
f"Could not apply snapshot weightings for {gen_name} in year {year} - energy may be incorrect"
|
|
810
|
-
)
|
|
811
779
|
|
|
812
780
|
if carrier_name in carrier_stats["dispatch_by_carrier"]:
|
|
813
781
|
carrier_stats["dispatch_by_carrier"][
|
|
@@ -852,9 +820,6 @@ class ResultStorage:
|
|
|
852
820
|
discharge_mwh = float(
|
|
853
821
|
year_storage[su_name].clip(lower=0).sum()
|
|
854
822
|
)
|
|
855
|
-
logger.warning(
|
|
856
|
-
f"Could not apply snapshot weightings for storage unit {su_name} in year {year} - energy may be incorrect"
|
|
857
|
-
)
|
|
858
823
|
|
|
859
824
|
if carrier_name in carrier_stats["dispatch_by_carrier"]:
|
|
860
825
|
carrier_stats["dispatch_by_carrier"][
|
|
@@ -897,9 +862,6 @@ class ResultStorage:
|
|
|
897
862
|
discharge_mwh = float(
|
|
898
863
|
year_stores[store_name].clip(lower=0).sum()
|
|
899
864
|
)
|
|
900
|
-
logger.warning(
|
|
901
|
-
f"Could not apply snapshot weightings for store {store_name} in year {year} - energy may be incorrect"
|
|
902
|
-
)
|
|
903
865
|
|
|
904
866
|
if carrier_name in carrier_stats["dispatch_by_carrier"]:
|
|
905
867
|
carrier_stats["dispatch_by_carrier"][
|
|
@@ -1741,7 +1703,6 @@ class ResultStorage:
|
|
|
1741
1703
|
return carrier_stats
|
|
1742
1704
|
|
|
1743
1705
|
except Exception as e:
|
|
1744
|
-
logger.error(f"Failed to calculate year {year} carrier statistics: {e}")
|
|
1745
1706
|
return {
|
|
1746
1707
|
"dispatch_by_carrier": {},
|
|
1747
1708
|
"power_capacity_by_carrier": {},
|
|
@@ -1829,7 +1790,7 @@ class ResultStorage:
|
|
|
1829
1790
|
)
|
|
1830
1791
|
|
|
1831
1792
|
except Exception as e:
|
|
1832
|
-
|
|
1793
|
+
pass # Failed to process last year results
|
|
1833
1794
|
|
|
1834
1795
|
# For other stats (dispatch, emissions, costs): sum across all years
|
|
1835
1796
|
for year, results_json in year_results:
|
|
@@ -1881,13 +1842,11 @@ class ResultStorage:
|
|
|
1881
1842
|
)
|
|
1882
1843
|
|
|
1883
1844
|
except Exception as e:
|
|
1884
|
-
logger.error(f"Failed to process year {year} results: {e}")
|
|
1885
1845
|
continue
|
|
1886
1846
|
|
|
1887
1847
|
return totals
|
|
1888
1848
|
|
|
1889
1849
|
except Exception as e:
|
|
1890
|
-
logger.error(f"Failed to sum year-based carrier statistics: {e}")
|
|
1891
1850
|
# Return empty structure on error
|
|
1892
1851
|
return {
|
|
1893
1852
|
"dispatch_by_carrier": {},
|
|
@@ -1917,7 +1876,6 @@ class ResultStorage:
|
|
|
1917
1876
|
}
|
|
1918
1877
|
return json.dumps(results, default=self._json_serializer)
|
|
1919
1878
|
except Exception as e:
|
|
1920
|
-
logger.warning(f"Failed to serialize results JSON: {e}")
|
|
1921
1879
|
return json.dumps({"error": "serialization_failed"})
|
|
1922
1880
|
|
|
1923
1881
|
def _serialize_metadata_json(self, solve_result: Dict[str, Any]) -> str:
|
|
@@ -1935,7 +1893,6 @@ class ResultStorage:
|
|
|
1935
1893
|
}
|
|
1936
1894
|
return json.dumps(metadata, default=self._json_serializer)
|
|
1937
1895
|
except Exception as e:
|
|
1938
|
-
logger.warning(f"Failed to serialize metadata JSON: {e}")
|
|
1939
1896
|
return json.dumps({"error": "serialization_failed"})
|
|
1940
1897
|
|
|
1941
1898
|
def _filter_timeseries_by_year(
|
|
@@ -1961,7 +1918,6 @@ class ResultStorage:
|
|
|
1961
1918
|
return None
|
|
1962
1919
|
|
|
1963
1920
|
except Exception as e:
|
|
1964
|
-
logger.error(f"Failed to filter timeseries by year {year}: {e}")
|
|
1965
1921
|
return None
|
|
1966
1922
|
|
|
1967
1923
|
def _get_year_weightings(self, network: "pypsa.Network", year: int) -> "np.ndarray":
|
|
@@ -1997,7 +1953,6 @@ class ResultStorage:
|
|
|
1997
1953
|
return None
|
|
1998
1954
|
|
|
1999
1955
|
except Exception as e:
|
|
2000
|
-
logger.error(f"Failed to get year weightings for year {year}: {e}")
|
|
2001
1956
|
return None
|
|
2002
1957
|
|
|
2003
1958
|
def _calculate_total_demand(self, network: "pypsa.Network") -> float:
|
|
@@ -2024,7 +1979,6 @@ class ResultStorage:
|
|
|
2024
1979
|
return total_demand
|
|
2025
1980
|
|
|
2026
1981
|
except Exception as e:
|
|
2027
|
-
logger.error(f"Failed to calculate total demand: {e}")
|
|
2028
1982
|
return 0.0
|
|
2029
1983
|
|
|
2030
1984
|
def _json_serializer(self, obj):
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
pyconvexity/__init__.py,sha256=TJvgkEaMHFFWhIGaOdjMgc4rHVIxHKni3Ggg1YAQziE,5347
|
|
2
|
-
pyconvexity/_version.py,sha256=
|
|
2
|
+
pyconvexity/_version.py,sha256=mb7cZWFtBTYPgotnX_1oAZadFITLHrAXwTSs2Eb1dvU,28
|
|
3
3
|
pyconvexity/timeseries.py,sha256=QdKbiqjAlxkJATyKm2Kelx1Ea2PsAnnCYfVLU5VER1Y,11085
|
|
4
4
|
pyconvexity/core/__init__.py,sha256=gdyyHNqOc4h9Nfe9u6NA936GNzH6coGNCMgBvvvOnGE,1196
|
|
5
5
|
pyconvexity/core/database.py,sha256=vwCmuN0B0xwImh6L0bFR4vNWHw_wVfYSG1KwsUjK4iY,14831
|
|
@@ -17,8 +17,8 @@ pyconvexity/data/sources/gem.py,sha256=v8OYCMsb2t-8u-YmK8vzMsgI9ArUAOAXMZZQOFpJ-
|
|
|
17
17
|
pyconvexity/io/__init__.py,sha256=FCyvRDfBUrrNei-y5JVod6MMN1bkPMSSfE0fpKi1aKQ,751
|
|
18
18
|
pyconvexity/io/excel_exporter.py,sha256=9MkZAVnHvsJSmfZ12w29GhDTsYI89fCGphjdo7s_ABs,50506
|
|
19
19
|
pyconvexity/io/excel_importer.py,sha256=Q5petB0WXjbw0TIR4ofG3EkjdT8lBh21yJMbEgdtXfU,59347
|
|
20
|
-
pyconvexity/io/netcdf_exporter.py,sha256=
|
|
21
|
-
pyconvexity/io/netcdf_importer.py,sha256=
|
|
20
|
+
pyconvexity/io/netcdf_exporter.py,sha256=ndbYa_b34LZQ-70Y7KXh3oLpATnARLciQHXKNpcjNoY,6828
|
|
21
|
+
pyconvexity/io/netcdf_importer.py,sha256=JtdfIp48F_fQIlRgWJ3XaeeqtvQRa_FeE4hjVaA623I,67189
|
|
22
22
|
pyconvexity/models/__init__.py,sha256=WqDSq1Mst7iJsFytausruoM562FKlOKV0Egmnpm2900,4695
|
|
23
23
|
pyconvexity/models/attributes.py,sha256=RpH3rBoHD33xBSXUEfaD-CvRj3JruolCwexo6HPMGC8,23388
|
|
24
24
|
pyconvexity/models/carriers.py,sha256=L_WuDMW13k8aaA-obsDPxjmpZgZELiIAZuNtxq7YLpg,3447
|
|
@@ -28,15 +28,15 @@ pyconvexity/models/results.py,sha256=6j1H4AwVmp94L97gl_sGnE8izMxkU5o89guKIU8JdtE
|
|
|
28
28
|
pyconvexity/models/scenarios.py,sha256=-0UPUDXf6r9mFriA-z2fD5KKMARm2PUBjLba49S9mCI,5867
|
|
29
29
|
pyconvexity/solvers/__init__.py,sha256=t1gOUTqbYDCtIvKPqGVY1fjKwqJi2Od9bGeIO7bPvJE,667
|
|
30
30
|
pyconvexity/solvers/pypsa/__init__.py,sha256=nudu0AOYEfPhpGHZ1Q9pUgjGeeIJd_zeULc975iyluE,555
|
|
31
|
-
pyconvexity/solvers/pypsa/api.py,sha256=
|
|
32
|
-
pyconvexity/solvers/pypsa/batch_loader.py,sha256=
|
|
33
|
-
pyconvexity/solvers/pypsa/builder.py,sha256=
|
|
31
|
+
pyconvexity/solvers/pypsa/api.py,sha256=CWKslptTlZrSbuHy916_PHhCG8nO9SCfjTXkJZylLM8,17512
|
|
32
|
+
pyconvexity/solvers/pypsa/batch_loader.py,sha256=ZgOcZqMnMS3TOYTq2Ly2O4cuwhNNAicu3EDq1Fj38OI,11929
|
|
33
|
+
pyconvexity/solvers/pypsa/builder.py,sha256=1ZU68Wtl_jQSXHzspKQDkR6bxAVU1nKvPfnPUl0aO3k,23256
|
|
34
34
|
pyconvexity/solvers/pypsa/constraints.py,sha256=20WliFDhPQGMAsS4VOTU8LZJpsFpLVRHpNsZW49GTcc,16397
|
|
35
|
-
pyconvexity/solvers/pypsa/solver.py,sha256=
|
|
36
|
-
pyconvexity/solvers/pypsa/storage.py,sha256=
|
|
35
|
+
pyconvexity/solvers/pypsa/solver.py,sha256=M-s-VUCnRD8Jdh22PCUA-gWgYp1eH6_sgpoSzcv6kNQ,59762
|
|
36
|
+
pyconvexity/solvers/pypsa/storage.py,sha256=C8zLPXSd95LT0Aq6c00vZu1xlJHEF_l8RDUhB-s5mJ0,91704
|
|
37
37
|
pyconvexity/validation/__init__.py,sha256=VJNZlFoWABsWwUKktNk2jbtXIepH5omvC0WtsTS7o3o,583
|
|
38
38
|
pyconvexity/validation/rules.py,sha256=GiNadc8hvbWBr09vUkGiLLTmSdvtNSeGLFwvCjlikYY,9241
|
|
39
|
-
pyconvexity-0.4.6.dist-info/METADATA,sha256=
|
|
40
|
-
pyconvexity-0.4.6.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
41
|
-
pyconvexity-0.4.6.dist-info/top_level.txt,sha256=wFPEDXVaebR3JO5Tt3HNse-ws5aROCcxEco15d6j64s,12
|
|
42
|
-
pyconvexity-0.4.6.dist-info/RECORD,,
|
|
39
|
+
pyconvexity-0.4.6.post1.dist-info/METADATA,sha256=42uMFhx4muAr2ZyGYa00osp9K_ifn5l14814yYyHkis,4973
|
|
40
|
+
pyconvexity-0.4.6.post1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
41
|
+
pyconvexity-0.4.6.post1.dist-info/top_level.txt,sha256=wFPEDXVaebR3JO5Tt3HNse-ws5aROCcxEco15d6j64s,12
|
|
42
|
+
pyconvexity-0.4.6.post1.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|