pyconvexity 0.4.3__py3-none-any.whl → 0.4.6.post1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pyconvexity might be problematic. Click here for more details.
- pyconvexity/_version.py +1 -1
- pyconvexity/data/schema/03_validation_data.sql +2 -1
- pyconvexity/io/netcdf_exporter.py +1 -7
- pyconvexity/io/netcdf_importer.py +82 -113
- pyconvexity/solvers/pypsa/api.py +4 -18
- pyconvexity/solvers/pypsa/batch_loader.py +1 -12
- pyconvexity/solvers/pypsa/builder.py +3 -23
- pyconvexity/solvers/pypsa/solver.py +4 -71
- pyconvexity/solvers/pypsa/storage.py +1 -47
- pyconvexity-0.4.6.post1.dist-info/METADATA +148 -0
- {pyconvexity-0.4.3.dist-info → pyconvexity-0.4.6.post1.dist-info}/RECORD +13 -13
- pyconvexity-0.4.3.dist-info/METADATA +0 -47
- {pyconvexity-0.4.3.dist-info → pyconvexity-0.4.6.post1.dist-info}/WHEEL +0 -0
- {pyconvexity-0.4.3.dist-info → pyconvexity-0.4.6.post1.dist-info}/top_level.txt +0 -0
pyconvexity/_version.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
__version__ = "0.4.
|
|
1
|
+
__version__ = "0.4.6.post1"
|
|
@@ -33,7 +33,8 @@ INSERT INTO attribute_validation_rules (component_type, attribute_name, display_
|
|
|
33
33
|
('BUS', 'q', 'Reactive Power', 'float', 'MVar', '0', 'timeseries', FALSE, FALSE, 'reactive power (positive if net generation at bus)', NULL, NULL, 'electrical', TRUE),
|
|
34
34
|
('BUS', 'v_mag_pu', 'Voltage Magnitude', 'float', 'per unit', '1', 'timeseries', FALSE, FALSE, 'Voltage magnitude, per unit of v_nom', NULL, NULL, 'electrical', TRUE),
|
|
35
35
|
('BUS', 'v_ang', 'Voltage Angle', 'float', 'radians', '0', 'timeseries', FALSE, FALSE, 'Voltage angle', NULL, NULL, 'electrical', TRUE),
|
|
36
|
-
('BUS', 'marginal_price', 'Marginal Price', 'float', 'currency/MWh', '0', 'timeseries', FALSE, FALSE, 'Locational marginal price from LOPF from power balance constraint', NULL, NULL, 'costs', TRUE)
|
|
36
|
+
('BUS', 'marginal_price', 'Marginal Price', 'float', 'currency/MWh', '0', 'timeseries', FALSE, FALSE, 'Locational marginal price from LOPF from power balance constraint', NULL, NULL, 'costs', TRUE),
|
|
37
|
+
('BUS', 'market_price', 'Market Price', 'float', 'currency/MWh', '0', 'timeseries', FALSE, FALSE, 'Custom calculated market price for the bus (independent of PyPSA marginal cost)', NULL, NULL, 'costs', TRUE);
|
|
37
38
|
|
|
38
39
|
-- ============================================================================
|
|
39
40
|
-- GENERATOR ATTRIBUTES
|
|
@@ -3,22 +3,18 @@ NetCDF exporter for PyConvexity energy system models.
|
|
|
3
3
|
Exports networks to PyPSA NetCDF format using existing PyPSA infrastructure.
|
|
4
4
|
"""
|
|
5
5
|
|
|
6
|
-
import logging
|
|
7
6
|
from typing import Dict, Any, Optional, Callable
|
|
8
7
|
from pathlib import Path
|
|
9
8
|
|
|
10
9
|
# Import existing PyPSA functionality from pyconvexity
|
|
11
|
-
from pyconvexity.core.database import open_connection
|
|
12
10
|
from pyconvexity.solvers.pypsa import build_pypsa_network
|
|
13
11
|
|
|
14
|
-
logger = logging.getLogger(__name__)
|
|
15
|
-
|
|
16
12
|
|
|
17
13
|
class NetCDFModelExporter:
|
|
18
14
|
"""Export network model to PyPSA NetCDF format"""
|
|
19
15
|
|
|
20
16
|
def __init__(self):
|
|
21
|
-
|
|
17
|
+
pass
|
|
22
18
|
|
|
23
19
|
def export_to_netcdf(
|
|
24
20
|
self,
|
|
@@ -78,7 +74,6 @@ class NetCDFModelExporter:
|
|
|
78
74
|
}
|
|
79
75
|
|
|
80
76
|
except Exception as e:
|
|
81
|
-
self.logger.error(f"NetCDF export failed: {e}", exc_info=True)
|
|
82
77
|
if progress_callback:
|
|
83
78
|
progress_callback(None, f"Export failed: {str(e)}")
|
|
84
79
|
raise
|
|
@@ -141,7 +136,6 @@ class NetCDFModelExporter:
|
|
|
141
136
|
}
|
|
142
137
|
|
|
143
138
|
except Exception as e:
|
|
144
|
-
self.logger.error(f"CSV export failed: {e}", exc_info=True)
|
|
145
139
|
if progress_callback:
|
|
146
140
|
progress_callback(None, f"Export failed: {str(e)}")
|
|
147
141
|
raise
|
|
@@ -3,7 +3,6 @@ NetCDF importer for PyConvexity energy system models.
|
|
|
3
3
|
Imports PyPSA NetCDF files into PyConvexity database format.
|
|
4
4
|
"""
|
|
5
5
|
|
|
6
|
-
import logging
|
|
7
6
|
import pandas as pd
|
|
8
7
|
import numpy as np
|
|
9
8
|
from typing import Dict, Any, Optional, Callable, Tuple, List
|
|
@@ -33,14 +32,81 @@ from pyconvexity.models import (
|
|
|
33
32
|
from pyconvexity.validation import get_validation_rule
|
|
34
33
|
from pyconvexity.timeseries import set_timeseries
|
|
35
34
|
|
|
36
|
-
|
|
35
|
+
|
|
36
|
+
def _pandas_freq_to_iso8601(freq: str) -> str:
|
|
37
|
+
"""
|
|
38
|
+
Convert pandas frequency code to ISO 8601 duration format.
|
|
39
|
+
|
|
40
|
+
Args:
|
|
41
|
+
freq: Pandas frequency code (e.g., "H", "30T", "2H", "15min", "D")
|
|
42
|
+
|
|
43
|
+
Returns:
|
|
44
|
+
ISO 8601 duration string (e.g., "PT1H", "PT30M", "PT2H", "PT15M", "P1D")
|
|
45
|
+
"""
|
|
46
|
+
if not freq:
|
|
47
|
+
return "PT1H" # Default to hourly
|
|
48
|
+
|
|
49
|
+
freq = freq.strip().upper()
|
|
50
|
+
|
|
51
|
+
# Handle common pandas frequency codes
|
|
52
|
+
# Hourly patterns: "H", "1H", "2H", etc.
|
|
53
|
+
if freq == "H" or freq == "1H":
|
|
54
|
+
return "PT1H"
|
|
55
|
+
if freq.endswith("H"):
|
|
56
|
+
try:
|
|
57
|
+
hours = int(freq[:-1])
|
|
58
|
+
return f"PT{hours}H"
|
|
59
|
+
except ValueError:
|
|
60
|
+
pass
|
|
61
|
+
|
|
62
|
+
# Minute patterns: "T", "MIN", "30T", "30MIN", "15T", etc.
|
|
63
|
+
if freq == "T" or freq == "MIN" or freq == "1T" or freq == "1MIN":
|
|
64
|
+
return "PT1M"
|
|
65
|
+
if freq.endswith("T"):
|
|
66
|
+
try:
|
|
67
|
+
minutes = int(freq[:-1])
|
|
68
|
+
return f"PT{minutes}M"
|
|
69
|
+
except ValueError:
|
|
70
|
+
pass
|
|
71
|
+
if freq.endswith("MIN"):
|
|
72
|
+
try:
|
|
73
|
+
minutes = int(freq[:-3])
|
|
74
|
+
return f"PT{minutes}M"
|
|
75
|
+
except ValueError:
|
|
76
|
+
pass
|
|
77
|
+
|
|
78
|
+
# Second patterns: "S", "1S", "30S", etc.
|
|
79
|
+
if freq == "S" or freq == "1S":
|
|
80
|
+
return "PT1S"
|
|
81
|
+
if freq.endswith("S") and not freq.endswith("MS"):
|
|
82
|
+
try:
|
|
83
|
+
seconds = int(freq[:-1])
|
|
84
|
+
return f"PT{seconds}S"
|
|
85
|
+
except ValueError:
|
|
86
|
+
pass
|
|
87
|
+
|
|
88
|
+
# Daily patterns: "D", "1D", etc.
|
|
89
|
+
if freq == "D" or freq == "1D":
|
|
90
|
+
return "P1D"
|
|
91
|
+
if freq.endswith("D"):
|
|
92
|
+
try:
|
|
93
|
+
days = int(freq[:-1])
|
|
94
|
+
return f"P{days}D"
|
|
95
|
+
except ValueError:
|
|
96
|
+
pass
|
|
97
|
+
|
|
98
|
+
# Weekly patterns: "W", "1W", etc.
|
|
99
|
+
if freq == "W" or freq == "1W" or freq.startswith("W-"):
|
|
100
|
+
return "P1W"
|
|
101
|
+
|
|
102
|
+
# If we can't parse it, default to hourly
|
|
103
|
+
return "PT1H"
|
|
37
104
|
|
|
38
105
|
|
|
39
106
|
class NetCDFModelImporter:
|
|
40
107
|
"""Import PyPSA NetCDF files into PyConvexity database format"""
|
|
41
108
|
|
|
42
109
|
def __init__(self):
|
|
43
|
-
self.logger = logging.getLogger(__name__)
|
|
44
110
|
# Set random seed for reproducible coordinate generation
|
|
45
111
|
random.seed(42)
|
|
46
112
|
np.random.seed(42)
|
|
@@ -103,7 +169,6 @@ class NetCDFModelImporter:
|
|
|
103
169
|
)
|
|
104
170
|
|
|
105
171
|
except Exception as e:
|
|
106
|
-
self.logger.error(f"Error importing NetCDF: {e}", exc_info=True)
|
|
107
172
|
if progress_callback:
|
|
108
173
|
progress_callback(None, f"Error: {str(e)}")
|
|
109
174
|
raise
|
|
@@ -164,7 +229,6 @@ class NetCDFModelImporter:
|
|
|
164
229
|
error_msg += f"\n\nThis indicates a required column is missing from one of your CSV files. "
|
|
165
230
|
error_msg += "Please ensure your CSV files follow the PyPSA format specification."
|
|
166
231
|
|
|
167
|
-
self.logger.error(error_msg)
|
|
168
232
|
raise ValueError(error_msg)
|
|
169
233
|
|
|
170
234
|
if progress_callback:
|
|
@@ -185,7 +249,6 @@ class NetCDFModelImporter:
|
|
|
185
249
|
)
|
|
186
250
|
|
|
187
251
|
except Exception as e:
|
|
188
|
-
self.logger.error(f"Error importing PyPSA CSV: {e}", exc_info=True)
|
|
189
252
|
if progress_callback:
|
|
190
253
|
progress_callback(None, f"Error: {str(e)}")
|
|
191
254
|
raise
|
|
@@ -197,16 +260,10 @@ class NetCDFModelImporter:
|
|
|
197
260
|
|
|
198
261
|
return pypsa
|
|
199
262
|
except ImportError as e:
|
|
200
|
-
self.logger.error(f"Failed to import PyPSA: {e}", exc_info=True)
|
|
201
263
|
raise ImportError(
|
|
202
264
|
"PyPSA is not installed or could not be imported. "
|
|
203
265
|
"Please ensure it is installed correctly in the environment."
|
|
204
266
|
) from e
|
|
205
|
-
except Exception as e:
|
|
206
|
-
self.logger.error(
|
|
207
|
-
f"An unexpected error occurred during PyPSA import: {e}", exc_info=True
|
|
208
|
-
)
|
|
209
|
-
raise
|
|
210
267
|
|
|
211
268
|
def _validate_csv_directory(self, csv_directory: str) -> None:
|
|
212
269
|
"""Validate that the CSV directory contains valid PyPSA CSV files"""
|
|
@@ -415,7 +472,6 @@ class NetCDFModelImporter:
|
|
|
415
472
|
conn.close()
|
|
416
473
|
|
|
417
474
|
except Exception as e:
|
|
418
|
-
self.logger.error(f"Error importing network: {e}", exc_info=True)
|
|
419
475
|
if progress_callback:
|
|
420
476
|
progress_callback(None, f"Error: {str(e)}")
|
|
421
477
|
raise
|
|
@@ -427,7 +483,6 @@ class NetCDFModelImporter:
|
|
|
427
483
|
def _extract_datetime_snapshots(self, network) -> pd.DatetimeIndex:
|
|
428
484
|
"""Extract datetime snapshots from a PyPSA network"""
|
|
429
485
|
if not hasattr(network, "snapshots") or len(network.snapshots) == 0:
|
|
430
|
-
self.logger.warning("No snapshots found in PyPSA network")
|
|
431
486
|
return pd.DatetimeIndex([])
|
|
432
487
|
|
|
433
488
|
snapshots = network.snapshots
|
|
@@ -435,7 +490,7 @@ class NetCDFModelImporter:
|
|
|
435
490
|
try:
|
|
436
491
|
# Try direct conversion first (works for simple DatetimeIndex)
|
|
437
492
|
return pd.to_datetime(snapshots)
|
|
438
|
-
except (TypeError, ValueError)
|
|
493
|
+
except (TypeError, ValueError):
|
|
439
494
|
# Handle MultiIndex case
|
|
440
495
|
if hasattr(snapshots, "nlevels") and snapshots.nlevels > 1:
|
|
441
496
|
# Try to use the timesteps attribute if available (common in multi-period networks)
|
|
@@ -450,15 +505,10 @@ class NetCDFModelImporter:
|
|
|
450
505
|
last_level = snapshots.get_level_values(snapshots.nlevels - 1)
|
|
451
506
|
datetime_snapshots = pd.to_datetime(last_level)
|
|
452
507
|
return datetime_snapshots
|
|
453
|
-
except Exception
|
|
454
|
-
|
|
455
|
-
f"Failed to extract datetime from MultiIndex: {multi_e}"
|
|
456
|
-
)
|
|
508
|
+
except Exception:
|
|
509
|
+
pass
|
|
457
510
|
|
|
458
511
|
# Final fallback: create a default hourly range
|
|
459
|
-
self.logger.warning(
|
|
460
|
-
"Could not extract datetime snapshots, creating default hourly range"
|
|
461
|
-
)
|
|
462
512
|
default_start = pd.Timestamp("2024-01-01 00:00:00")
|
|
463
513
|
default_end = pd.Timestamp("2024-01-01 23:59:59")
|
|
464
514
|
return pd.date_range(start=default_start, end=default_end, freq="H")
|
|
@@ -479,17 +529,17 @@ class NetCDFModelImporter:
|
|
|
479
529
|
time_start = snapshots.min().strftime("%Y-%m-%d %H:%M:%S")
|
|
480
530
|
time_end = snapshots.max().strftime("%Y-%m-%d %H:%M:%S")
|
|
481
531
|
|
|
482
|
-
# Try to infer time interval
|
|
532
|
+
# Try to infer time interval and convert to ISO 8601 format
|
|
483
533
|
if len(snapshots) > 1:
|
|
484
534
|
freq = pd.infer_freq(snapshots)
|
|
485
|
-
time_interval = freq
|
|
535
|
+
time_interval = _pandas_freq_to_iso8601(freq) if freq else "PT1H"
|
|
486
536
|
else:
|
|
487
|
-
time_interval = "
|
|
537
|
+
time_interval = "PT1H"
|
|
488
538
|
else:
|
|
489
539
|
# Default time range if no snapshots
|
|
490
540
|
time_start = "2024-01-01 00:00:00"
|
|
491
541
|
time_end = "2024-01-01 23:59:59"
|
|
492
|
-
time_interval = "
|
|
542
|
+
time_interval = "PT1H"
|
|
493
543
|
|
|
494
544
|
description = (
|
|
495
545
|
network_description
|
|
@@ -511,9 +561,6 @@ class NetCDFModelImporter:
|
|
|
511
561
|
snapshots = self._extract_datetime_snapshots(network)
|
|
512
562
|
|
|
513
563
|
if len(snapshots) == 0:
|
|
514
|
-
self.logger.warning(
|
|
515
|
-
"No valid snapshots found in PyPSA network, skipping time periods creation"
|
|
516
|
-
)
|
|
517
564
|
return
|
|
518
565
|
|
|
519
566
|
# Insert optimized time periods metadata
|
|
@@ -686,12 +733,9 @@ class NetCDFModelImporter:
|
|
|
686
733
|
# Generate a unique name for this bus
|
|
687
734
|
unique_name = self._generate_unique_name(str(bus_name), "BUS")
|
|
688
735
|
|
|
689
|
-
# Extract
|
|
736
|
+
# Extract coordinate data
|
|
690
737
|
x_value = bus_data.get("x", None)
|
|
691
738
|
y_value = bus_data.get("y", None)
|
|
692
|
-
self.logger.debug(
|
|
693
|
-
f"Bus '{bus_name}' -> '{unique_name}': x={x_value} (type: {type(x_value)}), y={y_value} (type: {type(y_value)})"
|
|
694
|
-
)
|
|
695
739
|
|
|
696
740
|
# Handle NaN/None values properly
|
|
697
741
|
longitude = (
|
|
@@ -743,7 +787,6 @@ class NetCDFModelImporter:
|
|
|
743
787
|
except Exception as e:
|
|
744
788
|
if strict_validation:
|
|
745
789
|
raise
|
|
746
|
-
self.logger.warning(f"Failed to import bus {bus_name}: {e}")
|
|
747
790
|
continue
|
|
748
791
|
|
|
749
792
|
return count
|
|
@@ -776,9 +819,6 @@ class NetCDFModelImporter:
|
|
|
776
819
|
bus_id = bus_name_to_id.get(bus_name) if bus_name else None
|
|
777
820
|
|
|
778
821
|
if not bus_id:
|
|
779
|
-
self.logger.warning(
|
|
780
|
-
f"Generator {gen_name}: bus '{bus_name}' not found, skipping"
|
|
781
|
-
)
|
|
782
822
|
continue
|
|
783
823
|
|
|
784
824
|
# Get or create carrier
|
|
@@ -821,7 +861,6 @@ class NetCDFModelImporter:
|
|
|
821
861
|
except Exception as e:
|
|
822
862
|
if strict_validation:
|
|
823
863
|
raise
|
|
824
|
-
self.logger.warning(f"Failed to import generator {gen_name}: {e}")
|
|
825
864
|
continue
|
|
826
865
|
|
|
827
866
|
return count
|
|
@@ -855,9 +894,6 @@ class NetCDFModelImporter:
|
|
|
855
894
|
try:
|
|
856
895
|
bus_id = bus_map.get(load_data["bus"])
|
|
857
896
|
if bus_id is None:
|
|
858
|
-
self.logger.warning(
|
|
859
|
-
f"Bus '{load_data['bus']}' not found for load '{load_name}'"
|
|
860
|
-
)
|
|
861
897
|
continue
|
|
862
898
|
|
|
863
899
|
# Generate a unique name for this load
|
|
@@ -918,7 +954,6 @@ class NetCDFModelImporter:
|
|
|
918
954
|
except Exception as e:
|
|
919
955
|
if strict_validation:
|
|
920
956
|
raise
|
|
921
|
-
self.logger.warning(f"Failed to import load {load_name}: {e}")
|
|
922
957
|
continue
|
|
923
958
|
|
|
924
959
|
return count
|
|
@@ -941,9 +976,6 @@ class NetCDFModelImporter:
|
|
|
941
976
|
bus1_id = bus_map.get(line_data["bus1"])
|
|
942
977
|
|
|
943
978
|
if bus0_id is None or bus1_id is None:
|
|
944
|
-
self.logger.warning(
|
|
945
|
-
f"Bus not found for line '{line_name}': bus0='{line_data['bus0']}', bus1='{line_data['bus1']}'"
|
|
946
|
-
)
|
|
947
979
|
continue
|
|
948
980
|
|
|
949
981
|
# Handle duplicate names by appending counter
|
|
@@ -951,9 +983,6 @@ class NetCDFModelImporter:
|
|
|
951
983
|
if line_name in name_counter:
|
|
952
984
|
name_counter[line_name] += 1
|
|
953
985
|
unique_name = f"{line_name}_{name_counter[line_name]}"
|
|
954
|
-
self.logger.warning(
|
|
955
|
-
f"Duplicate line name '{line_name}' renamed to '{unique_name}'"
|
|
956
|
-
)
|
|
957
986
|
else:
|
|
958
987
|
name_counter[line_name] = 0
|
|
959
988
|
|
|
@@ -993,7 +1022,6 @@ class NetCDFModelImporter:
|
|
|
993
1022
|
except Exception as e:
|
|
994
1023
|
if strict_validation:
|
|
995
1024
|
raise
|
|
996
|
-
self.logger.warning(f"Failed to import line {line_name}: {e}")
|
|
997
1025
|
continue
|
|
998
1026
|
|
|
999
1027
|
return count
|
|
@@ -1015,9 +1043,6 @@ class NetCDFModelImporter:
|
|
|
1015
1043
|
bus1_id = bus_map.get(link_data["bus1"])
|
|
1016
1044
|
|
|
1017
1045
|
if bus0_id is None or bus1_id is None:
|
|
1018
|
-
self.logger.warning(
|
|
1019
|
-
f"Bus not found for link '{link_name}': bus0='{link_data['bus0']}', bus1='{link_data['bus1']}'"
|
|
1020
|
-
)
|
|
1021
1046
|
continue
|
|
1022
1047
|
|
|
1023
1048
|
# Generate a unique name for this link
|
|
@@ -1064,7 +1089,6 @@ class NetCDFModelImporter:
|
|
|
1064
1089
|
except Exception as e:
|
|
1065
1090
|
if strict_validation:
|
|
1066
1091
|
raise
|
|
1067
|
-
self.logger.warning(f"Failed to import link {link_name}: {e}")
|
|
1068
1092
|
continue
|
|
1069
1093
|
|
|
1070
1094
|
return count
|
|
@@ -1098,9 +1122,6 @@ class NetCDFModelImporter:
|
|
|
1098
1122
|
try:
|
|
1099
1123
|
bus_id = bus_map.get(su_data["bus"])
|
|
1100
1124
|
if bus_id is None:
|
|
1101
|
-
self.logger.warning(
|
|
1102
|
-
f"Bus '{su_data['bus']}' not found for storage unit '{su_name}'"
|
|
1103
|
-
)
|
|
1104
1125
|
continue
|
|
1105
1126
|
|
|
1106
1127
|
# Generate a unique name for this storage unit
|
|
@@ -1166,7 +1187,6 @@ class NetCDFModelImporter:
|
|
|
1166
1187
|
except Exception as e:
|
|
1167
1188
|
if strict_validation:
|
|
1168
1189
|
raise
|
|
1169
|
-
self.logger.warning(f"Failed to import storage unit {su_name}: {e}")
|
|
1170
1190
|
continue
|
|
1171
1191
|
|
|
1172
1192
|
return count
|
|
@@ -1203,9 +1223,6 @@ class NetCDFModelImporter:
|
|
|
1203
1223
|
try:
|
|
1204
1224
|
bus_id = bus_map.get(store_data["bus"])
|
|
1205
1225
|
if bus_id is None:
|
|
1206
|
-
self.logger.warning(
|
|
1207
|
-
f"Bus '{store_data['bus']}' not found for store '{store_name}'"
|
|
1208
|
-
)
|
|
1209
1226
|
continue
|
|
1210
1227
|
|
|
1211
1228
|
# Handle duplicate names by appending counter
|
|
@@ -1213,9 +1230,6 @@ class NetCDFModelImporter:
|
|
|
1213
1230
|
if store_name in name_counter:
|
|
1214
1231
|
name_counter[store_name] += 1
|
|
1215
1232
|
unique_name = f"{store_name}_{name_counter[store_name]}"
|
|
1216
|
-
self.logger.warning(
|
|
1217
|
-
f"Duplicate store name '{store_name}' renamed to '{unique_name}'"
|
|
1218
|
-
)
|
|
1219
1233
|
else:
|
|
1220
1234
|
name_counter[store_name] = 0
|
|
1221
1235
|
|
|
@@ -1276,7 +1290,6 @@ class NetCDFModelImporter:
|
|
|
1276
1290
|
except Exception as e:
|
|
1277
1291
|
if strict_validation:
|
|
1278
1292
|
raise
|
|
1279
|
-
self.logger.warning(f"Failed to import store {store_name}: {e}")
|
|
1280
1293
|
continue
|
|
1281
1294
|
|
|
1282
1295
|
return count
|
|
@@ -1355,15 +1368,10 @@ class NetCDFModelImporter:
|
|
|
1355
1368
|
# Validate required columns
|
|
1356
1369
|
required_columns = {"name", "longitude", "latitude"}
|
|
1357
1370
|
if not required_columns.issubset(location_df.columns):
|
|
1358
|
-
missing_cols = required_columns - set(location_df.columns)
|
|
1359
|
-
self.logger.warning(
|
|
1360
|
-
f"Location CSV missing required columns: {missing_cols}. Found columns: {list(location_df.columns)}"
|
|
1361
|
-
)
|
|
1362
1371
|
return None
|
|
1363
1372
|
|
|
1364
1373
|
# Create lookup dictionary
|
|
1365
1374
|
location_map = {}
|
|
1366
|
-
skipped_count = 0
|
|
1367
1375
|
|
|
1368
1376
|
for _, row in location_df.iterrows():
|
|
1369
1377
|
name = row["name"]
|
|
@@ -1372,15 +1380,10 @@ class NetCDFModelImporter:
|
|
|
1372
1380
|
|
|
1373
1381
|
# Skip rows with missing data
|
|
1374
1382
|
if pd.isna(name) or pd.isna(longitude) or pd.isna(latitude):
|
|
1375
|
-
skipped_count += 1
|
|
1376
1383
|
continue
|
|
1377
1384
|
|
|
1378
1385
|
# Validate coordinate ranges
|
|
1379
1386
|
if not (-180 <= longitude <= 180) or not (-90 <= latitude <= 90):
|
|
1380
|
-
self.logger.warning(
|
|
1381
|
-
f"Invalid coordinates for '{name}': longitude={longitude}, latitude={latitude}"
|
|
1382
|
-
)
|
|
1383
|
-
skipped_count += 1
|
|
1384
1387
|
continue
|
|
1385
1388
|
|
|
1386
1389
|
location_map[str(name).strip()] = (
|
|
@@ -1388,17 +1391,12 @@ class NetCDFModelImporter:
|
|
|
1388
1391
|
float(longitude),
|
|
1389
1392
|
)
|
|
1390
1393
|
|
|
1391
|
-
self.logger.info(
|
|
1392
|
-
f"Loaded {len(location_map)} component locations from CSV (skipped {skipped_count} invalid entries)"
|
|
1393
|
-
)
|
|
1394
1394
|
return location_map
|
|
1395
1395
|
|
|
1396
|
-
except Exception
|
|
1397
|
-
self.logger.error(f"Failed to parse location CSV {csv_path}: {e}")
|
|
1396
|
+
except Exception:
|
|
1398
1397
|
return None
|
|
1399
1398
|
|
|
1400
|
-
except Exception
|
|
1401
|
-
self.logger.warning(f"Error detecting location CSV: {e}")
|
|
1399
|
+
except Exception:
|
|
1402
1400
|
return None
|
|
1403
1401
|
|
|
1404
1402
|
def _get_or_create_carrier(self, conn, carrier_name: str) -> int:
|
|
@@ -1564,16 +1562,9 @@ class NetCDFModelImporter:
|
|
|
1564
1562
|
if strict_validation:
|
|
1565
1563
|
raise
|
|
1566
1564
|
else:
|
|
1567
|
-
self.logger.warning(
|
|
1568
|
-
f"Skipping undefined/invalid attribute '{attr_name}' for {component_type} component {component_id}: {e}"
|
|
1569
|
-
)
|
|
1570
1565
|
skipped_count += 1
|
|
1571
1566
|
continue
|
|
1572
1567
|
else:
|
|
1573
|
-
# Log but don't fail on other attribute import errors (like type conversion issues)
|
|
1574
|
-
self.logger.warning(
|
|
1575
|
-
f"Skipping attribute {attr_name} for component {component_id}: {e}"
|
|
1576
|
-
)
|
|
1577
1568
|
skipped_count += 1
|
|
1578
1569
|
|
|
1579
1570
|
def _import_component_timeseries(
|
|
@@ -1632,9 +1623,6 @@ class NetCDFModelImporter:
|
|
|
1632
1623
|
values.append(float(value))
|
|
1633
1624
|
|
|
1634
1625
|
if not values:
|
|
1635
|
-
self.logger.warning(
|
|
1636
|
-
f"No valid timeseries points for '{attr_name}' on {component_type} '{component_name}'"
|
|
1637
|
-
)
|
|
1638
1626
|
continue
|
|
1639
1627
|
|
|
1640
1628
|
# Use optimized timeseries attribute setting
|
|
@@ -1647,16 +1635,8 @@ class NetCDFModelImporter:
|
|
|
1647
1635
|
if strict_validation:
|
|
1648
1636
|
raise
|
|
1649
1637
|
else:
|
|
1650
|
-
self.logger.warning(
|
|
1651
|
-
f"Skipping timeseries attribute '{attr_name}' for {component_type} component '{component_name}': {e}"
|
|
1652
|
-
)
|
|
1653
1638
|
continue
|
|
1654
1639
|
|
|
1655
|
-
if timeseries_count > 0:
|
|
1656
|
-
self.logger.debug(
|
|
1657
|
-
f"Imported {timeseries_count} timeseries attributes for {component_type} '{component_name}'"
|
|
1658
|
-
)
|
|
1659
|
-
|
|
1660
1640
|
def _generate_unique_name(self, base_name: str, component_type: str) -> str:
|
|
1661
1641
|
"""
|
|
1662
1642
|
Generate a unique name for a component, ensuring no duplicates across all component types.
|
|
@@ -1720,7 +1700,7 @@ class NetCDFModelImporter:
|
|
|
1720
1700
|
base_angle = (2 * math.pi * component_index) / component_count_at_bus
|
|
1721
1701
|
angle_jitter = random.uniform(
|
|
1722
1702
|
-math.pi / 8, math.pi / 8
|
|
1723
|
-
) #
|
|
1703
|
+
) # +/- 22.5 degrees jitter
|
|
1724
1704
|
angle = base_angle + angle_jitter
|
|
1725
1705
|
|
|
1726
1706
|
# Vary distance randomly within the radius (use more of the available radius)
|
|
@@ -1811,23 +1791,12 @@ class NetCDFModelImporter:
|
|
|
1811
1791
|
|
|
1812
1792
|
# Try exact match first
|
|
1813
1793
|
if component_name in location_map:
|
|
1814
|
-
|
|
1815
|
-
self.logger.debug(
|
|
1816
|
-
f"CSV location exact match for '{component_name}': {coordinates}"
|
|
1817
|
-
)
|
|
1818
|
-
return coordinates
|
|
1794
|
+
return location_map[component_name]
|
|
1819
1795
|
|
|
1820
1796
|
# Try resolving back to original name
|
|
1821
1797
|
original_name = self._resolve_original_component_name(component_name)
|
|
1822
1798
|
if original_name != component_name and original_name in location_map:
|
|
1823
|
-
|
|
1824
|
-
self.logger.debug(
|
|
1825
|
-
f"CSV location resolved match for '{component_name}' -> '{original_name}': {coordinates}"
|
|
1826
|
-
)
|
|
1827
|
-
return coordinates
|
|
1799
|
+
return location_map[original_name]
|
|
1828
1800
|
|
|
1829
1801
|
# No match found
|
|
1830
|
-
self.logger.debug(
|
|
1831
|
-
f"No CSV location found for component '{component_name}' (original: '{original_name}')"
|
|
1832
|
-
)
|
|
1833
1802
|
return None
|
pyconvexity/solvers/pypsa/api.py
CHANGED
|
@@ -4,7 +4,6 @@ High-level API for PyPSA solver integration.
|
|
|
4
4
|
Provides user-friendly functions for the most common workflows.
|
|
5
5
|
"""
|
|
6
6
|
|
|
7
|
-
import logging
|
|
8
7
|
from typing import Dict, Any, Optional, Callable
|
|
9
8
|
|
|
10
9
|
from pyconvexity.core.database import database_context
|
|
@@ -13,8 +12,6 @@ from pyconvexity.solvers.pypsa.solver import NetworkSolver
|
|
|
13
12
|
from pyconvexity.solvers.pypsa.storage import ResultStorage
|
|
14
13
|
from pyconvexity.solvers.pypsa.constraints import ConstraintApplicator
|
|
15
14
|
|
|
16
|
-
logger = logging.getLogger(__name__)
|
|
17
|
-
|
|
18
15
|
|
|
19
16
|
def solve_network(
|
|
20
17
|
db_path: str,
|
|
@@ -143,19 +140,11 @@ def solve_network(
|
|
|
143
140
|
|
|
144
141
|
# Only optimize if there's significant free space (>5% threshold for post-solve)
|
|
145
142
|
if should_optimize_database(conn, free_space_threshold_percent=5.0):
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
f"Database optimization completed: {optimization_result['space_reclaimed']:,} bytes reclaimed"
|
|
150
|
-
)
|
|
151
|
-
else:
|
|
152
|
-
logger.debug(
|
|
153
|
-
"Skipping database optimization - insufficient free space"
|
|
154
|
-
)
|
|
155
|
-
|
|
156
|
-
except Exception as e:
|
|
143
|
+
optimize_database(conn)
|
|
144
|
+
|
|
145
|
+
except Exception:
|
|
157
146
|
# Don't fail the solve if optimization fails
|
|
158
|
-
|
|
147
|
+
pass
|
|
159
148
|
|
|
160
149
|
if progress_callback:
|
|
161
150
|
progress_callback(100, "Complete")
|
|
@@ -448,9 +437,6 @@ def _transform_to_comprehensive_format(
|
|
|
448
437
|
return comprehensive_result
|
|
449
438
|
|
|
450
439
|
except Exception as e:
|
|
451
|
-
logger.error(
|
|
452
|
-
f"Failed to transform result to comprehensive format: {e}", exc_info=True
|
|
453
|
-
)
|
|
454
440
|
# Return original result with error info if transformation fails
|
|
455
441
|
return {
|
|
456
442
|
**pyconvexity_result,
|
|
@@ -3,7 +3,6 @@ PyPSA Batch Data Loader
|
|
|
3
3
|
Simplified to always create MultiIndex timeseries for consistent multi-period optimization.
|
|
4
4
|
"""
|
|
5
5
|
|
|
6
|
-
import logging
|
|
7
6
|
import pandas as pd
|
|
8
7
|
import json
|
|
9
8
|
from typing import Dict, Any, List, Optional
|
|
@@ -11,8 +10,6 @@ from typing import Dict, Any, List, Optional
|
|
|
11
10
|
from pyconvexity.models.attributes import get_timeseries
|
|
12
11
|
from pyconvexity.models import get_network_time_periods
|
|
13
12
|
|
|
14
|
-
logger = logging.getLogger(__name__)
|
|
15
|
-
|
|
16
13
|
|
|
17
14
|
class PyPSABatchLoader:
|
|
18
15
|
"""
|
|
@@ -156,7 +153,6 @@ class PyPSABatchLoader:
|
|
|
156
153
|
# Get network time periods for proper timestamp alignment
|
|
157
154
|
network_time_periods = get_network_time_periods(conn)
|
|
158
155
|
if not network_time_periods:
|
|
159
|
-
logger.warning("No time periods found for network")
|
|
160
156
|
return {comp_id: {} for comp_id in component_ids}
|
|
161
157
|
|
|
162
158
|
# Convert to timestamps and extract years
|
|
@@ -252,15 +248,8 @@ class PyPSABatchLoader:
|
|
|
252
248
|
component_timeseries[comp_id][attr_name] = pd.Series(
|
|
253
249
|
values, index=multi_index
|
|
254
250
|
)
|
|
255
|
-
else:
|
|
256
|
-
logger.warning(
|
|
257
|
-
f"No valid timestamps for timeseries {attr_name}"
|
|
258
|
-
)
|
|
259
251
|
|
|
260
|
-
except Exception
|
|
261
|
-
logger.warning(
|
|
262
|
-
f"Failed to load timeseries {attr_name} for component {comp_id}: {e}"
|
|
263
|
-
)
|
|
252
|
+
except Exception:
|
|
264
253
|
continue
|
|
265
254
|
|
|
266
255
|
return component_timeseries
|