pyconvexity 0.4.3__py3-none-any.whl → 0.4.6.post1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pyconvexity might be problematic. Click here for more details.
- pyconvexity/_version.py +1 -1
- pyconvexity/data/schema/03_validation_data.sql +2 -1
- pyconvexity/io/netcdf_exporter.py +1 -7
- pyconvexity/io/netcdf_importer.py +82 -113
- pyconvexity/solvers/pypsa/api.py +4 -18
- pyconvexity/solvers/pypsa/batch_loader.py +1 -12
- pyconvexity/solvers/pypsa/builder.py +3 -23
- pyconvexity/solvers/pypsa/solver.py +4 -71
- pyconvexity/solvers/pypsa/storage.py +1 -47
- pyconvexity-0.4.6.post1.dist-info/METADATA +148 -0
- {pyconvexity-0.4.3.dist-info → pyconvexity-0.4.6.post1.dist-info}/RECORD +13 -13
- pyconvexity-0.4.3.dist-info/METADATA +0 -47
- {pyconvexity-0.4.3.dist-info → pyconvexity-0.4.6.post1.dist-info}/WHEEL +0 -0
- {pyconvexity-0.4.3.dist-info → pyconvexity-0.4.6.post1.dist-info}/top_level.txt +0 -0
|
@@ -4,7 +4,6 @@ Network building functionality for PyPSA solver integration.
|
|
|
4
4
|
Simplified to always use MultiIndex format for consistent multi-period optimization.
|
|
5
5
|
"""
|
|
6
6
|
|
|
7
|
-
import logging
|
|
8
7
|
import json
|
|
9
8
|
import pandas as pd
|
|
10
9
|
from typing import Dict, Any, Optional, Callable
|
|
@@ -15,8 +14,6 @@ from pyconvexity.models import (
|
|
|
15
14
|
get_network_info,
|
|
16
15
|
)
|
|
17
16
|
|
|
18
|
-
logger = logging.getLogger(__name__)
|
|
19
|
-
|
|
20
17
|
|
|
21
18
|
class NetworkBuilder:
|
|
22
19
|
"""
|
|
@@ -140,7 +137,7 @@ class NetworkBuilder:
|
|
|
140
137
|
for tp in time_periods
|
|
141
138
|
]
|
|
142
139
|
except Exception as e:
|
|
143
|
-
|
|
140
|
+
pass # Failed to load time periods
|
|
144
141
|
|
|
145
142
|
# Load all component types
|
|
146
143
|
component_types = [
|
|
@@ -173,7 +170,7 @@ class NetworkBuilder:
|
|
|
173
170
|
for comp in components
|
|
174
171
|
]
|
|
175
172
|
except Exception as e:
|
|
176
|
-
|
|
173
|
+
pass # Failed to load components
|
|
177
174
|
|
|
178
175
|
return data
|
|
179
176
|
|
|
@@ -188,7 +185,6 @@ class NetworkBuilder:
|
|
|
188
185
|
try:
|
|
189
186
|
time_periods = get_network_time_periods(conn)
|
|
190
187
|
if not time_periods:
|
|
191
|
-
logger.error("No time periods found for network")
|
|
192
188
|
return
|
|
193
189
|
|
|
194
190
|
# Convert to pandas DatetimeIndex
|
|
@@ -221,12 +217,7 @@ class NetworkBuilder:
|
|
|
221
217
|
# Store years for statistics
|
|
222
218
|
network._available_years = years
|
|
223
219
|
|
|
224
|
-
logger.info(
|
|
225
|
-
f"Time index: {len(multi_index)} snapshots across {len(years)} investment periods: {years}"
|
|
226
|
-
)
|
|
227
|
-
|
|
228
220
|
except Exception as e:
|
|
229
|
-
logger.error(f"Failed to set time index: {e}")
|
|
230
221
|
network._available_years = []
|
|
231
222
|
|
|
232
223
|
def _load_carriers(self, conn, network: "pypsa.Network"):
|
|
@@ -381,14 +372,8 @@ class NetworkBuilder:
|
|
|
381
372
|
if include_unmet_loads:
|
|
382
373
|
unmet_loads = list_components_by_type(conn, "UNMET_LOAD")
|
|
383
374
|
all_generators = generators + unmet_loads
|
|
384
|
-
if self.verbose:
|
|
385
|
-
logger.info(
|
|
386
|
-
f"Loading {len(generators)} generators and {len(unmet_loads)} unmet loads"
|
|
387
|
-
)
|
|
388
375
|
else:
|
|
389
376
|
all_generators = generators
|
|
390
|
-
if self.verbose:
|
|
391
|
-
logger.info(f"Loading {len(generators)} generators (unmet loads disabled)")
|
|
392
377
|
|
|
393
378
|
generator_ids = [gen.id for gen in all_generators]
|
|
394
379
|
|
|
@@ -635,12 +620,8 @@ class NetworkBuilder:
|
|
|
635
620
|
network.snapshot_weightings.loc[:, "objective"] = weightings
|
|
636
621
|
network.snapshot_weightings.loc[:, "generators"] = weightings
|
|
637
622
|
network.snapshot_weightings.loc[:, "stores"] = weightings
|
|
638
|
-
else:
|
|
639
|
-
logger.warning(
|
|
640
|
-
f"Mismatch between weightings ({len(weightings)}) and snapshots ({len(network.snapshots)})"
|
|
641
|
-
)
|
|
642
623
|
except Exception as e:
|
|
643
|
-
|
|
624
|
+
pass # Failed to set snapshot weightings
|
|
644
625
|
|
|
645
626
|
def _parse_time_interval(self, time_interval: str) -> Optional[float]:
|
|
646
627
|
"""Parse time interval string to hours."""
|
|
@@ -661,7 +642,6 @@ class NetworkBuilder:
|
|
|
661
642
|
# Try to parse as float (assume hours)
|
|
662
643
|
return float(time_interval)
|
|
663
644
|
except (ValueError, TypeError):
|
|
664
|
-
logger.warning(f"Could not parse time interval: {time_interval}")
|
|
665
645
|
return None
|
|
666
646
|
|
|
667
647
|
def _build_bus_id_to_name_map(self, conn) -> Dict[int, str]:
|
|
@@ -4,15 +4,12 @@ Solving functionality for PyPSA networks.
|
|
|
4
4
|
Simplified to always use multi-period optimization for consistency.
|
|
5
5
|
"""
|
|
6
6
|
|
|
7
|
-
import logging
|
|
8
7
|
import time
|
|
9
8
|
import uuid
|
|
10
9
|
import pandas as pd
|
|
11
10
|
import numpy as np
|
|
12
11
|
from typing import Dict, Any, Optional
|
|
13
12
|
|
|
14
|
-
logger = logging.getLogger(__name__)
|
|
15
|
-
|
|
16
13
|
|
|
17
14
|
class NetworkSolver:
|
|
18
15
|
"""
|
|
@@ -66,7 +63,6 @@ class NetworkSolver:
|
|
|
66
63
|
return settings_file if settings_file.exists() else None
|
|
67
64
|
|
|
68
65
|
except Exception as e:
|
|
69
|
-
logger.warning(f"Failed to determine user settings path: {e}")
|
|
70
66
|
return None
|
|
71
67
|
|
|
72
68
|
def _resolve_default_solver(self) -> str:
|
|
@@ -76,9 +72,6 @@ class NetworkSolver:
|
|
|
76
72
|
|
|
77
73
|
settings_path = self._get_user_settings_path()
|
|
78
74
|
if not settings_path:
|
|
79
|
-
logger.debug(
|
|
80
|
-
"User settings file not found, using 'highs' as default solver"
|
|
81
|
-
)
|
|
82
75
|
return "highs"
|
|
83
76
|
|
|
84
77
|
with open(settings_path, "r") as f:
|
|
@@ -115,13 +108,9 @@ class NetworkSolver:
|
|
|
115
108
|
if default_solver in known_solvers:
|
|
116
109
|
return default_solver
|
|
117
110
|
else:
|
|
118
|
-
logger.warning(
|
|
119
|
-
f"Unknown default solver '{default_solver}' in user settings, falling back to 'highs'"
|
|
120
|
-
)
|
|
121
111
|
return "highs"
|
|
122
112
|
|
|
123
113
|
except Exception as e:
|
|
124
|
-
logger.warning(f"Failed to read default solver from user settings: {e}")
|
|
125
114
|
return "highs"
|
|
126
115
|
|
|
127
116
|
def solve_network(
|
|
@@ -173,8 +162,6 @@ class NetworkSolver:
|
|
|
173
162
|
|
|
174
163
|
years = list(network.investment_periods)
|
|
175
164
|
|
|
176
|
-
logger.info(f"Solving with {actual_solver_name}: {len(years)} periods {years}, discount rate {effective_discount_rate}")
|
|
177
|
-
|
|
178
165
|
# Calculate investment period weightings with discount rate
|
|
179
166
|
self._calculate_investment_weightings(network, effective_discount_rate)
|
|
180
167
|
|
|
@@ -206,17 +193,10 @@ class NetworkSolver:
|
|
|
206
193
|
extra_functionality = self._create_extra_functionality(
|
|
207
194
|
model_constraints, constraint_applicator
|
|
208
195
|
)
|
|
209
|
-
if self.verbose:
|
|
210
|
-
logger.info(
|
|
211
|
-
f"Prepared {len(model_constraints)} model constraints for optimization"
|
|
212
|
-
)
|
|
213
196
|
|
|
214
197
|
# NOTE: Model constraints are applied DURING solve via extra_functionality
|
|
215
198
|
# Network constraints were already applied to the network structure before solve
|
|
216
199
|
|
|
217
|
-
if self.verbose:
|
|
218
|
-
logger.info(f"Snapshots: {len(network.snapshots)}, Solver options: {solver_config}")
|
|
219
|
-
|
|
220
200
|
if solver_config:
|
|
221
201
|
result = network.optimize(
|
|
222
202
|
solver_name=actual_solver_name,
|
|
@@ -252,17 +232,10 @@ class NetworkSolver:
|
|
|
252
232
|
solve_result["year_statistics"] = year_statistics
|
|
253
233
|
solve_result["year_statistics_available"] = len(year_statistics) > 0
|
|
254
234
|
|
|
255
|
-
objective_value = getattr(network, "objective", None)
|
|
256
|
-
logger.info(
|
|
257
|
-
f"Solve completed in {solve_time:.2f}s - status: {solve_result['status']}, objective: {objective_value}"
|
|
258
|
-
)
|
|
259
|
-
|
|
260
235
|
return solve_result
|
|
261
236
|
|
|
262
237
|
except Exception as e:
|
|
263
238
|
solve_time = time.time() - start_time
|
|
264
|
-
logger.error(f"Solve failed after {solve_time:.2f} seconds: {e}")
|
|
265
|
-
logger.exception("Full solve error traceback:")
|
|
266
239
|
|
|
267
240
|
return {
|
|
268
241
|
"success": False,
|
|
@@ -609,10 +582,7 @@ class NetworkSolver:
|
|
|
609
582
|
return solver_name, solver_options
|
|
610
583
|
|
|
611
584
|
else:
|
|
612
|
-
# Unknown solver name -
|
|
613
|
-
logger.warning(
|
|
614
|
-
f"Unknown solver name '{solver_name}' - falling back to 'highs'"
|
|
615
|
-
)
|
|
585
|
+
# Unknown solver name - fall back to highs
|
|
616
586
|
return "highs", solver_options
|
|
617
587
|
|
|
618
588
|
def _detect_constraint_type(self, constraint_code: str) -> str:
|
|
@@ -690,14 +660,10 @@ class NetworkSolver:
|
|
|
690
660
|
network, snapshots, constraint
|
|
691
661
|
)
|
|
692
662
|
except Exception as e:
|
|
693
|
-
logger.error(
|
|
694
|
-
f"Failed to apply optimization constraint {constraint.get('name', 'unknown')}: {e}"
|
|
695
|
-
)
|
|
696
663
|
continue
|
|
697
664
|
|
|
698
665
|
except Exception as e:
|
|
699
|
-
|
|
700
|
-
# Don't re-raise - let optimization continue
|
|
666
|
+
pass # Don't re-raise - let optimization continue
|
|
701
667
|
|
|
702
668
|
return extra_functionality
|
|
703
669
|
|
|
@@ -717,9 +683,6 @@ class NetworkSolver:
|
|
|
717
683
|
|
|
718
684
|
if weight is None:
|
|
719
685
|
weight = 1.0
|
|
720
|
-
logger.warning(
|
|
721
|
-
f"Could not parse time interval '{time_interval}', using default weight of 1.0"
|
|
722
|
-
)
|
|
723
686
|
|
|
724
687
|
# Create weightings array - all snapshots get the same weight for this time resolution
|
|
725
688
|
weightings = [weight] * len(time_periods)
|
|
@@ -729,14 +692,8 @@ class NetworkSolver:
|
|
|
729
692
|
network.snapshot_weightings.loc[:, "objective"] = weightings
|
|
730
693
|
network.snapshot_weightings.loc[:, "generators"] = weightings
|
|
731
694
|
network.snapshot_weightings.loc[:, "stores"] = weightings
|
|
732
|
-
else:
|
|
733
|
-
logger.warning(
|
|
734
|
-
f"Mismatch between weightings ({len(weightings)}) and snapshots ({len(network.snapshots)})"
|
|
735
|
-
)
|
|
736
695
|
except Exception as e:
|
|
737
|
-
|
|
738
|
-
f"Failed to set snapshot weightings after multi-period setup: {e}"
|
|
739
|
-
)
|
|
696
|
+
pass # Failed to set snapshot weightings
|
|
740
697
|
|
|
741
698
|
def _parse_time_interval(self, time_interval: str) -> Optional[float]:
|
|
742
699
|
"""Parse time interval string to hours - handles multiple formats."""
|
|
@@ -780,7 +737,6 @@ class NetworkSolver:
|
|
|
780
737
|
return float(interval)
|
|
781
738
|
|
|
782
739
|
except (ValueError, TypeError) as e:
|
|
783
|
-
logger.warning(f"Could not parse time interval '{time_interval}': {e}")
|
|
784
740
|
return None
|
|
785
741
|
|
|
786
742
|
def _calculate_investment_weightings(
|
|
@@ -872,8 +828,7 @@ class NetworkSolver:
|
|
|
872
828
|
network.investment_period_weightings = weightings_df
|
|
873
829
|
|
|
874
830
|
except Exception as e:
|
|
875
|
-
|
|
876
|
-
logger.exception("Full traceback:")
|
|
831
|
+
pass # Failed to calculate investment weightings
|
|
877
832
|
|
|
878
833
|
def _extract_solve_results(
|
|
879
834
|
self,
|
|
@@ -934,7 +889,6 @@ class NetworkSolver:
|
|
|
934
889
|
return solve_result
|
|
935
890
|
|
|
936
891
|
except Exception as e:
|
|
937
|
-
logger.error(f"Failed to extract solve results: {e}")
|
|
938
892
|
return {
|
|
939
893
|
"success": False,
|
|
940
894
|
"status": "extraction_failed",
|
|
@@ -983,13 +937,9 @@ class NetworkSolver:
|
|
|
983
937
|
if "optimal" in term_condition:
|
|
984
938
|
return True
|
|
985
939
|
|
|
986
|
-
logger.warning(
|
|
987
|
-
f"Could not determine solve success: status={status}, objective={objective_value}"
|
|
988
|
-
)
|
|
989
940
|
return False
|
|
990
941
|
|
|
991
942
|
except Exception as e:
|
|
992
|
-
logger.error(f"Error determining solve success: {e}")
|
|
993
943
|
return False
|
|
994
944
|
|
|
995
945
|
def _convert_pypsa_result_to_dict(self, result) -> Dict[str, Any]:
|
|
@@ -1029,7 +979,6 @@ class NetworkSolver:
|
|
|
1029
979
|
return result_dict
|
|
1030
980
|
|
|
1031
981
|
except Exception as e:
|
|
1032
|
-
logger.warning(f"Failed to convert PyPSA result to dict: {e}")
|
|
1033
982
|
return {"status": "conversion_failed", "error": str(e)}
|
|
1034
983
|
|
|
1035
984
|
def _calculate_comprehensive_network_statistics(
|
|
@@ -1125,7 +1074,6 @@ class NetworkSolver:
|
|
|
1125
1074
|
else:
|
|
1126
1075
|
statistics["pypsa_statistics"] = {}
|
|
1127
1076
|
except Exception as e:
|
|
1128
|
-
logger.error(f"Failed to calculate PyPSA statistics: {e}")
|
|
1129
1077
|
statistics["pypsa_statistics"] = {}
|
|
1130
1078
|
|
|
1131
1079
|
# Custom statistics - calculate detailed breakdowns
|
|
@@ -1205,10 +1153,6 @@ class NetworkSolver:
|
|
|
1205
1153
|
return statistics
|
|
1206
1154
|
|
|
1207
1155
|
except Exception as e:
|
|
1208
|
-
logger.error(
|
|
1209
|
-
f"Failed to calculate comprehensive network statistics: {e}",
|
|
1210
|
-
exc_info=True,
|
|
1211
|
-
)
|
|
1212
1156
|
return {
|
|
1213
1157
|
"error": str(e),
|
|
1214
1158
|
"core_summary": {},
|
|
@@ -1245,15 +1189,11 @@ class NetworkSolver:
|
|
|
1245
1189
|
)
|
|
1246
1190
|
year_statistics[year] = year_stats
|
|
1247
1191
|
except Exception as e:
|
|
1248
|
-
logger.error(f"Failed to calculate statistics for year {year}: {e}")
|
|
1249
1192
|
continue
|
|
1250
1193
|
|
|
1251
1194
|
return year_statistics
|
|
1252
1195
|
|
|
1253
1196
|
except Exception as e:
|
|
1254
|
-
logger.error(
|
|
1255
|
-
f"Failed to calculate year-based statistics: {e}", exc_info=True
|
|
1256
|
-
)
|
|
1257
1197
|
return {}
|
|
1258
1198
|
|
|
1259
1199
|
def _calculate_network_statistics_for_year(
|
|
@@ -1367,10 +1307,6 @@ class NetworkSolver:
|
|
|
1367
1307
|
return statistics
|
|
1368
1308
|
|
|
1369
1309
|
except Exception as e:
|
|
1370
|
-
logger.error(
|
|
1371
|
-
f"Failed to calculate network statistics for year {year}: {e}",
|
|
1372
|
-
exc_info=True,
|
|
1373
|
-
)
|
|
1374
1310
|
return {
|
|
1375
1311
|
"error": str(e),
|
|
1376
1312
|
"core_summary": {},
|
|
@@ -1402,7 +1338,6 @@ class NetworkSolver:
|
|
|
1402
1338
|
return None
|
|
1403
1339
|
|
|
1404
1340
|
except Exception as e:
|
|
1405
|
-
logger.error(f"Failed to filter timeseries by year {year}: {e}")
|
|
1406
1341
|
return None
|
|
1407
1342
|
|
|
1408
1343
|
def _get_year_weightings(self, network: "pypsa.Network", year: int) -> "np.ndarray":
|
|
@@ -1438,7 +1373,6 @@ class NetworkSolver:
|
|
|
1438
1373
|
return None
|
|
1439
1374
|
|
|
1440
1375
|
except Exception as e:
|
|
1441
|
-
logger.error(f"Failed to get year weightings for year {year}: {e}")
|
|
1442
1376
|
return None
|
|
1443
1377
|
|
|
1444
1378
|
def _count_year_snapshots(self, snapshots: "pd.Index", year: int) -> int:
|
|
@@ -1459,7 +1393,6 @@ class NetworkSolver:
|
|
|
1459
1393
|
return 0
|
|
1460
1394
|
|
|
1461
1395
|
except Exception as e:
|
|
1462
|
-
logger.error(f"Failed to count snapshots for year {year}: {e}")
|
|
1463
1396
|
return 0
|
|
1464
1397
|
|
|
1465
1398
|
def _calculate_year_carrier_statistics(
|
|
@@ -4,7 +4,6 @@ Result storage functionality for PyPSA solver integration.
|
|
|
4
4
|
Handles storing solve results back to the database with proper validation and error handling.
|
|
5
5
|
"""
|
|
6
6
|
|
|
7
|
-
import logging
|
|
8
7
|
import uuid
|
|
9
8
|
import pandas as pd
|
|
10
9
|
import numpy as np
|
|
@@ -18,8 +17,6 @@ from pyconvexity.models import (
|
|
|
18
17
|
)
|
|
19
18
|
from pyconvexity.validation import get_validation_rule
|
|
20
19
|
|
|
21
|
-
logger = logging.getLogger(__name__)
|
|
22
|
-
|
|
23
20
|
|
|
24
21
|
class ResultStorage:
|
|
25
22
|
"""
|
|
@@ -80,12 +77,6 @@ class ResultStorage:
|
|
|
80
77
|
)
|
|
81
78
|
conn.commit()
|
|
82
79
|
|
|
83
|
-
total_gen = network_stats.get("core_summary", {}).get("total_generation_mwh", 0)
|
|
84
|
-
total_cost = network_stats.get("core_summary", {}).get("total_cost", 0)
|
|
85
|
-
logger.info(
|
|
86
|
-
f"Results stored: {total_gen:.0f} MWh generation, {total_cost:.0f} cost, {year_stats_stored} years"
|
|
87
|
-
)
|
|
88
|
-
|
|
89
80
|
return {
|
|
90
81
|
"component_stats": component_stats,
|
|
91
82
|
"network_stats": network_stats,
|
|
@@ -95,7 +86,6 @@ class ResultStorage:
|
|
|
95
86
|
}
|
|
96
87
|
|
|
97
88
|
except Exception as e:
|
|
98
|
-
logger.error(f"Result storage failed: {e}")
|
|
99
89
|
return {
|
|
100
90
|
"component_stats": {},
|
|
101
91
|
"network_stats": {},
|
|
@@ -200,7 +190,6 @@ class ResultStorage:
|
|
|
200
190
|
return results_stats
|
|
201
191
|
|
|
202
192
|
except Exception as e:
|
|
203
|
-
logger.error(f"Error storing solve results: {e}", exc_info=True)
|
|
204
193
|
results_stats["errors"] += 1
|
|
205
194
|
return results_stats
|
|
206
195
|
|
|
@@ -272,9 +261,6 @@ class ResultStorage:
|
|
|
272
261
|
):
|
|
273
262
|
continue
|
|
274
263
|
else:
|
|
275
|
-
logger.warning(
|
|
276
|
-
f"Error storing timeseries {attr_name} for {component_type} '{component_name}': {e}"
|
|
277
|
-
)
|
|
278
264
|
continue
|
|
279
265
|
|
|
280
266
|
# Store static optimization results - ONLY OUTPUT ATTRIBUTES (is_input=FALSE)
|
|
@@ -325,17 +311,11 @@ class ResultStorage:
|
|
|
325
311
|
):
|
|
326
312
|
continue
|
|
327
313
|
else:
|
|
328
|
-
logger.warning(
|
|
329
|
-
f"Error storing static {attr_name} for {component_type} '{component_name}': {e}"
|
|
330
|
-
)
|
|
331
314
|
continue
|
|
332
315
|
|
|
333
316
|
return stored_count
|
|
334
317
|
|
|
335
318
|
except Exception as e:
|
|
336
|
-
logger.error(
|
|
337
|
-
f"Error storing results for {component_type}: {e}", exc_info=True
|
|
338
|
-
)
|
|
339
319
|
return stored_count
|
|
340
320
|
|
|
341
321
|
def _store_solve_summary(
|
|
@@ -394,7 +374,6 @@ class ResultStorage:
|
|
|
394
374
|
)
|
|
395
375
|
|
|
396
376
|
except Exception as e:
|
|
397
|
-
logger.error(f"Failed to store solve summary: {e}")
|
|
398
377
|
raise # Re-raise to trigger rollback
|
|
399
378
|
|
|
400
379
|
def _calculate_network_statistics(
|
|
@@ -487,13 +466,9 @@ class ResultStorage:
|
|
|
487
466
|
},
|
|
488
467
|
}
|
|
489
468
|
|
|
490
|
-
logger.info(
|
|
491
|
-
f"Calculated network statistics: core_summary={network_statistics['core_summary']}"
|
|
492
|
-
)
|
|
493
469
|
return network_statistics
|
|
494
470
|
|
|
495
471
|
except Exception as e:
|
|
496
|
-
logger.error(f"Failed to calculate network statistics: {e}", exc_info=True)
|
|
497
472
|
# Return empty structure matching expected format
|
|
498
473
|
return {
|
|
499
474
|
"core_summary": {
|
|
@@ -549,7 +524,6 @@ class ResultStorage:
|
|
|
549
524
|
years = network._available_years
|
|
550
525
|
else:
|
|
551
526
|
years = [2020] # Fallback
|
|
552
|
-
logger.warning(f"No year information found, using fallback: {years}")
|
|
553
527
|
|
|
554
528
|
# Calculate per-year statistics first
|
|
555
529
|
all_year_stats = {
|
|
@@ -618,7 +592,6 @@ class ResultStorage:
|
|
|
618
592
|
return all_year_stats
|
|
619
593
|
|
|
620
594
|
except Exception as e:
|
|
621
|
-
logger.error(f"Failed to calculate carrier statistics: {e}")
|
|
622
595
|
return {
|
|
623
596
|
"dispatch_by_carrier": {},
|
|
624
597
|
"power_capacity_by_carrier": {},
|
|
@@ -718,13 +691,11 @@ class ResultStorage:
|
|
|
718
691
|
stored_count += 1
|
|
719
692
|
|
|
720
693
|
except Exception as e:
|
|
721
|
-
logger.error(f"Failed to store statistics for year {year}: {e}")
|
|
722
694
|
continue
|
|
723
695
|
|
|
724
696
|
return stored_count
|
|
725
697
|
|
|
726
698
|
except Exception as e:
|
|
727
|
-
logger.error(f"Failed to store year-based statistics: {e}")
|
|
728
699
|
return 0
|
|
729
700
|
|
|
730
701
|
def _calculate_year_carrier_statistics(
|
|
@@ -805,9 +776,6 @@ class ResultStorage:
|
|
|
805
776
|
else:
|
|
806
777
|
# Fallback: simple sum (will be incorrect for non-1H models)
|
|
807
778
|
generation_mwh = float(year_generation[gen_name].sum())
|
|
808
|
-
logger.warning(
|
|
809
|
-
f"Could not apply snapshot weightings for {gen_name} in year {year} - energy may be incorrect"
|
|
810
|
-
)
|
|
811
779
|
|
|
812
780
|
if carrier_name in carrier_stats["dispatch_by_carrier"]:
|
|
813
781
|
carrier_stats["dispatch_by_carrier"][
|
|
@@ -852,9 +820,6 @@ class ResultStorage:
|
|
|
852
820
|
discharge_mwh = float(
|
|
853
821
|
year_storage[su_name].clip(lower=0).sum()
|
|
854
822
|
)
|
|
855
|
-
logger.warning(
|
|
856
|
-
f"Could not apply snapshot weightings for storage unit {su_name} in year {year} - energy may be incorrect"
|
|
857
|
-
)
|
|
858
823
|
|
|
859
824
|
if carrier_name in carrier_stats["dispatch_by_carrier"]:
|
|
860
825
|
carrier_stats["dispatch_by_carrier"][
|
|
@@ -897,9 +862,6 @@ class ResultStorage:
|
|
|
897
862
|
discharge_mwh = float(
|
|
898
863
|
year_stores[store_name].clip(lower=0).sum()
|
|
899
864
|
)
|
|
900
|
-
logger.warning(
|
|
901
|
-
f"Could not apply snapshot weightings for store {store_name} in year {year} - energy may be incorrect"
|
|
902
|
-
)
|
|
903
865
|
|
|
904
866
|
if carrier_name in carrier_stats["dispatch_by_carrier"]:
|
|
905
867
|
carrier_stats["dispatch_by_carrier"][
|
|
@@ -1741,7 +1703,6 @@ class ResultStorage:
|
|
|
1741
1703
|
return carrier_stats
|
|
1742
1704
|
|
|
1743
1705
|
except Exception as e:
|
|
1744
|
-
logger.error(f"Failed to calculate year {year} carrier statistics: {e}")
|
|
1745
1706
|
return {
|
|
1746
1707
|
"dispatch_by_carrier": {},
|
|
1747
1708
|
"power_capacity_by_carrier": {},
|
|
@@ -1829,7 +1790,7 @@ class ResultStorage:
|
|
|
1829
1790
|
)
|
|
1830
1791
|
|
|
1831
1792
|
except Exception as e:
|
|
1832
|
-
|
|
1793
|
+
pass # Failed to process last year results
|
|
1833
1794
|
|
|
1834
1795
|
# For other stats (dispatch, emissions, costs): sum across all years
|
|
1835
1796
|
for year, results_json in year_results:
|
|
@@ -1881,13 +1842,11 @@ class ResultStorage:
|
|
|
1881
1842
|
)
|
|
1882
1843
|
|
|
1883
1844
|
except Exception as e:
|
|
1884
|
-
logger.error(f"Failed to process year {year} results: {e}")
|
|
1885
1845
|
continue
|
|
1886
1846
|
|
|
1887
1847
|
return totals
|
|
1888
1848
|
|
|
1889
1849
|
except Exception as e:
|
|
1890
|
-
logger.error(f"Failed to sum year-based carrier statistics: {e}")
|
|
1891
1850
|
# Return empty structure on error
|
|
1892
1851
|
return {
|
|
1893
1852
|
"dispatch_by_carrier": {},
|
|
@@ -1917,7 +1876,6 @@ class ResultStorage:
|
|
|
1917
1876
|
}
|
|
1918
1877
|
return json.dumps(results, default=self._json_serializer)
|
|
1919
1878
|
except Exception as e:
|
|
1920
|
-
logger.warning(f"Failed to serialize results JSON: {e}")
|
|
1921
1879
|
return json.dumps({"error": "serialization_failed"})
|
|
1922
1880
|
|
|
1923
1881
|
def _serialize_metadata_json(self, solve_result: Dict[str, Any]) -> str:
|
|
@@ -1935,7 +1893,6 @@ class ResultStorage:
|
|
|
1935
1893
|
}
|
|
1936
1894
|
return json.dumps(metadata, default=self._json_serializer)
|
|
1937
1895
|
except Exception as e:
|
|
1938
|
-
logger.warning(f"Failed to serialize metadata JSON: {e}")
|
|
1939
1896
|
return json.dumps({"error": "serialization_failed"})
|
|
1940
1897
|
|
|
1941
1898
|
def _filter_timeseries_by_year(
|
|
@@ -1961,7 +1918,6 @@ class ResultStorage:
|
|
|
1961
1918
|
return None
|
|
1962
1919
|
|
|
1963
1920
|
except Exception as e:
|
|
1964
|
-
logger.error(f"Failed to filter timeseries by year {year}: {e}")
|
|
1965
1921
|
return None
|
|
1966
1922
|
|
|
1967
1923
|
def _get_year_weightings(self, network: "pypsa.Network", year: int) -> "np.ndarray":
|
|
@@ -1997,7 +1953,6 @@ class ResultStorage:
|
|
|
1997
1953
|
return None
|
|
1998
1954
|
|
|
1999
1955
|
except Exception as e:
|
|
2000
|
-
logger.error(f"Failed to get year weightings for year {year}: {e}")
|
|
2001
1956
|
return None
|
|
2002
1957
|
|
|
2003
1958
|
def _calculate_total_demand(self, network: "pypsa.Network") -> float:
|
|
@@ -2024,7 +1979,6 @@ class ResultStorage:
|
|
|
2024
1979
|
return total_demand
|
|
2025
1980
|
|
|
2026
1981
|
except Exception as e:
|
|
2027
|
-
logger.error(f"Failed to calculate total demand: {e}")
|
|
2028
1982
|
return 0.0
|
|
2029
1983
|
|
|
2030
1984
|
def _json_serializer(self, obj):
|