pyconvexity 0.4.0__py3-none-any.whl → 0.4.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pyconvexity/__init__.py +87 -46
- pyconvexity/_version.py +1 -1
- pyconvexity/core/__init__.py +3 -5
- pyconvexity/core/database.py +111 -103
- pyconvexity/core/errors.py +16 -10
- pyconvexity/core/types.py +61 -54
- pyconvexity/data/__init__.py +0 -1
- pyconvexity/data/loaders/cache.py +65 -64
- pyconvexity/data/schema/01_core_schema.sql +134 -234
- pyconvexity/data/schema/02_data_metadata.sql +38 -168
- pyconvexity/data/schema/03_validation_data.sql +327 -264
- pyconvexity/data/sources/gem.py +169 -139
- pyconvexity/io/__init__.py +4 -10
- pyconvexity/io/excel_exporter.py +694 -480
- pyconvexity/io/excel_importer.py +817 -545
- pyconvexity/io/netcdf_exporter.py +66 -61
- pyconvexity/io/netcdf_importer.py +850 -619
- pyconvexity/models/__init__.py +109 -59
- pyconvexity/models/attributes.py +197 -178
- pyconvexity/models/carriers.py +70 -67
- pyconvexity/models/components.py +260 -236
- pyconvexity/models/network.py +202 -284
- pyconvexity/models/results.py +65 -55
- pyconvexity/models/scenarios.py +58 -88
- pyconvexity/solvers/__init__.py +5 -5
- pyconvexity/solvers/pypsa/__init__.py +3 -3
- pyconvexity/solvers/pypsa/api.py +150 -134
- pyconvexity/solvers/pypsa/batch_loader.py +165 -162
- pyconvexity/solvers/pypsa/builder.py +390 -291
- pyconvexity/solvers/pypsa/constraints.py +184 -162
- pyconvexity/solvers/pypsa/solver.py +968 -663
- pyconvexity/solvers/pypsa/storage.py +1377 -671
- pyconvexity/timeseries.py +63 -60
- pyconvexity/validation/__init__.py +14 -6
- pyconvexity/validation/rules.py +95 -84
- pyconvexity-0.4.1.dist-info/METADATA +46 -0
- pyconvexity-0.4.1.dist-info/RECORD +42 -0
- pyconvexity/data/schema/04_scenario_schema.sql +0 -122
- pyconvexity/data/schema/migrate_add_geometries.sql +0 -73
- pyconvexity-0.4.0.dist-info/METADATA +0 -138
- pyconvexity-0.4.0.dist-info/RECORD +0 -44
- {pyconvexity-0.4.0.dist-info → pyconvexity-0.4.1.dist-info}/WHEEL +0 -0
- {pyconvexity-0.4.0.dist-info → pyconvexity-0.4.1.dist-info}/top_level.txt +0 -0
|
@@ -12,7 +12,9 @@ from typing import Dict, Any, Optional, Callable
|
|
|
12
12
|
|
|
13
13
|
from pyconvexity.core.types import StaticValue
|
|
14
14
|
from pyconvexity.models import (
|
|
15
|
-
list_components_by_type,
|
|
15
|
+
list_components_by_type,
|
|
16
|
+
set_static_attribute,
|
|
17
|
+
set_timeseries_attribute,
|
|
16
18
|
)
|
|
17
19
|
from pyconvexity.validation import get_validation_rule
|
|
18
20
|
|
|
@@ -22,83 +24,125 @@ logger = logging.getLogger(__name__)
|
|
|
22
24
|
class ResultStorage:
|
|
23
25
|
"""
|
|
24
26
|
Handles storing PyPSA solve results back to the database.
|
|
25
|
-
|
|
27
|
+
|
|
26
28
|
This class manages the complex process of extracting results from PyPSA networks
|
|
27
29
|
and storing them back to the database with proper validation and error handling.
|
|
28
30
|
"""
|
|
29
|
-
|
|
31
|
+
|
|
30
32
|
def store_results(
|
|
31
33
|
self,
|
|
32
34
|
conn,
|
|
33
|
-
|
|
34
|
-
network: 'pypsa.Network',
|
|
35
|
+
network: "pypsa.Network",
|
|
35
36
|
solve_result: Dict[str, Any],
|
|
36
|
-
scenario_id: Optional[int] = None
|
|
37
|
+
scenario_id: Optional[int] = None,
|
|
37
38
|
) -> Dict[str, Any]:
|
|
38
39
|
"""
|
|
39
|
-
Store complete solve results back to database.
|
|
40
|
-
|
|
40
|
+
Store complete solve results back to database (single network per database).
|
|
41
|
+
|
|
41
42
|
Args:
|
|
42
43
|
conn: Database connection
|
|
43
|
-
network_id: ID of the network
|
|
44
44
|
network: Solved PyPSA Network object
|
|
45
45
|
solve_result: Solve result metadata
|
|
46
|
-
scenario_id: Optional scenario ID
|
|
47
|
-
|
|
46
|
+
scenario_id: Optional scenario ID (NULL for base network)
|
|
47
|
+
|
|
48
48
|
Returns:
|
|
49
49
|
Dictionary with storage statistics
|
|
50
50
|
"""
|
|
51
|
-
run_id = solve_result.get(
|
|
52
|
-
|
|
51
|
+
run_id = solve_result.get("run_id", str(uuid.uuid4()))
|
|
52
|
+
|
|
53
53
|
try:
|
|
54
|
+
logger.info("=" * 80)
|
|
55
|
+
logger.info("📊 STARTING RESULT STORAGE")
|
|
56
|
+
logger.info("=" * 80)
|
|
57
|
+
|
|
54
58
|
# Store component results
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
)
|
|
58
|
-
|
|
59
|
+
logger.info("📝 Step 1: Storing component results (timeseries data)...")
|
|
60
|
+
component_stats = self._store_component_results(conn, network, scenario_id)
|
|
61
|
+
logger.info(f"✅ Component results stored: {component_stats}")
|
|
62
|
+
|
|
59
63
|
# Calculate network statistics first
|
|
64
|
+
logger.info("📊 Step 2: Calculating network statistics...")
|
|
60
65
|
network_stats = self._calculate_network_statistics(
|
|
61
|
-
conn,
|
|
66
|
+
conn, network, solve_result
|
|
62
67
|
)
|
|
63
|
-
|
|
68
|
+
logger.info(f"✅ Network statistics calculated")
|
|
69
|
+
logger.info(
|
|
70
|
+
f" - Total generation: {network_stats.get('core_summary', {}).get('total_generation_mwh', 0):.2f} MWh"
|
|
71
|
+
)
|
|
72
|
+
logger.info(
|
|
73
|
+
f" - Total cost: {network_stats.get('core_summary', {}).get('total_cost', 0):.2f}"
|
|
74
|
+
)
|
|
75
|
+
|
|
64
76
|
# Store solve summary with network statistics
|
|
65
|
-
|
|
66
|
-
|
|
77
|
+
logger.info(
|
|
78
|
+
"💾 Step 3: Storing solve summary to network_solve_results table..."
|
|
67
79
|
)
|
|
68
|
-
|
|
80
|
+
self._store_solve_summary(conn, solve_result, scenario_id, network_stats)
|
|
81
|
+
# Explicit commit after storing summary
|
|
82
|
+
conn.commit()
|
|
83
|
+
logger.info("✅ Solve summary stored and committed successfully")
|
|
84
|
+
|
|
69
85
|
# Store year-based statistics if available
|
|
70
86
|
year_stats_stored = 0
|
|
71
|
-
if solve_result.get(
|
|
87
|
+
if solve_result.get("year_statistics"):
|
|
88
|
+
logger.info(
|
|
89
|
+
f"📅 Step 4: Storing year-based statistics ({len(solve_result['year_statistics'])} years)..."
|
|
90
|
+
)
|
|
72
91
|
year_stats_stored = self._store_year_based_statistics(
|
|
73
|
-
conn,
|
|
92
|
+
conn, network, solve_result["year_statistics"], scenario_id
|
|
74
93
|
)
|
|
75
|
-
|
|
94
|
+
# Explicit commit after storing year statistics
|
|
95
|
+
conn.commit()
|
|
96
|
+
logger.info(
|
|
97
|
+
f"✅ Year-based statistics stored and committed: {year_stats_stored} years"
|
|
98
|
+
)
|
|
99
|
+
else:
|
|
100
|
+
logger.warning(
|
|
101
|
+
"⚠️ No year_statistics in solve_result - skipping year-based storage"
|
|
102
|
+
)
|
|
103
|
+
logger.info(
|
|
104
|
+
f" Available solve_result keys: {list(solve_result.keys())}"
|
|
105
|
+
)
|
|
106
|
+
|
|
107
|
+
logger.info("=" * 80)
|
|
108
|
+
logger.info("✅ RESULT STORAGE COMPLETED SUCCESSFULLY")
|
|
109
|
+
logger.info("=" * 80)
|
|
110
|
+
|
|
111
|
+
# Final verification - check if data was actually stored
|
|
112
|
+
cursor = conn.execute(
|
|
113
|
+
"SELECT COUNT(*) FROM network_solve_results WHERE scenario_id IS ? ",
|
|
114
|
+
(scenario_id,) if scenario_id is not None else (None,),
|
|
115
|
+
)
|
|
116
|
+
count = cursor.fetchone()[0]
|
|
117
|
+
logger.info(
|
|
118
|
+
f"🔍 Verification: {count} solve result(s) found in network_solve_results table"
|
|
119
|
+
)
|
|
120
|
+
|
|
76
121
|
return {
|
|
77
122
|
"component_stats": component_stats,
|
|
78
123
|
"network_stats": network_stats,
|
|
79
124
|
"year_stats_stored": year_stats_stored,
|
|
80
125
|
"run_id": run_id,
|
|
81
|
-
"success": True
|
|
126
|
+
"success": True,
|
|
82
127
|
}
|
|
83
|
-
|
|
128
|
+
|
|
84
129
|
except Exception as e:
|
|
85
|
-
logger.error(
|
|
130
|
+
logger.error("=" * 80)
|
|
131
|
+
logger.error(f"❌ RESULT STORAGE FAILED: {e}")
|
|
132
|
+
logger.error("=" * 80)
|
|
133
|
+
logger.exception("Full traceback:")
|
|
86
134
|
return {
|
|
87
135
|
"component_stats": {},
|
|
88
136
|
"network_stats": {},
|
|
89
137
|
"run_id": run_id,
|
|
90
138
|
"success": False,
|
|
91
|
-
"error": str(e)
|
|
139
|
+
"error": str(e),
|
|
92
140
|
}
|
|
93
|
-
|
|
141
|
+
|
|
94
142
|
def _store_component_results(
|
|
95
|
-
self,
|
|
96
|
-
conn,
|
|
97
|
-
network_id: int,
|
|
98
|
-
network: 'pypsa.Network',
|
|
99
|
-
scenario_id: Optional[int]
|
|
143
|
+
self, conn, network: "pypsa.Network", scenario_id: Optional[int]
|
|
100
144
|
) -> Dict[str, int]:
|
|
101
|
-
"""Store results for all component types."""
|
|
145
|
+
"""Store results for all component types (single network per database)."""
|
|
102
146
|
results_stats = {
|
|
103
147
|
"stored_bus_results": 0,
|
|
104
148
|
"stored_generator_results": 0,
|
|
@@ -109,86 +153,113 @@ class ResultStorage:
|
|
|
109
153
|
"stored_storage_unit_results": 0,
|
|
110
154
|
"stored_store_results": 0,
|
|
111
155
|
"skipped_attributes": 0,
|
|
112
|
-
"errors": 0
|
|
156
|
+
"errors": 0,
|
|
113
157
|
}
|
|
114
|
-
|
|
158
|
+
|
|
115
159
|
try:
|
|
116
160
|
# Store bus results
|
|
117
|
-
if hasattr(network,
|
|
118
|
-
results_stats["stored_bus_results"] =
|
|
119
|
-
|
|
161
|
+
if hasattr(network, "buses_t") and network.buses_t:
|
|
162
|
+
results_stats["stored_bus_results"] = (
|
|
163
|
+
self._store_component_type_results(
|
|
164
|
+
conn, "BUS", network.buses, network.buses_t, scenario_id
|
|
165
|
+
)
|
|
120
166
|
)
|
|
121
|
-
|
|
167
|
+
|
|
122
168
|
# Store generator results (includes regular generators)
|
|
123
|
-
if hasattr(network,
|
|
124
|
-
results_stats["stored_generator_results"] =
|
|
125
|
-
|
|
169
|
+
if hasattr(network, "generators_t") and network.generators_t:
|
|
170
|
+
results_stats["stored_generator_results"] = (
|
|
171
|
+
self._store_component_type_results(
|
|
172
|
+
conn,
|
|
173
|
+
"GENERATOR",
|
|
174
|
+
network.generators,
|
|
175
|
+
network.generators_t,
|
|
176
|
+
scenario_id,
|
|
177
|
+
)
|
|
126
178
|
)
|
|
127
|
-
|
|
179
|
+
|
|
128
180
|
# Store UNMET_LOAD results (these are also stored as generators in PyPSA)
|
|
129
|
-
results_stats["stored_unmet_load_results"] =
|
|
130
|
-
|
|
181
|
+
results_stats["stored_unmet_load_results"] = (
|
|
182
|
+
self._store_component_type_results(
|
|
183
|
+
conn,
|
|
184
|
+
"UNMET_LOAD",
|
|
185
|
+
network.generators,
|
|
186
|
+
network.generators_t,
|
|
187
|
+
scenario_id,
|
|
188
|
+
)
|
|
131
189
|
)
|
|
132
|
-
|
|
190
|
+
|
|
133
191
|
# Store load results
|
|
134
|
-
if hasattr(network,
|
|
135
|
-
results_stats["stored_load_results"] =
|
|
136
|
-
|
|
192
|
+
if hasattr(network, "loads_t") and network.loads_t:
|
|
193
|
+
results_stats["stored_load_results"] = (
|
|
194
|
+
self._store_component_type_results(
|
|
195
|
+
conn, "LOAD", network.loads, network.loads_t, scenario_id
|
|
196
|
+
)
|
|
137
197
|
)
|
|
138
|
-
|
|
198
|
+
|
|
139
199
|
# Store line results
|
|
140
|
-
if hasattr(network,
|
|
141
|
-
results_stats["stored_line_results"] =
|
|
142
|
-
|
|
200
|
+
if hasattr(network, "lines_t") and network.lines_t:
|
|
201
|
+
results_stats["stored_line_results"] = (
|
|
202
|
+
self._store_component_type_results(
|
|
203
|
+
conn, "LINE", network.lines, network.lines_t, scenario_id
|
|
204
|
+
)
|
|
143
205
|
)
|
|
144
|
-
|
|
206
|
+
|
|
145
207
|
# Store link results
|
|
146
|
-
if hasattr(network,
|
|
147
|
-
results_stats["stored_link_results"] =
|
|
148
|
-
|
|
208
|
+
if hasattr(network, "links_t") and network.links_t:
|
|
209
|
+
results_stats["stored_link_results"] = (
|
|
210
|
+
self._store_component_type_results(
|
|
211
|
+
conn, "LINK", network.links, network.links_t, scenario_id
|
|
212
|
+
)
|
|
149
213
|
)
|
|
150
|
-
|
|
214
|
+
|
|
151
215
|
# Store storage unit results
|
|
152
|
-
if hasattr(network,
|
|
153
|
-
results_stats["stored_storage_unit_results"] =
|
|
154
|
-
|
|
216
|
+
if hasattr(network, "storage_units_t") and network.storage_units_t:
|
|
217
|
+
results_stats["stored_storage_unit_results"] = (
|
|
218
|
+
self._store_component_type_results(
|
|
219
|
+
conn,
|
|
220
|
+
"STORAGE_UNIT",
|
|
221
|
+
network.storage_units,
|
|
222
|
+
network.storage_units_t,
|
|
223
|
+
scenario_id,
|
|
224
|
+
)
|
|
155
225
|
)
|
|
156
|
-
|
|
226
|
+
|
|
157
227
|
# Store store results
|
|
158
|
-
if hasattr(network,
|
|
159
|
-
results_stats["stored_store_results"] =
|
|
160
|
-
|
|
228
|
+
if hasattr(network, "stores_t") and network.stores_t:
|
|
229
|
+
results_stats["stored_store_results"] = (
|
|
230
|
+
self._store_component_type_results(
|
|
231
|
+
conn, "STORE", network.stores, network.stores_t, scenario_id
|
|
232
|
+
)
|
|
161
233
|
)
|
|
162
|
-
|
|
234
|
+
|
|
163
235
|
return results_stats
|
|
164
|
-
|
|
236
|
+
|
|
165
237
|
except Exception as e:
|
|
166
238
|
logger.error(f"Error storing solve results: {e}", exc_info=True)
|
|
167
239
|
results_stats["errors"] += 1
|
|
168
240
|
return results_stats
|
|
169
|
-
|
|
241
|
+
|
|
170
242
|
def _store_component_type_results(
|
|
171
243
|
self,
|
|
172
244
|
conn,
|
|
173
|
-
network_id: int,
|
|
174
245
|
component_type: str,
|
|
175
246
|
static_df: pd.DataFrame,
|
|
176
247
|
timeseries_dict: Dict[str, pd.DataFrame],
|
|
177
|
-
scenario_id: Optional[int]
|
|
248
|
+
scenario_id: Optional[int],
|
|
178
249
|
) -> int:
|
|
179
|
-
"""Store results for a specific component type - only store OUTPUT attributes."""
|
|
250
|
+
"""Store results for a specific component type - only store OUTPUT attributes (single network per database)."""
|
|
180
251
|
stored_count = 0
|
|
181
|
-
|
|
252
|
+
|
|
182
253
|
try:
|
|
183
254
|
# Get component name to ID mapping
|
|
184
|
-
components = list_components_by_type(conn,
|
|
255
|
+
components = list_components_by_type(conn, component_type)
|
|
185
256
|
name_to_id = {comp.name: comp.id for comp in components}
|
|
186
|
-
|
|
257
|
+
|
|
187
258
|
# Store timeseries results - ONLY OUTPUT ATTRIBUTES (is_input=FALSE)
|
|
188
259
|
for attr_name, timeseries_df in timeseries_dict.items():
|
|
189
260
|
if timeseries_df.empty:
|
|
190
261
|
continue
|
|
191
|
-
|
|
262
|
+
|
|
192
263
|
# Check if this attribute is an output attribute (not an input)
|
|
193
264
|
try:
|
|
194
265
|
rule = get_validation_rule(conn, component_type, attr_name)
|
|
@@ -198,18 +269,18 @@ class ResultStorage:
|
|
|
198
269
|
except Exception:
|
|
199
270
|
# If no validation rule found, skip to be safe
|
|
200
271
|
continue
|
|
201
|
-
|
|
272
|
+
|
|
202
273
|
for component_name in timeseries_df.columns:
|
|
203
274
|
if component_name not in name_to_id:
|
|
204
275
|
continue
|
|
205
|
-
|
|
276
|
+
|
|
206
277
|
component_id = name_to_id[component_name]
|
|
207
278
|
component_series = timeseries_df[component_name]
|
|
208
|
-
|
|
279
|
+
|
|
209
280
|
# Skip if all values are NaN
|
|
210
281
|
if component_series.isna().all():
|
|
211
282
|
continue
|
|
212
|
-
|
|
283
|
+
|
|
213
284
|
# Convert to efficient values array
|
|
214
285
|
values = []
|
|
215
286
|
for value in component_series.values:
|
|
@@ -217,24 +288,30 @@ class ResultStorage:
|
|
|
217
288
|
values.append(0.0) # Fill NaN with 0.0
|
|
218
289
|
else:
|
|
219
290
|
values.append(float(value))
|
|
220
|
-
|
|
291
|
+
|
|
221
292
|
if not values:
|
|
222
293
|
continue
|
|
223
|
-
|
|
294
|
+
|
|
224
295
|
# Store using efficient format
|
|
225
296
|
try:
|
|
226
|
-
set_timeseries_attribute(
|
|
297
|
+
set_timeseries_attribute(
|
|
298
|
+
conn, component_id, attr_name, values, scenario_id
|
|
299
|
+
)
|
|
227
300
|
stored_count += 1
|
|
228
301
|
except Exception as e:
|
|
229
302
|
# Handle validation errors gracefully
|
|
230
|
-
if (
|
|
231
|
-
"
|
|
232
|
-
"
|
|
303
|
+
if (
|
|
304
|
+
"No validation rule found" in str(e)
|
|
305
|
+
or "does not allow" in str(e)
|
|
306
|
+
or "ValidationError" in str(type(e).__name__)
|
|
307
|
+
):
|
|
233
308
|
continue
|
|
234
309
|
else:
|
|
235
|
-
logger.warning(
|
|
310
|
+
logger.warning(
|
|
311
|
+
f"Error storing timeseries {attr_name} for {component_type} '{component_name}': {e}"
|
|
312
|
+
)
|
|
236
313
|
continue
|
|
237
|
-
|
|
314
|
+
|
|
238
315
|
# Store static optimization results - ONLY OUTPUT ATTRIBUTES (is_input=FALSE)
|
|
239
316
|
if not static_df.empty:
|
|
240
317
|
for attr_name in static_df.columns:
|
|
@@ -247,13 +324,13 @@ class ResultStorage:
|
|
|
247
324
|
except Exception:
|
|
248
325
|
# If no validation rule found, skip to be safe
|
|
249
326
|
continue
|
|
250
|
-
|
|
327
|
+
|
|
251
328
|
for component_name, value in static_df[attr_name].items():
|
|
252
329
|
if component_name not in name_to_id or pd.isna(value):
|
|
253
330
|
continue
|
|
254
|
-
|
|
331
|
+
|
|
255
332
|
component_id = name_to_id[component_name]
|
|
256
|
-
|
|
333
|
+
|
|
257
334
|
try:
|
|
258
335
|
# Convert value to StaticValue
|
|
259
336
|
if isinstance(value, (int, np.integer)):
|
|
@@ -267,110 +344,176 @@ class ResultStorage:
|
|
|
267
344
|
static_value = StaticValue(bool(value))
|
|
268
345
|
else:
|
|
269
346
|
static_value = StaticValue(str(value))
|
|
270
|
-
|
|
347
|
+
|
|
271
348
|
# Store using atomic utility
|
|
272
|
-
set_static_attribute(
|
|
349
|
+
set_static_attribute(
|
|
350
|
+
conn, component_id, attr_name, static_value, scenario_id
|
|
351
|
+
)
|
|
273
352
|
stored_count += 1
|
|
274
|
-
|
|
353
|
+
|
|
275
354
|
except Exception as e:
|
|
276
355
|
# Handle validation errors gracefully
|
|
277
|
-
if (
|
|
278
|
-
"
|
|
279
|
-
"
|
|
356
|
+
if (
|
|
357
|
+
"No validation rule found" in str(e)
|
|
358
|
+
or "does not allow" in str(e)
|
|
359
|
+
or "ValidationError" in str(type(e).__name__)
|
|
360
|
+
):
|
|
280
361
|
continue
|
|
281
362
|
else:
|
|
282
|
-
logger.warning(
|
|
363
|
+
logger.warning(
|
|
364
|
+
f"Error storing static {attr_name} for {component_type} '{component_name}': {e}"
|
|
365
|
+
)
|
|
283
366
|
continue
|
|
284
|
-
|
|
367
|
+
|
|
285
368
|
return stored_count
|
|
286
|
-
|
|
369
|
+
|
|
287
370
|
except Exception as e:
|
|
288
|
-
logger.error(
|
|
371
|
+
logger.error(
|
|
372
|
+
f"Error storing results for {component_type}: {e}", exc_info=True
|
|
373
|
+
)
|
|
289
374
|
return stored_count
|
|
290
|
-
|
|
375
|
+
|
|
291
376
|
def _store_solve_summary(
|
|
292
377
|
self,
|
|
293
378
|
conn,
|
|
294
|
-
network_id: int,
|
|
295
379
|
solve_result: Dict[str, Any],
|
|
296
380
|
scenario_id: Optional[int],
|
|
297
|
-
network_stats: Optional[Dict[str, Any]] = None
|
|
381
|
+
network_stats: Optional[Dict[str, Any]] = None,
|
|
298
382
|
):
|
|
299
|
-
"""Store solve summary to network_solve_results table."""
|
|
383
|
+
"""Store solve summary to network_solve_results table (single network per database)."""
|
|
300
384
|
try:
|
|
385
|
+
# Debug logging
|
|
386
|
+
logger.info(
|
|
387
|
+
f"📝 _store_solve_summary called with scenario_id={scenario_id} (type={type(scenario_id)})"
|
|
388
|
+
)
|
|
389
|
+
|
|
301
390
|
# Prepare solve summary data
|
|
302
|
-
solver_name = solve_result.get(
|
|
303
|
-
solve_status = solve_result.get(
|
|
304
|
-
objective_value = solve_result.get(
|
|
305
|
-
solve_time = solve_result.get(
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
391
|
+
solver_name = solve_result.get("solver_name", "unknown")
|
|
392
|
+
solve_status = solve_result.get("status", "unknown")
|
|
393
|
+
objective_value = solve_result.get("objective_value")
|
|
394
|
+
solve_time = solve_result.get("solve_time", 0.0)
|
|
395
|
+
|
|
396
|
+
logger.info(f" solver_name: {solver_name}")
|
|
397
|
+
logger.info(f" solve_status: {solve_status}")
|
|
398
|
+
logger.info(f" objective_value: {objective_value}")
|
|
399
|
+
logger.info(f" solve_time: {solve_time}")
|
|
400
|
+
|
|
312
401
|
# Create enhanced solve result with network statistics for serialization
|
|
313
402
|
enhanced_solve_result = {
|
|
314
403
|
**solve_result,
|
|
315
|
-
"network_statistics": network_stats or {}
|
|
404
|
+
"network_statistics": network_stats or {},
|
|
316
405
|
}
|
|
317
|
-
|
|
318
|
-
#
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
406
|
+
|
|
407
|
+
# Delete existing result for this scenario first (handles NULL scenario_id correctly)
|
|
408
|
+
if scenario_id is None:
|
|
409
|
+
logger.info("🗑️ Deleting existing base scenario (NULL) results")
|
|
410
|
+
delete_result = conn.execute(
|
|
411
|
+
"DELETE FROM network_solve_results WHERE scenario_id IS NULL"
|
|
412
|
+
)
|
|
413
|
+
logger.info(f" Deleted {delete_result.rowcount} existing rows")
|
|
414
|
+
else:
|
|
415
|
+
logger.info(
|
|
416
|
+
f"🗑️ Deleting existing results for scenario_id={scenario_id}"
|
|
417
|
+
)
|
|
418
|
+
delete_result = conn.execute(
|
|
419
|
+
"DELETE FROM network_solve_results WHERE scenario_id = ?",
|
|
420
|
+
(scenario_id,),
|
|
421
|
+
)
|
|
422
|
+
logger.info(f" Deleted {delete_result.rowcount} existing rows")
|
|
423
|
+
|
|
424
|
+
logger.info(f"💾 Inserting solve results for scenario_id={scenario_id}")
|
|
425
|
+
logger.info(
|
|
426
|
+
f" Serializing results_json ({len(str(enhanced_solve_result))} chars)..."
|
|
427
|
+
)
|
|
428
|
+
results_json = self._serialize_results_json(enhanced_solve_result)
|
|
429
|
+
logger.info(f" Serializing metadata_json...")
|
|
430
|
+
metadata_json = self._serialize_metadata_json(enhanced_solve_result)
|
|
431
|
+
|
|
432
|
+
# Insert new solve results summary
|
|
433
|
+
insert_result = conn.execute(
|
|
434
|
+
"""
|
|
435
|
+
INSERT INTO network_solve_results (
|
|
436
|
+
scenario_id, solver_name, solve_type, solve_status,
|
|
322
437
|
objective_value, solve_time_seconds, results_json, metadata_json
|
|
323
|
-
) VALUES (?, ?, ?, ?, ?, ?, ?,
|
|
324
|
-
""",
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
438
|
+
) VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
|
439
|
+
""",
|
|
440
|
+
(
|
|
441
|
+
scenario_id,
|
|
442
|
+
solver_name,
|
|
443
|
+
"pypsa_optimization",
|
|
444
|
+
solve_status,
|
|
445
|
+
objective_value,
|
|
446
|
+
solve_time,
|
|
447
|
+
results_json,
|
|
448
|
+
metadata_json,
|
|
449
|
+
),
|
|
450
|
+
)
|
|
451
|
+
|
|
452
|
+
logger.info(f"✅ Inserted solve summary (rowid={insert_result.lastrowid})")
|
|
453
|
+
|
|
454
|
+
# Verify insertion
|
|
455
|
+
verify_cursor = conn.execute(
|
|
456
|
+
"SELECT COUNT(*), solver_name, solve_status FROM network_solve_results WHERE scenario_id IS ?",
|
|
457
|
+
(scenario_id,) if scenario_id is not None else (None,),
|
|
458
|
+
)
|
|
459
|
+
verify_result = verify_cursor.fetchone()
|
|
460
|
+
logger.info(
|
|
461
|
+
f"🔍 Verification: {verify_result[0]} row(s) with solver={verify_result[1]}, status={verify_result[2]}"
|
|
462
|
+
)
|
|
463
|
+
|
|
338
464
|
except Exception as e:
|
|
339
|
-
logger.error(f"
|
|
340
|
-
|
|
465
|
+
logger.error(f"❌ FAILED to store solve summary: {e}", exc_info=True)
|
|
466
|
+
raise # Re-raise to trigger rollback
|
|
467
|
+
|
|
341
468
|
def _calculate_network_statistics(
|
|
342
|
-
self,
|
|
343
|
-
conn,
|
|
344
|
-
network_id: int,
|
|
345
|
-
network: 'pypsa.Network',
|
|
346
|
-
solve_result: Dict[str, Any]
|
|
469
|
+
self, conn, network: "pypsa.Network", solve_result: Dict[str, Any]
|
|
347
470
|
) -> Dict[str, Any]:
|
|
348
|
-
"""Calculate network statistics - focusing only on capacity for now."""
|
|
471
|
+
"""Calculate network statistics - focusing only on capacity for now (single network per database)."""
|
|
349
472
|
try:
|
|
350
473
|
# Calculate carrier-specific statistics
|
|
351
|
-
carrier_stats = self._calculate_carrier_statistics(conn,
|
|
352
|
-
|
|
474
|
+
carrier_stats = self._calculate_carrier_statistics(conn, network)
|
|
475
|
+
|
|
353
476
|
# Calculate basic network statistics
|
|
354
|
-
total_cost = solve_result.get(
|
|
355
|
-
total_generation_mwh = sum(
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
360
|
-
|
|
477
|
+
total_cost = solve_result.get("objective_value", 0.0)
|
|
478
|
+
total_generation_mwh = sum(
|
|
479
|
+
carrier_stats.get("dispatch_by_carrier", {}).values()
|
|
480
|
+
)
|
|
481
|
+
total_emissions_tonnes = sum(
|
|
482
|
+
carrier_stats.get("emissions_by_carrier", {}).values()
|
|
483
|
+
)
|
|
484
|
+
total_capital_cost = sum(
|
|
485
|
+
carrier_stats.get("capital_cost_by_carrier", {}).values()
|
|
486
|
+
)
|
|
487
|
+
total_operational_cost = sum(
|
|
488
|
+
carrier_stats.get("operational_cost_by_carrier", {}).values()
|
|
489
|
+
)
|
|
490
|
+
total_system_cost = sum(
|
|
491
|
+
carrier_stats.get("total_system_cost_by_carrier", {}).values()
|
|
492
|
+
)
|
|
493
|
+
|
|
361
494
|
# Calculate unmet load statistics
|
|
362
|
-
unmet_load_mwh = carrier_stats.get("dispatch_by_carrier", {}).get(
|
|
495
|
+
unmet_load_mwh = carrier_stats.get("dispatch_by_carrier", {}).get(
|
|
496
|
+
"Unmet Load", 0.0
|
|
497
|
+
)
|
|
363
498
|
total_demand_mwh = self._calculate_total_demand(network)
|
|
364
|
-
unmet_load_percentage = (
|
|
365
|
-
|
|
499
|
+
unmet_load_percentage = (
|
|
500
|
+
(unmet_load_mwh / (total_demand_mwh + 1e-6)) * 100
|
|
501
|
+
if total_demand_mwh > 0
|
|
502
|
+
else 0.0
|
|
503
|
+
)
|
|
504
|
+
|
|
366
505
|
# Create nested structure expected by frontend
|
|
367
506
|
network_statistics = {
|
|
368
507
|
"core_summary": {
|
|
369
508
|
"total_generation_mwh": total_generation_mwh,
|
|
370
509
|
"total_demand_mwh": total_demand_mwh,
|
|
371
510
|
"total_cost": total_cost,
|
|
372
|
-
"load_factor": (
|
|
373
|
-
|
|
511
|
+
"load_factor": (
|
|
512
|
+
(total_demand_mwh / (total_generation_mwh + 1e-6))
|
|
513
|
+
if total_generation_mwh > 0
|
|
514
|
+
else 0.0
|
|
515
|
+
),
|
|
516
|
+
"unserved_energy_mwh": unmet_load_mwh,
|
|
374
517
|
},
|
|
375
518
|
"custom_statistics": {
|
|
376
519
|
# Include carrier-specific statistics (capacity, dispatch, emissions, costs)
|
|
@@ -379,25 +522,44 @@ class ResultStorage:
|
|
|
379
522
|
"total_operational_cost": total_operational_cost,
|
|
380
523
|
"total_currency_cost": total_system_cost, # Use calculated system cost instead of PyPSA objective
|
|
381
524
|
"total_emissions_tons_co2": total_emissions_tonnes,
|
|
382
|
-
"average_price_per_mwh": (
|
|
525
|
+
"average_price_per_mwh": (
|
|
526
|
+
(total_system_cost / (total_generation_mwh + 1e-6))
|
|
527
|
+
if total_generation_mwh > 0
|
|
528
|
+
else 0.0
|
|
529
|
+
),
|
|
383
530
|
"unmet_load_percentage": unmet_load_percentage,
|
|
384
|
-
"max_unmet_load_hour_mw": 0.0
|
|
531
|
+
"max_unmet_load_hour_mw": 0.0, # TODO: Calculate max hourly unmet load later
|
|
385
532
|
},
|
|
386
533
|
"runtime_info": {
|
|
387
534
|
"component_count": (
|
|
388
|
-
|
|
389
|
-
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
|
|
393
|
-
|
|
394
|
-
|
|
395
|
-
|
|
535
|
+
(
|
|
536
|
+
len(network.buses)
|
|
537
|
+
+ len(network.generators)
|
|
538
|
+
+ len(network.loads)
|
|
539
|
+
+ len(network.lines)
|
|
540
|
+
+ len(network.links)
|
|
541
|
+
)
|
|
542
|
+
if hasattr(network, "buses")
|
|
543
|
+
else 0
|
|
544
|
+
),
|
|
545
|
+
"bus_count": len(network.buses) if hasattr(network, "buses") else 0,
|
|
546
|
+
"generator_count": (
|
|
547
|
+
len(network.generators) if hasattr(network, "generators") else 0
|
|
548
|
+
),
|
|
549
|
+
"load_count": (
|
|
550
|
+
len(network.loads) if hasattr(network, "loads") else 0
|
|
551
|
+
),
|
|
552
|
+
"snapshot_count": (
|
|
553
|
+
len(network.snapshots) if hasattr(network, "snapshots") else 0
|
|
554
|
+
),
|
|
555
|
+
},
|
|
396
556
|
}
|
|
397
|
-
|
|
398
|
-
logger.info(
|
|
557
|
+
|
|
558
|
+
logger.info(
|
|
559
|
+
f"Calculated network statistics: core_summary={network_statistics['core_summary']}"
|
|
560
|
+
)
|
|
399
561
|
return network_statistics
|
|
400
|
-
|
|
562
|
+
|
|
401
563
|
except Exception as e:
|
|
402
564
|
logger.error(f"Failed to calculate network statistics: {e}", exc_info=True)
|
|
403
565
|
# Return empty structure matching expected format
|
|
@@ -405,9 +567,9 @@ class ResultStorage:
|
|
|
405
567
|
"core_summary": {
|
|
406
568
|
"total_generation_mwh": 0.0,
|
|
407
569
|
"total_demand_mwh": 0.0,
|
|
408
|
-
"total_cost": solve_result.get(
|
|
570
|
+
"total_cost": solve_result.get("objective_value", 0.0),
|
|
409
571
|
"load_factor": 0.0,
|
|
410
|
-
"unserved_energy_mwh": 0.0
|
|
572
|
+
"unserved_energy_mwh": 0.0,
|
|
411
573
|
},
|
|
412
574
|
"custom_statistics": {
|
|
413
575
|
"dispatch_by_carrier": {},
|
|
@@ -423,39 +585,51 @@ class ResultStorage:
|
|
|
423
585
|
"total_emissions_tons_co2": 0.0,
|
|
424
586
|
"average_price_per_mwh": 0.0,
|
|
425
587
|
"unmet_load_percentage": 0.0,
|
|
426
|
-
"max_unmet_load_hour_mw": 0.0
|
|
588
|
+
"max_unmet_load_hour_mw": 0.0,
|
|
427
589
|
},
|
|
428
590
|
"runtime_info": {
|
|
429
591
|
"component_count": 0,
|
|
430
592
|
"bus_count": 0,
|
|
431
593
|
"generator_count": 0,
|
|
432
594
|
"load_count": 0,
|
|
433
|
-
"snapshot_count": 0
|
|
595
|
+
"snapshot_count": 0,
|
|
434
596
|
},
|
|
435
|
-
"error": str(e)
|
|
597
|
+
"error": str(e),
|
|
436
598
|
}
|
|
437
|
-
|
|
438
|
-
def _calculate_carrier_statistics(
|
|
599
|
+
|
|
600
|
+
def _calculate_carrier_statistics(
|
|
601
|
+
self, conn, network: "pypsa.Network"
|
|
602
|
+
) -> Dict[str, Any]:
|
|
439
603
|
"""
|
|
440
|
-
Calculate carrier-specific statistics directly from the network.
|
|
604
|
+
Calculate carrier-specific statistics directly from the network (single network per database).
|
|
441
605
|
This is the primary calculation - per-year stats will be calculated separately.
|
|
442
606
|
"""
|
|
443
607
|
try:
|
|
608
|
+
logger.info("🔍 _calculate_carrier_statistics: Starting calculation...")
|
|
609
|
+
|
|
444
610
|
# Calculate all-year statistics directly from the network
|
|
445
611
|
# Extract years from network snapshots
|
|
446
|
-
if hasattr(network.snapshots,
|
|
612
|
+
if hasattr(network.snapshots, "levels"):
|
|
447
613
|
# Multi-period optimization - get years from period level
|
|
448
614
|
period_values = network.snapshots.get_level_values(0)
|
|
449
615
|
years = sorted(period_values.unique())
|
|
450
|
-
|
|
616
|
+
logger.info(
|
|
617
|
+
f" Detected multi-period optimization with years: {years}"
|
|
618
|
+
)
|
|
619
|
+
elif hasattr(network.snapshots, "year"):
|
|
451
620
|
years = sorted(network.snapshots.year.unique())
|
|
452
|
-
|
|
621
|
+
logger.info(f" Detected DatetimeIndex with years: {years}")
|
|
622
|
+
elif hasattr(network, "_available_years"):
|
|
453
623
|
years = network._available_years
|
|
624
|
+
logger.info(f" Using _available_years: {years}")
|
|
454
625
|
else:
|
|
455
626
|
years = [2020] # Fallback
|
|
456
|
-
|
|
457
|
-
|
|
458
|
-
|
|
627
|
+
logger.warning(f" No year information found, using fallback: {years}")
|
|
628
|
+
|
|
629
|
+
logger.info(
|
|
630
|
+
f"📅 Calculating all-year carrier statistics for {len(years)} years: {years}"
|
|
631
|
+
)
|
|
632
|
+
|
|
459
633
|
# Calculate per-year statistics first
|
|
460
634
|
all_year_stats = {
|
|
461
635
|
"dispatch_by_carrier": {},
|
|
@@ -464,17 +638,19 @@ class ResultStorage:
|
|
|
464
638
|
"emissions_by_carrier": {},
|
|
465
639
|
"capital_cost_by_carrier": {},
|
|
466
640
|
"operational_cost_by_carrier": {},
|
|
467
|
-
"total_system_cost_by_carrier": {}
|
|
641
|
+
"total_system_cost_by_carrier": {},
|
|
468
642
|
}
|
|
469
|
-
|
|
643
|
+
|
|
470
644
|
# Initialize all carriers with zero values
|
|
471
|
-
cursor = conn.execute(
|
|
472
|
-
|
|
473
|
-
|
|
645
|
+
cursor = conn.execute(
|
|
646
|
+
"""
|
|
647
|
+
SELECT DISTINCT name FROM carriers
|
|
648
|
+
"""
|
|
649
|
+
)
|
|
474
650
|
all_carriers = [row[0] for row in cursor.fetchall()]
|
|
475
|
-
|
|
651
|
+
|
|
476
652
|
# Initialize all carriers with zero values (including special "Unmet Load" carrier)
|
|
477
|
-
all_carriers_with_unmet = all_carriers + [
|
|
653
|
+
all_carriers_with_unmet = all_carriers + ["Unmet Load"]
|
|
478
654
|
for carrier in all_carriers_with_unmet:
|
|
479
655
|
all_year_stats["dispatch_by_carrier"][carrier] = 0.0
|
|
480
656
|
all_year_stats["power_capacity_by_carrier"][carrier] = 0.0
|
|
@@ -483,34 +659,84 @@ class ResultStorage:
|
|
|
483
659
|
all_year_stats["capital_cost_by_carrier"][carrier] = 0.0
|
|
484
660
|
all_year_stats["operational_cost_by_carrier"][carrier] = 0.0
|
|
485
661
|
all_year_stats["total_system_cost_by_carrier"][carrier] = 0.0
|
|
486
|
-
|
|
662
|
+
|
|
487
663
|
# Calculate statistics for each year and sum them up
|
|
488
664
|
for year in years:
|
|
489
|
-
year_stats = self._calculate_year_carrier_statistics(
|
|
490
|
-
|
|
665
|
+
year_stats = self._calculate_year_carrier_statistics(
|
|
666
|
+
conn, network, year
|
|
667
|
+
)
|
|
668
|
+
|
|
491
669
|
# Sum up all the statistics (including "Unmet Load")
|
|
492
670
|
for carrier in all_carriers_with_unmet:
|
|
493
671
|
# Sum dispatch, emissions, and costs across years
|
|
494
|
-
all_year_stats["dispatch_by_carrier"][carrier] += year_stats[
|
|
495
|
-
|
|
496
|
-
|
|
497
|
-
all_year_stats["
|
|
498
|
-
|
|
499
|
-
|
|
672
|
+
all_year_stats["dispatch_by_carrier"][carrier] += year_stats[
|
|
673
|
+
"dispatch_by_carrier"
|
|
674
|
+
].get(carrier, 0.0)
|
|
675
|
+
all_year_stats["emissions_by_carrier"][carrier] += year_stats[
|
|
676
|
+
"emissions_by_carrier"
|
|
677
|
+
].get(carrier, 0.0)
|
|
678
|
+
all_year_stats["capital_cost_by_carrier"][carrier] += year_stats[
|
|
679
|
+
"capital_cost_by_carrier"
|
|
680
|
+
].get(carrier, 0.0)
|
|
681
|
+
all_year_stats["operational_cost_by_carrier"][
|
|
682
|
+
carrier
|
|
683
|
+
] += year_stats["operational_cost_by_carrier"].get(carrier, 0.0)
|
|
684
|
+
all_year_stats["total_system_cost_by_carrier"][
|
|
685
|
+
carrier
|
|
686
|
+
] += year_stats["total_system_cost_by_carrier"].get(carrier, 0.0)
|
|
687
|
+
|
|
500
688
|
# For capacity: use the last year (final capacity state)
|
|
501
689
|
if year == years[-1]:
|
|
502
|
-
all_year_stats["power_capacity_by_carrier"][carrier] =
|
|
503
|
-
|
|
504
|
-
|
|
690
|
+
all_year_stats["power_capacity_by_carrier"][carrier] = (
|
|
691
|
+
year_stats["power_capacity_by_carrier"].get(carrier, 0.0)
|
|
692
|
+
)
|
|
693
|
+
all_year_stats["energy_capacity_by_carrier"][carrier] = (
|
|
694
|
+
year_stats["energy_capacity_by_carrier"].get(carrier, 0.0)
|
|
695
|
+
)
|
|
696
|
+
|
|
505
697
|
logger.info(f"Calculated all-year carrier statistics:")
|
|
506
|
-
logger.info(
|
|
507
|
-
|
|
508
|
-
|
|
509
|
-
logger.info(
|
|
510
|
-
|
|
698
|
+
logger.info(
|
|
699
|
+
f" Total dispatch: {sum(all_year_stats['dispatch_by_carrier'].values()):.2f} MWh"
|
|
700
|
+
)
|
|
701
|
+
logger.info(
|
|
702
|
+
f" Total emissions: {sum(all_year_stats['emissions_by_carrier'].values()):.2f} tonnes CO2"
|
|
703
|
+
)
|
|
704
|
+
logger.info(
|
|
705
|
+
f" Total capital cost: {sum(all_year_stats['capital_cost_by_carrier'].values()):.2f} USD"
|
|
706
|
+
)
|
|
707
|
+
logger.info(
|
|
708
|
+
f" Total operational cost: {sum(all_year_stats['operational_cost_by_carrier'].values()):.2f} USD"
|
|
709
|
+
)
|
|
710
|
+
logger.info(
|
|
711
|
+
f" Final power capacity: {sum(all_year_stats['power_capacity_by_carrier'].values()):.2f} MW"
|
|
712
|
+
)
|
|
511
713
|
|
|
714
|
+
# Detailed logging for cost and emission statistics by carrier
|
|
715
|
+
logger.info("=" * 80)
|
|
716
|
+
logger.info("📊 DETAILED CARRIER STATISTICS (RAW VALUES FROM MODEL):")
|
|
717
|
+
logger.info("=" * 80)
|
|
718
|
+
for carrier in sorted(all_carriers_with_unmet):
|
|
719
|
+
dispatch = all_year_stats['dispatch_by_carrier'].get(carrier, 0.0)
|
|
720
|
+
emissions = all_year_stats['emissions_by_carrier'].get(carrier, 0.0)
|
|
721
|
+
capital_cost = all_year_stats['capital_cost_by_carrier'].get(carrier, 0.0)
|
|
722
|
+
operational_cost = all_year_stats['operational_cost_by_carrier'].get(carrier, 0.0)
|
|
723
|
+
total_cost = all_year_stats['total_system_cost_by_carrier'].get(carrier, 0.0)
|
|
724
|
+
power_cap = all_year_stats['power_capacity_by_carrier'].get(carrier, 0.0)
|
|
725
|
+
energy_cap = all_year_stats['energy_capacity_by_carrier'].get(carrier, 0.0)
|
|
726
|
+
|
|
727
|
+
if dispatch > 0.001 or emissions > 0.001 or capital_cost > 0.001 or operational_cost > 0.001:
|
|
728
|
+
logger.info(f" {carrier}:")
|
|
729
|
+
logger.info(f" - Dispatch: {dispatch:.2f} MWh")
|
|
730
|
+
logger.info(f" - Emissions: {emissions:.2f} tonnes CO2")
|
|
731
|
+
logger.info(f" - Capital Cost: {capital_cost:.2f} USD")
|
|
732
|
+
logger.info(f" - Operational Cost: {operational_cost:.2f} USD")
|
|
733
|
+
logger.info(f" - Total System Cost: {total_cost:.2f} USD")
|
|
734
|
+
logger.info(f" - Power Capacity: {power_cap:.2f} MW")
|
|
735
|
+
logger.info(f" - Energy Capacity: {energy_cap:.2f} MWh")
|
|
736
|
+
logger.info("=" * 80)
|
|
737
|
+
|
|
512
738
|
return all_year_stats
|
|
513
|
-
|
|
739
|
+
|
|
514
740
|
except Exception as e:
|
|
515
741
|
logger.error(f"Failed to calculate carrier statistics: {e}", exc_info=True)
|
|
516
742
|
return {
|
|
@@ -520,99 +746,120 @@ class ResultStorage:
|
|
|
520
746
|
"emissions_by_carrier": {},
|
|
521
747
|
"capital_cost_by_carrier": {},
|
|
522
748
|
"operational_cost_by_carrier": {},
|
|
523
|
-
"total_system_cost_by_carrier": {}
|
|
749
|
+
"total_system_cost_by_carrier": {},
|
|
524
750
|
}
|
|
525
|
-
|
|
751
|
+
|
|
526
752
|
def _store_year_based_statistics(
|
|
527
753
|
self,
|
|
528
754
|
conn,
|
|
529
|
-
|
|
530
|
-
network: 'pypsa.Network',
|
|
755
|
+
network: "pypsa.Network",
|
|
531
756
|
year_statistics: Dict[int, Dict[str, Any]],
|
|
532
|
-
scenario_id: Optional[int]
|
|
757
|
+
scenario_id: Optional[int],
|
|
533
758
|
) -> int:
|
|
534
|
-
"""Store year-based statistics to database"""
|
|
759
|
+
"""Store year-based statistics to database (single network per database)"""
|
|
535
760
|
try:
|
|
536
761
|
import json
|
|
762
|
+
|
|
537
763
|
stored_count = 0
|
|
538
|
-
|
|
539
|
-
# Use master scenario if no scenario specified
|
|
540
|
-
if scenario_id is None:
|
|
541
|
-
from pyconvexity.models import get_master_scenario_id
|
|
542
|
-
scenario_id = get_master_scenario_id(conn, network_id)
|
|
543
|
-
|
|
764
|
+
|
|
544
765
|
# Check if network_solve_results_by_year table exists, create if not
|
|
545
|
-
conn.execute(
|
|
766
|
+
conn.execute(
|
|
767
|
+
"""
|
|
546
768
|
CREATE TABLE IF NOT EXISTS network_solve_results_by_year (
|
|
547
769
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
548
|
-
|
|
549
|
-
scenario_id INTEGER NOT NULL,
|
|
770
|
+
scenario_id INTEGER,
|
|
550
771
|
year INTEGER NOT NULL,
|
|
551
772
|
results_json TEXT,
|
|
552
773
|
metadata_json TEXT,
|
|
553
774
|
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
|
554
|
-
FOREIGN KEY (network_id) REFERENCES networks(id),
|
|
555
775
|
FOREIGN KEY (scenario_id) REFERENCES scenarios(id),
|
|
556
|
-
UNIQUE(
|
|
776
|
+
UNIQUE(scenario_id, year)
|
|
557
777
|
)
|
|
558
|
-
"""
|
|
559
|
-
|
|
778
|
+
"""
|
|
779
|
+
)
|
|
780
|
+
|
|
560
781
|
for year, stats in year_statistics.items():
|
|
561
782
|
try:
|
|
562
783
|
# Calculate proper year-specific carrier statistics
|
|
563
|
-
year_carrier_stats = self._calculate_year_carrier_statistics(
|
|
564
|
-
|
|
784
|
+
year_carrier_stats = self._calculate_year_carrier_statistics(
|
|
785
|
+
conn, network, year
|
|
786
|
+
)
|
|
787
|
+
|
|
565
788
|
# Merge year-specific carrier stats into the statistics
|
|
566
789
|
if "custom_statistics" in stats:
|
|
567
790
|
stats["custom_statistics"].update(year_carrier_stats)
|
|
568
791
|
else:
|
|
569
792
|
stats["custom_statistics"] = year_carrier_stats
|
|
570
|
-
|
|
793
|
+
|
|
571
794
|
# Wrap the year statistics in the same structure as overall results for consistency
|
|
572
795
|
year_result_wrapper = {
|
|
573
796
|
"success": True,
|
|
574
797
|
"year": year,
|
|
575
|
-
"network_statistics": stats
|
|
576
|
-
}
|
|
577
|
-
|
|
578
|
-
metadata = {
|
|
579
|
-
"year": year,
|
|
580
|
-
"network_id": network_id,
|
|
581
|
-
"scenario_id": scenario_id
|
|
798
|
+
"network_statistics": stats,
|
|
582
799
|
}
|
|
583
|
-
|
|
584
|
-
|
|
585
|
-
|
|
586
|
-
|
|
587
|
-
|
|
588
|
-
|
|
589
|
-
|
|
590
|
-
|
|
591
|
-
|
|
592
|
-
|
|
593
|
-
|
|
594
|
-
|
|
595
|
-
|
|
800
|
+
|
|
801
|
+
metadata = {"year": year, "scenario_id": scenario_id}
|
|
802
|
+
|
|
803
|
+
# Delete existing result for this scenario+year first (handles NULL scenario_id correctly)
|
|
804
|
+
if scenario_id is None:
|
|
805
|
+
conn.execute(
|
|
806
|
+
"""
|
|
807
|
+
DELETE FROM network_solve_results_by_year
|
|
808
|
+
WHERE scenario_id IS NULL AND year = ?
|
|
809
|
+
""",
|
|
810
|
+
(year,),
|
|
811
|
+
)
|
|
812
|
+
else:
|
|
813
|
+
conn.execute(
|
|
814
|
+
"""
|
|
815
|
+
DELETE FROM network_solve_results_by_year
|
|
816
|
+
WHERE scenario_id = ? AND year = ?
|
|
817
|
+
""",
|
|
818
|
+
(scenario_id, year),
|
|
819
|
+
)
|
|
820
|
+
|
|
821
|
+
# Insert new year-based results
|
|
822
|
+
conn.execute(
|
|
823
|
+
"""
|
|
824
|
+
INSERT INTO network_solve_results_by_year
|
|
825
|
+
(scenario_id, year, results_json, metadata_json)
|
|
826
|
+
VALUES (?, ?, ?, ?)
|
|
827
|
+
""",
|
|
828
|
+
(
|
|
829
|
+
scenario_id,
|
|
830
|
+
year,
|
|
831
|
+
json.dumps(
|
|
832
|
+
year_result_wrapper, default=self._json_serializer
|
|
833
|
+
),
|
|
834
|
+
json.dumps(metadata, default=self._json_serializer),
|
|
835
|
+
),
|
|
836
|
+
)
|
|
837
|
+
|
|
596
838
|
stored_count += 1
|
|
597
839
|
logger.info(f"Stored year-based statistics for year {year}")
|
|
598
|
-
|
|
840
|
+
|
|
599
841
|
except Exception as e:
|
|
600
842
|
logger.error(f"Failed to store statistics for year {year}: {e}")
|
|
601
843
|
continue
|
|
602
|
-
|
|
603
|
-
logger.info(
|
|
844
|
+
|
|
845
|
+
logger.info(
|
|
846
|
+
f"Successfully stored year-based statistics for {stored_count} years"
|
|
847
|
+
)
|
|
604
848
|
return stored_count
|
|
605
|
-
|
|
849
|
+
|
|
606
850
|
except Exception as e:
|
|
607
851
|
logger.error(f"Failed to store year-based statistics: {e}", exc_info=True)
|
|
608
852
|
return 0
|
|
609
|
-
|
|
610
|
-
def _calculate_year_carrier_statistics(
|
|
853
|
+
|
|
854
|
+
def _calculate_year_carrier_statistics(
|
|
855
|
+
self, conn, network: "pypsa.Network", year: int
|
|
856
|
+
) -> Dict[str, Any]:
|
|
611
857
|
"""
|
|
612
858
|
Calculate carrier-specific statistics for a specific year.
|
|
613
859
|
For now, only calculate capacity statistics.
|
|
614
860
|
"""
|
|
615
861
|
try:
|
|
862
|
+
logger.info(f" 🔍 Calculating statistics for year {year}...")
|
|
616
863
|
# Initialize carrier statistics
|
|
617
864
|
carrier_stats = {
|
|
618
865
|
"dispatch_by_carrier": {},
|
|
@@ -621,17 +868,19 @@ class ResultStorage:
|
|
|
621
868
|
"emissions_by_carrier": {},
|
|
622
869
|
"capital_cost_by_carrier": {},
|
|
623
870
|
"operational_cost_by_carrier": {},
|
|
624
|
-
"total_system_cost_by_carrier": {}
|
|
871
|
+
"total_system_cost_by_carrier": {},
|
|
625
872
|
}
|
|
626
|
-
|
|
873
|
+
|
|
627
874
|
# Get all carriers from database
|
|
628
|
-
cursor = conn.execute(
|
|
629
|
-
|
|
630
|
-
|
|
875
|
+
cursor = conn.execute(
|
|
876
|
+
"""
|
|
877
|
+
SELECT DISTINCT name FROM carriers
|
|
878
|
+
"""
|
|
879
|
+
)
|
|
631
880
|
all_carriers = [row[0] for row in cursor.fetchall()]
|
|
632
|
-
|
|
881
|
+
|
|
633
882
|
# Initialize all carriers with zero values (including special "Unmet Load" carrier)
|
|
634
|
-
all_carriers_with_unmet = all_carriers + [
|
|
883
|
+
all_carriers_with_unmet = all_carriers + ["Unmet Load"]
|
|
635
884
|
for carrier in all_carriers_with_unmet:
|
|
636
885
|
carrier_stats["dispatch_by_carrier"][carrier] = 0.0
|
|
637
886
|
carrier_stats["power_capacity_by_carrier"][carrier] = 0.0
|
|
@@ -640,13 +889,14 @@ class ResultStorage:
|
|
|
640
889
|
carrier_stats["capital_cost_by_carrier"][carrier] = 0.0
|
|
641
890
|
carrier_stats["operational_cost_by_carrier"][carrier] = 0.0
|
|
642
891
|
carrier_stats["total_system_cost_by_carrier"][carrier] = 0.0
|
|
643
|
-
|
|
892
|
+
|
|
644
893
|
# Calculate dispatch (generation) by carrier for this specific year
|
|
645
|
-
|
|
894
|
+
|
|
646
895
|
# 1. GENERATORS - Generation dispatch (including UNMET_LOAD)
|
|
647
|
-
if hasattr(network,
|
|
896
|
+
if hasattr(network, "generators_t") and hasattr(network.generators_t, "p"):
|
|
648
897
|
# Get generator-carrier mapping (include both GENERATOR and UNMET_LOAD)
|
|
649
|
-
cursor = conn.execute(
|
|
898
|
+
cursor = conn.execute(
|
|
899
|
+
"""
|
|
650
900
|
SELECT c.name as component_name,
|
|
651
901
|
CASE
|
|
652
902
|
WHEN c.component_type = 'UNMET_LOAD' THEN 'Unmet Load'
|
|
@@ -654,135 +904,188 @@ class ResultStorage:
|
|
|
654
904
|
END as carrier_name
|
|
655
905
|
FROM components c
|
|
656
906
|
JOIN carriers carr ON c.carrier_id = carr.id
|
|
657
|
-
WHERE c.
|
|
658
|
-
"""
|
|
907
|
+
WHERE c.component_type IN ('GENERATOR', 'UNMET_LOAD')
|
|
908
|
+
"""
|
|
909
|
+
)
|
|
659
910
|
generator_carriers = {row[0]: row[1] for row in cursor.fetchall()}
|
|
660
|
-
|
|
911
|
+
|
|
661
912
|
# Filter generation data for this specific year
|
|
662
|
-
year_generation = self._filter_timeseries_by_year(
|
|
913
|
+
year_generation = self._filter_timeseries_by_year(
|
|
914
|
+
network.generators_t.p, network.snapshots, year
|
|
915
|
+
)
|
|
663
916
|
if year_generation is not None and not year_generation.empty:
|
|
664
917
|
for gen_name in year_generation.columns:
|
|
665
918
|
if gen_name in generator_carriers:
|
|
666
919
|
carrier_name = generator_carriers[gen_name]
|
|
667
|
-
|
|
920
|
+
|
|
668
921
|
# Calculate generation for this year (ALWAYS apply snapshot weightings to convert MW to MWh)
|
|
669
922
|
year_weightings = self._get_year_weightings(network, year)
|
|
670
923
|
if year_weightings is not None:
|
|
671
|
-
generation_mwh = float(
|
|
924
|
+
generation_mwh = float(
|
|
925
|
+
(
|
|
926
|
+
year_generation[gen_name].values
|
|
927
|
+
* year_weightings
|
|
928
|
+
).sum()
|
|
929
|
+
)
|
|
672
930
|
else:
|
|
673
931
|
# Fallback: simple sum (will be incorrect for non-1H models)
|
|
674
932
|
generation_mwh = float(year_generation[gen_name].sum())
|
|
675
|
-
logger.warning(
|
|
676
|
-
|
|
933
|
+
logger.warning(
|
|
934
|
+
f"Could not apply snapshot weightings for {gen_name} in year {year} - energy may be incorrect"
|
|
935
|
+
)
|
|
936
|
+
|
|
677
937
|
if carrier_name in carrier_stats["dispatch_by_carrier"]:
|
|
678
|
-
carrier_stats["dispatch_by_carrier"][
|
|
679
|
-
|
|
938
|
+
carrier_stats["dispatch_by_carrier"][
|
|
939
|
+
carrier_name
|
|
940
|
+
] += generation_mwh
|
|
941
|
+
|
|
680
942
|
# 2. STORAGE_UNITS - Discharge only (positive values)
|
|
681
|
-
if hasattr(network,
|
|
943
|
+
if hasattr(network, "storage_units_t") and hasattr(
|
|
944
|
+
network.storage_units_t, "p"
|
|
945
|
+
):
|
|
682
946
|
# Get storage unit-carrier mapping
|
|
683
|
-
cursor = conn.execute(
|
|
947
|
+
cursor = conn.execute(
|
|
948
|
+
"""
|
|
684
949
|
SELECT c.name as component_name, carr.name as carrier_name
|
|
685
950
|
FROM components c
|
|
686
951
|
JOIN carriers carr ON c.carrier_id = carr.id
|
|
687
|
-
WHERE c.
|
|
688
|
-
"""
|
|
952
|
+
WHERE c.component_type = 'STORAGE_UNIT'
|
|
953
|
+
"""
|
|
954
|
+
)
|
|
689
955
|
storage_unit_carriers = {row[0]: row[1] for row in cursor.fetchall()}
|
|
690
|
-
|
|
956
|
+
|
|
691
957
|
# Filter storage unit data for this specific year
|
|
692
|
-
year_storage = self._filter_timeseries_by_year(
|
|
958
|
+
year_storage = self._filter_timeseries_by_year(
|
|
959
|
+
network.storage_units_t.p, network.snapshots, year
|
|
960
|
+
)
|
|
693
961
|
if year_storage is not None and not year_storage.empty:
|
|
694
962
|
for su_name in year_storage.columns:
|
|
695
963
|
if su_name in storage_unit_carriers:
|
|
696
964
|
carrier_name = storage_unit_carriers[su_name]
|
|
697
|
-
|
|
965
|
+
|
|
698
966
|
# Calculate discharge for this year (positive values only, ALWAYS apply snapshot weightings)
|
|
699
967
|
year_weightings = self._get_year_weightings(network, year)
|
|
700
968
|
if year_weightings is not None:
|
|
701
|
-
discharge_mwh = float(
|
|
969
|
+
discharge_mwh = float(
|
|
970
|
+
(
|
|
971
|
+
year_storage[su_name].clip(lower=0).values
|
|
972
|
+
* year_weightings
|
|
973
|
+
).sum()
|
|
974
|
+
)
|
|
702
975
|
else:
|
|
703
976
|
# Fallback: simple sum (will be incorrect for non-1H models)
|
|
704
|
-
discharge_mwh = float(
|
|
705
|
-
|
|
706
|
-
|
|
977
|
+
discharge_mwh = float(
|
|
978
|
+
year_storage[su_name].clip(lower=0).sum()
|
|
979
|
+
)
|
|
980
|
+
logger.warning(
|
|
981
|
+
f"Could not apply snapshot weightings for storage unit {su_name} in year {year} - energy may be incorrect"
|
|
982
|
+
)
|
|
983
|
+
|
|
707
984
|
if carrier_name in carrier_stats["dispatch_by_carrier"]:
|
|
708
|
-
carrier_stats["dispatch_by_carrier"][
|
|
709
|
-
|
|
985
|
+
carrier_stats["dispatch_by_carrier"][
|
|
986
|
+
carrier_name
|
|
987
|
+
] += discharge_mwh
|
|
988
|
+
|
|
710
989
|
# 3. STORES - Discharge only (positive values)
|
|
711
|
-
if hasattr(network,
|
|
990
|
+
if hasattr(network, "stores_t") and hasattr(network.stores_t, "p"):
|
|
712
991
|
# Get store-carrier mapping
|
|
713
|
-
cursor = conn.execute(
|
|
992
|
+
cursor = conn.execute(
|
|
993
|
+
"""
|
|
714
994
|
SELECT c.name as component_name, carr.name as carrier_name
|
|
715
995
|
FROM components c
|
|
716
996
|
JOIN carriers carr ON c.carrier_id = carr.id
|
|
717
|
-
WHERE c.
|
|
718
|
-
"""
|
|
997
|
+
WHERE c.component_type = 'STORE'
|
|
998
|
+
"""
|
|
999
|
+
)
|
|
719
1000
|
store_carriers = {row[0]: row[1] for row in cursor.fetchall()}
|
|
720
|
-
|
|
1001
|
+
|
|
721
1002
|
# Filter store data for this specific year
|
|
722
|
-
year_stores = self._filter_timeseries_by_year(
|
|
1003
|
+
year_stores = self._filter_timeseries_by_year(
|
|
1004
|
+
network.stores_t.p, network.snapshots, year
|
|
1005
|
+
)
|
|
723
1006
|
if year_stores is not None and not year_stores.empty:
|
|
724
1007
|
for store_name in year_stores.columns:
|
|
725
1008
|
if store_name in store_carriers:
|
|
726
1009
|
carrier_name = store_carriers[store_name]
|
|
727
|
-
|
|
1010
|
+
|
|
728
1011
|
# Calculate discharge for this year (positive values only, ALWAYS apply snapshot weightings)
|
|
729
1012
|
year_weightings = self._get_year_weightings(network, year)
|
|
730
1013
|
if year_weightings is not None:
|
|
731
|
-
discharge_mwh = float(
|
|
1014
|
+
discharge_mwh = float(
|
|
1015
|
+
(
|
|
1016
|
+
year_stores[store_name].clip(lower=0).values
|
|
1017
|
+
* year_weightings
|
|
1018
|
+
).sum()
|
|
1019
|
+
)
|
|
732
1020
|
else:
|
|
733
1021
|
# Fallback: simple sum (will be incorrect for non-1H models)
|
|
734
|
-
discharge_mwh = float(
|
|
735
|
-
|
|
736
|
-
|
|
1022
|
+
discharge_mwh = float(
|
|
1023
|
+
year_stores[store_name].clip(lower=0).sum()
|
|
1024
|
+
)
|
|
1025
|
+
logger.warning(
|
|
1026
|
+
f"Could not apply snapshot weightings for store {store_name} in year {year} - energy may be incorrect"
|
|
1027
|
+
)
|
|
1028
|
+
|
|
737
1029
|
if carrier_name in carrier_stats["dispatch_by_carrier"]:
|
|
738
|
-
carrier_stats["dispatch_by_carrier"][
|
|
739
|
-
|
|
1030
|
+
carrier_stats["dispatch_by_carrier"][
|
|
1031
|
+
carrier_name
|
|
1032
|
+
] += discharge_mwh
|
|
1033
|
+
|
|
740
1034
|
# Calculate emissions by carrier for this specific year
|
|
741
1035
|
# Get emission factors for all carriers
|
|
742
|
-
cursor = conn.execute(
|
|
743
|
-
|
|
744
|
-
|
|
1036
|
+
cursor = conn.execute(
|
|
1037
|
+
"""
|
|
1038
|
+
SELECT name, co2_emissions FROM carriers
|
|
1039
|
+
"""
|
|
1040
|
+
)
|
|
745
1041
|
emission_factors = {row[0]: row[1] for row in cursor.fetchall()}
|
|
746
|
-
|
|
1042
|
+
|
|
747
1043
|
# Calculate emissions: dispatch (MWh) × emission factor (tonnes CO2/MWh) = tonnes CO2
|
|
748
|
-
for carrier_name, dispatch_mwh in carrier_stats[
|
|
1044
|
+
for carrier_name, dispatch_mwh in carrier_stats[
|
|
1045
|
+
"dispatch_by_carrier"
|
|
1046
|
+
].items():
|
|
749
1047
|
# Handle None values safely
|
|
750
1048
|
if dispatch_mwh is None:
|
|
751
1049
|
dispatch_mwh = 0.0
|
|
752
|
-
|
|
753
|
-
emission_factor = emission_factors.get(
|
|
1050
|
+
|
|
1051
|
+
emission_factor = emission_factors.get(
|
|
1052
|
+
carrier_name, 0.0
|
|
1053
|
+
) # Default to 0 if no emission factor
|
|
754
1054
|
if emission_factor is None:
|
|
755
1055
|
emission_factor = 0.0
|
|
756
|
-
|
|
1056
|
+
|
|
757
1057
|
emissions_tonnes = dispatch_mwh * emission_factor
|
|
758
|
-
|
|
1058
|
+
|
|
759
1059
|
if carrier_name in carrier_stats["emissions_by_carrier"]:
|
|
760
|
-
carrier_stats["emissions_by_carrier"][
|
|
761
|
-
|
|
1060
|
+
carrier_stats["emissions_by_carrier"][
|
|
1061
|
+
carrier_name
|
|
1062
|
+
] += emissions_tonnes
|
|
1063
|
+
|
|
762
1064
|
# Calculate capital costs by carrier for this specific year
|
|
763
1065
|
# Capital costs are annualized and counted every year the component is active
|
|
764
|
-
|
|
1066
|
+
|
|
765
1067
|
# Helper function to check if component is active in this year
|
|
766
1068
|
def is_component_active(build_year, lifetime, current_year):
|
|
767
1069
|
"""Check if component is active in the current year based on build_year and lifetime"""
|
|
768
1070
|
if pd.isna(build_year):
|
|
769
1071
|
return True # No build year constraint
|
|
770
|
-
|
|
1072
|
+
|
|
771
1073
|
build_year = int(build_year)
|
|
772
1074
|
if build_year > current_year:
|
|
773
1075
|
return False # Not built yet
|
|
774
|
-
|
|
775
|
-
if pd.isna(lifetime) or lifetime == float(
|
|
1076
|
+
|
|
1077
|
+
if pd.isna(lifetime) or lifetime == float("inf"):
|
|
776
1078
|
return True # Infinite lifetime
|
|
777
|
-
|
|
1079
|
+
|
|
778
1080
|
lifetime = int(lifetime)
|
|
779
1081
|
end_year = build_year + lifetime - 1
|
|
780
1082
|
return current_year <= end_year
|
|
781
|
-
|
|
1083
|
+
|
|
782
1084
|
# 1. GENERATORS - Capital costs (including UNMET_LOAD)
|
|
783
|
-
if hasattr(network,
|
|
1085
|
+
if hasattr(network, "generators") and not network.generators.empty:
|
|
784
1086
|
# Get generator info: carrier, capital_cost, build_year, lifetime (include UNMET_LOAD)
|
|
785
|
-
cursor = conn.execute(
|
|
1087
|
+
cursor = conn.execute(
|
|
1088
|
+
"""
|
|
786
1089
|
SELECT c.name as component_name,
|
|
787
1090
|
CASE
|
|
788
1091
|
WHEN c.component_type = 'UNMET_LOAD' THEN 'Unmet Load'
|
|
@@ -790,178 +1093,308 @@ class ResultStorage:
|
|
|
790
1093
|
END as carrier_name
|
|
791
1094
|
FROM components c
|
|
792
1095
|
JOIN carriers carr ON c.carrier_id = carr.id
|
|
793
|
-
WHERE c.
|
|
794
|
-
"""
|
|
1096
|
+
WHERE c.component_type IN ('GENERATOR', 'UNMET_LOAD')
|
|
1097
|
+
"""
|
|
1098
|
+
)
|
|
795
1099
|
generator_carriers = {row[0]: row[1] for row in cursor.fetchall()}
|
|
796
|
-
|
|
1100
|
+
|
|
797
1101
|
for gen_name in network.generators.index:
|
|
798
1102
|
if gen_name in generator_carriers:
|
|
799
1103
|
carrier_name = generator_carriers[gen_name]
|
|
800
|
-
|
|
1104
|
+
|
|
801
1105
|
# Get build year and lifetime
|
|
802
|
-
build_year =
|
|
803
|
-
|
|
804
|
-
|
|
1106
|
+
build_year = (
|
|
1107
|
+
network.generators.loc[gen_name, "build_year"]
|
|
1108
|
+
if "build_year" in network.generators.columns
|
|
1109
|
+
else None
|
|
1110
|
+
)
|
|
1111
|
+
lifetime = (
|
|
1112
|
+
network.generators.loc[gen_name, "lifetime"]
|
|
1113
|
+
if "lifetime" in network.generators.columns
|
|
1114
|
+
else None
|
|
1115
|
+
)
|
|
1116
|
+
|
|
805
1117
|
# Check if component is active in this year
|
|
806
1118
|
if is_component_active(build_year, lifetime, year):
|
|
807
1119
|
# Get capacity and capital cost
|
|
808
|
-
if
|
|
809
|
-
capacity_mw = float(
|
|
1120
|
+
if "p_nom_opt" in network.generators.columns:
|
|
1121
|
+
capacity_mw = float(
|
|
1122
|
+
network.generators.loc[gen_name, "p_nom_opt"]
|
|
1123
|
+
)
|
|
810
1124
|
else:
|
|
811
|
-
capacity_mw =
|
|
812
|
-
|
|
813
|
-
|
|
814
|
-
|
|
1125
|
+
capacity_mw = (
|
|
1126
|
+
float(network.generators.loc[gen_name, "p_nom"])
|
|
1127
|
+
if "p_nom" in network.generators.columns
|
|
1128
|
+
else 0.0
|
|
1129
|
+
)
|
|
1130
|
+
|
|
1131
|
+
capital_cost_per_mw = (
|
|
1132
|
+
float(network.generators.loc[gen_name, "capital_cost"])
|
|
1133
|
+
if "capital_cost" in network.generators.columns
|
|
1134
|
+
else 0.0
|
|
1135
|
+
)
|
|
1136
|
+
|
|
815
1137
|
# Calculate annualized capital cost for this year
|
|
816
1138
|
annual_capital_cost = capacity_mw * capital_cost_per_mw
|
|
817
1139
|
|
|
1140
|
+
# Log capital cost calculation details with unit check
|
|
1141
|
+
if annual_capital_cost > 0.001:
|
|
1142
|
+
logger.info(
|
|
1143
|
+
f" 💰 Capital cost calc for {gen_name} ({carrier_name}) in year {year}: "
|
|
1144
|
+
f"{capacity_mw:.2f} MW × {capital_cost_per_mw:.2f} currency/MW = {annual_capital_cost:.2f} currency"
|
|
1145
|
+
)
|
|
1146
|
+
# Check if values seem too small (might be in kUSD instead of USD)
|
|
1147
|
+
if capital_cost_per_mw > 0 and capital_cost_per_mw < 1:
|
|
1148
|
+
logger.warning(
|
|
1149
|
+
f" ⚠️ WARNING: capital_cost_per_mw ({capital_cost_per_mw:.2f}) seems very small. "
|
|
1150
|
+
f"Expected USD/MW but might be in kUSD/MW (thousands)?"
|
|
1151
|
+
)
|
|
1152
|
+
if annual_capital_cost > 0 and annual_capital_cost < 1000:
|
|
1153
|
+
logger.warning(
|
|
1154
|
+
f" ⚠️ WARNING: annual_capital_cost ({annual_capital_cost:.2f}) seems very small. "
|
|
1155
|
+
f"Expected USD but might be in kUSD (thousands)?"
|
|
1156
|
+
)
|
|
1157
|
+
|
|
818
1158
|
if carrier_name in carrier_stats["capital_cost_by_carrier"]:
|
|
819
|
-
carrier_stats["capital_cost_by_carrier"][
|
|
820
|
-
|
|
1159
|
+
carrier_stats["capital_cost_by_carrier"][
|
|
1160
|
+
carrier_name
|
|
1161
|
+
] += annual_capital_cost
|
|
1162
|
+
|
|
821
1163
|
# 2. STORAGE_UNITS - Capital costs
|
|
822
|
-
if hasattr(network,
|
|
823
|
-
cursor = conn.execute(
|
|
1164
|
+
if hasattr(network, "storage_units") and not network.storage_units.empty:
|
|
1165
|
+
cursor = conn.execute(
|
|
1166
|
+
"""
|
|
824
1167
|
SELECT c.name as component_name, carr.name as carrier_name
|
|
825
1168
|
FROM components c
|
|
826
1169
|
JOIN carriers carr ON c.carrier_id = carr.id
|
|
827
|
-
WHERE c.
|
|
828
|
-
"""
|
|
1170
|
+
WHERE c.component_type = 'STORAGE_UNIT'
|
|
1171
|
+
"""
|
|
1172
|
+
)
|
|
829
1173
|
storage_unit_carriers = {row[0]: row[1] for row in cursor.fetchall()}
|
|
830
|
-
|
|
1174
|
+
|
|
831
1175
|
for su_name in network.storage_units.index:
|
|
832
1176
|
if su_name in storage_unit_carriers:
|
|
833
1177
|
carrier_name = storage_unit_carriers[su_name]
|
|
834
|
-
|
|
1178
|
+
|
|
835
1179
|
# Get build year and lifetime
|
|
836
|
-
build_year =
|
|
837
|
-
|
|
838
|
-
|
|
1180
|
+
build_year = (
|
|
1181
|
+
network.storage_units.loc[su_name, "build_year"]
|
|
1182
|
+
if "build_year" in network.storage_units.columns
|
|
1183
|
+
else None
|
|
1184
|
+
)
|
|
1185
|
+
lifetime = (
|
|
1186
|
+
network.storage_units.loc[su_name, "lifetime"]
|
|
1187
|
+
if "lifetime" in network.storage_units.columns
|
|
1188
|
+
else None
|
|
1189
|
+
)
|
|
1190
|
+
|
|
839
1191
|
# Check if component is active in this year
|
|
840
1192
|
if is_component_active(build_year, lifetime, year):
|
|
841
1193
|
# Get power capacity and capital cost (per MW)
|
|
842
|
-
if
|
|
843
|
-
capacity_mw = float(
|
|
1194
|
+
if "p_nom_opt" in network.storage_units.columns:
|
|
1195
|
+
capacity_mw = float(
|
|
1196
|
+
network.storage_units.loc[su_name, "p_nom_opt"]
|
|
1197
|
+
)
|
|
844
1198
|
else:
|
|
845
|
-
capacity_mw =
|
|
846
|
-
|
|
847
|
-
|
|
848
|
-
|
|
1199
|
+
capacity_mw = (
|
|
1200
|
+
float(network.storage_units.loc[su_name, "p_nom"])
|
|
1201
|
+
if "p_nom" in network.storage_units.columns
|
|
1202
|
+
else 0.0
|
|
1203
|
+
)
|
|
1204
|
+
|
|
1205
|
+
capital_cost_per_mw = (
|
|
1206
|
+
float(
|
|
1207
|
+
network.storage_units.loc[su_name, "capital_cost"]
|
|
1208
|
+
)
|
|
1209
|
+
if "capital_cost" in network.storage_units.columns
|
|
1210
|
+
else 0.0
|
|
1211
|
+
)
|
|
1212
|
+
|
|
849
1213
|
# Calculate annualized capital cost for this year
|
|
850
1214
|
annual_capital_cost = capacity_mw * capital_cost_per_mw
|
|
851
|
-
|
|
1215
|
+
|
|
852
1216
|
if carrier_name in carrier_stats["capital_cost_by_carrier"]:
|
|
853
|
-
carrier_stats["capital_cost_by_carrier"][
|
|
854
|
-
|
|
1217
|
+
carrier_stats["capital_cost_by_carrier"][
|
|
1218
|
+
carrier_name
|
|
1219
|
+
] += annual_capital_cost
|
|
1220
|
+
|
|
855
1221
|
# 3. STORES - Capital costs (per MWh)
|
|
856
|
-
if hasattr(network,
|
|
857
|
-
cursor = conn.execute(
|
|
1222
|
+
if hasattr(network, "stores") and not network.stores.empty:
|
|
1223
|
+
cursor = conn.execute(
|
|
1224
|
+
"""
|
|
858
1225
|
SELECT c.name as component_name, carr.name as carrier_name
|
|
859
1226
|
FROM components c
|
|
860
1227
|
JOIN carriers carr ON c.carrier_id = carr.id
|
|
861
|
-
WHERE c.
|
|
862
|
-
"""
|
|
1228
|
+
WHERE c.component_type = 'STORE'
|
|
1229
|
+
"""
|
|
1230
|
+
)
|
|
863
1231
|
store_carriers = {row[0]: row[1] for row in cursor.fetchall()}
|
|
864
|
-
|
|
1232
|
+
|
|
865
1233
|
for store_name in network.stores.index:
|
|
866
1234
|
if store_name in store_carriers:
|
|
867
1235
|
carrier_name = store_carriers[store_name]
|
|
868
|
-
|
|
1236
|
+
|
|
869
1237
|
# Get build year and lifetime
|
|
870
|
-
build_year =
|
|
871
|
-
|
|
872
|
-
|
|
1238
|
+
build_year = (
|
|
1239
|
+
network.stores.loc[store_name, "build_year"]
|
|
1240
|
+
if "build_year" in network.stores.columns
|
|
1241
|
+
else None
|
|
1242
|
+
)
|
|
1243
|
+
lifetime = (
|
|
1244
|
+
network.stores.loc[store_name, "lifetime"]
|
|
1245
|
+
if "lifetime" in network.stores.columns
|
|
1246
|
+
else None
|
|
1247
|
+
)
|
|
1248
|
+
|
|
873
1249
|
# Check if component is active in this year
|
|
874
1250
|
if is_component_active(build_year, lifetime, year):
|
|
875
1251
|
# Get energy capacity and capital cost (per MWh)
|
|
876
|
-
if
|
|
877
|
-
capacity_mwh = float(
|
|
1252
|
+
if "e_nom_opt" in network.stores.columns:
|
|
1253
|
+
capacity_mwh = float(
|
|
1254
|
+
network.stores.loc[store_name, "e_nom_opt"]
|
|
1255
|
+
)
|
|
878
1256
|
else:
|
|
879
|
-
capacity_mwh =
|
|
880
|
-
|
|
881
|
-
|
|
882
|
-
|
|
1257
|
+
capacity_mwh = (
|
|
1258
|
+
float(network.stores.loc[store_name, "e_nom"])
|
|
1259
|
+
if "e_nom" in network.stores.columns
|
|
1260
|
+
else 0.0
|
|
1261
|
+
)
|
|
1262
|
+
|
|
1263
|
+
capital_cost_per_mwh = (
|
|
1264
|
+
float(network.stores.loc[store_name, "capital_cost"])
|
|
1265
|
+
if "capital_cost" in network.stores.columns
|
|
1266
|
+
else 0.0
|
|
1267
|
+
)
|
|
1268
|
+
|
|
883
1269
|
# Calculate annualized capital cost for this year
|
|
884
1270
|
annual_capital_cost = capacity_mwh * capital_cost_per_mwh
|
|
885
|
-
|
|
1271
|
+
|
|
886
1272
|
if carrier_name in carrier_stats["capital_cost_by_carrier"]:
|
|
887
|
-
carrier_stats["capital_cost_by_carrier"][
|
|
888
|
-
|
|
1273
|
+
carrier_stats["capital_cost_by_carrier"][
|
|
1274
|
+
carrier_name
|
|
1275
|
+
] += annual_capital_cost
|
|
1276
|
+
|
|
889
1277
|
# 4. LINES - Capital costs (per MVA)
|
|
890
|
-
if hasattr(network,
|
|
891
|
-
cursor = conn.execute(
|
|
1278
|
+
if hasattr(network, "lines") and not network.lines.empty:
|
|
1279
|
+
cursor = conn.execute(
|
|
1280
|
+
"""
|
|
892
1281
|
SELECT c.name as component_name, carr.name as carrier_name
|
|
893
1282
|
FROM components c
|
|
894
1283
|
JOIN carriers carr ON c.carrier_id = carr.id
|
|
895
|
-
WHERE c.
|
|
896
|
-
"""
|
|
1284
|
+
WHERE c.component_type = 'LINE'
|
|
1285
|
+
"""
|
|
1286
|
+
)
|
|
897
1287
|
line_carriers = {row[0]: row[1] for row in cursor.fetchall()}
|
|
898
|
-
|
|
1288
|
+
|
|
899
1289
|
for line_name in network.lines.index:
|
|
900
1290
|
if line_name in line_carriers:
|
|
901
1291
|
carrier_name = line_carriers[line_name]
|
|
902
|
-
|
|
1292
|
+
|
|
903
1293
|
# Get build year and lifetime
|
|
904
|
-
build_year =
|
|
905
|
-
|
|
906
|
-
|
|
1294
|
+
build_year = (
|
|
1295
|
+
network.lines.loc[line_name, "build_year"]
|
|
1296
|
+
if "build_year" in network.lines.columns
|
|
1297
|
+
else None
|
|
1298
|
+
)
|
|
1299
|
+
lifetime = (
|
|
1300
|
+
network.lines.loc[line_name, "lifetime"]
|
|
1301
|
+
if "lifetime" in network.lines.columns
|
|
1302
|
+
else None
|
|
1303
|
+
)
|
|
1304
|
+
|
|
907
1305
|
# Check if component is active in this year
|
|
908
1306
|
if is_component_active(build_year, lifetime, year):
|
|
909
1307
|
# Get apparent power capacity and capital cost (per MVA)
|
|
910
|
-
if
|
|
911
|
-
capacity_mva = float(
|
|
1308
|
+
if "s_nom_opt" in network.lines.columns:
|
|
1309
|
+
capacity_mva = float(
|
|
1310
|
+
network.lines.loc[line_name, "s_nom_opt"]
|
|
1311
|
+
)
|
|
912
1312
|
else:
|
|
913
|
-
capacity_mva =
|
|
914
|
-
|
|
915
|
-
|
|
916
|
-
|
|
1313
|
+
capacity_mva = (
|
|
1314
|
+
float(network.lines.loc[line_name, "s_nom"])
|
|
1315
|
+
if "s_nom" in network.lines.columns
|
|
1316
|
+
else 0.0
|
|
1317
|
+
)
|
|
1318
|
+
|
|
1319
|
+
capital_cost_per_mva = (
|
|
1320
|
+
float(network.lines.loc[line_name, "capital_cost"])
|
|
1321
|
+
if "capital_cost" in network.lines.columns
|
|
1322
|
+
else 0.0
|
|
1323
|
+
)
|
|
1324
|
+
|
|
917
1325
|
# Calculate annualized capital cost for this year
|
|
918
1326
|
annual_capital_cost = capacity_mva * capital_cost_per_mva
|
|
919
|
-
|
|
1327
|
+
|
|
920
1328
|
if carrier_name in carrier_stats["capital_cost_by_carrier"]:
|
|
921
|
-
carrier_stats["capital_cost_by_carrier"][
|
|
922
|
-
|
|
1329
|
+
carrier_stats["capital_cost_by_carrier"][
|
|
1330
|
+
carrier_name
|
|
1331
|
+
] += annual_capital_cost
|
|
1332
|
+
|
|
923
1333
|
# 5. LINKS - Capital costs (per MW)
|
|
924
|
-
if hasattr(network,
|
|
925
|
-
cursor = conn.execute(
|
|
1334
|
+
if hasattr(network, "links") and not network.links.empty:
|
|
1335
|
+
cursor = conn.execute(
|
|
1336
|
+
"""
|
|
926
1337
|
SELECT c.name as component_name, carr.name as carrier_name
|
|
927
1338
|
FROM components c
|
|
928
1339
|
JOIN carriers carr ON c.carrier_id = carr.id
|
|
929
|
-
WHERE c.
|
|
930
|
-
"""
|
|
1340
|
+
WHERE c.component_type = 'LINK'
|
|
1341
|
+
"""
|
|
1342
|
+
)
|
|
931
1343
|
link_carriers = {row[0]: row[1] for row in cursor.fetchall()}
|
|
932
|
-
|
|
1344
|
+
|
|
933
1345
|
for link_name in network.links.index:
|
|
934
1346
|
if link_name in link_carriers:
|
|
935
1347
|
carrier_name = link_carriers[link_name]
|
|
936
|
-
|
|
1348
|
+
|
|
937
1349
|
# Get build year and lifetime
|
|
938
|
-
build_year =
|
|
939
|
-
|
|
940
|
-
|
|
1350
|
+
build_year = (
|
|
1351
|
+
network.links.loc[link_name, "build_year"]
|
|
1352
|
+
if "build_year" in network.links.columns
|
|
1353
|
+
else None
|
|
1354
|
+
)
|
|
1355
|
+
lifetime = (
|
|
1356
|
+
network.links.loc[link_name, "lifetime"]
|
|
1357
|
+
if "lifetime" in network.links.columns
|
|
1358
|
+
else None
|
|
1359
|
+
)
|
|
1360
|
+
|
|
941
1361
|
# Check if component is active in this year
|
|
942
1362
|
if is_component_active(build_year, lifetime, year):
|
|
943
1363
|
# Get power capacity and capital cost (per MW)
|
|
944
|
-
if
|
|
945
|
-
capacity_mw = float(
|
|
1364
|
+
if "p_nom_opt" in network.links.columns:
|
|
1365
|
+
capacity_mw = float(
|
|
1366
|
+
network.links.loc[link_name, "p_nom_opt"]
|
|
1367
|
+
)
|
|
946
1368
|
else:
|
|
947
|
-
capacity_mw =
|
|
948
|
-
|
|
949
|
-
|
|
950
|
-
|
|
1369
|
+
capacity_mw = (
|
|
1370
|
+
float(network.links.loc[link_name, "p_nom"])
|
|
1371
|
+
if "p_nom" in network.links.columns
|
|
1372
|
+
else 0.0
|
|
1373
|
+
)
|
|
1374
|
+
|
|
1375
|
+
capital_cost_per_mw = (
|
|
1376
|
+
float(network.links.loc[link_name, "capital_cost"])
|
|
1377
|
+
if "capital_cost" in network.links.columns
|
|
1378
|
+
else 0.0
|
|
1379
|
+
)
|
|
1380
|
+
|
|
951
1381
|
# Calculate annualized capital cost for this year
|
|
952
1382
|
annual_capital_cost = capacity_mw * capital_cost_per_mw
|
|
953
|
-
|
|
1383
|
+
|
|
954
1384
|
if carrier_name in carrier_stats["capital_cost_by_carrier"]:
|
|
955
|
-
carrier_stats["capital_cost_by_carrier"][
|
|
956
|
-
|
|
1385
|
+
carrier_stats["capital_cost_by_carrier"][
|
|
1386
|
+
carrier_name
|
|
1387
|
+
] += annual_capital_cost
|
|
1388
|
+
|
|
957
1389
|
# Calculate operational costs by carrier for this specific year
|
|
958
1390
|
# Operational costs = dispatch (MWh) × marginal_cost (currency/MWh)
|
|
959
1391
|
# Only for components that are active in this year
|
|
960
|
-
|
|
1392
|
+
|
|
961
1393
|
# 1. GENERATORS - Operational costs (including UNMET_LOAD)
|
|
962
|
-
if hasattr(network,
|
|
1394
|
+
if hasattr(network, "generators_t") and hasattr(network.generators_t, "p"):
|
|
963
1395
|
# Get generator info: carrier, marginal_cost, build_year, lifetime (include UNMET_LOAD)
|
|
964
|
-
cursor = conn.execute(
|
|
1396
|
+
cursor = conn.execute(
|
|
1397
|
+
"""
|
|
965
1398
|
SELECT c.name as component_name,
|
|
966
1399
|
CASE
|
|
967
1400
|
WHEN c.component_type = 'UNMET_LOAD' THEN 'Unmet Load'
|
|
@@ -969,135 +1402,259 @@ class ResultStorage:
|
|
|
969
1402
|
END as carrier_name
|
|
970
1403
|
FROM components c
|
|
971
1404
|
JOIN carriers carr ON c.carrier_id = carr.id
|
|
972
|
-
WHERE c.
|
|
973
|
-
"""
|
|
1405
|
+
WHERE c.component_type IN ('GENERATOR', 'UNMET_LOAD')
|
|
1406
|
+
"""
|
|
1407
|
+
)
|
|
974
1408
|
generator_carriers = {row[0]: row[1] for row in cursor.fetchall()}
|
|
975
|
-
|
|
1409
|
+
|
|
976
1410
|
# Filter generation data for this specific year
|
|
977
|
-
year_generation = self._filter_timeseries_by_year(
|
|
1411
|
+
year_generation = self._filter_timeseries_by_year(
|
|
1412
|
+
network.generators_t.p, network.snapshots, year
|
|
1413
|
+
)
|
|
978
1414
|
if year_generation is not None and not year_generation.empty:
|
|
979
1415
|
for gen_name in year_generation.columns:
|
|
980
1416
|
if gen_name in generator_carriers:
|
|
981
1417
|
carrier_name = generator_carriers[gen_name]
|
|
982
|
-
|
|
1418
|
+
|
|
983
1419
|
# Get build year and lifetime
|
|
984
|
-
build_year =
|
|
985
|
-
|
|
986
|
-
|
|
1420
|
+
build_year = (
|
|
1421
|
+
network.generators.loc[gen_name, "build_year"]
|
|
1422
|
+
if "build_year" in network.generators.columns
|
|
1423
|
+
else None
|
|
1424
|
+
)
|
|
1425
|
+
lifetime = (
|
|
1426
|
+
network.generators.loc[gen_name, "lifetime"]
|
|
1427
|
+
if "lifetime" in network.generators.columns
|
|
1428
|
+
else None
|
|
1429
|
+
)
|
|
1430
|
+
|
|
987
1431
|
# Check if component is active in this year
|
|
988
1432
|
if is_component_active(build_year, lifetime, year):
|
|
989
1433
|
# Calculate generation for this year (already calculated above, but need to recalculate for operational costs)
|
|
990
|
-
year_weightings = self._get_year_weightings(
|
|
1434
|
+
year_weightings = self._get_year_weightings(
|
|
1435
|
+
network, year
|
|
1436
|
+
)
|
|
991
1437
|
if year_weightings is not None:
|
|
992
|
-
generation_mwh = float(
|
|
1438
|
+
generation_mwh = float(
|
|
1439
|
+
(
|
|
1440
|
+
year_generation[gen_name].values
|
|
1441
|
+
* year_weightings
|
|
1442
|
+
).sum()
|
|
1443
|
+
)
|
|
993
1444
|
else:
|
|
994
|
-
generation_mwh = float(
|
|
995
|
-
|
|
1445
|
+
generation_mwh = float(
|
|
1446
|
+
year_generation[gen_name].sum()
|
|
1447
|
+
)
|
|
1448
|
+
|
|
996
1449
|
# Get marginal cost
|
|
997
|
-
marginal_cost =
|
|
998
|
-
|
|
1450
|
+
marginal_cost = (
|
|
1451
|
+
float(
|
|
1452
|
+
network.generators.loc[
|
|
1453
|
+
gen_name, "marginal_cost"
|
|
1454
|
+
]
|
|
1455
|
+
)
|
|
1456
|
+
if "marginal_cost" in network.generators.columns
|
|
1457
|
+
else 0.0
|
|
1458
|
+
)
|
|
1459
|
+
|
|
999
1460
|
# Calculate operational cost for this year
|
|
1000
1461
|
operational_cost = generation_mwh * marginal_cost
|
|
1001
1462
|
|
|
1002
|
-
|
|
1003
|
-
|
|
1004
|
-
|
|
1463
|
+
# Log operational cost calculation details with unit check
|
|
1464
|
+
if operational_cost > 0.001:
|
|
1465
|
+
logger.info(
|
|
1466
|
+
f" 💰 Operational cost calc for {gen_name} ({carrier_name}) in year {year}: "
|
|
1467
|
+
f"{generation_mwh:.2f} MWh × {marginal_cost:.2f} currency/MWh = {operational_cost:.2f} currency"
|
|
1468
|
+
)
|
|
1469
|
+
# Check if values seem too small (might be in kUSD instead of USD)
|
|
1470
|
+
if marginal_cost > 0 and marginal_cost < 1:
|
|
1471
|
+
logger.warning(
|
|
1472
|
+
f" ⚠️ WARNING: marginal_cost ({marginal_cost:.2f}) seems very small. "
|
|
1473
|
+
f"Expected USD/MWh but might be in kUSD/MWh (thousands)?"
|
|
1474
|
+
)
|
|
1475
|
+
if operational_cost > 0 and operational_cost < 1000:
|
|
1476
|
+
logger.warning(
|
|
1477
|
+
f" ⚠️ WARNING: operational_cost ({operational_cost:.2f}) seems very small. "
|
|
1478
|
+
f"Expected USD but might be in kUSD (thousands)?"
|
|
1479
|
+
)
|
|
1480
|
+
|
|
1481
|
+
if (
|
|
1482
|
+
carrier_name
|
|
1483
|
+
in carrier_stats["operational_cost_by_carrier"]
|
|
1484
|
+
):
|
|
1485
|
+
carrier_stats["operational_cost_by_carrier"][
|
|
1486
|
+
carrier_name
|
|
1487
|
+
] += operational_cost
|
|
1488
|
+
|
|
1005
1489
|
# 2. STORAGE_UNITS - Operational costs (discharge only)
|
|
1006
|
-
if hasattr(network,
|
|
1490
|
+
if hasattr(network, "storage_units_t") and hasattr(
|
|
1491
|
+
network.storage_units_t, "p"
|
|
1492
|
+
):
|
|
1007
1493
|
# Get storage unit info: carrier, marginal_cost, build_year, lifetime
|
|
1008
|
-
cursor = conn.execute(
|
|
1494
|
+
cursor = conn.execute(
|
|
1495
|
+
"""
|
|
1009
1496
|
SELECT c.name as component_name, carr.name as carrier_name
|
|
1010
1497
|
FROM components c
|
|
1011
1498
|
JOIN carriers carr ON c.carrier_id = carr.id
|
|
1012
|
-
WHERE c.
|
|
1013
|
-
"""
|
|
1499
|
+
WHERE c.component_type = 'STORAGE_UNIT'
|
|
1500
|
+
"""
|
|
1501
|
+
)
|
|
1014
1502
|
storage_unit_carriers = {row[0]: row[1] for row in cursor.fetchall()}
|
|
1015
|
-
|
|
1503
|
+
|
|
1016
1504
|
# Filter storage unit data for this specific year
|
|
1017
|
-
year_storage = self._filter_timeseries_by_year(
|
|
1505
|
+
year_storage = self._filter_timeseries_by_year(
|
|
1506
|
+
network.storage_units_t.p, network.snapshots, year
|
|
1507
|
+
)
|
|
1018
1508
|
if year_storage is not None and not year_storage.empty:
|
|
1019
1509
|
for su_name in year_storage.columns:
|
|
1020
1510
|
if su_name in storage_unit_carriers:
|
|
1021
1511
|
carrier_name = storage_unit_carriers[su_name]
|
|
1022
|
-
|
|
1512
|
+
|
|
1023
1513
|
# Get build year and lifetime
|
|
1024
|
-
build_year =
|
|
1025
|
-
|
|
1026
|
-
|
|
1514
|
+
build_year = (
|
|
1515
|
+
network.storage_units.loc[su_name, "build_year"]
|
|
1516
|
+
if "build_year" in network.storage_units.columns
|
|
1517
|
+
else None
|
|
1518
|
+
)
|
|
1519
|
+
lifetime = (
|
|
1520
|
+
network.storage_units.loc[su_name, "lifetime"]
|
|
1521
|
+
if "lifetime" in network.storage_units.columns
|
|
1522
|
+
else None
|
|
1523
|
+
)
|
|
1524
|
+
|
|
1027
1525
|
# Check if component is active in this year
|
|
1028
1526
|
if is_component_active(build_year, lifetime, year):
|
|
1029
1527
|
# Calculate discharge for this year (positive values only)
|
|
1030
|
-
year_weightings = self._get_year_weightings(
|
|
1528
|
+
year_weightings = self._get_year_weightings(
|
|
1529
|
+
network, year
|
|
1530
|
+
)
|
|
1031
1531
|
if year_weightings is not None:
|
|
1032
|
-
discharge_mwh = float(
|
|
1532
|
+
discharge_mwh = float(
|
|
1533
|
+
(
|
|
1534
|
+
year_storage[su_name].clip(lower=0).values
|
|
1535
|
+
* year_weightings
|
|
1536
|
+
).sum()
|
|
1537
|
+
)
|
|
1033
1538
|
else:
|
|
1034
|
-
discharge_mwh = float(
|
|
1035
|
-
|
|
1539
|
+
discharge_mwh = float(
|
|
1540
|
+
year_storage[su_name].clip(lower=0).sum()
|
|
1541
|
+
)
|
|
1542
|
+
|
|
1036
1543
|
# Get marginal cost
|
|
1037
|
-
marginal_cost =
|
|
1038
|
-
|
|
1544
|
+
marginal_cost = (
|
|
1545
|
+
float(
|
|
1546
|
+
network.storage_units.loc[
|
|
1547
|
+
su_name, "marginal_cost"
|
|
1548
|
+
]
|
|
1549
|
+
)
|
|
1550
|
+
if "marginal_cost" in network.storage_units.columns
|
|
1551
|
+
else 0.0
|
|
1552
|
+
)
|
|
1553
|
+
|
|
1039
1554
|
# Calculate operational cost for this year
|
|
1040
1555
|
operational_cost = discharge_mwh * marginal_cost
|
|
1041
|
-
|
|
1042
|
-
if
|
|
1043
|
-
|
|
1044
|
-
|
|
1556
|
+
|
|
1557
|
+
if (
|
|
1558
|
+
carrier_name
|
|
1559
|
+
in carrier_stats["operational_cost_by_carrier"]
|
|
1560
|
+
):
|
|
1561
|
+
carrier_stats["operational_cost_by_carrier"][
|
|
1562
|
+
carrier_name
|
|
1563
|
+
] += operational_cost
|
|
1564
|
+
|
|
1045
1565
|
# 3. STORES - Operational costs (discharge only)
|
|
1046
|
-
if hasattr(network,
|
|
1566
|
+
if hasattr(network, "stores_t") and hasattr(network.stores_t, "p"):
|
|
1047
1567
|
# Get store info: carrier, marginal_cost, build_year, lifetime
|
|
1048
|
-
cursor = conn.execute(
|
|
1568
|
+
cursor = conn.execute(
|
|
1569
|
+
"""
|
|
1049
1570
|
SELECT c.name as component_name, carr.name as carrier_name
|
|
1050
1571
|
FROM components c
|
|
1051
1572
|
JOIN carriers carr ON c.carrier_id = carr.id
|
|
1052
|
-
WHERE c.
|
|
1053
|
-
"""
|
|
1573
|
+
WHERE c.component_type = 'STORE'
|
|
1574
|
+
"""
|
|
1575
|
+
)
|
|
1054
1576
|
store_carriers = {row[0]: row[1] for row in cursor.fetchall()}
|
|
1055
|
-
|
|
1577
|
+
|
|
1056
1578
|
# Filter store data for this specific year
|
|
1057
|
-
year_stores = self._filter_timeseries_by_year(
|
|
1579
|
+
year_stores = self._filter_timeseries_by_year(
|
|
1580
|
+
network.stores_t.p, network.snapshots, year
|
|
1581
|
+
)
|
|
1058
1582
|
if year_stores is not None and not year_stores.empty:
|
|
1059
1583
|
for store_name in year_stores.columns:
|
|
1060
1584
|
if store_name in store_carriers:
|
|
1061
1585
|
carrier_name = store_carriers[store_name]
|
|
1062
|
-
|
|
1586
|
+
|
|
1063
1587
|
# Get build year and lifetime
|
|
1064
|
-
build_year =
|
|
1065
|
-
|
|
1066
|
-
|
|
1588
|
+
build_year = (
|
|
1589
|
+
network.stores.loc[store_name, "build_year"]
|
|
1590
|
+
if "build_year" in network.stores.columns
|
|
1591
|
+
else None
|
|
1592
|
+
)
|
|
1593
|
+
lifetime = (
|
|
1594
|
+
network.stores.loc[store_name, "lifetime"]
|
|
1595
|
+
if "lifetime" in network.stores.columns
|
|
1596
|
+
else None
|
|
1597
|
+
)
|
|
1598
|
+
|
|
1067
1599
|
# Check if component is active in this year
|
|
1068
1600
|
if is_component_active(build_year, lifetime, year):
|
|
1069
1601
|
# Calculate discharge for this year (positive values only)
|
|
1070
|
-
year_weightings = self._get_year_weightings(
|
|
1602
|
+
year_weightings = self._get_year_weightings(
|
|
1603
|
+
network, year
|
|
1604
|
+
)
|
|
1071
1605
|
if year_weightings is not None:
|
|
1072
|
-
discharge_mwh = float(
|
|
1606
|
+
discharge_mwh = float(
|
|
1607
|
+
(
|
|
1608
|
+
year_stores[store_name].clip(lower=0).values
|
|
1609
|
+
* year_weightings
|
|
1610
|
+
).sum()
|
|
1611
|
+
)
|
|
1073
1612
|
else:
|
|
1074
|
-
discharge_mwh = float(
|
|
1075
|
-
|
|
1613
|
+
discharge_mwh = float(
|
|
1614
|
+
year_stores[store_name].clip(lower=0).sum()
|
|
1615
|
+
)
|
|
1616
|
+
|
|
1076
1617
|
# Get marginal cost
|
|
1077
|
-
marginal_cost =
|
|
1078
|
-
|
|
1618
|
+
marginal_cost = (
|
|
1619
|
+
float(
|
|
1620
|
+
network.stores.loc[store_name, "marginal_cost"]
|
|
1621
|
+
)
|
|
1622
|
+
if "marginal_cost" in network.stores.columns
|
|
1623
|
+
else 0.0
|
|
1624
|
+
)
|
|
1625
|
+
|
|
1079
1626
|
# Calculate operational cost for this year
|
|
1080
1627
|
operational_cost = discharge_mwh * marginal_cost
|
|
1081
|
-
|
|
1082
|
-
|
|
1083
|
-
|
|
1084
|
-
|
|
1628
|
+
|
|
1629
|
+
if (
|
|
1630
|
+
carrier_name
|
|
1631
|
+
in carrier_stats["operational_cost_by_carrier"]
|
|
1632
|
+
):
|
|
1633
|
+
carrier_stats["operational_cost_by_carrier"][
|
|
1634
|
+
carrier_name
|
|
1635
|
+
] += operational_cost
|
|
1636
|
+
|
|
1085
1637
|
# Calculate total system costs by carrier for this specific year
|
|
1086
1638
|
# Total system cost = capital cost + operational cost
|
|
1087
1639
|
for carrier_name in carrier_stats["capital_cost_by_carrier"]:
|
|
1088
1640
|
capital_cost = carrier_stats["capital_cost_by_carrier"][carrier_name]
|
|
1089
|
-
operational_cost = carrier_stats["operational_cost_by_carrier"][
|
|
1641
|
+
operational_cost = carrier_stats["operational_cost_by_carrier"][
|
|
1642
|
+
carrier_name
|
|
1643
|
+
]
|
|
1090
1644
|
total_system_cost = capital_cost + operational_cost
|
|
1091
|
-
|
|
1645
|
+
|
|
1092
1646
|
if carrier_name in carrier_stats["total_system_cost_by_carrier"]:
|
|
1093
|
-
carrier_stats["total_system_cost_by_carrier"][
|
|
1094
|
-
|
|
1647
|
+
carrier_stats["total_system_cost_by_carrier"][
|
|
1648
|
+
carrier_name
|
|
1649
|
+
] = total_system_cost
|
|
1650
|
+
|
|
1095
1651
|
# Calculate capacity by carrier for this specific year
|
|
1096
|
-
|
|
1652
|
+
|
|
1097
1653
|
# 4. GENERATORS - Power capacity (MW) (including UNMET_LOAD)
|
|
1098
|
-
if hasattr(network,
|
|
1654
|
+
if hasattr(network, "generators") and not network.generators.empty:
|
|
1099
1655
|
# Get generator-carrier mapping (include UNMET_LOAD)
|
|
1100
|
-
cursor = conn.execute(
|
|
1656
|
+
cursor = conn.execute(
|
|
1657
|
+
"""
|
|
1101
1658
|
SELECT c.name as component_name,
|
|
1102
1659
|
CASE
|
|
1103
1660
|
WHEN c.component_type = 'UNMET_LOAD' THEN 'Unmet Load'
|
|
@@ -1105,183 +1662,287 @@ class ResultStorage:
|
|
|
1105
1662
|
END as carrier_name
|
|
1106
1663
|
FROM components c
|
|
1107
1664
|
JOIN carriers carr ON c.carrier_id = carr.id
|
|
1108
|
-
WHERE c.
|
|
1109
|
-
"""
|
|
1665
|
+
WHERE c.component_type IN ('GENERATOR', 'UNMET_LOAD')
|
|
1666
|
+
"""
|
|
1667
|
+
)
|
|
1110
1668
|
generator_carriers = {row[0]: row[1] for row in cursor.fetchall()}
|
|
1111
|
-
|
|
1669
|
+
|
|
1112
1670
|
for gen_name in network.generators.index:
|
|
1113
1671
|
if gen_name in generator_carriers:
|
|
1114
1672
|
carrier_name = generator_carriers[gen_name]
|
|
1115
|
-
|
|
1673
|
+
|
|
1116
1674
|
# Check if this generator is available in this year (build_year <= year)
|
|
1117
1675
|
is_available = True
|
|
1118
|
-
if
|
|
1119
|
-
build_year = network.generators.loc[gen_name,
|
|
1676
|
+
if "build_year" in network.generators.columns:
|
|
1677
|
+
build_year = network.generators.loc[gen_name, "build_year"]
|
|
1120
1678
|
if pd.notna(build_year) and int(build_year) > year:
|
|
1121
1679
|
is_available = False
|
|
1122
|
-
|
|
1680
|
+
|
|
1123
1681
|
if is_available:
|
|
1124
1682
|
# Use p_nom_opt if available, otherwise p_nom
|
|
1125
|
-
if
|
|
1126
|
-
capacity_mw = float(
|
|
1683
|
+
if "p_nom_opt" in network.generators.columns:
|
|
1684
|
+
capacity_mw = float(
|
|
1685
|
+
network.generators.loc[gen_name, "p_nom_opt"]
|
|
1686
|
+
)
|
|
1127
1687
|
else:
|
|
1128
|
-
capacity_mw =
|
|
1129
|
-
|
|
1130
|
-
|
|
1131
|
-
|
|
1132
|
-
|
|
1688
|
+
capacity_mw = (
|
|
1689
|
+
float(network.generators.loc[gen_name, "p_nom"])
|
|
1690
|
+
if "p_nom" in network.generators.columns
|
|
1691
|
+
else 0.0
|
|
1692
|
+
)
|
|
1693
|
+
|
|
1694
|
+
if (
|
|
1695
|
+
carrier_name
|
|
1696
|
+
in carrier_stats["power_capacity_by_carrier"]
|
|
1697
|
+
):
|
|
1698
|
+
carrier_stats["power_capacity_by_carrier"][
|
|
1699
|
+
carrier_name
|
|
1700
|
+
] += capacity_mw
|
|
1701
|
+
|
|
1133
1702
|
# 2. STORAGE_UNITS - Power capacity (MW) + Energy capacity (MWh)
|
|
1134
|
-
if hasattr(network,
|
|
1703
|
+
if hasattr(network, "storage_units") and not network.storage_units.empty:
|
|
1135
1704
|
# Get storage unit-carrier mapping
|
|
1136
|
-
cursor = conn.execute(
|
|
1705
|
+
cursor = conn.execute(
|
|
1706
|
+
"""
|
|
1137
1707
|
SELECT c.name as component_name, carr.name as carrier_name
|
|
1138
1708
|
FROM components c
|
|
1139
1709
|
JOIN carriers carr ON c.carrier_id = carr.id
|
|
1140
|
-
WHERE c.
|
|
1141
|
-
"""
|
|
1710
|
+
WHERE c.component_type = 'STORAGE_UNIT'
|
|
1711
|
+
"""
|
|
1712
|
+
)
|
|
1142
1713
|
storage_unit_carriers = {row[0]: row[1] for row in cursor.fetchall()}
|
|
1143
|
-
|
|
1714
|
+
|
|
1144
1715
|
for su_name in network.storage_units.index:
|
|
1145
1716
|
if su_name in storage_unit_carriers:
|
|
1146
1717
|
carrier_name = storage_unit_carriers[su_name]
|
|
1147
|
-
|
|
1718
|
+
|
|
1148
1719
|
# Check if this storage unit is available in this year
|
|
1149
1720
|
is_available = True
|
|
1150
|
-
if
|
|
1151
|
-
build_year = network.storage_units.loc[
|
|
1721
|
+
if "build_year" in network.storage_units.columns:
|
|
1722
|
+
build_year = network.storage_units.loc[
|
|
1723
|
+
su_name, "build_year"
|
|
1724
|
+
]
|
|
1152
1725
|
if pd.notna(build_year) and int(build_year) > year:
|
|
1153
1726
|
is_available = False
|
|
1154
|
-
|
|
1727
|
+
|
|
1155
1728
|
if is_available:
|
|
1156
1729
|
# Power capacity (MW)
|
|
1157
|
-
if
|
|
1158
|
-
p_nom_opt = float(
|
|
1730
|
+
if "p_nom_opt" in network.storage_units.columns:
|
|
1731
|
+
p_nom_opt = float(
|
|
1732
|
+
network.storage_units.loc[su_name, "p_nom_opt"]
|
|
1733
|
+
)
|
|
1159
1734
|
else:
|
|
1160
|
-
p_nom_opt =
|
|
1161
|
-
|
|
1162
|
-
|
|
1163
|
-
|
|
1164
|
-
|
|
1735
|
+
p_nom_opt = (
|
|
1736
|
+
float(network.storage_units.loc[su_name, "p_nom"])
|
|
1737
|
+
if "p_nom" in network.storage_units.columns
|
|
1738
|
+
else 0.0
|
|
1739
|
+
)
|
|
1740
|
+
|
|
1741
|
+
if (
|
|
1742
|
+
carrier_name
|
|
1743
|
+
in carrier_stats["power_capacity_by_carrier"]
|
|
1744
|
+
):
|
|
1745
|
+
carrier_stats["power_capacity_by_carrier"][
|
|
1746
|
+
carrier_name
|
|
1747
|
+
] += p_nom_opt
|
|
1748
|
+
|
|
1165
1749
|
# Energy capacity (MWh) using max_hours
|
|
1166
1750
|
max_hours = 1.0 # Default
|
|
1167
|
-
if
|
|
1168
|
-
max_hours = float(
|
|
1751
|
+
if "max_hours" in network.storage_units.columns:
|
|
1752
|
+
max_hours = float(
|
|
1753
|
+
network.storage_units.loc[su_name, "max_hours"]
|
|
1754
|
+
)
|
|
1169
1755
|
energy_capacity_mwh = p_nom_opt * max_hours
|
|
1170
|
-
|
|
1171
|
-
if
|
|
1172
|
-
|
|
1173
|
-
|
|
1756
|
+
|
|
1757
|
+
if (
|
|
1758
|
+
carrier_name
|
|
1759
|
+
in carrier_stats["energy_capacity_by_carrier"]
|
|
1760
|
+
):
|
|
1761
|
+
carrier_stats["energy_capacity_by_carrier"][
|
|
1762
|
+
carrier_name
|
|
1763
|
+
] += energy_capacity_mwh
|
|
1764
|
+
|
|
1174
1765
|
# 3. STORES - Energy capacity (MWh) only
|
|
1175
|
-
if hasattr(network,
|
|
1766
|
+
if hasattr(network, "stores") and not network.stores.empty:
|
|
1176
1767
|
# Get store-carrier mapping
|
|
1177
|
-
cursor = conn.execute(
|
|
1768
|
+
cursor = conn.execute(
|
|
1769
|
+
"""
|
|
1178
1770
|
SELECT c.name as component_name, carr.name as carrier_name
|
|
1179
1771
|
FROM components c
|
|
1180
1772
|
JOIN carriers carr ON c.carrier_id = carr.id
|
|
1181
|
-
WHERE c.
|
|
1182
|
-
"""
|
|
1773
|
+
WHERE c.component_type = 'STORE'
|
|
1774
|
+
"""
|
|
1775
|
+
)
|
|
1183
1776
|
store_carriers = {row[0]: row[1] for row in cursor.fetchall()}
|
|
1184
|
-
|
|
1777
|
+
|
|
1185
1778
|
for store_name in network.stores.index:
|
|
1186
1779
|
if store_name in store_carriers:
|
|
1187
1780
|
carrier_name = store_carriers[store_name]
|
|
1188
|
-
|
|
1781
|
+
|
|
1189
1782
|
# Check if this store is available in this year
|
|
1190
1783
|
is_available = True
|
|
1191
|
-
if
|
|
1192
|
-
build_year = network.stores.loc[store_name,
|
|
1784
|
+
if "build_year" in network.stores.columns:
|
|
1785
|
+
build_year = network.stores.loc[store_name, "build_year"]
|
|
1193
1786
|
if pd.notna(build_year) and int(build_year) > year:
|
|
1194
1787
|
is_available = False
|
|
1195
|
-
|
|
1788
|
+
|
|
1196
1789
|
if is_available:
|
|
1197
1790
|
# Energy capacity (MWh)
|
|
1198
|
-
if
|
|
1199
|
-
capacity_mwh = float(
|
|
1791
|
+
if "e_nom_opt" in network.stores.columns:
|
|
1792
|
+
capacity_mwh = float(
|
|
1793
|
+
network.stores.loc[store_name, "e_nom_opt"]
|
|
1794
|
+
)
|
|
1200
1795
|
else:
|
|
1201
|
-
capacity_mwh =
|
|
1202
|
-
|
|
1203
|
-
|
|
1204
|
-
|
|
1205
|
-
|
|
1796
|
+
capacity_mwh = (
|
|
1797
|
+
float(network.stores.loc[store_name, "e_nom"])
|
|
1798
|
+
if "e_nom" in network.stores.columns
|
|
1799
|
+
else 0.0
|
|
1800
|
+
)
|
|
1801
|
+
|
|
1802
|
+
if (
|
|
1803
|
+
carrier_name
|
|
1804
|
+
in carrier_stats["energy_capacity_by_carrier"]
|
|
1805
|
+
):
|
|
1806
|
+
carrier_stats["energy_capacity_by_carrier"][
|
|
1807
|
+
carrier_name
|
|
1808
|
+
] += capacity_mwh
|
|
1809
|
+
|
|
1206
1810
|
# 4. LINES - Apparent power capacity (MVA -> MW)
|
|
1207
|
-
if hasattr(network,
|
|
1811
|
+
if hasattr(network, "lines") and not network.lines.empty:
|
|
1208
1812
|
# Get line-carrier mapping
|
|
1209
|
-
cursor = conn.execute(
|
|
1813
|
+
cursor = conn.execute(
|
|
1814
|
+
"""
|
|
1210
1815
|
SELECT c.name as component_name, carr.name as carrier_name
|
|
1211
1816
|
FROM components c
|
|
1212
1817
|
JOIN carriers carr ON c.carrier_id = carr.id
|
|
1213
|
-
WHERE c.
|
|
1214
|
-
"""
|
|
1818
|
+
WHERE c.component_type = 'LINE'
|
|
1819
|
+
"""
|
|
1820
|
+
)
|
|
1215
1821
|
line_carriers = {row[0]: row[1] for row in cursor.fetchall()}
|
|
1216
|
-
|
|
1822
|
+
|
|
1217
1823
|
for line_name in network.lines.index:
|
|
1218
1824
|
if line_name in line_carriers:
|
|
1219
1825
|
carrier_name = line_carriers[line_name]
|
|
1220
|
-
|
|
1826
|
+
|
|
1221
1827
|
# Check if this line is available in this year
|
|
1222
1828
|
is_available = True
|
|
1223
|
-
if
|
|
1224
|
-
build_year = network.lines.loc[line_name,
|
|
1829
|
+
if "build_year" in network.lines.columns:
|
|
1830
|
+
build_year = network.lines.loc[line_name, "build_year"]
|
|
1225
1831
|
if pd.notna(build_year) and int(build_year) > year:
|
|
1226
1832
|
is_available = False
|
|
1227
|
-
|
|
1833
|
+
|
|
1228
1834
|
if is_available:
|
|
1229
1835
|
# Apparent power capacity (MVA -> MW, assume power factor = 1)
|
|
1230
|
-
if
|
|
1231
|
-
capacity_mva = float(
|
|
1836
|
+
if "s_nom_opt" in network.lines.columns:
|
|
1837
|
+
capacity_mva = float(
|
|
1838
|
+
network.lines.loc[line_name, "s_nom_opt"]
|
|
1839
|
+
)
|
|
1232
1840
|
else:
|
|
1233
|
-
capacity_mva =
|
|
1234
|
-
|
|
1841
|
+
capacity_mva = (
|
|
1842
|
+
float(network.lines.loc[line_name, "s_nom"])
|
|
1843
|
+
if "s_nom" in network.lines.columns
|
|
1844
|
+
else 0.0
|
|
1845
|
+
)
|
|
1846
|
+
|
|
1235
1847
|
capacity_mw = capacity_mva # Convert MVA to MW
|
|
1236
|
-
|
|
1237
|
-
if
|
|
1238
|
-
|
|
1239
|
-
|
|
1848
|
+
|
|
1849
|
+
if (
|
|
1850
|
+
carrier_name
|
|
1851
|
+
in carrier_stats["power_capacity_by_carrier"]
|
|
1852
|
+
):
|
|
1853
|
+
carrier_stats["power_capacity_by_carrier"][
|
|
1854
|
+
carrier_name
|
|
1855
|
+
] += capacity_mw
|
|
1856
|
+
|
|
1240
1857
|
# 5. LINKS - Power capacity (MW)
|
|
1241
|
-
if hasattr(network,
|
|
1858
|
+
if hasattr(network, "links") and not network.links.empty:
|
|
1242
1859
|
# Get link-carrier mapping
|
|
1243
|
-
cursor = conn.execute(
|
|
1860
|
+
cursor = conn.execute(
|
|
1861
|
+
"""
|
|
1244
1862
|
SELECT c.name as component_name, carr.name as carrier_name
|
|
1245
1863
|
FROM components c
|
|
1246
1864
|
JOIN carriers carr ON c.carrier_id = carr.id
|
|
1247
|
-
WHERE c.
|
|
1248
|
-
"""
|
|
1865
|
+
WHERE c.component_type = 'LINK'
|
|
1866
|
+
"""
|
|
1867
|
+
)
|
|
1249
1868
|
link_carriers = {row[0]: row[1] for row in cursor.fetchall()}
|
|
1250
|
-
|
|
1869
|
+
|
|
1251
1870
|
for link_name in network.links.index:
|
|
1252
1871
|
if link_name in link_carriers:
|
|
1253
1872
|
carrier_name = link_carriers[link_name]
|
|
1254
|
-
|
|
1873
|
+
|
|
1255
1874
|
# Check if this link is available in this year
|
|
1256
1875
|
is_available = True
|
|
1257
|
-
if
|
|
1258
|
-
build_year = network.links.loc[link_name,
|
|
1876
|
+
if "build_year" in network.links.columns:
|
|
1877
|
+
build_year = network.links.loc[link_name, "build_year"]
|
|
1259
1878
|
if pd.notna(build_year) and int(build_year) > year:
|
|
1260
1879
|
is_available = False
|
|
1261
|
-
|
|
1880
|
+
|
|
1262
1881
|
if is_available:
|
|
1263
1882
|
# Power capacity (MW)
|
|
1264
|
-
if
|
|
1265
|
-
capacity_mw = float(
|
|
1883
|
+
if "p_nom_opt" in network.links.columns:
|
|
1884
|
+
capacity_mw = float(
|
|
1885
|
+
network.links.loc[link_name, "p_nom_opt"]
|
|
1886
|
+
)
|
|
1266
1887
|
else:
|
|
1267
|
-
capacity_mw =
|
|
1268
|
-
|
|
1269
|
-
|
|
1270
|
-
|
|
1271
|
-
|
|
1888
|
+
capacity_mw = (
|
|
1889
|
+
float(network.links.loc[link_name, "p_nom"])
|
|
1890
|
+
if "p_nom" in network.links.columns
|
|
1891
|
+
else 0.0
|
|
1892
|
+
)
|
|
1893
|
+
|
|
1894
|
+
if (
|
|
1895
|
+
carrier_name
|
|
1896
|
+
in carrier_stats["power_capacity_by_carrier"]
|
|
1897
|
+
):
|
|
1898
|
+
carrier_stats["power_capacity_by_carrier"][
|
|
1899
|
+
carrier_name
|
|
1900
|
+
] += capacity_mw
|
|
1901
|
+
|
|
1272
1902
|
logger.info(f"Calculated year {year} carrier statistics:")
|
|
1273
|
-
logger.info(
|
|
1274
|
-
|
|
1275
|
-
|
|
1276
|
-
logger.info(
|
|
1277
|
-
|
|
1278
|
-
|
|
1279
|
-
logger.info(
|
|
1903
|
+
logger.info(
|
|
1904
|
+
f" Dispatch: {sum(carrier_stats['dispatch_by_carrier'].values()):.2f} MWh"
|
|
1905
|
+
)
|
|
1906
|
+
logger.info(
|
|
1907
|
+
f" Emissions: {sum(carrier_stats['emissions_by_carrier'].values()):.2f} tonnes CO2"
|
|
1908
|
+
)
|
|
1909
|
+
logger.info(
|
|
1910
|
+
f" Capital cost: {sum(carrier_stats['capital_cost_by_carrier'].values()):.2f} USD"
|
|
1911
|
+
)
|
|
1912
|
+
logger.info(
|
|
1913
|
+
f" Operational cost: {sum(carrier_stats['operational_cost_by_carrier'].values()):.2f} USD"
|
|
1914
|
+
)
|
|
1915
|
+
logger.info(
|
|
1916
|
+
f" Total system cost: {sum(carrier_stats['total_system_cost_by_carrier'].values()):.2f} USD"
|
|
1917
|
+
)
|
|
1918
|
+
logger.info(
|
|
1919
|
+
f" Power capacity: {sum(carrier_stats['power_capacity_by_carrier'].values()):.2f} MW"
|
|
1920
|
+
)
|
|
1921
|
+
logger.info(
|
|
1922
|
+
f" Energy capacity: {sum(carrier_stats['energy_capacity_by_carrier'].values()):.2f} MWh"
|
|
1923
|
+
)
|
|
1280
1924
|
|
|
1925
|
+
# Detailed logging for year-specific statistics
|
|
1926
|
+
logger.info(f" 📊 Year {year} detailed carrier stats (RAW VALUES):")
|
|
1927
|
+
for carrier in sorted(all_carriers_with_unmet):
|
|
1928
|
+
dispatch = carrier_stats['dispatch_by_carrier'].get(carrier, 0.0)
|
|
1929
|
+
emissions = carrier_stats['emissions_by_carrier'].get(carrier, 0.0)
|
|
1930
|
+
capital_cost = carrier_stats['capital_cost_by_carrier'].get(carrier, 0.0)
|
|
1931
|
+
operational_cost = carrier_stats['operational_cost_by_carrier'].get(carrier, 0.0)
|
|
1932
|
+
total_cost = carrier_stats['total_system_cost_by_carrier'].get(carrier, 0.0)
|
|
1933
|
+
|
|
1934
|
+
if dispatch > 0.001 or emissions > 0.001 or capital_cost > 0.001 or operational_cost > 0.001:
|
|
1935
|
+
logger.info(f" {carrier}: dispatch={dispatch:.2f} MWh, emissions={emissions:.2f} tCO2, "
|
|
1936
|
+
f"capital={capital_cost:.2f} USD, operational={operational_cost:.2f} USD, "
|
|
1937
|
+
f"total={total_cost:.2f} USD")
|
|
1938
|
+
|
|
1281
1939
|
return carrier_stats
|
|
1282
|
-
|
|
1940
|
+
|
|
1283
1941
|
except Exception as e:
|
|
1284
|
-
logger.error(
|
|
1942
|
+
logger.error(
|
|
1943
|
+
f"Failed to calculate year {year} carrier statistics: {e}",
|
|
1944
|
+
exc_info=True,
|
|
1945
|
+
)
|
|
1285
1946
|
return {
|
|
1286
1947
|
"dispatch_by_carrier": {},
|
|
1287
1948
|
"power_capacity_by_carrier": {},
|
|
@@ -1289,17 +1950,17 @@ class ResultStorage:
|
|
|
1289
1950
|
"emissions_by_carrier": {},
|
|
1290
1951
|
"capital_cost_by_carrier": {},
|
|
1291
1952
|
"operational_cost_by_carrier": {},
|
|
1292
|
-
"total_system_cost_by_carrier": {}
|
|
1953
|
+
"total_system_cost_by_carrier": {},
|
|
1293
1954
|
}
|
|
1294
|
-
|
|
1295
|
-
def _sum_year_based_carrier_statistics(self, conn
|
|
1955
|
+
|
|
1956
|
+
def _sum_year_based_carrier_statistics(self, conn) -> Dict[str, Any]:
|
|
1296
1957
|
"""
|
|
1297
|
-
Sum up per-year carrier statistics for accurate multi-year totals.
|
|
1958
|
+
Sum up per-year carrier statistics for accurate multi-year totals (single network per database).
|
|
1298
1959
|
For capacity: take the LAST YEAR (final capacity) instead of maximum.
|
|
1299
1960
|
"""
|
|
1300
1961
|
try:
|
|
1301
1962
|
import json
|
|
1302
|
-
|
|
1963
|
+
|
|
1303
1964
|
# Initialize totals
|
|
1304
1965
|
totals = {
|
|
1305
1966
|
"dispatch_by_carrier": {},
|
|
@@ -1308,17 +1969,19 @@ class ResultStorage:
|
|
|
1308
1969
|
"emissions_by_carrier": {},
|
|
1309
1970
|
"capital_cost_by_carrier": {},
|
|
1310
1971
|
"operational_cost_by_carrier": {},
|
|
1311
|
-
"total_system_cost_by_carrier": {}
|
|
1972
|
+
"total_system_cost_by_carrier": {},
|
|
1312
1973
|
}
|
|
1313
|
-
|
|
1974
|
+
|
|
1314
1975
|
# Get all carriers from database
|
|
1315
|
-
cursor = conn.execute(
|
|
1316
|
-
|
|
1317
|
-
|
|
1976
|
+
cursor = conn.execute(
|
|
1977
|
+
"""
|
|
1978
|
+
SELECT DISTINCT name FROM carriers
|
|
1979
|
+
"""
|
|
1980
|
+
)
|
|
1318
1981
|
all_carriers = [row[0] for row in cursor.fetchall()]
|
|
1319
|
-
|
|
1982
|
+
|
|
1320
1983
|
# Initialize all carriers with zero values (including special "Unmet Load" carrier)
|
|
1321
|
-
all_carriers_with_unmet = all_carriers + [
|
|
1984
|
+
all_carriers_with_unmet = all_carriers + ["Unmet Load"]
|
|
1322
1985
|
for carrier in all_carriers_with_unmet:
|
|
1323
1986
|
totals["dispatch_by_carrier"][carrier] = 0.0
|
|
1324
1987
|
totals["power_capacity_by_carrier"][carrier] = 0.0
|
|
@@ -1327,99 +1990,136 @@ class ResultStorage:
|
|
|
1327
1990
|
totals["capital_cost_by_carrier"][carrier] = 0.0
|
|
1328
1991
|
totals["operational_cost_by_carrier"][carrier] = 0.0
|
|
1329
1992
|
totals["total_system_cost_by_carrier"][carrier] = 0.0
|
|
1330
|
-
|
|
1993
|
+
|
|
1331
1994
|
# Get all year-based results, ordered by year
|
|
1332
|
-
cursor = conn.execute(
|
|
1995
|
+
cursor = conn.execute(
|
|
1996
|
+
"""
|
|
1333
1997
|
SELECT year, results_json FROM network_solve_results_by_year
|
|
1334
|
-
WHERE network_id = ?
|
|
1335
1998
|
ORDER BY year
|
|
1336
|
-
"""
|
|
1337
|
-
|
|
1999
|
+
"""
|
|
2000
|
+
)
|
|
2001
|
+
|
|
1338
2002
|
year_results = cursor.fetchall()
|
|
1339
|
-
logger.info(f"Found {len(year_results)} year-based results to sum
|
|
1340
|
-
|
|
2003
|
+
logger.info(f"Found {len(year_results)} year-based results to sum")
|
|
2004
|
+
|
|
1341
2005
|
if not year_results:
|
|
1342
|
-
logger.warning(f"No year-based results found
|
|
2006
|
+
logger.warning(f"No year-based results found")
|
|
1343
2007
|
return totals
|
|
1344
|
-
|
|
2008
|
+
|
|
1345
2009
|
# For capacity: use the LAST YEAR only (final capacity state)
|
|
1346
2010
|
last_year, last_results_json = year_results[-1]
|
|
1347
|
-
|
|
2011
|
+
|
|
1348
2012
|
try:
|
|
1349
2013
|
results = json.loads(last_results_json)
|
|
1350
|
-
network_stats = results.get(
|
|
1351
|
-
custom_stats = network_stats.get(
|
|
1352
|
-
|
|
2014
|
+
network_stats = results.get("network_statistics", {})
|
|
2015
|
+
custom_stats = network_stats.get("custom_statistics", {})
|
|
2016
|
+
|
|
1353
2017
|
# Use last year's capacity as the all-year capacity
|
|
1354
|
-
power_capacity_by_carrier = custom_stats.get(
|
|
2018
|
+
power_capacity_by_carrier = custom_stats.get(
|
|
2019
|
+
"power_capacity_by_carrier", {}
|
|
2020
|
+
)
|
|
1355
2021
|
for carrier, value in power_capacity_by_carrier.items():
|
|
1356
2022
|
if carrier in totals["power_capacity_by_carrier"]:
|
|
1357
2023
|
totals["power_capacity_by_carrier"][carrier] = float(value or 0)
|
|
1358
|
-
|
|
1359
|
-
energy_capacity_by_carrier = custom_stats.get(
|
|
2024
|
+
|
|
2025
|
+
energy_capacity_by_carrier = custom_stats.get(
|
|
2026
|
+
"energy_capacity_by_carrier", {}
|
|
2027
|
+
)
|
|
1360
2028
|
for carrier, value in energy_capacity_by_carrier.items():
|
|
1361
2029
|
if carrier in totals["energy_capacity_by_carrier"]:
|
|
1362
|
-
totals["energy_capacity_by_carrier"][carrier] = float(
|
|
1363
|
-
|
|
1364
|
-
|
|
1365
|
-
|
|
2030
|
+
totals["energy_capacity_by_carrier"][carrier] = float(
|
|
2031
|
+
value or 0
|
|
2032
|
+
)
|
|
2033
|
+
|
|
2034
|
+
logger.info(
|
|
2035
|
+
f"Used last year ({last_year}) capacity as all-year capacity"
|
|
2036
|
+
)
|
|
2037
|
+
|
|
1366
2038
|
except Exception as e:
|
|
1367
2039
|
logger.error(f"Failed to process last year ({last_year}) results: {e}")
|
|
1368
|
-
|
|
2040
|
+
|
|
1369
2041
|
# For other stats (dispatch, emissions, costs): sum across all years
|
|
1370
2042
|
for year, results_json in year_results:
|
|
1371
2043
|
try:
|
|
1372
2044
|
results = json.loads(results_json)
|
|
1373
|
-
network_stats = results.get(
|
|
1374
|
-
custom_stats = network_stats.get(
|
|
1375
|
-
|
|
2045
|
+
network_stats = results.get("network_statistics", {})
|
|
2046
|
+
custom_stats = network_stats.get("custom_statistics", {})
|
|
2047
|
+
|
|
1376
2048
|
# Sum dispatch (energy values - sum across years)
|
|
1377
|
-
dispatch_by_carrier = custom_stats.get(
|
|
2049
|
+
dispatch_by_carrier = custom_stats.get("dispatch_by_carrier", {})
|
|
1378
2050
|
for carrier, value in dispatch_by_carrier.items():
|
|
1379
2051
|
if carrier in totals["dispatch_by_carrier"]:
|
|
1380
2052
|
totals["dispatch_by_carrier"][carrier] += float(value or 0)
|
|
1381
|
-
|
|
2053
|
+
|
|
1382
2054
|
# Sum emissions (cumulative across years)
|
|
1383
|
-
emissions_by_carrier = custom_stats.get(
|
|
2055
|
+
emissions_by_carrier = custom_stats.get("emissions_by_carrier", {})
|
|
1384
2056
|
for carrier, value in emissions_by_carrier.items():
|
|
1385
2057
|
if carrier in totals["emissions_by_carrier"]:
|
|
1386
2058
|
totals["emissions_by_carrier"][carrier] += float(value or 0)
|
|
1387
|
-
|
|
2059
|
+
|
|
1388
2060
|
# Sum capital costs (cumulative across years)
|
|
1389
|
-
capital_cost_by_carrier = custom_stats.get(
|
|
2061
|
+
capital_cost_by_carrier = custom_stats.get(
|
|
2062
|
+
"capital_cost_by_carrier", {}
|
|
2063
|
+
)
|
|
1390
2064
|
for carrier, value in capital_cost_by_carrier.items():
|
|
1391
2065
|
if carrier in totals["capital_cost_by_carrier"]:
|
|
1392
|
-
totals["capital_cost_by_carrier"][carrier] += float(
|
|
1393
|
-
|
|
2066
|
+
totals["capital_cost_by_carrier"][carrier] += float(
|
|
2067
|
+
value or 0
|
|
2068
|
+
)
|
|
2069
|
+
|
|
1394
2070
|
# Sum operational costs (cumulative across years)
|
|
1395
|
-
operational_cost_by_carrier = custom_stats.get(
|
|
2071
|
+
operational_cost_by_carrier = custom_stats.get(
|
|
2072
|
+
"operational_cost_by_carrier", {}
|
|
2073
|
+
)
|
|
1396
2074
|
for carrier, value in operational_cost_by_carrier.items():
|
|
1397
2075
|
if carrier in totals["operational_cost_by_carrier"]:
|
|
1398
|
-
totals["operational_cost_by_carrier"][carrier] += float(
|
|
1399
|
-
|
|
2076
|
+
totals["operational_cost_by_carrier"][carrier] += float(
|
|
2077
|
+
value or 0
|
|
2078
|
+
)
|
|
2079
|
+
|
|
1400
2080
|
# Sum total system costs (cumulative across years)
|
|
1401
|
-
total_system_cost_by_carrier = custom_stats.get(
|
|
2081
|
+
total_system_cost_by_carrier = custom_stats.get(
|
|
2082
|
+
"total_system_cost_by_carrier", {}
|
|
2083
|
+
)
|
|
1402
2084
|
for carrier, value in total_system_cost_by_carrier.items():
|
|
1403
2085
|
if carrier in totals["total_system_cost_by_carrier"]:
|
|
1404
|
-
totals["total_system_cost_by_carrier"][carrier] += float(
|
|
1405
|
-
|
|
2086
|
+
totals["total_system_cost_by_carrier"][carrier] += float(
|
|
2087
|
+
value or 0
|
|
2088
|
+
)
|
|
2089
|
+
|
|
1406
2090
|
except Exception as e:
|
|
1407
2091
|
logger.error(f"Failed to process year {year} results: {e}")
|
|
1408
2092
|
continue
|
|
1409
|
-
|
|
2093
|
+
|
|
1410
2094
|
logger.info(f"Summed carrier statistics across {len(year_results)} years:")
|
|
1411
|
-
logger.info(
|
|
1412
|
-
|
|
1413
|
-
|
|
1414
|
-
logger.info(
|
|
1415
|
-
|
|
1416
|
-
|
|
1417
|
-
logger.info(
|
|
1418
|
-
|
|
2095
|
+
logger.info(
|
|
2096
|
+
f" Final power capacity: {sum(totals['power_capacity_by_carrier'].values()):.2f} MW"
|
|
2097
|
+
)
|
|
2098
|
+
logger.info(
|
|
2099
|
+
f" Final energy capacity: {sum(totals['energy_capacity_by_carrier'].values()):.2f} MWh"
|
|
2100
|
+
)
|
|
2101
|
+
logger.info(
|
|
2102
|
+
f" Total dispatch: {sum(totals['dispatch_by_carrier'].values()):.2f} MWh"
|
|
2103
|
+
)
|
|
2104
|
+
logger.info(
|
|
2105
|
+
f" Total emissions: {sum(totals['emissions_by_carrier'].values()):.2f} tonnes CO2"
|
|
2106
|
+
)
|
|
2107
|
+
logger.info(
|
|
2108
|
+
f" Total capital cost: {sum(totals['capital_cost_by_carrier'].values()):.2f} USD"
|
|
2109
|
+
)
|
|
2110
|
+
logger.info(
|
|
2111
|
+
f" Total operational cost: {sum(totals['operational_cost_by_carrier'].values()):.2f} USD"
|
|
2112
|
+
)
|
|
2113
|
+
logger.info(
|
|
2114
|
+
f" Total system cost: {sum(totals['total_system_cost_by_carrier'].values()):.2f} USD"
|
|
2115
|
+
)
|
|
2116
|
+
|
|
1419
2117
|
return totals
|
|
1420
|
-
|
|
2118
|
+
|
|
1421
2119
|
except Exception as e:
|
|
1422
|
-
logger.error(
|
|
2120
|
+
logger.error(
|
|
2121
|
+
f"Failed to sum year-based carrier statistics: {e}", exc_info=True
|
|
2122
|
+
)
|
|
1423
2123
|
# Return empty structure on error
|
|
1424
2124
|
return {
|
|
1425
2125
|
"dispatch_by_carrier": {},
|
|
@@ -1428,12 +2128,13 @@ class ResultStorage:
|
|
|
1428
2128
|
"emissions_by_carrier": {},
|
|
1429
2129
|
"capital_cost_by_carrier": {},
|
|
1430
2130
|
"operational_cost_by_carrier": {},
|
|
1431
|
-
"total_system_cost_by_carrier": {}
|
|
2131
|
+
"total_system_cost_by_carrier": {},
|
|
1432
2132
|
}
|
|
1433
|
-
|
|
2133
|
+
|
|
1434
2134
|
def _serialize_results_json(self, solve_result: Dict[str, Any]) -> str:
|
|
1435
2135
|
"""Serialize solve results to JSON string."""
|
|
1436
2136
|
import json
|
|
2137
|
+
|
|
1437
2138
|
try:
|
|
1438
2139
|
# Create a clean results dictionary
|
|
1439
2140
|
results = {
|
|
@@ -1444,16 +2145,17 @@ class ResultStorage:
|
|
|
1444
2145
|
"solver_name": solve_result.get("solver_name", "unknown"),
|
|
1445
2146
|
"run_id": solve_result.get("run_id"),
|
|
1446
2147
|
"network_statistics": solve_result.get("network_statistics", {}),
|
|
1447
|
-
"pypsa_result": solve_result.get("pypsa_result", {})
|
|
2148
|
+
"pypsa_result": solve_result.get("pypsa_result", {}),
|
|
1448
2149
|
}
|
|
1449
2150
|
return json.dumps(results, default=self._json_serializer)
|
|
1450
2151
|
except Exception as e:
|
|
1451
2152
|
logger.warning(f"Failed to serialize results JSON: {e}")
|
|
1452
2153
|
return json.dumps({"error": "serialization_failed"})
|
|
1453
|
-
|
|
2154
|
+
|
|
1454
2155
|
def _serialize_metadata_json(self, solve_result: Dict[str, Any]) -> str:
|
|
1455
2156
|
"""Serialize solve metadata to JSON string."""
|
|
1456
2157
|
import json
|
|
2158
|
+
|
|
1457
2159
|
try:
|
|
1458
2160
|
metadata = {
|
|
1459
2161
|
"solver_name": solve_result.get("solver_name", "unknown"),
|
|
@@ -1461,103 +2163,107 @@ class ResultStorage:
|
|
|
1461
2163
|
"multi_period": solve_result.get("multi_period", False),
|
|
1462
2164
|
"years": solve_result.get("years", []),
|
|
1463
2165
|
"network_name": solve_result.get("network_name"),
|
|
1464
|
-
"num_snapshots": solve_result.get("num_snapshots", 0)
|
|
2166
|
+
"num_snapshots": solve_result.get("num_snapshots", 0),
|
|
1465
2167
|
}
|
|
1466
2168
|
return json.dumps(metadata, default=self._json_serializer)
|
|
1467
2169
|
except Exception as e:
|
|
1468
2170
|
logger.warning(f"Failed to serialize metadata JSON: {e}")
|
|
1469
2171
|
return json.dumps({"error": "serialization_failed"})
|
|
1470
|
-
|
|
1471
|
-
def _filter_timeseries_by_year(
|
|
2172
|
+
|
|
2173
|
+
def _filter_timeseries_by_year(
|
|
2174
|
+
self, timeseries_df: "pd.DataFrame", snapshots: "pd.Index", year: int
|
|
2175
|
+
) -> "pd.DataFrame":
|
|
1472
2176
|
"""Filter timeseries data by year"""
|
|
1473
2177
|
try:
|
|
1474
2178
|
# Handle MultiIndex case (multi-period optimization)
|
|
1475
|
-
if hasattr(snapshots,
|
|
2179
|
+
if hasattr(snapshots, "levels"):
|
|
1476
2180
|
period_values = snapshots.get_level_values(0)
|
|
1477
2181
|
year_mask = period_values == year
|
|
1478
2182
|
if year_mask.any():
|
|
1479
2183
|
year_snapshots = snapshots[year_mask]
|
|
1480
2184
|
return timeseries_df.loc[year_snapshots]
|
|
1481
|
-
|
|
2185
|
+
|
|
1482
2186
|
# Handle DatetimeIndex case (regular time series)
|
|
1483
|
-
elif hasattr(snapshots,
|
|
2187
|
+
elif hasattr(snapshots, "year"):
|
|
1484
2188
|
year_mask = snapshots.year == year
|
|
1485
2189
|
if year_mask.any():
|
|
1486
2190
|
return timeseries_df.loc[year_mask]
|
|
1487
|
-
|
|
2191
|
+
|
|
1488
2192
|
# Fallback - return None if can't filter
|
|
1489
2193
|
return None
|
|
1490
|
-
|
|
2194
|
+
|
|
1491
2195
|
except Exception as e:
|
|
1492
2196
|
logger.error(f"Failed to filter timeseries by year {year}: {e}")
|
|
1493
2197
|
return None
|
|
1494
|
-
|
|
1495
|
-
def _get_year_weightings(self, network:
|
|
2198
|
+
|
|
2199
|
+
def _get_year_weightings(self, network: "pypsa.Network", year: int) -> "np.ndarray":
|
|
1496
2200
|
"""Get snapshot weightings for a specific year"""
|
|
1497
2201
|
try:
|
|
1498
2202
|
# Filter snapshot weightings by year
|
|
1499
|
-
if hasattr(network.snapshots,
|
|
2203
|
+
if hasattr(network.snapshots, "levels"):
|
|
1500
2204
|
period_values = network.snapshots.get_level_values(0)
|
|
1501
2205
|
year_mask = period_values == year
|
|
1502
2206
|
if year_mask.any():
|
|
1503
2207
|
year_snapshots = network.snapshots[year_mask]
|
|
1504
2208
|
year_weightings = network.snapshot_weightings.loc[year_snapshots]
|
|
1505
2209
|
if isinstance(year_weightings, pd.DataFrame):
|
|
1506
|
-
if
|
|
1507
|
-
return year_weightings[
|
|
2210
|
+
if "objective" in year_weightings.columns:
|
|
2211
|
+
return year_weightings["objective"].values
|
|
1508
2212
|
else:
|
|
1509
2213
|
return year_weightings.iloc[:, 0].values
|
|
1510
2214
|
else:
|
|
1511
2215
|
return year_weightings.values
|
|
1512
|
-
|
|
1513
|
-
elif hasattr(network.snapshots,
|
|
2216
|
+
|
|
2217
|
+
elif hasattr(network.snapshots, "year"):
|
|
1514
2218
|
year_mask = network.snapshots.year == year
|
|
1515
2219
|
if year_mask.any():
|
|
1516
2220
|
year_weightings = network.snapshot_weightings.loc[year_mask]
|
|
1517
2221
|
if isinstance(year_weightings, pd.DataFrame):
|
|
1518
|
-
if
|
|
1519
|
-
return year_weightings[
|
|
2222
|
+
if "objective" in year_weightings.columns:
|
|
2223
|
+
return year_weightings["objective"].values
|
|
1520
2224
|
else:
|
|
1521
2225
|
return year_weightings.iloc[:, 0].values
|
|
1522
2226
|
else:
|
|
1523
2227
|
return year_weightings.values
|
|
1524
|
-
|
|
2228
|
+
|
|
1525
2229
|
return None
|
|
1526
|
-
|
|
2230
|
+
|
|
1527
2231
|
except Exception as e:
|
|
1528
2232
|
logger.error(f"Failed to get year weightings for year {year}: {e}")
|
|
1529
2233
|
return None
|
|
1530
|
-
|
|
1531
|
-
def _calculate_total_demand(self, network:
|
|
2234
|
+
|
|
2235
|
+
def _calculate_total_demand(self, network: "pypsa.Network") -> float:
|
|
1532
2236
|
"""Calculate total demand from loads in the network"""
|
|
1533
2237
|
try:
|
|
1534
2238
|
total_demand = 0.0
|
|
1535
|
-
|
|
2239
|
+
|
|
1536
2240
|
# Calculate demand from loads
|
|
1537
|
-
if hasattr(network,
|
|
2241
|
+
if hasattr(network, "loads_t") and hasattr(network.loads_t, "p"):
|
|
1538
2242
|
# Apply snapshot weightings to convert MW to MWh
|
|
1539
2243
|
weightings = network.snapshot_weightings
|
|
1540
2244
|
if isinstance(weightings, pd.DataFrame):
|
|
1541
|
-
if
|
|
1542
|
-
weighting_values = weightings[
|
|
2245
|
+
if "objective" in weightings.columns:
|
|
2246
|
+
weighting_values = weightings["objective"].values
|
|
1543
2247
|
else:
|
|
1544
2248
|
weighting_values = weightings.iloc[:, 0].values
|
|
1545
2249
|
else:
|
|
1546
2250
|
weighting_values = weightings.values
|
|
1547
|
-
|
|
1548
|
-
total_demand = float(
|
|
1549
|
-
|
|
2251
|
+
|
|
2252
|
+
total_demand = float(
|
|
2253
|
+
(network.loads_t.p.values * weighting_values[:, None]).sum()
|
|
2254
|
+
)
|
|
2255
|
+
|
|
1550
2256
|
return total_demand
|
|
1551
|
-
|
|
2257
|
+
|
|
1552
2258
|
except Exception as e:
|
|
1553
2259
|
logger.error(f"Failed to calculate total demand: {e}")
|
|
1554
2260
|
return 0.0
|
|
1555
|
-
|
|
2261
|
+
|
|
1556
2262
|
def _json_serializer(self, obj):
|
|
1557
2263
|
"""Convert numpy/pandas types to JSON serializable types"""
|
|
1558
2264
|
import numpy as np
|
|
1559
2265
|
import pandas as pd
|
|
1560
|
-
|
|
2266
|
+
|
|
1561
2267
|
if isinstance(obj, (np.integer, np.int64, np.int32)):
|
|
1562
2268
|
return int(obj)
|
|
1563
2269
|
elif isinstance(obj, (np.floating, np.float64, np.float32)):
|
|
@@ -1568,7 +2274,7 @@ class ResultStorage:
|
|
|
1568
2274
|
return obj.to_dict()
|
|
1569
2275
|
elif isinstance(obj, pd.DataFrame):
|
|
1570
2276
|
return obj.to_dict()
|
|
1571
|
-
elif hasattr(obj,
|
|
2277
|
+
elif hasattr(obj, "item"): # Handle numpy scalars
|
|
1572
2278
|
return obj.item()
|
|
1573
2279
|
else:
|
|
1574
|
-
raise TypeError(f"Object of type {type(obj)} is not JSON serializable")
|
|
2280
|
+
raise TypeError(f"Object of type {type(obj)} is not JSON serializable")
|