pyconvexity 0.4.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pyconvexity might be problematic. Click here for more details.
- pyconvexity/__init__.py +241 -0
- pyconvexity/_version.py +1 -0
- pyconvexity/core/__init__.py +60 -0
- pyconvexity/core/database.py +485 -0
- pyconvexity/core/errors.py +106 -0
- pyconvexity/core/types.py +400 -0
- pyconvexity/dashboard.py +265 -0
- pyconvexity/data/README.md +101 -0
- pyconvexity/data/__init__.py +17 -0
- pyconvexity/data/loaders/__init__.py +3 -0
- pyconvexity/data/loaders/cache.py +213 -0
- pyconvexity/data/schema/01_core_schema.sql +420 -0
- pyconvexity/data/schema/02_data_metadata.sql +120 -0
- pyconvexity/data/schema/03_validation_data.sql +507 -0
- pyconvexity/data/sources/__init__.py +5 -0
- pyconvexity/data/sources/gem.py +442 -0
- pyconvexity/io/__init__.py +26 -0
- pyconvexity/io/excel_exporter.py +1226 -0
- pyconvexity/io/excel_importer.py +1381 -0
- pyconvexity/io/netcdf_exporter.py +191 -0
- pyconvexity/io/netcdf_importer.py +1802 -0
- pyconvexity/models/__init__.py +195 -0
- pyconvexity/models/attributes.py +730 -0
- pyconvexity/models/carriers.py +159 -0
- pyconvexity/models/components.py +611 -0
- pyconvexity/models/network.py +503 -0
- pyconvexity/models/results.py +148 -0
- pyconvexity/models/scenarios.py +234 -0
- pyconvexity/solvers/__init__.py +29 -0
- pyconvexity/solvers/pypsa/__init__.py +30 -0
- pyconvexity/solvers/pypsa/api.py +446 -0
- pyconvexity/solvers/pypsa/batch_loader.py +296 -0
- pyconvexity/solvers/pypsa/builder.py +655 -0
- pyconvexity/solvers/pypsa/clearing_price.py +678 -0
- pyconvexity/solvers/pypsa/constraints.py +405 -0
- pyconvexity/solvers/pypsa/solver.py +1442 -0
- pyconvexity/solvers/pypsa/storage.py +2096 -0
- pyconvexity/timeseries.py +330 -0
- pyconvexity/validation/__init__.py +25 -0
- pyconvexity/validation/rules.py +312 -0
- pyconvexity-0.4.8.dist-info/METADATA +148 -0
- pyconvexity-0.4.8.dist-info/RECORD +44 -0
- pyconvexity-0.4.8.dist-info/WHEEL +5 -0
- pyconvexity-0.4.8.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,2096 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Result storage functionality for PyPSA solver integration.
|
|
3
|
+
|
|
4
|
+
Handles storing solve results back to the database with proper validation and error handling.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import uuid
|
|
8
|
+
import pandas as pd
|
|
9
|
+
import numpy as np
|
|
10
|
+
from typing import Dict, Any, Optional, Callable
|
|
11
|
+
|
|
12
|
+
from pyconvexity.core.types import StaticValue
|
|
13
|
+
from pyconvexity.models import (
|
|
14
|
+
list_components_by_type,
|
|
15
|
+
set_static_attribute,
|
|
16
|
+
set_timeseries_attribute,
|
|
17
|
+
)
|
|
18
|
+
from pyconvexity.validation import get_validation_rule
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class ResultStorage:
|
|
22
|
+
"""
|
|
23
|
+
Handles storing PyPSA solve results back to the database.
|
|
24
|
+
|
|
25
|
+
This class manages the complex process of extracting results from PyPSA networks
|
|
26
|
+
and storing them back to the database with proper validation and error handling.
|
|
27
|
+
"""
|
|
28
|
+
|
|
29
|
+
def __init__(self, verbose: bool = False):
|
|
30
|
+
"""
|
|
31
|
+
Initialize ResultStorage.
|
|
32
|
+
|
|
33
|
+
Args:
|
|
34
|
+
verbose: Enable detailed logging output
|
|
35
|
+
"""
|
|
36
|
+
self.verbose = verbose
|
|
37
|
+
|
|
38
|
+
def store_results(
|
|
39
|
+
self,
|
|
40
|
+
conn,
|
|
41
|
+
network: "pypsa.Network",
|
|
42
|
+
solve_result: Dict[str, Any],
|
|
43
|
+
scenario_id: Optional[int] = None,
|
|
44
|
+
) -> Dict[str, Any]:
|
|
45
|
+
"""
|
|
46
|
+
Store complete solve results back to database (single network per database).
|
|
47
|
+
|
|
48
|
+
Args:
|
|
49
|
+
conn: Database connection
|
|
50
|
+
network: Solved PyPSA Network object
|
|
51
|
+
solve_result: Solve result metadata
|
|
52
|
+
scenario_id: Optional scenario ID (NULL for base network)
|
|
53
|
+
|
|
54
|
+
Returns:
|
|
55
|
+
Dictionary with storage statistics
|
|
56
|
+
"""
|
|
57
|
+
run_id = solve_result.get("run_id", str(uuid.uuid4()))
|
|
58
|
+
|
|
59
|
+
try:
|
|
60
|
+
# Store component results
|
|
61
|
+
component_stats = self._store_component_results(conn, network, scenario_id)
|
|
62
|
+
|
|
63
|
+
# Calculate and store clearing prices for all buses
|
|
64
|
+
clearing_prices_stored = self._calculate_and_store_clearing_prices(
|
|
65
|
+
conn, network, scenario_id
|
|
66
|
+
)
|
|
67
|
+
component_stats["stored_clearing_prices"] = clearing_prices_stored
|
|
68
|
+
|
|
69
|
+
# Calculate network statistics first
|
|
70
|
+
network_stats = self._calculate_network_statistics(
|
|
71
|
+
conn, network, solve_result
|
|
72
|
+
)
|
|
73
|
+
|
|
74
|
+
# Store solve summary with network statistics
|
|
75
|
+
self._store_solve_summary(conn, solve_result, scenario_id, network_stats)
|
|
76
|
+
conn.commit()
|
|
77
|
+
|
|
78
|
+
# Store year-based statistics if available
|
|
79
|
+
year_stats_stored = 0
|
|
80
|
+
if solve_result.get("year_statistics"):
|
|
81
|
+
year_stats_stored = self._store_year_based_statistics(
|
|
82
|
+
conn, network, solve_result["year_statistics"], scenario_id
|
|
83
|
+
)
|
|
84
|
+
conn.commit()
|
|
85
|
+
|
|
86
|
+
return {
|
|
87
|
+
"component_stats": component_stats,
|
|
88
|
+
"network_stats": network_stats,
|
|
89
|
+
"year_stats_stored": year_stats_stored,
|
|
90
|
+
"run_id": run_id,
|
|
91
|
+
"success": True,
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
except Exception as e:
|
|
95
|
+
return {
|
|
96
|
+
"component_stats": {},
|
|
97
|
+
"network_stats": {},
|
|
98
|
+
"run_id": run_id,
|
|
99
|
+
"success": False,
|
|
100
|
+
"error": str(e),
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
def _store_component_results(
|
|
104
|
+
self, conn, network: "pypsa.Network", scenario_id: Optional[int]
|
|
105
|
+
) -> Dict[str, int]:
|
|
106
|
+
"""Store results for all component types (single network per database)."""
|
|
107
|
+
results_stats = {
|
|
108
|
+
"stored_bus_results": 0,
|
|
109
|
+
"stored_generator_results": 0,
|
|
110
|
+
"stored_unmet_load_results": 0,
|
|
111
|
+
"stored_load_results": 0,
|
|
112
|
+
"stored_line_results": 0,
|
|
113
|
+
"stored_link_results": 0,
|
|
114
|
+
"stored_storage_unit_results": 0,
|
|
115
|
+
"stored_store_results": 0,
|
|
116
|
+
"skipped_attributes": 0,
|
|
117
|
+
"errors": 0,
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
try:
|
|
121
|
+
# Store bus results
|
|
122
|
+
if hasattr(network, "buses_t") and network.buses_t:
|
|
123
|
+
results_stats["stored_bus_results"] = (
|
|
124
|
+
self._store_component_type_results(
|
|
125
|
+
conn, "BUS", network.buses, network.buses_t, scenario_id
|
|
126
|
+
)
|
|
127
|
+
)
|
|
128
|
+
|
|
129
|
+
# Store generator results (includes regular generators)
|
|
130
|
+
if hasattr(network, "generators_t") and network.generators_t:
|
|
131
|
+
results_stats["stored_generator_results"] = (
|
|
132
|
+
self._store_component_type_results(
|
|
133
|
+
conn,
|
|
134
|
+
"GENERATOR",
|
|
135
|
+
network.generators,
|
|
136
|
+
network.generators_t,
|
|
137
|
+
scenario_id,
|
|
138
|
+
)
|
|
139
|
+
)
|
|
140
|
+
|
|
141
|
+
# Store UNMET_LOAD results (these are also stored as generators in PyPSA)
|
|
142
|
+
results_stats["stored_unmet_load_results"] = (
|
|
143
|
+
self._store_component_type_results(
|
|
144
|
+
conn,
|
|
145
|
+
"UNMET_LOAD",
|
|
146
|
+
network.generators,
|
|
147
|
+
network.generators_t,
|
|
148
|
+
scenario_id,
|
|
149
|
+
)
|
|
150
|
+
)
|
|
151
|
+
|
|
152
|
+
# Store load results
|
|
153
|
+
if hasattr(network, "loads_t") and network.loads_t:
|
|
154
|
+
results_stats["stored_load_results"] = (
|
|
155
|
+
self._store_component_type_results(
|
|
156
|
+
conn, "LOAD", network.loads, network.loads_t, scenario_id
|
|
157
|
+
)
|
|
158
|
+
)
|
|
159
|
+
|
|
160
|
+
# Store line results
|
|
161
|
+
if hasattr(network, "lines_t") and network.lines_t:
|
|
162
|
+
results_stats["stored_line_results"] = (
|
|
163
|
+
self._store_component_type_results(
|
|
164
|
+
conn, "LINE", network.lines, network.lines_t, scenario_id
|
|
165
|
+
)
|
|
166
|
+
)
|
|
167
|
+
|
|
168
|
+
# Store link results
|
|
169
|
+
if hasattr(network, "links_t") and network.links_t:
|
|
170
|
+
results_stats["stored_link_results"] = (
|
|
171
|
+
self._store_component_type_results(
|
|
172
|
+
conn, "LINK", network.links, network.links_t, scenario_id
|
|
173
|
+
)
|
|
174
|
+
)
|
|
175
|
+
|
|
176
|
+
# Store storage unit results
|
|
177
|
+
if hasattr(network, "storage_units_t") and network.storage_units_t:
|
|
178
|
+
results_stats["stored_storage_unit_results"] = (
|
|
179
|
+
self._store_component_type_results(
|
|
180
|
+
conn,
|
|
181
|
+
"STORAGE_UNIT",
|
|
182
|
+
network.storage_units,
|
|
183
|
+
network.storage_units_t,
|
|
184
|
+
scenario_id,
|
|
185
|
+
)
|
|
186
|
+
)
|
|
187
|
+
|
|
188
|
+
# Store store results
|
|
189
|
+
if hasattr(network, "stores_t") and network.stores_t:
|
|
190
|
+
results_stats["stored_store_results"] = (
|
|
191
|
+
self._store_component_type_results(
|
|
192
|
+
conn, "STORE", network.stores, network.stores_t, scenario_id
|
|
193
|
+
)
|
|
194
|
+
)
|
|
195
|
+
|
|
196
|
+
return results_stats
|
|
197
|
+
|
|
198
|
+
except Exception as e:
|
|
199
|
+
results_stats["errors"] += 1
|
|
200
|
+
return results_stats
|
|
201
|
+
|
|
202
|
+
def _calculate_and_store_clearing_prices(
|
|
203
|
+
self,
|
|
204
|
+
conn,
|
|
205
|
+
network: "pypsa.Network",
|
|
206
|
+
scenario_id: Optional[int],
|
|
207
|
+
) -> int:
|
|
208
|
+
"""
|
|
209
|
+
Calculate and store clearing prices for all buses.
|
|
210
|
+
|
|
211
|
+
The clearing price at each bus is the pay-as-clear price: the marginal
|
|
212
|
+
cost of the cheapest source (generator, storage, or import) with spare
|
|
213
|
+
capacity. This differs from the marginal_price (LP shadow price).
|
|
214
|
+
|
|
215
|
+
Args:
|
|
216
|
+
conn: Database connection
|
|
217
|
+
network: Solved PyPSA Network object
|
|
218
|
+
scenario_id: Scenario ID for result storage
|
|
219
|
+
|
|
220
|
+
Returns:
|
|
221
|
+
Number of buses with clearing prices stored
|
|
222
|
+
"""
|
|
223
|
+
logger.info(f"=== CALCULATING AND STORING CLEARING PRICES (scenario_id={scenario_id}) ===")
|
|
224
|
+
|
|
225
|
+
try:
|
|
226
|
+
from .clearing_price import ClearingPriceCalculator
|
|
227
|
+
|
|
228
|
+
calculator = ClearingPriceCalculator(verbose=True)
|
|
229
|
+
clearing_prices = calculator.calculate_all_buses(conn, network, scenario_id)
|
|
230
|
+
|
|
231
|
+
if not clearing_prices:
|
|
232
|
+
logger.warning("No clearing prices calculated - clearing_prices dict is empty")
|
|
233
|
+
return 0
|
|
234
|
+
|
|
235
|
+
# Log what we got from the calculator
|
|
236
|
+
logger.info(f"Clearing prices calculated for {len(clearing_prices)} buses")
|
|
237
|
+
for bus_name, prices in clearing_prices.items():
|
|
238
|
+
n_zeros = np.sum(prices == 0)
|
|
239
|
+
n_inf = np.sum(np.isinf(prices))
|
|
240
|
+
valid = prices[(prices > 0) & np.isfinite(prices)]
|
|
241
|
+
if len(valid) > 0:
|
|
242
|
+
logger.info(f" {bus_name}: {len(prices)} periods, mean=£{np.mean(valid):.2f}, "
|
|
243
|
+
f"zeros={n_zeros}, inf={n_inf}, range=[£{np.min(valid):.2f}, £{np.max(valid):.2f}]")
|
|
244
|
+
else:
|
|
245
|
+
logger.warning(f" {bus_name}: {len(prices)} periods, ALL ZERO OR INF (zeros={n_zeros}, inf={n_inf})")
|
|
246
|
+
|
|
247
|
+
# Get bus component IDs
|
|
248
|
+
buses = list_components_by_type(conn, "BUS")
|
|
249
|
+
bus_name_to_id = {bus.name: bus.id for bus in buses}
|
|
250
|
+
logger.info(f"Found {len(buses)} buses in database: {list(bus_name_to_id.keys())}")
|
|
251
|
+
|
|
252
|
+
stored_count = 0
|
|
253
|
+
for bus_name, prices in clearing_prices.items():
|
|
254
|
+
if bus_name not in bus_name_to_id:
|
|
255
|
+
logger.warning(f" {bus_name}: not found in database - skipping")
|
|
256
|
+
continue
|
|
257
|
+
|
|
258
|
+
bus_id = bus_name_to_id[bus_name]
|
|
259
|
+
values = [float(p) if np.isfinite(p) else 0.0 for p in prices]
|
|
260
|
+
|
|
261
|
+
# Log what we're about to store
|
|
262
|
+
n_zeros = sum(1 for v in values if v == 0)
|
|
263
|
+
if n_zeros > 0:
|
|
264
|
+
logger.warning(f" {bus_name}: storing {len(values)} values, {n_zeros} zeros")
|
|
265
|
+
|
|
266
|
+
try:
|
|
267
|
+
set_timeseries_attribute(
|
|
268
|
+
conn, bus_id, "clearing_price", values, scenario_id
|
|
269
|
+
)
|
|
270
|
+
stored_count += 1
|
|
271
|
+
logger.info(f" ✅ {bus_name} (id={bus_id}): stored {len(values)} clearing prices")
|
|
272
|
+
except Exception as e:
|
|
273
|
+
logger.error(f" ❌ {bus_name} (id={bus_id}): failed to store clearing_price: {e}")
|
|
274
|
+
import traceback
|
|
275
|
+
traceback.print_exc()
|
|
276
|
+
continue
|
|
277
|
+
|
|
278
|
+
logger.info(f"=== CLEARING PRICES: Stored {stored_count}/{len(clearing_prices)} buses ===")
|
|
279
|
+
return stored_count
|
|
280
|
+
|
|
281
|
+
except ImportError as e:
|
|
282
|
+
logger.error(f"ClearingPriceCalculator not available - skipping clearing price calculation: {e}")
|
|
283
|
+
return 0
|
|
284
|
+
except Exception as e:
|
|
285
|
+
logger.error(f"Failed to calculate/store clearing prices: {e}")
|
|
286
|
+
import traceback
|
|
287
|
+
traceback.print_exc()
|
|
288
|
+
return 0
|
|
289
|
+
|
|
290
|
+
def _store_component_type_results(
|
|
291
|
+
self,
|
|
292
|
+
conn,
|
|
293
|
+
component_type: str,
|
|
294
|
+
static_df: pd.DataFrame,
|
|
295
|
+
timeseries_dict: Dict[str, pd.DataFrame],
|
|
296
|
+
scenario_id: Optional[int],
|
|
297
|
+
) -> int:
|
|
298
|
+
"""Store results for a specific component type - only store OUTPUT attributes (single network per database)."""
|
|
299
|
+
stored_count = 0
|
|
300
|
+
|
|
301
|
+
try:
|
|
302
|
+
# Get component name to ID mapping
|
|
303
|
+
components = list_components_by_type(conn, component_type)
|
|
304
|
+
name_to_id = {comp.name: comp.id for comp in components}
|
|
305
|
+
|
|
306
|
+
# Store timeseries results - ONLY OUTPUT ATTRIBUTES (is_input=FALSE)
|
|
307
|
+
for attr_name, timeseries_df in timeseries_dict.items():
|
|
308
|
+
if timeseries_df.empty:
|
|
309
|
+
continue
|
|
310
|
+
|
|
311
|
+
# Check if this attribute is an output attribute (not an input)
|
|
312
|
+
try:
|
|
313
|
+
rule = get_validation_rule(conn, component_type, attr_name)
|
|
314
|
+
if rule.is_input:
|
|
315
|
+
# Skip input attributes to preserve original input data
|
|
316
|
+
continue
|
|
317
|
+
except Exception:
|
|
318
|
+
# If no validation rule found, skip to be safe
|
|
319
|
+
continue
|
|
320
|
+
|
|
321
|
+
for component_name in timeseries_df.columns:
|
|
322
|
+
if component_name not in name_to_id:
|
|
323
|
+
continue
|
|
324
|
+
|
|
325
|
+
component_id = name_to_id[component_name]
|
|
326
|
+
component_series = timeseries_df[component_name]
|
|
327
|
+
|
|
328
|
+
# Skip if all values are NaN
|
|
329
|
+
if component_series.isna().all():
|
|
330
|
+
continue
|
|
331
|
+
|
|
332
|
+
# Convert to efficient values array
|
|
333
|
+
values = []
|
|
334
|
+
for value in component_series.values:
|
|
335
|
+
if pd.isna(value):
|
|
336
|
+
values.append(0.0) # Fill NaN with 0.0
|
|
337
|
+
else:
|
|
338
|
+
values.append(float(value))
|
|
339
|
+
|
|
340
|
+
if not values:
|
|
341
|
+
continue
|
|
342
|
+
|
|
343
|
+
# Store using efficient format
|
|
344
|
+
try:
|
|
345
|
+
set_timeseries_attribute(
|
|
346
|
+
conn, component_id, attr_name, values, scenario_id
|
|
347
|
+
)
|
|
348
|
+
stored_count += 1
|
|
349
|
+
except Exception as e:
|
|
350
|
+
# Handle validation errors gracefully
|
|
351
|
+
if (
|
|
352
|
+
"No validation rule found" in str(e)
|
|
353
|
+
or "does not allow" in str(e)
|
|
354
|
+
or "ValidationError" in str(type(e).__name__)
|
|
355
|
+
):
|
|
356
|
+
continue
|
|
357
|
+
else:
|
|
358
|
+
continue
|
|
359
|
+
|
|
360
|
+
# Store static optimization results - ONLY OUTPUT ATTRIBUTES (is_input=FALSE)
|
|
361
|
+
if not static_df.empty:
|
|
362
|
+
for attr_name in static_df.columns:
|
|
363
|
+
# Check if this attribute is an output attribute (not an input)
|
|
364
|
+
try:
|
|
365
|
+
rule = get_validation_rule(conn, component_type, attr_name)
|
|
366
|
+
if rule.is_input:
|
|
367
|
+
# Skip input attributes to preserve original input data
|
|
368
|
+
continue
|
|
369
|
+
except Exception:
|
|
370
|
+
# If no validation rule found, skip to be safe
|
|
371
|
+
continue
|
|
372
|
+
|
|
373
|
+
for component_name, value in static_df[attr_name].items():
|
|
374
|
+
if component_name not in name_to_id or pd.isna(value):
|
|
375
|
+
continue
|
|
376
|
+
|
|
377
|
+
component_id = name_to_id[component_name]
|
|
378
|
+
|
|
379
|
+
try:
|
|
380
|
+
# Convert value to StaticValue
|
|
381
|
+
if isinstance(value, (int, np.integer)):
|
|
382
|
+
static_value = StaticValue(int(value))
|
|
383
|
+
elif isinstance(value, (float, np.floating)):
|
|
384
|
+
if np.isfinite(value):
|
|
385
|
+
static_value = StaticValue(float(value))
|
|
386
|
+
else:
|
|
387
|
+
continue # Skip infinite/NaN values
|
|
388
|
+
elif isinstance(value, bool):
|
|
389
|
+
static_value = StaticValue(bool(value))
|
|
390
|
+
else:
|
|
391
|
+
static_value = StaticValue(str(value))
|
|
392
|
+
|
|
393
|
+
# Store using atomic utility
|
|
394
|
+
set_static_attribute(
|
|
395
|
+
conn, component_id, attr_name, static_value, scenario_id
|
|
396
|
+
)
|
|
397
|
+
stored_count += 1
|
|
398
|
+
|
|
399
|
+
except Exception as e:
|
|
400
|
+
# Handle validation errors gracefully
|
|
401
|
+
if (
|
|
402
|
+
"No validation rule found" in str(e)
|
|
403
|
+
or "does not allow" in str(e)
|
|
404
|
+
or "ValidationError" in str(type(e).__name__)
|
|
405
|
+
):
|
|
406
|
+
continue
|
|
407
|
+
else:
|
|
408
|
+
continue
|
|
409
|
+
|
|
410
|
+
return stored_count
|
|
411
|
+
|
|
412
|
+
except Exception as e:
|
|
413
|
+
return stored_count
|
|
414
|
+
|
|
415
|
+
def _store_solve_summary(
|
|
416
|
+
self,
|
|
417
|
+
conn,
|
|
418
|
+
solve_result: Dict[str, Any],
|
|
419
|
+
scenario_id: Optional[int],
|
|
420
|
+
network_stats: Optional[Dict[str, Any]] = None,
|
|
421
|
+
):
|
|
422
|
+
"""Store solve summary to network_solve_results table (single network per database)."""
|
|
423
|
+
try:
|
|
424
|
+
# Prepare solve summary data
|
|
425
|
+
solver_name = solve_result.get("solver_name", "unknown")
|
|
426
|
+
solve_status = solve_result.get("status", "unknown")
|
|
427
|
+
objective_value = solve_result.get("objective_value")
|
|
428
|
+
solve_time = solve_result.get("solve_time", 0.0)
|
|
429
|
+
|
|
430
|
+
# Create enhanced solve result with network statistics for serialization
|
|
431
|
+
enhanced_solve_result = {
|
|
432
|
+
**solve_result,
|
|
433
|
+
"network_statistics": network_stats or {},
|
|
434
|
+
}
|
|
435
|
+
|
|
436
|
+
# Delete existing result for this scenario first (handles NULL scenario_id correctly)
|
|
437
|
+
if scenario_id is None:
|
|
438
|
+
conn.execute(
|
|
439
|
+
"DELETE FROM network_solve_results WHERE scenario_id IS NULL"
|
|
440
|
+
)
|
|
441
|
+
else:
|
|
442
|
+
conn.execute(
|
|
443
|
+
"DELETE FROM network_solve_results WHERE scenario_id = ?",
|
|
444
|
+
(scenario_id,),
|
|
445
|
+
)
|
|
446
|
+
|
|
447
|
+
results_json = self._serialize_results_json(enhanced_solve_result)
|
|
448
|
+
metadata_json = self._serialize_metadata_json(enhanced_solve_result)
|
|
449
|
+
|
|
450
|
+
# Insert new solve results summary
|
|
451
|
+
conn.execute(
|
|
452
|
+
"""
|
|
453
|
+
INSERT INTO network_solve_results (
|
|
454
|
+
scenario_id, solver_name, solve_type, solve_status,
|
|
455
|
+
objective_value, solve_time_seconds, results_json, metadata_json
|
|
456
|
+
) VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
|
457
|
+
""",
|
|
458
|
+
(
|
|
459
|
+
scenario_id,
|
|
460
|
+
solver_name,
|
|
461
|
+
"pypsa_optimization",
|
|
462
|
+
solve_status,
|
|
463
|
+
objective_value,
|
|
464
|
+
solve_time,
|
|
465
|
+
results_json,
|
|
466
|
+
metadata_json,
|
|
467
|
+
),
|
|
468
|
+
)
|
|
469
|
+
|
|
470
|
+
except Exception as e:
|
|
471
|
+
raise # Re-raise to trigger rollback
|
|
472
|
+
|
|
473
|
+
def _calculate_network_statistics(
|
|
474
|
+
self, conn, network: "pypsa.Network", solve_result: Dict[str, Any]
|
|
475
|
+
) -> Dict[str, Any]:
|
|
476
|
+
"""Calculate network statistics - focusing only on capacity for now (single network per database)."""
|
|
477
|
+
try:
|
|
478
|
+
# Calculate carrier-specific statistics
|
|
479
|
+
carrier_stats = self._calculate_carrier_statistics(conn, network)
|
|
480
|
+
|
|
481
|
+
# Calculate basic network statistics
|
|
482
|
+
total_cost = solve_result.get("objective_value", 0.0)
|
|
483
|
+
total_generation_mwh = sum(
|
|
484
|
+
carrier_stats.get("dispatch_by_carrier", {}).values()
|
|
485
|
+
)
|
|
486
|
+
total_emissions_tonnes = sum(
|
|
487
|
+
carrier_stats.get("emissions_by_carrier", {}).values()
|
|
488
|
+
)
|
|
489
|
+
total_capital_cost = sum(
|
|
490
|
+
carrier_stats.get("capital_cost_by_carrier", {}).values()
|
|
491
|
+
)
|
|
492
|
+
total_operational_cost = sum(
|
|
493
|
+
carrier_stats.get("operational_cost_by_carrier", {}).values()
|
|
494
|
+
)
|
|
495
|
+
total_system_cost = sum(
|
|
496
|
+
carrier_stats.get("total_system_cost_by_carrier", {}).values()
|
|
497
|
+
)
|
|
498
|
+
|
|
499
|
+
# Calculate unmet load statistics
|
|
500
|
+
unmet_load_mwh = carrier_stats.get("dispatch_by_carrier", {}).get(
|
|
501
|
+
"Unmet Load", 0.0
|
|
502
|
+
)
|
|
503
|
+
total_demand_mwh = self._calculate_total_demand(network)
|
|
504
|
+
unmet_load_percentage = (
|
|
505
|
+
(unmet_load_mwh / (total_demand_mwh + 1e-6)) * 100
|
|
506
|
+
if total_demand_mwh > 0
|
|
507
|
+
else 0.0
|
|
508
|
+
)
|
|
509
|
+
|
|
510
|
+
# Create nested structure expected by frontend
|
|
511
|
+
network_statistics = {
|
|
512
|
+
"core_summary": {
|
|
513
|
+
"total_generation_mwh": total_generation_mwh,
|
|
514
|
+
"total_demand_mwh": total_demand_mwh,
|
|
515
|
+
"total_cost": total_cost,
|
|
516
|
+
"load_factor": (
|
|
517
|
+
(total_demand_mwh / (total_generation_mwh + 1e-6))
|
|
518
|
+
if total_generation_mwh > 0
|
|
519
|
+
else 0.0
|
|
520
|
+
),
|
|
521
|
+
"unserved_energy_mwh": unmet_load_mwh,
|
|
522
|
+
},
|
|
523
|
+
"custom_statistics": {
|
|
524
|
+
# Include carrier-specific statistics (capacity, dispatch, emissions, costs)
|
|
525
|
+
**carrier_stats,
|
|
526
|
+
"total_capital_cost": total_capital_cost,
|
|
527
|
+
"total_operational_cost": total_operational_cost,
|
|
528
|
+
"total_currency_cost": total_system_cost, # Use calculated system cost instead of PyPSA objective
|
|
529
|
+
"total_emissions_tons_co2": total_emissions_tonnes,
|
|
530
|
+
"average_price_per_mwh": (
|
|
531
|
+
(total_system_cost / (total_generation_mwh + 1e-6))
|
|
532
|
+
if total_generation_mwh > 0
|
|
533
|
+
else 0.0
|
|
534
|
+
),
|
|
535
|
+
"unmet_load_percentage": unmet_load_percentage,
|
|
536
|
+
"max_unmet_load_hour_mw": 0.0, # TODO: Calculate max hourly unmet load later
|
|
537
|
+
},
|
|
538
|
+
"runtime_info": {
|
|
539
|
+
"component_count": (
|
|
540
|
+
(
|
|
541
|
+
len(network.buses)
|
|
542
|
+
+ len(network.generators)
|
|
543
|
+
+ len(network.loads)
|
|
544
|
+
+ len(network.lines)
|
|
545
|
+
+ len(network.links)
|
|
546
|
+
)
|
|
547
|
+
if hasattr(network, "buses")
|
|
548
|
+
else 0
|
|
549
|
+
),
|
|
550
|
+
"bus_count": len(network.buses) if hasattr(network, "buses") else 0,
|
|
551
|
+
"generator_count": (
|
|
552
|
+
len(network.generators) if hasattr(network, "generators") else 0
|
|
553
|
+
),
|
|
554
|
+
"load_count": (
|
|
555
|
+
len(network.loads) if hasattr(network, "loads") else 0
|
|
556
|
+
),
|
|
557
|
+
"snapshot_count": (
|
|
558
|
+
len(network.snapshots) if hasattr(network, "snapshots") else 0
|
|
559
|
+
),
|
|
560
|
+
},
|
|
561
|
+
}
|
|
562
|
+
|
|
563
|
+
return network_statistics
|
|
564
|
+
|
|
565
|
+
except Exception as e:
|
|
566
|
+
# Return empty structure matching expected format
|
|
567
|
+
return {
|
|
568
|
+
"core_summary": {
|
|
569
|
+
"total_generation_mwh": 0.0,
|
|
570
|
+
"total_demand_mwh": 0.0,
|
|
571
|
+
"total_cost": solve_result.get("objective_value", 0.0),
|
|
572
|
+
"load_factor": 0.0,
|
|
573
|
+
"unserved_energy_mwh": 0.0,
|
|
574
|
+
},
|
|
575
|
+
"custom_statistics": {
|
|
576
|
+
"dispatch_by_carrier": {},
|
|
577
|
+
"power_capacity_by_carrier": {},
|
|
578
|
+
"energy_capacity_by_carrier": {},
|
|
579
|
+
"emissions_by_carrier": {},
|
|
580
|
+
"capital_cost_by_carrier": {},
|
|
581
|
+
"operational_cost_by_carrier": {},
|
|
582
|
+
"total_system_cost_by_carrier": {},
|
|
583
|
+
"total_capital_cost": 0.0,
|
|
584
|
+
"total_operational_cost": 0.0,
|
|
585
|
+
"total_currency_cost": 0.0,
|
|
586
|
+
"total_emissions_tons_co2": 0.0,
|
|
587
|
+
"average_price_per_mwh": 0.0,
|
|
588
|
+
"unmet_load_percentage": 0.0,
|
|
589
|
+
"max_unmet_load_hour_mw": 0.0,
|
|
590
|
+
},
|
|
591
|
+
"runtime_info": {
|
|
592
|
+
"component_count": 0,
|
|
593
|
+
"bus_count": 0,
|
|
594
|
+
"generator_count": 0,
|
|
595
|
+
"load_count": 0,
|
|
596
|
+
"snapshot_count": 0,
|
|
597
|
+
},
|
|
598
|
+
"error": str(e),
|
|
599
|
+
}
|
|
600
|
+
|
|
601
|
+
def _calculate_carrier_statistics(
|
|
602
|
+
self, conn, network: "pypsa.Network"
|
|
603
|
+
) -> Dict[str, Any]:
|
|
604
|
+
"""
|
|
605
|
+
Calculate carrier-specific statistics directly from the network (single network per database).
|
|
606
|
+
This is the primary calculation - per-year stats will be calculated separately.
|
|
607
|
+
"""
|
|
608
|
+
try:
|
|
609
|
+
# Calculate all-year statistics directly from the network
|
|
610
|
+
# Extract years from network snapshots
|
|
611
|
+
if hasattr(network.snapshots, "levels"):
|
|
612
|
+
# Multi-period optimization - get years from period level
|
|
613
|
+
period_values = network.snapshots.get_level_values(0)
|
|
614
|
+
years = sorted(period_values.unique())
|
|
615
|
+
elif hasattr(network.snapshots, "year"):
|
|
616
|
+
years = sorted(network.snapshots.year.unique())
|
|
617
|
+
elif hasattr(network, "_available_years"):
|
|
618
|
+
years = network._available_years
|
|
619
|
+
else:
|
|
620
|
+
years = [2020] # Fallback
|
|
621
|
+
|
|
622
|
+
# Calculate per-year statistics first
|
|
623
|
+
all_year_stats = {
|
|
624
|
+
"dispatch_by_carrier": {},
|
|
625
|
+
"power_capacity_by_carrier": {},
|
|
626
|
+
"energy_capacity_by_carrier": {},
|
|
627
|
+
"emissions_by_carrier": {},
|
|
628
|
+
"capital_cost_by_carrier": {},
|
|
629
|
+
"operational_cost_by_carrier": {},
|
|
630
|
+
"total_system_cost_by_carrier": {},
|
|
631
|
+
}
|
|
632
|
+
|
|
633
|
+
# Initialize all carriers with zero values
|
|
634
|
+
cursor = conn.execute(
|
|
635
|
+
"""
|
|
636
|
+
SELECT DISTINCT name FROM carriers
|
|
637
|
+
"""
|
|
638
|
+
)
|
|
639
|
+
all_carriers = [row[0] for row in cursor.fetchall()]
|
|
640
|
+
|
|
641
|
+
# Initialize all carriers with zero values (including special "Unmet Load" carrier)
|
|
642
|
+
all_carriers_with_unmet = all_carriers + ["Unmet Load"]
|
|
643
|
+
for carrier in all_carriers_with_unmet:
|
|
644
|
+
all_year_stats["dispatch_by_carrier"][carrier] = 0.0
|
|
645
|
+
all_year_stats["power_capacity_by_carrier"][carrier] = 0.0
|
|
646
|
+
all_year_stats["energy_capacity_by_carrier"][carrier] = 0.0
|
|
647
|
+
all_year_stats["emissions_by_carrier"][carrier] = 0.0
|
|
648
|
+
all_year_stats["capital_cost_by_carrier"][carrier] = 0.0
|
|
649
|
+
all_year_stats["operational_cost_by_carrier"][carrier] = 0.0
|
|
650
|
+
all_year_stats["total_system_cost_by_carrier"][carrier] = 0.0
|
|
651
|
+
|
|
652
|
+
# Calculate statistics for each year and sum them up
|
|
653
|
+
for year in years:
|
|
654
|
+
year_stats = self._calculate_year_carrier_statistics(
|
|
655
|
+
conn, network, year
|
|
656
|
+
)
|
|
657
|
+
|
|
658
|
+
# Sum up all the statistics (including "Unmet Load")
|
|
659
|
+
for carrier in all_carriers_with_unmet:
|
|
660
|
+
# Sum dispatch, emissions, and costs across years
|
|
661
|
+
all_year_stats["dispatch_by_carrier"][carrier] += year_stats[
|
|
662
|
+
"dispatch_by_carrier"
|
|
663
|
+
].get(carrier, 0.0)
|
|
664
|
+
all_year_stats["emissions_by_carrier"][carrier] += year_stats[
|
|
665
|
+
"emissions_by_carrier"
|
|
666
|
+
].get(carrier, 0.0)
|
|
667
|
+
all_year_stats["capital_cost_by_carrier"][carrier] += year_stats[
|
|
668
|
+
"capital_cost_by_carrier"
|
|
669
|
+
].get(carrier, 0.0)
|
|
670
|
+
all_year_stats["operational_cost_by_carrier"][
|
|
671
|
+
carrier
|
|
672
|
+
] += year_stats["operational_cost_by_carrier"].get(carrier, 0.0)
|
|
673
|
+
all_year_stats["total_system_cost_by_carrier"][
|
|
674
|
+
carrier
|
|
675
|
+
] += year_stats["total_system_cost_by_carrier"].get(carrier, 0.0)
|
|
676
|
+
|
|
677
|
+
# For capacity: use the last year (final capacity state)
|
|
678
|
+
if year == years[-1]:
|
|
679
|
+
all_year_stats["power_capacity_by_carrier"][carrier] = (
|
|
680
|
+
year_stats["power_capacity_by_carrier"].get(carrier, 0.0)
|
|
681
|
+
)
|
|
682
|
+
all_year_stats["energy_capacity_by_carrier"][carrier] = (
|
|
683
|
+
year_stats["energy_capacity_by_carrier"].get(carrier, 0.0)
|
|
684
|
+
)
|
|
685
|
+
|
|
686
|
+
return all_year_stats
|
|
687
|
+
|
|
688
|
+
except Exception as e:
|
|
689
|
+
return {
|
|
690
|
+
"dispatch_by_carrier": {},
|
|
691
|
+
"power_capacity_by_carrier": {},
|
|
692
|
+
"energy_capacity_by_carrier": {},
|
|
693
|
+
"emissions_by_carrier": {},
|
|
694
|
+
"capital_cost_by_carrier": {},
|
|
695
|
+
"operational_cost_by_carrier": {},
|
|
696
|
+
"total_system_cost_by_carrier": {},
|
|
697
|
+
}
|
|
698
|
+
|
|
699
|
+
def _store_year_based_statistics(
|
|
700
|
+
self,
|
|
701
|
+
conn,
|
|
702
|
+
network: "pypsa.Network",
|
|
703
|
+
year_statistics: Dict[int, Dict[str, Any]],
|
|
704
|
+
scenario_id: Optional[int],
|
|
705
|
+
) -> int:
|
|
706
|
+
"""Store year-based statistics to database (single network per database)"""
|
|
707
|
+
try:
|
|
708
|
+
import json
|
|
709
|
+
|
|
710
|
+
stored_count = 0
|
|
711
|
+
|
|
712
|
+
# Check if network_solve_results_by_year table exists, create if not
|
|
713
|
+
conn.execute(
|
|
714
|
+
"""
|
|
715
|
+
CREATE TABLE IF NOT EXISTS network_solve_results_by_year (
|
|
716
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
717
|
+
scenario_id INTEGER,
|
|
718
|
+
year INTEGER NOT NULL,
|
|
719
|
+
results_json TEXT,
|
|
720
|
+
metadata_json TEXT,
|
|
721
|
+
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
|
722
|
+
FOREIGN KEY (scenario_id) REFERENCES scenarios(id),
|
|
723
|
+
UNIQUE(scenario_id, year)
|
|
724
|
+
)
|
|
725
|
+
"""
|
|
726
|
+
)
|
|
727
|
+
|
|
728
|
+
for year, stats in year_statistics.items():
|
|
729
|
+
try:
|
|
730
|
+
# Calculate proper year-specific carrier statistics
|
|
731
|
+
year_carrier_stats = self._calculate_year_carrier_statistics(
|
|
732
|
+
conn, network, year
|
|
733
|
+
)
|
|
734
|
+
|
|
735
|
+
# Merge year-specific carrier stats into the statistics
|
|
736
|
+
if "custom_statistics" in stats:
|
|
737
|
+
stats["custom_statistics"].update(year_carrier_stats)
|
|
738
|
+
else:
|
|
739
|
+
stats["custom_statistics"] = year_carrier_stats
|
|
740
|
+
|
|
741
|
+
# Wrap the year statistics in the same structure as overall results for consistency
|
|
742
|
+
year_result_wrapper = {
|
|
743
|
+
"success": True,
|
|
744
|
+
"year": year,
|
|
745
|
+
"network_statistics": stats,
|
|
746
|
+
}
|
|
747
|
+
|
|
748
|
+
metadata = {"year": year, "scenario_id": scenario_id}
|
|
749
|
+
|
|
750
|
+
# Delete existing result for this scenario+year first (handles NULL scenario_id correctly)
|
|
751
|
+
if scenario_id is None:
|
|
752
|
+
conn.execute(
|
|
753
|
+
"""
|
|
754
|
+
DELETE FROM network_solve_results_by_year
|
|
755
|
+
WHERE scenario_id IS NULL AND year = ?
|
|
756
|
+
""",
|
|
757
|
+
(year,),
|
|
758
|
+
)
|
|
759
|
+
else:
|
|
760
|
+
conn.execute(
|
|
761
|
+
"""
|
|
762
|
+
DELETE FROM network_solve_results_by_year
|
|
763
|
+
WHERE scenario_id = ? AND year = ?
|
|
764
|
+
""",
|
|
765
|
+
(scenario_id, year),
|
|
766
|
+
)
|
|
767
|
+
|
|
768
|
+
# Insert new year-based results
|
|
769
|
+
conn.execute(
|
|
770
|
+
"""
|
|
771
|
+
INSERT INTO network_solve_results_by_year
|
|
772
|
+
(scenario_id, year, results_json, metadata_json)
|
|
773
|
+
VALUES (?, ?, ?, ?)
|
|
774
|
+
""",
|
|
775
|
+
(
|
|
776
|
+
scenario_id,
|
|
777
|
+
year,
|
|
778
|
+
json.dumps(
|
|
779
|
+
year_result_wrapper, default=self._json_serializer
|
|
780
|
+
),
|
|
781
|
+
json.dumps(metadata, default=self._json_serializer),
|
|
782
|
+
),
|
|
783
|
+
)
|
|
784
|
+
|
|
785
|
+
stored_count += 1
|
|
786
|
+
|
|
787
|
+
except Exception as e:
|
|
788
|
+
continue
|
|
789
|
+
|
|
790
|
+
return stored_count
|
|
791
|
+
|
|
792
|
+
except Exception as e:
|
|
793
|
+
return 0
|
|
794
|
+
|
|
795
|
+
def _calculate_year_carrier_statistics(
|
|
796
|
+
self, conn, network: "pypsa.Network", year: int
|
|
797
|
+
) -> Dict[str, Any]:
|
|
798
|
+
"""
|
|
799
|
+
Calculate carrier-specific statistics for a specific year.
|
|
800
|
+
For now, only calculate capacity statistics.
|
|
801
|
+
"""
|
|
802
|
+
try:
|
|
803
|
+
# Initialize carrier statistics
|
|
804
|
+
carrier_stats = {
|
|
805
|
+
"dispatch_by_carrier": {},
|
|
806
|
+
"power_capacity_by_carrier": {}, # MW - Generators + Storage Units (power) + Lines + Links
|
|
807
|
+
"energy_capacity_by_carrier": {}, # MWh - Stores + Storage Units (energy)
|
|
808
|
+
"emissions_by_carrier": {},
|
|
809
|
+
"capital_cost_by_carrier": {},
|
|
810
|
+
"operational_cost_by_carrier": {},
|
|
811
|
+
"total_system_cost_by_carrier": {},
|
|
812
|
+
}
|
|
813
|
+
|
|
814
|
+
# Get all carriers from database
|
|
815
|
+
cursor = conn.execute(
|
|
816
|
+
"""
|
|
817
|
+
SELECT DISTINCT name FROM carriers
|
|
818
|
+
"""
|
|
819
|
+
)
|
|
820
|
+
all_carriers = [row[0] for row in cursor.fetchall()]
|
|
821
|
+
|
|
822
|
+
# Initialize all carriers with zero values (including special "Unmet Load" carrier)
|
|
823
|
+
all_carriers_with_unmet = all_carriers + ["Unmet Load"]
|
|
824
|
+
for carrier in all_carriers_with_unmet:
|
|
825
|
+
carrier_stats["dispatch_by_carrier"][carrier] = 0.0
|
|
826
|
+
carrier_stats["power_capacity_by_carrier"][carrier] = 0.0
|
|
827
|
+
carrier_stats["energy_capacity_by_carrier"][carrier] = 0.0
|
|
828
|
+
carrier_stats["emissions_by_carrier"][carrier] = 0.0
|
|
829
|
+
carrier_stats["capital_cost_by_carrier"][carrier] = 0.0
|
|
830
|
+
carrier_stats["operational_cost_by_carrier"][carrier] = 0.0
|
|
831
|
+
carrier_stats["total_system_cost_by_carrier"][carrier] = 0.0
|
|
832
|
+
|
|
833
|
+
# Calculate dispatch (generation) by carrier for this specific year
|
|
834
|
+
|
|
835
|
+
# 1. GENERATORS - Generation dispatch (including UNMET_LOAD)
|
|
836
|
+
if hasattr(network, "generators_t") and hasattr(network.generators_t, "p"):
|
|
837
|
+
# Get generator-carrier mapping (include both GENERATOR and UNMET_LOAD)
|
|
838
|
+
cursor = conn.execute(
|
|
839
|
+
"""
|
|
840
|
+
SELECT c.name as component_name,
|
|
841
|
+
CASE
|
|
842
|
+
WHEN c.component_type = 'UNMET_LOAD' THEN 'Unmet Load'
|
|
843
|
+
ELSE carr.name
|
|
844
|
+
END as carrier_name
|
|
845
|
+
FROM components c
|
|
846
|
+
JOIN carriers carr ON c.carrier_id = carr.id
|
|
847
|
+
WHERE c.component_type IN ('GENERATOR', 'UNMET_LOAD')
|
|
848
|
+
"""
|
|
849
|
+
)
|
|
850
|
+
generator_carriers = {row[0]: row[1] for row in cursor.fetchall()}
|
|
851
|
+
|
|
852
|
+
# Filter generation data for this specific year
|
|
853
|
+
year_generation = self._filter_timeseries_by_year(
|
|
854
|
+
network.generators_t.p, network.snapshots, year
|
|
855
|
+
)
|
|
856
|
+
if year_generation is not None and not year_generation.empty:
|
|
857
|
+
for gen_name in year_generation.columns:
|
|
858
|
+
if gen_name in generator_carriers:
|
|
859
|
+
carrier_name = generator_carriers[gen_name]
|
|
860
|
+
|
|
861
|
+
# Calculate generation for this year (ALWAYS apply snapshot weightings to convert MW to MWh)
|
|
862
|
+
year_weightings = self._get_year_weightings(network, year)
|
|
863
|
+
if year_weightings is not None:
|
|
864
|
+
generation_mwh = float(
|
|
865
|
+
(
|
|
866
|
+
year_generation[gen_name].values
|
|
867
|
+
* year_weightings
|
|
868
|
+
).sum()
|
|
869
|
+
)
|
|
870
|
+
else:
|
|
871
|
+
# Fallback: simple sum (will be incorrect for non-1H models)
|
|
872
|
+
generation_mwh = float(year_generation[gen_name].sum())
|
|
873
|
+
|
|
874
|
+
if carrier_name in carrier_stats["dispatch_by_carrier"]:
|
|
875
|
+
carrier_stats["dispatch_by_carrier"][
|
|
876
|
+
carrier_name
|
|
877
|
+
] += generation_mwh
|
|
878
|
+
|
|
879
|
+
# 2. STORAGE_UNITS - Discharge only (positive values)
|
|
880
|
+
if hasattr(network, "storage_units_t") and hasattr(
|
|
881
|
+
network.storage_units_t, "p"
|
|
882
|
+
):
|
|
883
|
+
# Get storage unit-carrier mapping
|
|
884
|
+
cursor = conn.execute(
|
|
885
|
+
"""
|
|
886
|
+
SELECT c.name as component_name, carr.name as carrier_name
|
|
887
|
+
FROM components c
|
|
888
|
+
JOIN carriers carr ON c.carrier_id = carr.id
|
|
889
|
+
WHERE c.component_type = 'STORAGE_UNIT'
|
|
890
|
+
"""
|
|
891
|
+
)
|
|
892
|
+
storage_unit_carriers = {row[0]: row[1] for row in cursor.fetchall()}
|
|
893
|
+
|
|
894
|
+
# Filter storage unit data for this specific year
|
|
895
|
+
year_storage = self._filter_timeseries_by_year(
|
|
896
|
+
network.storage_units_t.p, network.snapshots, year
|
|
897
|
+
)
|
|
898
|
+
if year_storage is not None and not year_storage.empty:
|
|
899
|
+
for su_name in year_storage.columns:
|
|
900
|
+
if su_name in storage_unit_carriers:
|
|
901
|
+
carrier_name = storage_unit_carriers[su_name]
|
|
902
|
+
|
|
903
|
+
# Calculate discharge for this year (positive values only, ALWAYS apply snapshot weightings)
|
|
904
|
+
year_weightings = self._get_year_weightings(network, year)
|
|
905
|
+
if year_weightings is not None:
|
|
906
|
+
discharge_mwh = float(
|
|
907
|
+
(
|
|
908
|
+
year_storage[su_name].clip(lower=0).values
|
|
909
|
+
* year_weightings
|
|
910
|
+
).sum()
|
|
911
|
+
)
|
|
912
|
+
else:
|
|
913
|
+
# Fallback: simple sum (will be incorrect for non-1H models)
|
|
914
|
+
discharge_mwh = float(
|
|
915
|
+
year_storage[su_name].clip(lower=0).sum()
|
|
916
|
+
)
|
|
917
|
+
|
|
918
|
+
if carrier_name in carrier_stats["dispatch_by_carrier"]:
|
|
919
|
+
carrier_stats["dispatch_by_carrier"][
|
|
920
|
+
carrier_name
|
|
921
|
+
] += discharge_mwh
|
|
922
|
+
|
|
923
|
+
# 3. STORES - Discharge only (positive values)
|
|
924
|
+
if hasattr(network, "stores_t") and hasattr(network.stores_t, "p"):
|
|
925
|
+
# Get store-carrier mapping
|
|
926
|
+
cursor = conn.execute(
|
|
927
|
+
"""
|
|
928
|
+
SELECT c.name as component_name, carr.name as carrier_name
|
|
929
|
+
FROM components c
|
|
930
|
+
JOIN carriers carr ON c.carrier_id = carr.id
|
|
931
|
+
WHERE c.component_type = 'STORE'
|
|
932
|
+
"""
|
|
933
|
+
)
|
|
934
|
+
store_carriers = {row[0]: row[1] for row in cursor.fetchall()}
|
|
935
|
+
|
|
936
|
+
# Filter store data for this specific year
|
|
937
|
+
year_stores = self._filter_timeseries_by_year(
|
|
938
|
+
network.stores_t.p, network.snapshots, year
|
|
939
|
+
)
|
|
940
|
+
if year_stores is not None and not year_stores.empty:
|
|
941
|
+
for store_name in year_stores.columns:
|
|
942
|
+
if store_name in store_carriers:
|
|
943
|
+
carrier_name = store_carriers[store_name]
|
|
944
|
+
|
|
945
|
+
# Calculate discharge for this year (positive values only, ALWAYS apply snapshot weightings)
|
|
946
|
+
year_weightings = self._get_year_weightings(network, year)
|
|
947
|
+
if year_weightings is not None:
|
|
948
|
+
discharge_mwh = float(
|
|
949
|
+
(
|
|
950
|
+
year_stores[store_name].clip(lower=0).values
|
|
951
|
+
* year_weightings
|
|
952
|
+
).sum()
|
|
953
|
+
)
|
|
954
|
+
else:
|
|
955
|
+
# Fallback: simple sum (will be incorrect for non-1H models)
|
|
956
|
+
discharge_mwh = float(
|
|
957
|
+
year_stores[store_name].clip(lower=0).sum()
|
|
958
|
+
)
|
|
959
|
+
|
|
960
|
+
if carrier_name in carrier_stats["dispatch_by_carrier"]:
|
|
961
|
+
carrier_stats["dispatch_by_carrier"][
|
|
962
|
+
carrier_name
|
|
963
|
+
] += discharge_mwh
|
|
964
|
+
|
|
965
|
+
# Calculate emissions by carrier for this specific year
|
|
966
|
+
# Get emission factors for all carriers
|
|
967
|
+
cursor = conn.execute(
|
|
968
|
+
"""
|
|
969
|
+
SELECT name, co2_emissions FROM carriers
|
|
970
|
+
"""
|
|
971
|
+
)
|
|
972
|
+
emission_factors = {row[0]: row[1] for row in cursor.fetchall()}
|
|
973
|
+
|
|
974
|
+
# Calculate emissions: dispatch (MWh) × emission factor (tonnes CO2/MWh) = tonnes CO2
|
|
975
|
+
for carrier_name, dispatch_mwh in carrier_stats[
|
|
976
|
+
"dispatch_by_carrier"
|
|
977
|
+
].items():
|
|
978
|
+
# Handle None values safely
|
|
979
|
+
if dispatch_mwh is None:
|
|
980
|
+
dispatch_mwh = 0.0
|
|
981
|
+
|
|
982
|
+
emission_factor = emission_factors.get(
|
|
983
|
+
carrier_name, 0.0
|
|
984
|
+
) # Default to 0 if no emission factor
|
|
985
|
+
if emission_factor is None:
|
|
986
|
+
emission_factor = 0.0
|
|
987
|
+
|
|
988
|
+
emissions_tonnes = dispatch_mwh * emission_factor
|
|
989
|
+
|
|
990
|
+
if carrier_name in carrier_stats["emissions_by_carrier"]:
|
|
991
|
+
carrier_stats["emissions_by_carrier"][
|
|
992
|
+
carrier_name
|
|
993
|
+
] += emissions_tonnes
|
|
994
|
+
|
|
995
|
+
# Calculate capital costs by carrier for this specific year
|
|
996
|
+
# Capital costs are annualized and counted every year the component is active
|
|
997
|
+
|
|
998
|
+
# Helper function to check if component is active in this year
|
|
999
|
+
def is_component_active(build_year, lifetime, current_year):
|
|
1000
|
+
"""Check if component is active in the current year based on build_year and lifetime"""
|
|
1001
|
+
if pd.isna(build_year):
|
|
1002
|
+
return True # No build year constraint
|
|
1003
|
+
|
|
1004
|
+
build_year = int(build_year)
|
|
1005
|
+
if build_year > current_year:
|
|
1006
|
+
return False # Not built yet
|
|
1007
|
+
|
|
1008
|
+
if pd.isna(lifetime) or lifetime == float("inf"):
|
|
1009
|
+
return True # Infinite lifetime
|
|
1010
|
+
|
|
1011
|
+
lifetime = int(lifetime)
|
|
1012
|
+
end_year = build_year + lifetime - 1
|
|
1013
|
+
return current_year <= end_year
|
|
1014
|
+
|
|
1015
|
+
# 1. GENERATORS - Capital costs (including UNMET_LOAD)
|
|
1016
|
+
if hasattr(network, "generators") and not network.generators.empty:
|
|
1017
|
+
# Get generator info: carrier, capital_cost, build_year, lifetime (include UNMET_LOAD)
|
|
1018
|
+
cursor = conn.execute(
|
|
1019
|
+
"""
|
|
1020
|
+
SELECT c.name as component_name,
|
|
1021
|
+
CASE
|
|
1022
|
+
WHEN c.component_type = 'UNMET_LOAD' THEN 'Unmet Load'
|
|
1023
|
+
ELSE carr.name
|
|
1024
|
+
END as carrier_name
|
|
1025
|
+
FROM components c
|
|
1026
|
+
JOIN carriers carr ON c.carrier_id = carr.id
|
|
1027
|
+
WHERE c.component_type IN ('GENERATOR', 'UNMET_LOAD')
|
|
1028
|
+
"""
|
|
1029
|
+
)
|
|
1030
|
+
generator_carriers = {row[0]: row[1] for row in cursor.fetchall()}
|
|
1031
|
+
|
|
1032
|
+
for gen_name in network.generators.index:
|
|
1033
|
+
if gen_name in generator_carriers:
|
|
1034
|
+
carrier_name = generator_carriers[gen_name]
|
|
1035
|
+
|
|
1036
|
+
# Get build year and lifetime
|
|
1037
|
+
build_year = (
|
|
1038
|
+
network.generators.loc[gen_name, "build_year"]
|
|
1039
|
+
if "build_year" in network.generators.columns
|
|
1040
|
+
else None
|
|
1041
|
+
)
|
|
1042
|
+
lifetime = (
|
|
1043
|
+
network.generators.loc[gen_name, "lifetime"]
|
|
1044
|
+
if "lifetime" in network.generators.columns
|
|
1045
|
+
else None
|
|
1046
|
+
)
|
|
1047
|
+
|
|
1048
|
+
# Check if component is active in this year
|
|
1049
|
+
if is_component_active(build_year, lifetime, year):
|
|
1050
|
+
# Get capacity and capital cost
|
|
1051
|
+
if "p_nom_opt" in network.generators.columns:
|
|
1052
|
+
capacity_mw = float(
|
|
1053
|
+
network.generators.loc[gen_name, "p_nom_opt"]
|
|
1054
|
+
)
|
|
1055
|
+
else:
|
|
1056
|
+
capacity_mw = (
|
|
1057
|
+
float(network.generators.loc[gen_name, "p_nom"])
|
|
1058
|
+
if "p_nom" in network.generators.columns
|
|
1059
|
+
else 0.0
|
|
1060
|
+
)
|
|
1061
|
+
|
|
1062
|
+
capital_cost_per_mw = (
|
|
1063
|
+
float(network.generators.loc[gen_name, "capital_cost"])
|
|
1064
|
+
if "capital_cost" in network.generators.columns
|
|
1065
|
+
else 0.0
|
|
1066
|
+
)
|
|
1067
|
+
|
|
1068
|
+
# Calculate annualized capital cost for this year
|
|
1069
|
+
annual_capital_cost = capacity_mw * capital_cost_per_mw
|
|
1070
|
+
|
|
1071
|
+
if carrier_name in carrier_stats["capital_cost_by_carrier"]:
|
|
1072
|
+
carrier_stats["capital_cost_by_carrier"][
|
|
1073
|
+
carrier_name
|
|
1074
|
+
] += annual_capital_cost
|
|
1075
|
+
|
|
1076
|
+
# 2. STORAGE_UNITS - Capital costs
|
|
1077
|
+
if hasattr(network, "storage_units") and not network.storage_units.empty:
|
|
1078
|
+
cursor = conn.execute(
|
|
1079
|
+
"""
|
|
1080
|
+
SELECT c.name as component_name, carr.name as carrier_name
|
|
1081
|
+
FROM components c
|
|
1082
|
+
JOIN carriers carr ON c.carrier_id = carr.id
|
|
1083
|
+
WHERE c.component_type = 'STORAGE_UNIT'
|
|
1084
|
+
"""
|
|
1085
|
+
)
|
|
1086
|
+
storage_unit_carriers = {row[0]: row[1] for row in cursor.fetchall()}
|
|
1087
|
+
|
|
1088
|
+
for su_name in network.storage_units.index:
|
|
1089
|
+
if su_name in storage_unit_carriers:
|
|
1090
|
+
carrier_name = storage_unit_carriers[su_name]
|
|
1091
|
+
|
|
1092
|
+
# Get build year and lifetime
|
|
1093
|
+
build_year = (
|
|
1094
|
+
network.storage_units.loc[su_name, "build_year"]
|
|
1095
|
+
if "build_year" in network.storage_units.columns
|
|
1096
|
+
else None
|
|
1097
|
+
)
|
|
1098
|
+
lifetime = (
|
|
1099
|
+
network.storage_units.loc[su_name, "lifetime"]
|
|
1100
|
+
if "lifetime" in network.storage_units.columns
|
|
1101
|
+
else None
|
|
1102
|
+
)
|
|
1103
|
+
|
|
1104
|
+
# Check if component is active in this year
|
|
1105
|
+
if is_component_active(build_year, lifetime, year):
|
|
1106
|
+
# Get power capacity and capital cost (per MW)
|
|
1107
|
+
if "p_nom_opt" in network.storage_units.columns:
|
|
1108
|
+
capacity_mw = float(
|
|
1109
|
+
network.storage_units.loc[su_name, "p_nom_opt"]
|
|
1110
|
+
)
|
|
1111
|
+
else:
|
|
1112
|
+
capacity_mw = (
|
|
1113
|
+
float(network.storage_units.loc[su_name, "p_nom"])
|
|
1114
|
+
if "p_nom" in network.storage_units.columns
|
|
1115
|
+
else 0.0
|
|
1116
|
+
)
|
|
1117
|
+
|
|
1118
|
+
capital_cost_per_mw = (
|
|
1119
|
+
float(
|
|
1120
|
+
network.storage_units.loc[su_name, "capital_cost"]
|
|
1121
|
+
)
|
|
1122
|
+
if "capital_cost" in network.storage_units.columns
|
|
1123
|
+
else 0.0
|
|
1124
|
+
)
|
|
1125
|
+
|
|
1126
|
+
# Calculate annualized capital cost for this year
|
|
1127
|
+
annual_capital_cost = capacity_mw * capital_cost_per_mw
|
|
1128
|
+
|
|
1129
|
+
if carrier_name in carrier_stats["capital_cost_by_carrier"]:
|
|
1130
|
+
carrier_stats["capital_cost_by_carrier"][
|
|
1131
|
+
carrier_name
|
|
1132
|
+
] += annual_capital_cost
|
|
1133
|
+
|
|
1134
|
+
# 3. STORES - Capital costs (per MWh)
|
|
1135
|
+
if hasattr(network, "stores") and not network.stores.empty:
|
|
1136
|
+
cursor = conn.execute(
|
|
1137
|
+
"""
|
|
1138
|
+
SELECT c.name as component_name, carr.name as carrier_name
|
|
1139
|
+
FROM components c
|
|
1140
|
+
JOIN carriers carr ON c.carrier_id = carr.id
|
|
1141
|
+
WHERE c.component_type = 'STORE'
|
|
1142
|
+
"""
|
|
1143
|
+
)
|
|
1144
|
+
store_carriers = {row[0]: row[1] for row in cursor.fetchall()}
|
|
1145
|
+
|
|
1146
|
+
for store_name in network.stores.index:
|
|
1147
|
+
if store_name in store_carriers:
|
|
1148
|
+
carrier_name = store_carriers[store_name]
|
|
1149
|
+
|
|
1150
|
+
# Get build year and lifetime
|
|
1151
|
+
build_year = (
|
|
1152
|
+
network.stores.loc[store_name, "build_year"]
|
|
1153
|
+
if "build_year" in network.stores.columns
|
|
1154
|
+
else None
|
|
1155
|
+
)
|
|
1156
|
+
lifetime = (
|
|
1157
|
+
network.stores.loc[store_name, "lifetime"]
|
|
1158
|
+
if "lifetime" in network.stores.columns
|
|
1159
|
+
else None
|
|
1160
|
+
)
|
|
1161
|
+
|
|
1162
|
+
# Check if component is active in this year
|
|
1163
|
+
if is_component_active(build_year, lifetime, year):
|
|
1164
|
+
# Get energy capacity and capital cost (per MWh)
|
|
1165
|
+
if "e_nom_opt" in network.stores.columns:
|
|
1166
|
+
capacity_mwh = float(
|
|
1167
|
+
network.stores.loc[store_name, "e_nom_opt"]
|
|
1168
|
+
)
|
|
1169
|
+
else:
|
|
1170
|
+
capacity_mwh = (
|
|
1171
|
+
float(network.stores.loc[store_name, "e_nom"])
|
|
1172
|
+
if "e_nom" in network.stores.columns
|
|
1173
|
+
else 0.0
|
|
1174
|
+
)
|
|
1175
|
+
|
|
1176
|
+
capital_cost_per_mwh = (
|
|
1177
|
+
float(network.stores.loc[store_name, "capital_cost"])
|
|
1178
|
+
if "capital_cost" in network.stores.columns
|
|
1179
|
+
else 0.0
|
|
1180
|
+
)
|
|
1181
|
+
|
|
1182
|
+
# Calculate annualized capital cost for this year
|
|
1183
|
+
annual_capital_cost = capacity_mwh * capital_cost_per_mwh
|
|
1184
|
+
|
|
1185
|
+
if carrier_name in carrier_stats["capital_cost_by_carrier"]:
|
|
1186
|
+
carrier_stats["capital_cost_by_carrier"][
|
|
1187
|
+
carrier_name
|
|
1188
|
+
] += annual_capital_cost
|
|
1189
|
+
|
|
1190
|
+
# 4. LINES - Capital costs (per MVA)
|
|
1191
|
+
if hasattr(network, "lines") and not network.lines.empty:
|
|
1192
|
+
cursor = conn.execute(
|
|
1193
|
+
"""
|
|
1194
|
+
SELECT c.name as component_name, carr.name as carrier_name
|
|
1195
|
+
FROM components c
|
|
1196
|
+
JOIN carriers carr ON c.carrier_id = carr.id
|
|
1197
|
+
WHERE c.component_type = 'LINE'
|
|
1198
|
+
"""
|
|
1199
|
+
)
|
|
1200
|
+
line_carriers = {row[0]: row[1] for row in cursor.fetchall()}
|
|
1201
|
+
|
|
1202
|
+
for line_name in network.lines.index:
|
|
1203
|
+
if line_name in line_carriers:
|
|
1204
|
+
carrier_name = line_carriers[line_name]
|
|
1205
|
+
|
|
1206
|
+
# Get build year and lifetime
|
|
1207
|
+
build_year = (
|
|
1208
|
+
network.lines.loc[line_name, "build_year"]
|
|
1209
|
+
if "build_year" in network.lines.columns
|
|
1210
|
+
else None
|
|
1211
|
+
)
|
|
1212
|
+
lifetime = (
|
|
1213
|
+
network.lines.loc[line_name, "lifetime"]
|
|
1214
|
+
if "lifetime" in network.lines.columns
|
|
1215
|
+
else None
|
|
1216
|
+
)
|
|
1217
|
+
|
|
1218
|
+
# Check if component is active in this year
|
|
1219
|
+
if is_component_active(build_year, lifetime, year):
|
|
1220
|
+
# Get apparent power capacity and capital cost (per MVA)
|
|
1221
|
+
if "s_nom_opt" in network.lines.columns:
|
|
1222
|
+
capacity_mva = float(
|
|
1223
|
+
network.lines.loc[line_name, "s_nom_opt"]
|
|
1224
|
+
)
|
|
1225
|
+
else:
|
|
1226
|
+
capacity_mva = (
|
|
1227
|
+
float(network.lines.loc[line_name, "s_nom"])
|
|
1228
|
+
if "s_nom" in network.lines.columns
|
|
1229
|
+
else 0.0
|
|
1230
|
+
)
|
|
1231
|
+
|
|
1232
|
+
capital_cost_per_mva = (
|
|
1233
|
+
float(network.lines.loc[line_name, "capital_cost"])
|
|
1234
|
+
if "capital_cost" in network.lines.columns
|
|
1235
|
+
else 0.0
|
|
1236
|
+
)
|
|
1237
|
+
|
|
1238
|
+
# Calculate annualized capital cost for this year
|
|
1239
|
+
annual_capital_cost = capacity_mva * capital_cost_per_mva
|
|
1240
|
+
|
|
1241
|
+
if carrier_name in carrier_stats["capital_cost_by_carrier"]:
|
|
1242
|
+
carrier_stats["capital_cost_by_carrier"][
|
|
1243
|
+
carrier_name
|
|
1244
|
+
] += annual_capital_cost
|
|
1245
|
+
|
|
1246
|
+
# 5. LINKS - Capital costs (per MW)
|
|
1247
|
+
if hasattr(network, "links") and not network.links.empty:
|
|
1248
|
+
cursor = conn.execute(
|
|
1249
|
+
"""
|
|
1250
|
+
SELECT c.name as component_name, carr.name as carrier_name
|
|
1251
|
+
FROM components c
|
|
1252
|
+
JOIN carriers carr ON c.carrier_id = carr.id
|
|
1253
|
+
WHERE c.component_type = 'LINK'
|
|
1254
|
+
"""
|
|
1255
|
+
)
|
|
1256
|
+
link_carriers = {row[0]: row[1] for row in cursor.fetchall()}
|
|
1257
|
+
|
|
1258
|
+
for link_name in network.links.index:
|
|
1259
|
+
if link_name in link_carriers:
|
|
1260
|
+
carrier_name = link_carriers[link_name]
|
|
1261
|
+
|
|
1262
|
+
# Get build year and lifetime
|
|
1263
|
+
build_year = (
|
|
1264
|
+
network.links.loc[link_name, "build_year"]
|
|
1265
|
+
if "build_year" in network.links.columns
|
|
1266
|
+
else None
|
|
1267
|
+
)
|
|
1268
|
+
lifetime = (
|
|
1269
|
+
network.links.loc[link_name, "lifetime"]
|
|
1270
|
+
if "lifetime" in network.links.columns
|
|
1271
|
+
else None
|
|
1272
|
+
)
|
|
1273
|
+
|
|
1274
|
+
# Check if component is active in this year
|
|
1275
|
+
if is_component_active(build_year, lifetime, year):
|
|
1276
|
+
# Get power capacity and capital cost (per MW)
|
|
1277
|
+
if "p_nom_opt" in network.links.columns:
|
|
1278
|
+
capacity_mw = float(
|
|
1279
|
+
network.links.loc[link_name, "p_nom_opt"]
|
|
1280
|
+
)
|
|
1281
|
+
else:
|
|
1282
|
+
capacity_mw = (
|
|
1283
|
+
float(network.links.loc[link_name, "p_nom"])
|
|
1284
|
+
if "p_nom" in network.links.columns
|
|
1285
|
+
else 0.0
|
|
1286
|
+
)
|
|
1287
|
+
|
|
1288
|
+
capital_cost_per_mw = (
|
|
1289
|
+
float(network.links.loc[link_name, "capital_cost"])
|
|
1290
|
+
if "capital_cost" in network.links.columns
|
|
1291
|
+
else 0.0
|
|
1292
|
+
)
|
|
1293
|
+
|
|
1294
|
+
# Calculate annualized capital cost for this year
|
|
1295
|
+
annual_capital_cost = capacity_mw * capital_cost_per_mw
|
|
1296
|
+
|
|
1297
|
+
if carrier_name in carrier_stats["capital_cost_by_carrier"]:
|
|
1298
|
+
carrier_stats["capital_cost_by_carrier"][
|
|
1299
|
+
carrier_name
|
|
1300
|
+
] += annual_capital_cost
|
|
1301
|
+
|
|
1302
|
+
# Calculate operational costs by carrier for this specific year
|
|
1303
|
+
# Operational costs = dispatch (MWh) × marginal_cost (currency/MWh)
|
|
1304
|
+
# Only for components that are active in this year
|
|
1305
|
+
|
|
1306
|
+
# 1. GENERATORS - Operational costs (including UNMET_LOAD)
|
|
1307
|
+
if hasattr(network, "generators_t") and hasattr(network.generators_t, "p"):
|
|
1308
|
+
# Get generator info: carrier, marginal_cost, build_year, lifetime (include UNMET_LOAD)
|
|
1309
|
+
cursor = conn.execute(
|
|
1310
|
+
"""
|
|
1311
|
+
SELECT c.name as component_name,
|
|
1312
|
+
CASE
|
|
1313
|
+
WHEN c.component_type = 'UNMET_LOAD' THEN 'Unmet Load'
|
|
1314
|
+
ELSE carr.name
|
|
1315
|
+
END as carrier_name
|
|
1316
|
+
FROM components c
|
|
1317
|
+
JOIN carriers carr ON c.carrier_id = carr.id
|
|
1318
|
+
WHERE c.component_type IN ('GENERATOR', 'UNMET_LOAD')
|
|
1319
|
+
"""
|
|
1320
|
+
)
|
|
1321
|
+
generator_carriers = {row[0]: row[1] for row in cursor.fetchall()}
|
|
1322
|
+
|
|
1323
|
+
# Filter generation data for this specific year
|
|
1324
|
+
year_generation = self._filter_timeseries_by_year(
|
|
1325
|
+
network.generators_t.p, network.snapshots, year
|
|
1326
|
+
)
|
|
1327
|
+
if year_generation is not None and not year_generation.empty:
|
|
1328
|
+
for gen_name in year_generation.columns:
|
|
1329
|
+
if gen_name in generator_carriers:
|
|
1330
|
+
carrier_name = generator_carriers[gen_name]
|
|
1331
|
+
|
|
1332
|
+
# Get build year and lifetime
|
|
1333
|
+
build_year = (
|
|
1334
|
+
network.generators.loc[gen_name, "build_year"]
|
|
1335
|
+
if "build_year" in network.generators.columns
|
|
1336
|
+
else None
|
|
1337
|
+
)
|
|
1338
|
+
lifetime = (
|
|
1339
|
+
network.generators.loc[gen_name, "lifetime"]
|
|
1340
|
+
if "lifetime" in network.generators.columns
|
|
1341
|
+
else None
|
|
1342
|
+
)
|
|
1343
|
+
|
|
1344
|
+
# Check if component is active in this year
|
|
1345
|
+
if is_component_active(build_year, lifetime, year):
|
|
1346
|
+
# Calculate generation for this year (already calculated above, but need to recalculate for operational costs)
|
|
1347
|
+
year_weightings = self._get_year_weightings(
|
|
1348
|
+
network, year
|
|
1349
|
+
)
|
|
1350
|
+
if year_weightings is not None:
|
|
1351
|
+
generation_mwh = float(
|
|
1352
|
+
(
|
|
1353
|
+
year_generation[gen_name].values
|
|
1354
|
+
* year_weightings
|
|
1355
|
+
).sum()
|
|
1356
|
+
)
|
|
1357
|
+
else:
|
|
1358
|
+
generation_mwh = float(
|
|
1359
|
+
year_generation[gen_name].sum()
|
|
1360
|
+
)
|
|
1361
|
+
|
|
1362
|
+
# Get marginal cost
|
|
1363
|
+
marginal_cost = (
|
|
1364
|
+
float(
|
|
1365
|
+
network.generators.loc[
|
|
1366
|
+
gen_name, "marginal_cost"
|
|
1367
|
+
]
|
|
1368
|
+
)
|
|
1369
|
+
if "marginal_cost" in network.generators.columns
|
|
1370
|
+
else 0.0
|
|
1371
|
+
)
|
|
1372
|
+
|
|
1373
|
+
# Calculate operational cost for this year
|
|
1374
|
+
operational_cost = generation_mwh * marginal_cost
|
|
1375
|
+
|
|
1376
|
+
if (
|
|
1377
|
+
carrier_name
|
|
1378
|
+
in carrier_stats["operational_cost_by_carrier"]
|
|
1379
|
+
):
|
|
1380
|
+
carrier_stats["operational_cost_by_carrier"][
|
|
1381
|
+
carrier_name
|
|
1382
|
+
] += operational_cost
|
|
1383
|
+
|
|
1384
|
+
# 2. STORAGE_UNITS - Operational costs (discharge only)
|
|
1385
|
+
if hasattr(network, "storage_units_t") and hasattr(
|
|
1386
|
+
network.storage_units_t, "p"
|
|
1387
|
+
):
|
|
1388
|
+
# Get storage unit info: carrier, marginal_cost, build_year, lifetime
|
|
1389
|
+
cursor = conn.execute(
|
|
1390
|
+
"""
|
|
1391
|
+
SELECT c.name as component_name, carr.name as carrier_name
|
|
1392
|
+
FROM components c
|
|
1393
|
+
JOIN carriers carr ON c.carrier_id = carr.id
|
|
1394
|
+
WHERE c.component_type = 'STORAGE_UNIT'
|
|
1395
|
+
"""
|
|
1396
|
+
)
|
|
1397
|
+
storage_unit_carriers = {row[0]: row[1] for row in cursor.fetchall()}
|
|
1398
|
+
|
|
1399
|
+
# Filter storage unit data for this specific year
|
|
1400
|
+
year_storage = self._filter_timeseries_by_year(
|
|
1401
|
+
network.storage_units_t.p, network.snapshots, year
|
|
1402
|
+
)
|
|
1403
|
+
if year_storage is not None and not year_storage.empty:
|
|
1404
|
+
for su_name in year_storage.columns:
|
|
1405
|
+
if su_name in storage_unit_carriers:
|
|
1406
|
+
carrier_name = storage_unit_carriers[su_name]
|
|
1407
|
+
|
|
1408
|
+
# Get build year and lifetime
|
|
1409
|
+
build_year = (
|
|
1410
|
+
network.storage_units.loc[su_name, "build_year"]
|
|
1411
|
+
if "build_year" in network.storage_units.columns
|
|
1412
|
+
else None
|
|
1413
|
+
)
|
|
1414
|
+
lifetime = (
|
|
1415
|
+
network.storage_units.loc[su_name, "lifetime"]
|
|
1416
|
+
if "lifetime" in network.storage_units.columns
|
|
1417
|
+
else None
|
|
1418
|
+
)
|
|
1419
|
+
|
|
1420
|
+
# Check if component is active in this year
|
|
1421
|
+
if is_component_active(build_year, lifetime, year):
|
|
1422
|
+
# Calculate discharge for this year (positive values only)
|
|
1423
|
+
year_weightings = self._get_year_weightings(
|
|
1424
|
+
network, year
|
|
1425
|
+
)
|
|
1426
|
+
if year_weightings is not None:
|
|
1427
|
+
discharge_mwh = float(
|
|
1428
|
+
(
|
|
1429
|
+
year_storage[su_name].clip(lower=0).values
|
|
1430
|
+
* year_weightings
|
|
1431
|
+
).sum()
|
|
1432
|
+
)
|
|
1433
|
+
else:
|
|
1434
|
+
discharge_mwh = float(
|
|
1435
|
+
year_storage[su_name].clip(lower=0).sum()
|
|
1436
|
+
)
|
|
1437
|
+
|
|
1438
|
+
# Get marginal cost
|
|
1439
|
+
marginal_cost = (
|
|
1440
|
+
float(
|
|
1441
|
+
network.storage_units.loc[
|
|
1442
|
+
su_name, "marginal_cost"
|
|
1443
|
+
]
|
|
1444
|
+
)
|
|
1445
|
+
if "marginal_cost" in network.storage_units.columns
|
|
1446
|
+
else 0.0
|
|
1447
|
+
)
|
|
1448
|
+
|
|
1449
|
+
# Calculate operational cost for this year
|
|
1450
|
+
operational_cost = discharge_mwh * marginal_cost
|
|
1451
|
+
|
|
1452
|
+
if (
|
|
1453
|
+
carrier_name
|
|
1454
|
+
in carrier_stats["operational_cost_by_carrier"]
|
|
1455
|
+
):
|
|
1456
|
+
carrier_stats["operational_cost_by_carrier"][
|
|
1457
|
+
carrier_name
|
|
1458
|
+
] += operational_cost
|
|
1459
|
+
|
|
1460
|
+
# 3. STORES - Operational costs (discharge only)
|
|
1461
|
+
if hasattr(network, "stores_t") and hasattr(network.stores_t, "p"):
|
|
1462
|
+
# Get store info: carrier, marginal_cost, build_year, lifetime
|
|
1463
|
+
cursor = conn.execute(
|
|
1464
|
+
"""
|
|
1465
|
+
SELECT c.name as component_name, carr.name as carrier_name
|
|
1466
|
+
FROM components c
|
|
1467
|
+
JOIN carriers carr ON c.carrier_id = carr.id
|
|
1468
|
+
WHERE c.component_type = 'STORE'
|
|
1469
|
+
"""
|
|
1470
|
+
)
|
|
1471
|
+
store_carriers = {row[0]: row[1] for row in cursor.fetchall()}
|
|
1472
|
+
|
|
1473
|
+
# Filter store data for this specific year
|
|
1474
|
+
year_stores = self._filter_timeseries_by_year(
|
|
1475
|
+
network.stores_t.p, network.snapshots, year
|
|
1476
|
+
)
|
|
1477
|
+
if year_stores is not None and not year_stores.empty:
|
|
1478
|
+
for store_name in year_stores.columns:
|
|
1479
|
+
if store_name in store_carriers:
|
|
1480
|
+
carrier_name = store_carriers[store_name]
|
|
1481
|
+
|
|
1482
|
+
# Get build year and lifetime
|
|
1483
|
+
build_year = (
|
|
1484
|
+
network.stores.loc[store_name, "build_year"]
|
|
1485
|
+
if "build_year" in network.stores.columns
|
|
1486
|
+
else None
|
|
1487
|
+
)
|
|
1488
|
+
lifetime = (
|
|
1489
|
+
network.stores.loc[store_name, "lifetime"]
|
|
1490
|
+
if "lifetime" in network.stores.columns
|
|
1491
|
+
else None
|
|
1492
|
+
)
|
|
1493
|
+
|
|
1494
|
+
# Check if component is active in this year
|
|
1495
|
+
if is_component_active(build_year, lifetime, year):
|
|
1496
|
+
# Calculate discharge for this year (positive values only)
|
|
1497
|
+
year_weightings = self._get_year_weightings(
|
|
1498
|
+
network, year
|
|
1499
|
+
)
|
|
1500
|
+
if year_weightings is not None:
|
|
1501
|
+
discharge_mwh = float(
|
|
1502
|
+
(
|
|
1503
|
+
year_stores[store_name].clip(lower=0).values
|
|
1504
|
+
* year_weightings
|
|
1505
|
+
).sum()
|
|
1506
|
+
)
|
|
1507
|
+
else:
|
|
1508
|
+
discharge_mwh = float(
|
|
1509
|
+
year_stores[store_name].clip(lower=0).sum()
|
|
1510
|
+
)
|
|
1511
|
+
|
|
1512
|
+
# Get marginal cost
|
|
1513
|
+
marginal_cost = (
|
|
1514
|
+
float(
|
|
1515
|
+
network.stores.loc[store_name, "marginal_cost"]
|
|
1516
|
+
)
|
|
1517
|
+
if "marginal_cost" in network.stores.columns
|
|
1518
|
+
else 0.0
|
|
1519
|
+
)
|
|
1520
|
+
|
|
1521
|
+
# Calculate operational cost for this year
|
|
1522
|
+
operational_cost = discharge_mwh * marginal_cost
|
|
1523
|
+
|
|
1524
|
+
if (
|
|
1525
|
+
carrier_name
|
|
1526
|
+
in carrier_stats["operational_cost_by_carrier"]
|
|
1527
|
+
):
|
|
1528
|
+
carrier_stats["operational_cost_by_carrier"][
|
|
1529
|
+
carrier_name
|
|
1530
|
+
] += operational_cost
|
|
1531
|
+
|
|
1532
|
+
# Calculate total system costs by carrier for this specific year
|
|
1533
|
+
# Total system cost = capital cost + operational cost
|
|
1534
|
+
for carrier_name in carrier_stats["capital_cost_by_carrier"]:
|
|
1535
|
+
capital_cost = carrier_stats["capital_cost_by_carrier"][carrier_name]
|
|
1536
|
+
operational_cost = carrier_stats["operational_cost_by_carrier"][
|
|
1537
|
+
carrier_name
|
|
1538
|
+
]
|
|
1539
|
+
total_system_cost = capital_cost + operational_cost
|
|
1540
|
+
|
|
1541
|
+
if carrier_name in carrier_stats["total_system_cost_by_carrier"]:
|
|
1542
|
+
carrier_stats["total_system_cost_by_carrier"][
|
|
1543
|
+
carrier_name
|
|
1544
|
+
] = total_system_cost
|
|
1545
|
+
|
|
1546
|
+
# Calculate capacity by carrier for this specific year
|
|
1547
|
+
|
|
1548
|
+
# 4. GENERATORS - Power capacity (MW) (including UNMET_LOAD)
|
|
1549
|
+
if hasattr(network, "generators") and not network.generators.empty:
|
|
1550
|
+
# Get generator-carrier mapping (include UNMET_LOAD)
|
|
1551
|
+
cursor = conn.execute(
|
|
1552
|
+
"""
|
|
1553
|
+
SELECT c.name as component_name,
|
|
1554
|
+
CASE
|
|
1555
|
+
WHEN c.component_type = 'UNMET_LOAD' THEN 'Unmet Load'
|
|
1556
|
+
ELSE carr.name
|
|
1557
|
+
END as carrier_name
|
|
1558
|
+
FROM components c
|
|
1559
|
+
JOIN carriers carr ON c.carrier_id = carr.id
|
|
1560
|
+
WHERE c.component_type IN ('GENERATOR', 'UNMET_LOAD')
|
|
1561
|
+
"""
|
|
1562
|
+
)
|
|
1563
|
+
generator_carriers = {row[0]: row[1] for row in cursor.fetchall()}
|
|
1564
|
+
|
|
1565
|
+
for gen_name in network.generators.index:
|
|
1566
|
+
if gen_name in generator_carriers:
|
|
1567
|
+
carrier_name = generator_carriers[gen_name]
|
|
1568
|
+
|
|
1569
|
+
# Check if this generator is available in this year (build_year <= year)
|
|
1570
|
+
is_available = True
|
|
1571
|
+
if "build_year" in network.generators.columns:
|
|
1572
|
+
build_year = network.generators.loc[gen_name, "build_year"]
|
|
1573
|
+
if pd.notna(build_year) and int(build_year) > year:
|
|
1574
|
+
is_available = False
|
|
1575
|
+
|
|
1576
|
+
if is_available:
|
|
1577
|
+
# Use p_nom_opt if available, otherwise p_nom
|
|
1578
|
+
if "p_nom_opt" in network.generators.columns:
|
|
1579
|
+
capacity_mw = float(
|
|
1580
|
+
network.generators.loc[gen_name, "p_nom_opt"]
|
|
1581
|
+
)
|
|
1582
|
+
else:
|
|
1583
|
+
capacity_mw = (
|
|
1584
|
+
float(network.generators.loc[gen_name, "p_nom"])
|
|
1585
|
+
if "p_nom" in network.generators.columns
|
|
1586
|
+
else 0.0
|
|
1587
|
+
)
|
|
1588
|
+
|
|
1589
|
+
if (
|
|
1590
|
+
carrier_name
|
|
1591
|
+
in carrier_stats["power_capacity_by_carrier"]
|
|
1592
|
+
):
|
|
1593
|
+
carrier_stats["power_capacity_by_carrier"][
|
|
1594
|
+
carrier_name
|
|
1595
|
+
] += capacity_mw
|
|
1596
|
+
|
|
1597
|
+
# 2. STORAGE_UNITS - Power capacity (MW) + Energy capacity (MWh)
|
|
1598
|
+
if hasattr(network, "storage_units") and not network.storage_units.empty:
|
|
1599
|
+
# Get storage unit-carrier mapping
|
|
1600
|
+
cursor = conn.execute(
|
|
1601
|
+
"""
|
|
1602
|
+
SELECT c.name as component_name, carr.name as carrier_name
|
|
1603
|
+
FROM components c
|
|
1604
|
+
JOIN carriers carr ON c.carrier_id = carr.id
|
|
1605
|
+
WHERE c.component_type = 'STORAGE_UNIT'
|
|
1606
|
+
"""
|
|
1607
|
+
)
|
|
1608
|
+
storage_unit_carriers = {row[0]: row[1] for row in cursor.fetchall()}
|
|
1609
|
+
|
|
1610
|
+
for su_name in network.storage_units.index:
|
|
1611
|
+
if su_name in storage_unit_carriers:
|
|
1612
|
+
carrier_name = storage_unit_carriers[su_name]
|
|
1613
|
+
|
|
1614
|
+
# Check if this storage unit is available in this year
|
|
1615
|
+
is_available = True
|
|
1616
|
+
if "build_year" in network.storage_units.columns:
|
|
1617
|
+
build_year = network.storage_units.loc[
|
|
1618
|
+
su_name, "build_year"
|
|
1619
|
+
]
|
|
1620
|
+
if pd.notna(build_year) and int(build_year) > year:
|
|
1621
|
+
is_available = False
|
|
1622
|
+
|
|
1623
|
+
if is_available:
|
|
1624
|
+
# Power capacity (MW)
|
|
1625
|
+
if "p_nom_opt" in network.storage_units.columns:
|
|
1626
|
+
p_nom_opt = float(
|
|
1627
|
+
network.storage_units.loc[su_name, "p_nom_opt"]
|
|
1628
|
+
)
|
|
1629
|
+
else:
|
|
1630
|
+
p_nom_opt = (
|
|
1631
|
+
float(network.storage_units.loc[su_name, "p_nom"])
|
|
1632
|
+
if "p_nom" in network.storage_units.columns
|
|
1633
|
+
else 0.0
|
|
1634
|
+
)
|
|
1635
|
+
|
|
1636
|
+
if (
|
|
1637
|
+
carrier_name
|
|
1638
|
+
in carrier_stats["power_capacity_by_carrier"]
|
|
1639
|
+
):
|
|
1640
|
+
carrier_stats["power_capacity_by_carrier"][
|
|
1641
|
+
carrier_name
|
|
1642
|
+
] += p_nom_opt
|
|
1643
|
+
|
|
1644
|
+
# Energy capacity (MWh) using max_hours
|
|
1645
|
+
max_hours = 1.0 # Default
|
|
1646
|
+
if "max_hours" in network.storage_units.columns:
|
|
1647
|
+
max_hours = float(
|
|
1648
|
+
network.storage_units.loc[su_name, "max_hours"]
|
|
1649
|
+
)
|
|
1650
|
+
energy_capacity_mwh = p_nom_opt * max_hours
|
|
1651
|
+
|
|
1652
|
+
if (
|
|
1653
|
+
carrier_name
|
|
1654
|
+
in carrier_stats["energy_capacity_by_carrier"]
|
|
1655
|
+
):
|
|
1656
|
+
carrier_stats["energy_capacity_by_carrier"][
|
|
1657
|
+
carrier_name
|
|
1658
|
+
] += energy_capacity_mwh
|
|
1659
|
+
|
|
1660
|
+
# 3. STORES - Energy capacity (MWh) only
|
|
1661
|
+
if hasattr(network, "stores") and not network.stores.empty:
|
|
1662
|
+
# Get store-carrier mapping
|
|
1663
|
+
cursor = conn.execute(
|
|
1664
|
+
"""
|
|
1665
|
+
SELECT c.name as component_name, carr.name as carrier_name
|
|
1666
|
+
FROM components c
|
|
1667
|
+
JOIN carriers carr ON c.carrier_id = carr.id
|
|
1668
|
+
WHERE c.component_type = 'STORE'
|
|
1669
|
+
"""
|
|
1670
|
+
)
|
|
1671
|
+
store_carriers = {row[0]: row[1] for row in cursor.fetchall()}
|
|
1672
|
+
|
|
1673
|
+
for store_name in network.stores.index:
|
|
1674
|
+
if store_name in store_carriers:
|
|
1675
|
+
carrier_name = store_carriers[store_name]
|
|
1676
|
+
|
|
1677
|
+
# Check if this store is available in this year
|
|
1678
|
+
is_available = True
|
|
1679
|
+
if "build_year" in network.stores.columns:
|
|
1680
|
+
build_year = network.stores.loc[store_name, "build_year"]
|
|
1681
|
+
if pd.notna(build_year) and int(build_year) > year:
|
|
1682
|
+
is_available = False
|
|
1683
|
+
|
|
1684
|
+
if is_available:
|
|
1685
|
+
# Energy capacity (MWh)
|
|
1686
|
+
if "e_nom_opt" in network.stores.columns:
|
|
1687
|
+
capacity_mwh = float(
|
|
1688
|
+
network.stores.loc[store_name, "e_nom_opt"]
|
|
1689
|
+
)
|
|
1690
|
+
else:
|
|
1691
|
+
capacity_mwh = (
|
|
1692
|
+
float(network.stores.loc[store_name, "e_nom"])
|
|
1693
|
+
if "e_nom" in network.stores.columns
|
|
1694
|
+
else 0.0
|
|
1695
|
+
)
|
|
1696
|
+
|
|
1697
|
+
if (
|
|
1698
|
+
carrier_name
|
|
1699
|
+
in carrier_stats["energy_capacity_by_carrier"]
|
|
1700
|
+
):
|
|
1701
|
+
carrier_stats["energy_capacity_by_carrier"][
|
|
1702
|
+
carrier_name
|
|
1703
|
+
] += capacity_mwh
|
|
1704
|
+
|
|
1705
|
+
# 4. LINES - Apparent power capacity (MVA -> MW)
|
|
1706
|
+
if hasattr(network, "lines") and not network.lines.empty:
|
|
1707
|
+
# Get line-carrier mapping
|
|
1708
|
+
cursor = conn.execute(
|
|
1709
|
+
"""
|
|
1710
|
+
SELECT c.name as component_name, carr.name as carrier_name
|
|
1711
|
+
FROM components c
|
|
1712
|
+
JOIN carriers carr ON c.carrier_id = carr.id
|
|
1713
|
+
WHERE c.component_type = 'LINE'
|
|
1714
|
+
"""
|
|
1715
|
+
)
|
|
1716
|
+
line_carriers = {row[0]: row[1] for row in cursor.fetchall()}
|
|
1717
|
+
|
|
1718
|
+
for line_name in network.lines.index:
|
|
1719
|
+
if line_name in line_carriers:
|
|
1720
|
+
carrier_name = line_carriers[line_name]
|
|
1721
|
+
|
|
1722
|
+
# Check if this line is available in this year
|
|
1723
|
+
is_available = True
|
|
1724
|
+
if "build_year" in network.lines.columns:
|
|
1725
|
+
build_year = network.lines.loc[line_name, "build_year"]
|
|
1726
|
+
if pd.notna(build_year) and int(build_year) > year:
|
|
1727
|
+
is_available = False
|
|
1728
|
+
|
|
1729
|
+
if is_available:
|
|
1730
|
+
# Apparent power capacity (MVA -> MW, assume power factor = 1)
|
|
1731
|
+
if "s_nom_opt" in network.lines.columns:
|
|
1732
|
+
capacity_mva = float(
|
|
1733
|
+
network.lines.loc[line_name, "s_nom_opt"]
|
|
1734
|
+
)
|
|
1735
|
+
else:
|
|
1736
|
+
capacity_mva = (
|
|
1737
|
+
float(network.lines.loc[line_name, "s_nom"])
|
|
1738
|
+
if "s_nom" in network.lines.columns
|
|
1739
|
+
else 0.0
|
|
1740
|
+
)
|
|
1741
|
+
|
|
1742
|
+
capacity_mw = capacity_mva # Convert MVA to MW
|
|
1743
|
+
|
|
1744
|
+
if (
|
|
1745
|
+
carrier_name
|
|
1746
|
+
in carrier_stats["power_capacity_by_carrier"]
|
|
1747
|
+
):
|
|
1748
|
+
carrier_stats["power_capacity_by_carrier"][
|
|
1749
|
+
carrier_name
|
|
1750
|
+
] += capacity_mw
|
|
1751
|
+
|
|
1752
|
+
# 5. LINKS - Power capacity (MW)
|
|
1753
|
+
if hasattr(network, "links") and not network.links.empty:
|
|
1754
|
+
# Get link-carrier mapping
|
|
1755
|
+
cursor = conn.execute(
|
|
1756
|
+
"""
|
|
1757
|
+
SELECT c.name as component_name, carr.name as carrier_name
|
|
1758
|
+
FROM components c
|
|
1759
|
+
JOIN carriers carr ON c.carrier_id = carr.id
|
|
1760
|
+
WHERE c.component_type = 'LINK'
|
|
1761
|
+
"""
|
|
1762
|
+
)
|
|
1763
|
+
link_carriers = {row[0]: row[1] for row in cursor.fetchall()}
|
|
1764
|
+
|
|
1765
|
+
for link_name in network.links.index:
|
|
1766
|
+
if link_name in link_carriers:
|
|
1767
|
+
carrier_name = link_carriers[link_name]
|
|
1768
|
+
|
|
1769
|
+
# Check if this link is available in this year
|
|
1770
|
+
is_available = True
|
|
1771
|
+
if "build_year" in network.links.columns:
|
|
1772
|
+
build_year = network.links.loc[link_name, "build_year"]
|
|
1773
|
+
if pd.notna(build_year) and int(build_year) > year:
|
|
1774
|
+
is_available = False
|
|
1775
|
+
|
|
1776
|
+
if is_available:
|
|
1777
|
+
# Power capacity (MW)
|
|
1778
|
+
if "p_nom_opt" in network.links.columns:
|
|
1779
|
+
capacity_mw = float(
|
|
1780
|
+
network.links.loc[link_name, "p_nom_opt"]
|
|
1781
|
+
)
|
|
1782
|
+
else:
|
|
1783
|
+
capacity_mw = (
|
|
1784
|
+
float(network.links.loc[link_name, "p_nom"])
|
|
1785
|
+
if "p_nom" in network.links.columns
|
|
1786
|
+
else 0.0
|
|
1787
|
+
)
|
|
1788
|
+
|
|
1789
|
+
if (
|
|
1790
|
+
carrier_name
|
|
1791
|
+
in carrier_stats["power_capacity_by_carrier"]
|
|
1792
|
+
):
|
|
1793
|
+
carrier_stats["power_capacity_by_carrier"][
|
|
1794
|
+
carrier_name
|
|
1795
|
+
] += capacity_mw
|
|
1796
|
+
|
|
1797
|
+
return carrier_stats
|
|
1798
|
+
|
|
1799
|
+
except Exception as e:
|
|
1800
|
+
return {
|
|
1801
|
+
"dispatch_by_carrier": {},
|
|
1802
|
+
"power_capacity_by_carrier": {},
|
|
1803
|
+
"energy_capacity_by_carrier": {},
|
|
1804
|
+
"emissions_by_carrier": {},
|
|
1805
|
+
"capital_cost_by_carrier": {},
|
|
1806
|
+
"operational_cost_by_carrier": {},
|
|
1807
|
+
"total_system_cost_by_carrier": {},
|
|
1808
|
+
}
|
|
1809
|
+
|
|
1810
|
+
def _sum_year_based_carrier_statistics(self, conn) -> Dict[str, Any]:
|
|
1811
|
+
"""
|
|
1812
|
+
Sum up per-year carrier statistics for accurate multi-year totals (single network per database).
|
|
1813
|
+
For capacity: take the LAST YEAR (final capacity) instead of maximum.
|
|
1814
|
+
"""
|
|
1815
|
+
try:
|
|
1816
|
+
import json
|
|
1817
|
+
|
|
1818
|
+
# Initialize totals
|
|
1819
|
+
totals = {
|
|
1820
|
+
"dispatch_by_carrier": {},
|
|
1821
|
+
"power_capacity_by_carrier": {},
|
|
1822
|
+
"energy_capacity_by_carrier": {},
|
|
1823
|
+
"emissions_by_carrier": {},
|
|
1824
|
+
"capital_cost_by_carrier": {},
|
|
1825
|
+
"operational_cost_by_carrier": {},
|
|
1826
|
+
"total_system_cost_by_carrier": {},
|
|
1827
|
+
}
|
|
1828
|
+
|
|
1829
|
+
# Get all carriers from database
|
|
1830
|
+
cursor = conn.execute(
|
|
1831
|
+
"""
|
|
1832
|
+
SELECT DISTINCT name FROM carriers
|
|
1833
|
+
"""
|
|
1834
|
+
)
|
|
1835
|
+
all_carriers = [row[0] for row in cursor.fetchall()]
|
|
1836
|
+
|
|
1837
|
+
# Initialize all carriers with zero values (including special "Unmet Load" carrier)
|
|
1838
|
+
all_carriers_with_unmet = all_carriers + ["Unmet Load"]
|
|
1839
|
+
for carrier in all_carriers_with_unmet:
|
|
1840
|
+
totals["dispatch_by_carrier"][carrier] = 0.0
|
|
1841
|
+
totals["power_capacity_by_carrier"][carrier] = 0.0
|
|
1842
|
+
totals["energy_capacity_by_carrier"][carrier] = 0.0
|
|
1843
|
+
totals["emissions_by_carrier"][carrier] = 0.0
|
|
1844
|
+
totals["capital_cost_by_carrier"][carrier] = 0.0
|
|
1845
|
+
totals["operational_cost_by_carrier"][carrier] = 0.0
|
|
1846
|
+
totals["total_system_cost_by_carrier"][carrier] = 0.0
|
|
1847
|
+
|
|
1848
|
+
# Get all year-based results, ordered by year
|
|
1849
|
+
cursor = conn.execute(
|
|
1850
|
+
"""
|
|
1851
|
+
SELECT year, results_json FROM network_solve_results_by_year
|
|
1852
|
+
ORDER BY year
|
|
1853
|
+
"""
|
|
1854
|
+
)
|
|
1855
|
+
|
|
1856
|
+
year_results = cursor.fetchall()
|
|
1857
|
+
|
|
1858
|
+
if not year_results:
|
|
1859
|
+
return totals
|
|
1860
|
+
|
|
1861
|
+
# For capacity: use the LAST YEAR only (final capacity state)
|
|
1862
|
+
last_year, last_results_json = year_results[-1]
|
|
1863
|
+
|
|
1864
|
+
try:
|
|
1865
|
+
results = json.loads(last_results_json)
|
|
1866
|
+
network_stats = results.get("network_statistics", {})
|
|
1867
|
+
custom_stats = network_stats.get("custom_statistics", {})
|
|
1868
|
+
|
|
1869
|
+
# Use last year's capacity as the all-year capacity
|
|
1870
|
+
power_capacity_by_carrier = custom_stats.get(
|
|
1871
|
+
"power_capacity_by_carrier", {}
|
|
1872
|
+
)
|
|
1873
|
+
for carrier, value in power_capacity_by_carrier.items():
|
|
1874
|
+
if carrier in totals["power_capacity_by_carrier"]:
|
|
1875
|
+
totals["power_capacity_by_carrier"][carrier] = float(value or 0)
|
|
1876
|
+
|
|
1877
|
+
energy_capacity_by_carrier = custom_stats.get(
|
|
1878
|
+
"energy_capacity_by_carrier", {}
|
|
1879
|
+
)
|
|
1880
|
+
for carrier, value in energy_capacity_by_carrier.items():
|
|
1881
|
+
if carrier in totals["energy_capacity_by_carrier"]:
|
|
1882
|
+
totals["energy_capacity_by_carrier"][carrier] = float(
|
|
1883
|
+
value or 0
|
|
1884
|
+
)
|
|
1885
|
+
|
|
1886
|
+
except Exception as e:
|
|
1887
|
+
pass # Failed to process last year results
|
|
1888
|
+
|
|
1889
|
+
# For other stats (dispatch, emissions, costs): sum across all years
|
|
1890
|
+
for year, results_json in year_results:
|
|
1891
|
+
try:
|
|
1892
|
+
results = json.loads(results_json)
|
|
1893
|
+
network_stats = results.get("network_statistics", {})
|
|
1894
|
+
custom_stats = network_stats.get("custom_statistics", {})
|
|
1895
|
+
|
|
1896
|
+
# Sum dispatch (energy values - sum across years)
|
|
1897
|
+
dispatch_by_carrier = custom_stats.get("dispatch_by_carrier", {})
|
|
1898
|
+
for carrier, value in dispatch_by_carrier.items():
|
|
1899
|
+
if carrier in totals["dispatch_by_carrier"]:
|
|
1900
|
+
totals["dispatch_by_carrier"][carrier] += float(value or 0)
|
|
1901
|
+
|
|
1902
|
+
# Sum emissions (cumulative across years)
|
|
1903
|
+
emissions_by_carrier = custom_stats.get("emissions_by_carrier", {})
|
|
1904
|
+
for carrier, value in emissions_by_carrier.items():
|
|
1905
|
+
if carrier in totals["emissions_by_carrier"]:
|
|
1906
|
+
totals["emissions_by_carrier"][carrier] += float(value or 0)
|
|
1907
|
+
|
|
1908
|
+
# Sum capital costs (cumulative across years)
|
|
1909
|
+
capital_cost_by_carrier = custom_stats.get(
|
|
1910
|
+
"capital_cost_by_carrier", {}
|
|
1911
|
+
)
|
|
1912
|
+
for carrier, value in capital_cost_by_carrier.items():
|
|
1913
|
+
if carrier in totals["capital_cost_by_carrier"]:
|
|
1914
|
+
totals["capital_cost_by_carrier"][carrier] += float(
|
|
1915
|
+
value or 0
|
|
1916
|
+
)
|
|
1917
|
+
|
|
1918
|
+
# Sum operational costs (cumulative across years)
|
|
1919
|
+
operational_cost_by_carrier = custom_stats.get(
|
|
1920
|
+
"operational_cost_by_carrier", {}
|
|
1921
|
+
)
|
|
1922
|
+
for carrier, value in operational_cost_by_carrier.items():
|
|
1923
|
+
if carrier in totals["operational_cost_by_carrier"]:
|
|
1924
|
+
totals["operational_cost_by_carrier"][carrier] += float(
|
|
1925
|
+
value or 0
|
|
1926
|
+
)
|
|
1927
|
+
|
|
1928
|
+
# Sum total system costs (cumulative across years)
|
|
1929
|
+
total_system_cost_by_carrier = custom_stats.get(
|
|
1930
|
+
"total_system_cost_by_carrier", {}
|
|
1931
|
+
)
|
|
1932
|
+
for carrier, value in total_system_cost_by_carrier.items():
|
|
1933
|
+
if carrier in totals["total_system_cost_by_carrier"]:
|
|
1934
|
+
totals["total_system_cost_by_carrier"][carrier] += float(
|
|
1935
|
+
value or 0
|
|
1936
|
+
)
|
|
1937
|
+
|
|
1938
|
+
except Exception as e:
|
|
1939
|
+
continue
|
|
1940
|
+
|
|
1941
|
+
return totals
|
|
1942
|
+
|
|
1943
|
+
except Exception as e:
|
|
1944
|
+
# Return empty structure on error
|
|
1945
|
+
return {
|
|
1946
|
+
"dispatch_by_carrier": {},
|
|
1947
|
+
"power_capacity_by_carrier": {},
|
|
1948
|
+
"energy_capacity_by_carrier": {},
|
|
1949
|
+
"emissions_by_carrier": {},
|
|
1950
|
+
"capital_cost_by_carrier": {},
|
|
1951
|
+
"operational_cost_by_carrier": {},
|
|
1952
|
+
"total_system_cost_by_carrier": {},
|
|
1953
|
+
}
|
|
1954
|
+
|
|
1955
|
+
def _serialize_results_json(self, solve_result: Dict[str, Any]) -> str:
|
|
1956
|
+
"""Serialize solve results to JSON string."""
|
|
1957
|
+
import json
|
|
1958
|
+
|
|
1959
|
+
try:
|
|
1960
|
+
# Create a clean results dictionary
|
|
1961
|
+
results = {
|
|
1962
|
+
"success": solve_result.get("success", False),
|
|
1963
|
+
"status": solve_result.get("status", "unknown"),
|
|
1964
|
+
"solve_time": solve_result.get("solve_time", 0.0),
|
|
1965
|
+
"objective_value": solve_result.get("objective_value"),
|
|
1966
|
+
"solver_name": solve_result.get("solver_name", "unknown"),
|
|
1967
|
+
"run_id": solve_result.get("run_id"),
|
|
1968
|
+
"network_statistics": solve_result.get("network_statistics", {}),
|
|
1969
|
+
"pypsa_result": solve_result.get("pypsa_result", {}),
|
|
1970
|
+
}
|
|
1971
|
+
return json.dumps(results, default=self._json_serializer)
|
|
1972
|
+
except Exception as e:
|
|
1973
|
+
return json.dumps({"error": "serialization_failed"})
|
|
1974
|
+
|
|
1975
|
+
def _serialize_metadata_json(self, solve_result: Dict[str, Any]) -> str:
|
|
1976
|
+
"""Serialize solve metadata to JSON string."""
|
|
1977
|
+
import json
|
|
1978
|
+
|
|
1979
|
+
try:
|
|
1980
|
+
metadata = {
|
|
1981
|
+
"solver_name": solve_result.get("solver_name", "unknown"),
|
|
1982
|
+
"run_id": solve_result.get("run_id"),
|
|
1983
|
+
"multi_period": solve_result.get("multi_period", False),
|
|
1984
|
+
"years": solve_result.get("years", []),
|
|
1985
|
+
"network_name": solve_result.get("network_name"),
|
|
1986
|
+
"num_snapshots": solve_result.get("num_snapshots", 0),
|
|
1987
|
+
}
|
|
1988
|
+
return json.dumps(metadata, default=self._json_serializer)
|
|
1989
|
+
except Exception as e:
|
|
1990
|
+
return json.dumps({"error": "serialization_failed"})
|
|
1991
|
+
|
|
1992
|
+
def _filter_timeseries_by_year(
|
|
1993
|
+
self, timeseries_df: "pd.DataFrame", snapshots: "pd.Index", year: int
|
|
1994
|
+
) -> "pd.DataFrame":
|
|
1995
|
+
"""Filter timeseries data by year"""
|
|
1996
|
+
try:
|
|
1997
|
+
# Handle MultiIndex case (multi-period optimization)
|
|
1998
|
+
if hasattr(snapshots, "levels"):
|
|
1999
|
+
period_values = snapshots.get_level_values(0)
|
|
2000
|
+
year_mask = period_values == year
|
|
2001
|
+
if year_mask.any():
|
|
2002
|
+
year_snapshots = snapshots[year_mask]
|
|
2003
|
+
return timeseries_df.loc[year_snapshots]
|
|
2004
|
+
|
|
2005
|
+
# Handle DatetimeIndex case (regular time series)
|
|
2006
|
+
elif hasattr(snapshots, "year"):
|
|
2007
|
+
year_mask = snapshots.year == year
|
|
2008
|
+
if year_mask.any():
|
|
2009
|
+
return timeseries_df.loc[year_mask]
|
|
2010
|
+
|
|
2011
|
+
# Fallback - return None if can't filter
|
|
2012
|
+
return None
|
|
2013
|
+
|
|
2014
|
+
except Exception as e:
|
|
2015
|
+
return None
|
|
2016
|
+
|
|
2017
|
+
def _get_year_weightings(self, network: "pypsa.Network", year: int) -> "np.ndarray":
|
|
2018
|
+
"""Get snapshot weightings for a specific year"""
|
|
2019
|
+
try:
|
|
2020
|
+
# Filter snapshot weightings by year
|
|
2021
|
+
if hasattr(network.snapshots, "levels"):
|
|
2022
|
+
period_values = network.snapshots.get_level_values(0)
|
|
2023
|
+
year_mask = period_values == year
|
|
2024
|
+
if year_mask.any():
|
|
2025
|
+
year_snapshots = network.snapshots[year_mask]
|
|
2026
|
+
year_weightings = network.snapshot_weightings.loc[year_snapshots]
|
|
2027
|
+
if isinstance(year_weightings, pd.DataFrame):
|
|
2028
|
+
if "objective" in year_weightings.columns:
|
|
2029
|
+
return year_weightings["objective"].values
|
|
2030
|
+
else:
|
|
2031
|
+
return year_weightings.iloc[:, 0].values
|
|
2032
|
+
else:
|
|
2033
|
+
return year_weightings.values
|
|
2034
|
+
|
|
2035
|
+
elif hasattr(network.snapshots, "year"):
|
|
2036
|
+
year_mask = network.snapshots.year == year
|
|
2037
|
+
if year_mask.any():
|
|
2038
|
+
year_weightings = network.snapshot_weightings.loc[year_mask]
|
|
2039
|
+
if isinstance(year_weightings, pd.DataFrame):
|
|
2040
|
+
if "objective" in year_weightings.columns:
|
|
2041
|
+
return year_weightings["objective"].values
|
|
2042
|
+
else:
|
|
2043
|
+
return year_weightings.iloc[:, 0].values
|
|
2044
|
+
else:
|
|
2045
|
+
return year_weightings.values
|
|
2046
|
+
|
|
2047
|
+
return None
|
|
2048
|
+
|
|
2049
|
+
except Exception as e:
|
|
2050
|
+
return None
|
|
2051
|
+
|
|
2052
|
+
def _calculate_total_demand(self, network: "pypsa.Network") -> float:
|
|
2053
|
+
"""Calculate total demand from loads in the network"""
|
|
2054
|
+
try:
|
|
2055
|
+
total_demand = 0.0
|
|
2056
|
+
|
|
2057
|
+
# Calculate demand from loads
|
|
2058
|
+
if hasattr(network, "loads_t") and hasattr(network.loads_t, "p"):
|
|
2059
|
+
# Apply snapshot weightings to convert MW to MWh
|
|
2060
|
+
weightings = network.snapshot_weightings
|
|
2061
|
+
if isinstance(weightings, pd.DataFrame):
|
|
2062
|
+
if "objective" in weightings.columns:
|
|
2063
|
+
weighting_values = weightings["objective"].values
|
|
2064
|
+
else:
|
|
2065
|
+
weighting_values = weightings.iloc[:, 0].values
|
|
2066
|
+
else:
|
|
2067
|
+
weighting_values = weightings.values
|
|
2068
|
+
|
|
2069
|
+
total_demand = float(
|
|
2070
|
+
(network.loads_t.p.values * weighting_values[:, None]).sum()
|
|
2071
|
+
)
|
|
2072
|
+
|
|
2073
|
+
return total_demand
|
|
2074
|
+
|
|
2075
|
+
except Exception as e:
|
|
2076
|
+
return 0.0
|
|
2077
|
+
|
|
2078
|
+
def _json_serializer(self, obj):
|
|
2079
|
+
"""Convert numpy/pandas types to JSON serializable types"""
|
|
2080
|
+
import numpy as np
|
|
2081
|
+
import pandas as pd
|
|
2082
|
+
|
|
2083
|
+
if isinstance(obj, (np.integer, np.int64, np.int32)):
|
|
2084
|
+
return int(obj)
|
|
2085
|
+
elif isinstance(obj, (np.floating, np.float64, np.float32)):
|
|
2086
|
+
return float(obj)
|
|
2087
|
+
elif isinstance(obj, np.ndarray):
|
|
2088
|
+
return obj.tolist()
|
|
2089
|
+
elif isinstance(obj, pd.Series):
|
|
2090
|
+
return obj.to_dict()
|
|
2091
|
+
elif isinstance(obj, pd.DataFrame):
|
|
2092
|
+
return obj.to_dict()
|
|
2093
|
+
elif hasattr(obj, "item"): # Handle numpy scalars
|
|
2094
|
+
return obj.item()
|
|
2095
|
+
else:
|
|
2096
|
+
raise TypeError(f"Object of type {type(obj)} is not JSON serializable")
|