pyconvexity 0.4.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pyconvexity might be problematic. Click here for more details.
- pyconvexity/__init__.py +226 -0
- pyconvexity/_version.py +1 -0
- pyconvexity/core/__init__.py +60 -0
- pyconvexity/core/database.py +485 -0
- pyconvexity/core/errors.py +106 -0
- pyconvexity/core/types.py +400 -0
- pyconvexity/data/README.md +101 -0
- pyconvexity/data/__init__.py +17 -0
- pyconvexity/data/loaders/__init__.py +3 -0
- pyconvexity/data/loaders/cache.py +213 -0
- pyconvexity/data/schema/01_core_schema.sql +420 -0
- pyconvexity/data/schema/02_data_metadata.sql +120 -0
- pyconvexity/data/schema/03_validation_data.sql +506 -0
- pyconvexity/data/sources/__init__.py +5 -0
- pyconvexity/data/sources/gem.py +442 -0
- pyconvexity/io/__init__.py +26 -0
- pyconvexity/io/excel_exporter.py +1226 -0
- pyconvexity/io/excel_importer.py +1381 -0
- pyconvexity/io/netcdf_exporter.py +197 -0
- pyconvexity/io/netcdf_importer.py +1833 -0
- pyconvexity/models/__init__.py +195 -0
- pyconvexity/models/attributes.py +730 -0
- pyconvexity/models/carriers.py +159 -0
- pyconvexity/models/components.py +611 -0
- pyconvexity/models/network.py +503 -0
- pyconvexity/models/results.py +148 -0
- pyconvexity/models/scenarios.py +234 -0
- pyconvexity/solvers/__init__.py +29 -0
- pyconvexity/solvers/pypsa/__init__.py +24 -0
- pyconvexity/solvers/pypsa/api.py +460 -0
- pyconvexity/solvers/pypsa/batch_loader.py +307 -0
- pyconvexity/solvers/pypsa/builder.py +675 -0
- pyconvexity/solvers/pypsa/constraints.py +405 -0
- pyconvexity/solvers/pypsa/solver.py +1509 -0
- pyconvexity/solvers/pypsa/storage.py +2048 -0
- pyconvexity/timeseries.py +330 -0
- pyconvexity/validation/__init__.py +25 -0
- pyconvexity/validation/rules.py +312 -0
- pyconvexity-0.4.3.dist-info/METADATA +47 -0
- pyconvexity-0.4.3.dist-info/RECORD +42 -0
- pyconvexity-0.4.3.dist-info/WHEEL +5 -0
- pyconvexity-0.4.3.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,2048 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Result storage functionality for PyPSA solver integration.
|
|
3
|
+
|
|
4
|
+
Handles storing solve results back to the database with proper validation and error handling.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import logging
|
|
8
|
+
import uuid
|
|
9
|
+
import pandas as pd
|
|
10
|
+
import numpy as np
|
|
11
|
+
from typing import Dict, Any, Optional, Callable
|
|
12
|
+
|
|
13
|
+
from pyconvexity.core.types import StaticValue
|
|
14
|
+
from pyconvexity.models import (
|
|
15
|
+
list_components_by_type,
|
|
16
|
+
set_static_attribute,
|
|
17
|
+
set_timeseries_attribute,
|
|
18
|
+
)
|
|
19
|
+
from pyconvexity.validation import get_validation_rule
|
|
20
|
+
|
|
21
|
+
logger = logging.getLogger(__name__)
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class ResultStorage:
|
|
25
|
+
"""
|
|
26
|
+
Handles storing PyPSA solve results back to the database.
|
|
27
|
+
|
|
28
|
+
This class manages the complex process of extracting results from PyPSA networks
|
|
29
|
+
and storing them back to the database with proper validation and error handling.
|
|
30
|
+
"""
|
|
31
|
+
|
|
32
|
+
def __init__(self, verbose: bool = False):
|
|
33
|
+
"""
|
|
34
|
+
Initialize ResultStorage.
|
|
35
|
+
|
|
36
|
+
Args:
|
|
37
|
+
verbose: Enable detailed logging output
|
|
38
|
+
"""
|
|
39
|
+
self.verbose = verbose
|
|
40
|
+
|
|
41
|
+
def store_results(
|
|
42
|
+
self,
|
|
43
|
+
conn,
|
|
44
|
+
network: "pypsa.Network",
|
|
45
|
+
solve_result: Dict[str, Any],
|
|
46
|
+
scenario_id: Optional[int] = None,
|
|
47
|
+
) -> Dict[str, Any]:
|
|
48
|
+
"""
|
|
49
|
+
Store complete solve results back to database (single network per database).
|
|
50
|
+
|
|
51
|
+
Args:
|
|
52
|
+
conn: Database connection
|
|
53
|
+
network: Solved PyPSA Network object
|
|
54
|
+
solve_result: Solve result metadata
|
|
55
|
+
scenario_id: Optional scenario ID (NULL for base network)
|
|
56
|
+
|
|
57
|
+
Returns:
|
|
58
|
+
Dictionary with storage statistics
|
|
59
|
+
"""
|
|
60
|
+
run_id = solve_result.get("run_id", str(uuid.uuid4()))
|
|
61
|
+
|
|
62
|
+
try:
|
|
63
|
+
# Store component results
|
|
64
|
+
component_stats = self._store_component_results(conn, network, scenario_id)
|
|
65
|
+
|
|
66
|
+
# Calculate network statistics first
|
|
67
|
+
network_stats = self._calculate_network_statistics(
|
|
68
|
+
conn, network, solve_result
|
|
69
|
+
)
|
|
70
|
+
|
|
71
|
+
# Store solve summary with network statistics
|
|
72
|
+
self._store_solve_summary(conn, solve_result, scenario_id, network_stats)
|
|
73
|
+
conn.commit()
|
|
74
|
+
|
|
75
|
+
# Store year-based statistics if available
|
|
76
|
+
year_stats_stored = 0
|
|
77
|
+
if solve_result.get("year_statistics"):
|
|
78
|
+
year_stats_stored = self._store_year_based_statistics(
|
|
79
|
+
conn, network, solve_result["year_statistics"], scenario_id
|
|
80
|
+
)
|
|
81
|
+
conn.commit()
|
|
82
|
+
|
|
83
|
+
total_gen = network_stats.get("core_summary", {}).get("total_generation_mwh", 0)
|
|
84
|
+
total_cost = network_stats.get("core_summary", {}).get("total_cost", 0)
|
|
85
|
+
logger.info(
|
|
86
|
+
f"Results stored: {total_gen:.0f} MWh generation, {total_cost:.0f} cost, {year_stats_stored} years"
|
|
87
|
+
)
|
|
88
|
+
|
|
89
|
+
return {
|
|
90
|
+
"component_stats": component_stats,
|
|
91
|
+
"network_stats": network_stats,
|
|
92
|
+
"year_stats_stored": year_stats_stored,
|
|
93
|
+
"run_id": run_id,
|
|
94
|
+
"success": True,
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
except Exception as e:
|
|
98
|
+
logger.error(f"Result storage failed: {e}")
|
|
99
|
+
return {
|
|
100
|
+
"component_stats": {},
|
|
101
|
+
"network_stats": {},
|
|
102
|
+
"run_id": run_id,
|
|
103
|
+
"success": False,
|
|
104
|
+
"error": str(e),
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
def _store_component_results(
|
|
108
|
+
self, conn, network: "pypsa.Network", scenario_id: Optional[int]
|
|
109
|
+
) -> Dict[str, int]:
|
|
110
|
+
"""Store results for all component types (single network per database)."""
|
|
111
|
+
results_stats = {
|
|
112
|
+
"stored_bus_results": 0,
|
|
113
|
+
"stored_generator_results": 0,
|
|
114
|
+
"stored_unmet_load_results": 0,
|
|
115
|
+
"stored_load_results": 0,
|
|
116
|
+
"stored_line_results": 0,
|
|
117
|
+
"stored_link_results": 0,
|
|
118
|
+
"stored_storage_unit_results": 0,
|
|
119
|
+
"stored_store_results": 0,
|
|
120
|
+
"skipped_attributes": 0,
|
|
121
|
+
"errors": 0,
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
try:
|
|
125
|
+
# Store bus results
|
|
126
|
+
if hasattr(network, "buses_t") and network.buses_t:
|
|
127
|
+
results_stats["stored_bus_results"] = (
|
|
128
|
+
self._store_component_type_results(
|
|
129
|
+
conn, "BUS", network.buses, network.buses_t, scenario_id
|
|
130
|
+
)
|
|
131
|
+
)
|
|
132
|
+
|
|
133
|
+
# Store generator results (includes regular generators)
|
|
134
|
+
if hasattr(network, "generators_t") and network.generators_t:
|
|
135
|
+
results_stats["stored_generator_results"] = (
|
|
136
|
+
self._store_component_type_results(
|
|
137
|
+
conn,
|
|
138
|
+
"GENERATOR",
|
|
139
|
+
network.generators,
|
|
140
|
+
network.generators_t,
|
|
141
|
+
scenario_id,
|
|
142
|
+
)
|
|
143
|
+
)
|
|
144
|
+
|
|
145
|
+
# Store UNMET_LOAD results (these are also stored as generators in PyPSA)
|
|
146
|
+
results_stats["stored_unmet_load_results"] = (
|
|
147
|
+
self._store_component_type_results(
|
|
148
|
+
conn,
|
|
149
|
+
"UNMET_LOAD",
|
|
150
|
+
network.generators,
|
|
151
|
+
network.generators_t,
|
|
152
|
+
scenario_id,
|
|
153
|
+
)
|
|
154
|
+
)
|
|
155
|
+
|
|
156
|
+
# Store load results
|
|
157
|
+
if hasattr(network, "loads_t") and network.loads_t:
|
|
158
|
+
results_stats["stored_load_results"] = (
|
|
159
|
+
self._store_component_type_results(
|
|
160
|
+
conn, "LOAD", network.loads, network.loads_t, scenario_id
|
|
161
|
+
)
|
|
162
|
+
)
|
|
163
|
+
|
|
164
|
+
# Store line results
|
|
165
|
+
if hasattr(network, "lines_t") and network.lines_t:
|
|
166
|
+
results_stats["stored_line_results"] = (
|
|
167
|
+
self._store_component_type_results(
|
|
168
|
+
conn, "LINE", network.lines, network.lines_t, scenario_id
|
|
169
|
+
)
|
|
170
|
+
)
|
|
171
|
+
|
|
172
|
+
# Store link results
|
|
173
|
+
if hasattr(network, "links_t") and network.links_t:
|
|
174
|
+
results_stats["stored_link_results"] = (
|
|
175
|
+
self._store_component_type_results(
|
|
176
|
+
conn, "LINK", network.links, network.links_t, scenario_id
|
|
177
|
+
)
|
|
178
|
+
)
|
|
179
|
+
|
|
180
|
+
# Store storage unit results
|
|
181
|
+
if hasattr(network, "storage_units_t") and network.storage_units_t:
|
|
182
|
+
results_stats["stored_storage_unit_results"] = (
|
|
183
|
+
self._store_component_type_results(
|
|
184
|
+
conn,
|
|
185
|
+
"STORAGE_UNIT",
|
|
186
|
+
network.storage_units,
|
|
187
|
+
network.storage_units_t,
|
|
188
|
+
scenario_id,
|
|
189
|
+
)
|
|
190
|
+
)
|
|
191
|
+
|
|
192
|
+
# Store store results
|
|
193
|
+
if hasattr(network, "stores_t") and network.stores_t:
|
|
194
|
+
results_stats["stored_store_results"] = (
|
|
195
|
+
self._store_component_type_results(
|
|
196
|
+
conn, "STORE", network.stores, network.stores_t, scenario_id
|
|
197
|
+
)
|
|
198
|
+
)
|
|
199
|
+
|
|
200
|
+
return results_stats
|
|
201
|
+
|
|
202
|
+
except Exception as e:
|
|
203
|
+
logger.error(f"Error storing solve results: {e}", exc_info=True)
|
|
204
|
+
results_stats["errors"] += 1
|
|
205
|
+
return results_stats
|
|
206
|
+
|
|
207
|
+
def _store_component_type_results(
|
|
208
|
+
self,
|
|
209
|
+
conn,
|
|
210
|
+
component_type: str,
|
|
211
|
+
static_df: pd.DataFrame,
|
|
212
|
+
timeseries_dict: Dict[str, pd.DataFrame],
|
|
213
|
+
scenario_id: Optional[int],
|
|
214
|
+
) -> int:
|
|
215
|
+
"""Store results for a specific component type - only store OUTPUT attributes (single network per database)."""
|
|
216
|
+
stored_count = 0
|
|
217
|
+
|
|
218
|
+
try:
|
|
219
|
+
# Get component name to ID mapping
|
|
220
|
+
components = list_components_by_type(conn, component_type)
|
|
221
|
+
name_to_id = {comp.name: comp.id for comp in components}
|
|
222
|
+
|
|
223
|
+
# Store timeseries results - ONLY OUTPUT ATTRIBUTES (is_input=FALSE)
|
|
224
|
+
for attr_name, timeseries_df in timeseries_dict.items():
|
|
225
|
+
if timeseries_df.empty:
|
|
226
|
+
continue
|
|
227
|
+
|
|
228
|
+
# Check if this attribute is an output attribute (not an input)
|
|
229
|
+
try:
|
|
230
|
+
rule = get_validation_rule(conn, component_type, attr_name)
|
|
231
|
+
if rule.is_input:
|
|
232
|
+
# Skip input attributes to preserve original input data
|
|
233
|
+
continue
|
|
234
|
+
except Exception:
|
|
235
|
+
# If no validation rule found, skip to be safe
|
|
236
|
+
continue
|
|
237
|
+
|
|
238
|
+
for component_name in timeseries_df.columns:
|
|
239
|
+
if component_name not in name_to_id:
|
|
240
|
+
continue
|
|
241
|
+
|
|
242
|
+
component_id = name_to_id[component_name]
|
|
243
|
+
component_series = timeseries_df[component_name]
|
|
244
|
+
|
|
245
|
+
# Skip if all values are NaN
|
|
246
|
+
if component_series.isna().all():
|
|
247
|
+
continue
|
|
248
|
+
|
|
249
|
+
# Convert to efficient values array
|
|
250
|
+
values = []
|
|
251
|
+
for value in component_series.values:
|
|
252
|
+
if pd.isna(value):
|
|
253
|
+
values.append(0.0) # Fill NaN with 0.0
|
|
254
|
+
else:
|
|
255
|
+
values.append(float(value))
|
|
256
|
+
|
|
257
|
+
if not values:
|
|
258
|
+
continue
|
|
259
|
+
|
|
260
|
+
# Store using efficient format
|
|
261
|
+
try:
|
|
262
|
+
set_timeseries_attribute(
|
|
263
|
+
conn, component_id, attr_name, values, scenario_id
|
|
264
|
+
)
|
|
265
|
+
stored_count += 1
|
|
266
|
+
except Exception as e:
|
|
267
|
+
# Handle validation errors gracefully
|
|
268
|
+
if (
|
|
269
|
+
"No validation rule found" in str(e)
|
|
270
|
+
or "does not allow" in str(e)
|
|
271
|
+
or "ValidationError" in str(type(e).__name__)
|
|
272
|
+
):
|
|
273
|
+
continue
|
|
274
|
+
else:
|
|
275
|
+
logger.warning(
|
|
276
|
+
f"Error storing timeseries {attr_name} for {component_type} '{component_name}': {e}"
|
|
277
|
+
)
|
|
278
|
+
continue
|
|
279
|
+
|
|
280
|
+
# Store static optimization results - ONLY OUTPUT ATTRIBUTES (is_input=FALSE)
|
|
281
|
+
if not static_df.empty:
|
|
282
|
+
for attr_name in static_df.columns:
|
|
283
|
+
# Check if this attribute is an output attribute (not an input)
|
|
284
|
+
try:
|
|
285
|
+
rule = get_validation_rule(conn, component_type, attr_name)
|
|
286
|
+
if rule.is_input:
|
|
287
|
+
# Skip input attributes to preserve original input data
|
|
288
|
+
continue
|
|
289
|
+
except Exception:
|
|
290
|
+
# If no validation rule found, skip to be safe
|
|
291
|
+
continue
|
|
292
|
+
|
|
293
|
+
for component_name, value in static_df[attr_name].items():
|
|
294
|
+
if component_name not in name_to_id or pd.isna(value):
|
|
295
|
+
continue
|
|
296
|
+
|
|
297
|
+
component_id = name_to_id[component_name]
|
|
298
|
+
|
|
299
|
+
try:
|
|
300
|
+
# Convert value to StaticValue
|
|
301
|
+
if isinstance(value, (int, np.integer)):
|
|
302
|
+
static_value = StaticValue(int(value))
|
|
303
|
+
elif isinstance(value, (float, np.floating)):
|
|
304
|
+
if np.isfinite(value):
|
|
305
|
+
static_value = StaticValue(float(value))
|
|
306
|
+
else:
|
|
307
|
+
continue # Skip infinite/NaN values
|
|
308
|
+
elif isinstance(value, bool):
|
|
309
|
+
static_value = StaticValue(bool(value))
|
|
310
|
+
else:
|
|
311
|
+
static_value = StaticValue(str(value))
|
|
312
|
+
|
|
313
|
+
# Store using atomic utility
|
|
314
|
+
set_static_attribute(
|
|
315
|
+
conn, component_id, attr_name, static_value, scenario_id
|
|
316
|
+
)
|
|
317
|
+
stored_count += 1
|
|
318
|
+
|
|
319
|
+
except Exception as e:
|
|
320
|
+
# Handle validation errors gracefully
|
|
321
|
+
if (
|
|
322
|
+
"No validation rule found" in str(e)
|
|
323
|
+
or "does not allow" in str(e)
|
|
324
|
+
or "ValidationError" in str(type(e).__name__)
|
|
325
|
+
):
|
|
326
|
+
continue
|
|
327
|
+
else:
|
|
328
|
+
logger.warning(
|
|
329
|
+
f"Error storing static {attr_name} for {component_type} '{component_name}': {e}"
|
|
330
|
+
)
|
|
331
|
+
continue
|
|
332
|
+
|
|
333
|
+
return stored_count
|
|
334
|
+
|
|
335
|
+
except Exception as e:
|
|
336
|
+
logger.error(
|
|
337
|
+
f"Error storing results for {component_type}: {e}", exc_info=True
|
|
338
|
+
)
|
|
339
|
+
return stored_count
|
|
340
|
+
|
|
341
|
+
def _store_solve_summary(
|
|
342
|
+
self,
|
|
343
|
+
conn,
|
|
344
|
+
solve_result: Dict[str, Any],
|
|
345
|
+
scenario_id: Optional[int],
|
|
346
|
+
network_stats: Optional[Dict[str, Any]] = None,
|
|
347
|
+
):
|
|
348
|
+
"""Store solve summary to network_solve_results table (single network per database)."""
|
|
349
|
+
try:
|
|
350
|
+
# Prepare solve summary data
|
|
351
|
+
solver_name = solve_result.get("solver_name", "unknown")
|
|
352
|
+
solve_status = solve_result.get("status", "unknown")
|
|
353
|
+
objective_value = solve_result.get("objective_value")
|
|
354
|
+
solve_time = solve_result.get("solve_time", 0.0)
|
|
355
|
+
|
|
356
|
+
# Create enhanced solve result with network statistics for serialization
|
|
357
|
+
enhanced_solve_result = {
|
|
358
|
+
**solve_result,
|
|
359
|
+
"network_statistics": network_stats or {},
|
|
360
|
+
}
|
|
361
|
+
|
|
362
|
+
# Delete existing result for this scenario first (handles NULL scenario_id correctly)
|
|
363
|
+
if scenario_id is None:
|
|
364
|
+
conn.execute(
|
|
365
|
+
"DELETE FROM network_solve_results WHERE scenario_id IS NULL"
|
|
366
|
+
)
|
|
367
|
+
else:
|
|
368
|
+
conn.execute(
|
|
369
|
+
"DELETE FROM network_solve_results WHERE scenario_id = ?",
|
|
370
|
+
(scenario_id,),
|
|
371
|
+
)
|
|
372
|
+
|
|
373
|
+
results_json = self._serialize_results_json(enhanced_solve_result)
|
|
374
|
+
metadata_json = self._serialize_metadata_json(enhanced_solve_result)
|
|
375
|
+
|
|
376
|
+
# Insert new solve results summary
|
|
377
|
+
conn.execute(
|
|
378
|
+
"""
|
|
379
|
+
INSERT INTO network_solve_results (
|
|
380
|
+
scenario_id, solver_name, solve_type, solve_status,
|
|
381
|
+
objective_value, solve_time_seconds, results_json, metadata_json
|
|
382
|
+
) VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
|
383
|
+
""",
|
|
384
|
+
(
|
|
385
|
+
scenario_id,
|
|
386
|
+
solver_name,
|
|
387
|
+
"pypsa_optimization",
|
|
388
|
+
solve_status,
|
|
389
|
+
objective_value,
|
|
390
|
+
solve_time,
|
|
391
|
+
results_json,
|
|
392
|
+
metadata_json,
|
|
393
|
+
),
|
|
394
|
+
)
|
|
395
|
+
|
|
396
|
+
except Exception as e:
|
|
397
|
+
logger.error(f"Failed to store solve summary: {e}")
|
|
398
|
+
raise # Re-raise to trigger rollback
|
|
399
|
+
|
|
400
|
+
def _calculate_network_statistics(
|
|
401
|
+
self, conn, network: "pypsa.Network", solve_result: Dict[str, Any]
|
|
402
|
+
) -> Dict[str, Any]:
|
|
403
|
+
"""Calculate network statistics - focusing only on capacity for now (single network per database)."""
|
|
404
|
+
try:
|
|
405
|
+
# Calculate carrier-specific statistics
|
|
406
|
+
carrier_stats = self._calculate_carrier_statistics(conn, network)
|
|
407
|
+
|
|
408
|
+
# Calculate basic network statistics
|
|
409
|
+
total_cost = solve_result.get("objective_value", 0.0)
|
|
410
|
+
total_generation_mwh = sum(
|
|
411
|
+
carrier_stats.get("dispatch_by_carrier", {}).values()
|
|
412
|
+
)
|
|
413
|
+
total_emissions_tonnes = sum(
|
|
414
|
+
carrier_stats.get("emissions_by_carrier", {}).values()
|
|
415
|
+
)
|
|
416
|
+
total_capital_cost = sum(
|
|
417
|
+
carrier_stats.get("capital_cost_by_carrier", {}).values()
|
|
418
|
+
)
|
|
419
|
+
total_operational_cost = sum(
|
|
420
|
+
carrier_stats.get("operational_cost_by_carrier", {}).values()
|
|
421
|
+
)
|
|
422
|
+
total_system_cost = sum(
|
|
423
|
+
carrier_stats.get("total_system_cost_by_carrier", {}).values()
|
|
424
|
+
)
|
|
425
|
+
|
|
426
|
+
# Calculate unmet load statistics
|
|
427
|
+
unmet_load_mwh = carrier_stats.get("dispatch_by_carrier", {}).get(
|
|
428
|
+
"Unmet Load", 0.0
|
|
429
|
+
)
|
|
430
|
+
total_demand_mwh = self._calculate_total_demand(network)
|
|
431
|
+
unmet_load_percentage = (
|
|
432
|
+
(unmet_load_mwh / (total_demand_mwh + 1e-6)) * 100
|
|
433
|
+
if total_demand_mwh > 0
|
|
434
|
+
else 0.0
|
|
435
|
+
)
|
|
436
|
+
|
|
437
|
+
# Create nested structure expected by frontend
|
|
438
|
+
network_statistics = {
|
|
439
|
+
"core_summary": {
|
|
440
|
+
"total_generation_mwh": total_generation_mwh,
|
|
441
|
+
"total_demand_mwh": total_demand_mwh,
|
|
442
|
+
"total_cost": total_cost,
|
|
443
|
+
"load_factor": (
|
|
444
|
+
(total_demand_mwh / (total_generation_mwh + 1e-6))
|
|
445
|
+
if total_generation_mwh > 0
|
|
446
|
+
else 0.0
|
|
447
|
+
),
|
|
448
|
+
"unserved_energy_mwh": unmet_load_mwh,
|
|
449
|
+
},
|
|
450
|
+
"custom_statistics": {
|
|
451
|
+
# Include carrier-specific statistics (capacity, dispatch, emissions, costs)
|
|
452
|
+
**carrier_stats,
|
|
453
|
+
"total_capital_cost": total_capital_cost,
|
|
454
|
+
"total_operational_cost": total_operational_cost,
|
|
455
|
+
"total_currency_cost": total_system_cost, # Use calculated system cost instead of PyPSA objective
|
|
456
|
+
"total_emissions_tons_co2": total_emissions_tonnes,
|
|
457
|
+
"average_price_per_mwh": (
|
|
458
|
+
(total_system_cost / (total_generation_mwh + 1e-6))
|
|
459
|
+
if total_generation_mwh > 0
|
|
460
|
+
else 0.0
|
|
461
|
+
),
|
|
462
|
+
"unmet_load_percentage": unmet_load_percentage,
|
|
463
|
+
"max_unmet_load_hour_mw": 0.0, # TODO: Calculate max hourly unmet load later
|
|
464
|
+
},
|
|
465
|
+
"runtime_info": {
|
|
466
|
+
"component_count": (
|
|
467
|
+
(
|
|
468
|
+
len(network.buses)
|
|
469
|
+
+ len(network.generators)
|
|
470
|
+
+ len(network.loads)
|
|
471
|
+
+ len(network.lines)
|
|
472
|
+
+ len(network.links)
|
|
473
|
+
)
|
|
474
|
+
if hasattr(network, "buses")
|
|
475
|
+
else 0
|
|
476
|
+
),
|
|
477
|
+
"bus_count": len(network.buses) if hasattr(network, "buses") else 0,
|
|
478
|
+
"generator_count": (
|
|
479
|
+
len(network.generators) if hasattr(network, "generators") else 0
|
|
480
|
+
),
|
|
481
|
+
"load_count": (
|
|
482
|
+
len(network.loads) if hasattr(network, "loads") else 0
|
|
483
|
+
),
|
|
484
|
+
"snapshot_count": (
|
|
485
|
+
len(network.snapshots) if hasattr(network, "snapshots") else 0
|
|
486
|
+
),
|
|
487
|
+
},
|
|
488
|
+
}
|
|
489
|
+
|
|
490
|
+
logger.info(
|
|
491
|
+
f"Calculated network statistics: core_summary={network_statistics['core_summary']}"
|
|
492
|
+
)
|
|
493
|
+
return network_statistics
|
|
494
|
+
|
|
495
|
+
except Exception as e:
|
|
496
|
+
logger.error(f"Failed to calculate network statistics: {e}", exc_info=True)
|
|
497
|
+
# Return empty structure matching expected format
|
|
498
|
+
return {
|
|
499
|
+
"core_summary": {
|
|
500
|
+
"total_generation_mwh": 0.0,
|
|
501
|
+
"total_demand_mwh": 0.0,
|
|
502
|
+
"total_cost": solve_result.get("objective_value", 0.0),
|
|
503
|
+
"load_factor": 0.0,
|
|
504
|
+
"unserved_energy_mwh": 0.0,
|
|
505
|
+
},
|
|
506
|
+
"custom_statistics": {
|
|
507
|
+
"dispatch_by_carrier": {},
|
|
508
|
+
"power_capacity_by_carrier": {},
|
|
509
|
+
"energy_capacity_by_carrier": {},
|
|
510
|
+
"emissions_by_carrier": {},
|
|
511
|
+
"capital_cost_by_carrier": {},
|
|
512
|
+
"operational_cost_by_carrier": {},
|
|
513
|
+
"total_system_cost_by_carrier": {},
|
|
514
|
+
"total_capital_cost": 0.0,
|
|
515
|
+
"total_operational_cost": 0.0,
|
|
516
|
+
"total_currency_cost": 0.0,
|
|
517
|
+
"total_emissions_tons_co2": 0.0,
|
|
518
|
+
"average_price_per_mwh": 0.0,
|
|
519
|
+
"unmet_load_percentage": 0.0,
|
|
520
|
+
"max_unmet_load_hour_mw": 0.0,
|
|
521
|
+
},
|
|
522
|
+
"runtime_info": {
|
|
523
|
+
"component_count": 0,
|
|
524
|
+
"bus_count": 0,
|
|
525
|
+
"generator_count": 0,
|
|
526
|
+
"load_count": 0,
|
|
527
|
+
"snapshot_count": 0,
|
|
528
|
+
},
|
|
529
|
+
"error": str(e),
|
|
530
|
+
}
|
|
531
|
+
|
|
532
|
+
def _calculate_carrier_statistics(
|
|
533
|
+
self, conn, network: "pypsa.Network"
|
|
534
|
+
) -> Dict[str, Any]:
|
|
535
|
+
"""
|
|
536
|
+
Calculate carrier-specific statistics directly from the network (single network per database).
|
|
537
|
+
This is the primary calculation - per-year stats will be calculated separately.
|
|
538
|
+
"""
|
|
539
|
+
try:
|
|
540
|
+
# Calculate all-year statistics directly from the network
|
|
541
|
+
# Extract years from network snapshots
|
|
542
|
+
if hasattr(network.snapshots, "levels"):
|
|
543
|
+
# Multi-period optimization - get years from period level
|
|
544
|
+
period_values = network.snapshots.get_level_values(0)
|
|
545
|
+
years = sorted(period_values.unique())
|
|
546
|
+
elif hasattr(network.snapshots, "year"):
|
|
547
|
+
years = sorted(network.snapshots.year.unique())
|
|
548
|
+
elif hasattr(network, "_available_years"):
|
|
549
|
+
years = network._available_years
|
|
550
|
+
else:
|
|
551
|
+
years = [2020] # Fallback
|
|
552
|
+
logger.warning(f"No year information found, using fallback: {years}")
|
|
553
|
+
|
|
554
|
+
# Calculate per-year statistics first
|
|
555
|
+
all_year_stats = {
|
|
556
|
+
"dispatch_by_carrier": {},
|
|
557
|
+
"power_capacity_by_carrier": {},
|
|
558
|
+
"energy_capacity_by_carrier": {},
|
|
559
|
+
"emissions_by_carrier": {},
|
|
560
|
+
"capital_cost_by_carrier": {},
|
|
561
|
+
"operational_cost_by_carrier": {},
|
|
562
|
+
"total_system_cost_by_carrier": {},
|
|
563
|
+
}
|
|
564
|
+
|
|
565
|
+
# Initialize all carriers with zero values
|
|
566
|
+
cursor = conn.execute(
|
|
567
|
+
"""
|
|
568
|
+
SELECT DISTINCT name FROM carriers
|
|
569
|
+
"""
|
|
570
|
+
)
|
|
571
|
+
all_carriers = [row[0] for row in cursor.fetchall()]
|
|
572
|
+
|
|
573
|
+
# Initialize all carriers with zero values (including special "Unmet Load" carrier)
|
|
574
|
+
all_carriers_with_unmet = all_carriers + ["Unmet Load"]
|
|
575
|
+
for carrier in all_carriers_with_unmet:
|
|
576
|
+
all_year_stats["dispatch_by_carrier"][carrier] = 0.0
|
|
577
|
+
all_year_stats["power_capacity_by_carrier"][carrier] = 0.0
|
|
578
|
+
all_year_stats["energy_capacity_by_carrier"][carrier] = 0.0
|
|
579
|
+
all_year_stats["emissions_by_carrier"][carrier] = 0.0
|
|
580
|
+
all_year_stats["capital_cost_by_carrier"][carrier] = 0.0
|
|
581
|
+
all_year_stats["operational_cost_by_carrier"][carrier] = 0.0
|
|
582
|
+
all_year_stats["total_system_cost_by_carrier"][carrier] = 0.0
|
|
583
|
+
|
|
584
|
+
# Calculate statistics for each year and sum them up
|
|
585
|
+
for year in years:
|
|
586
|
+
year_stats = self._calculate_year_carrier_statistics(
|
|
587
|
+
conn, network, year
|
|
588
|
+
)
|
|
589
|
+
|
|
590
|
+
# Sum up all the statistics (including "Unmet Load")
|
|
591
|
+
for carrier in all_carriers_with_unmet:
|
|
592
|
+
# Sum dispatch, emissions, and costs across years
|
|
593
|
+
all_year_stats["dispatch_by_carrier"][carrier] += year_stats[
|
|
594
|
+
"dispatch_by_carrier"
|
|
595
|
+
].get(carrier, 0.0)
|
|
596
|
+
all_year_stats["emissions_by_carrier"][carrier] += year_stats[
|
|
597
|
+
"emissions_by_carrier"
|
|
598
|
+
].get(carrier, 0.0)
|
|
599
|
+
all_year_stats["capital_cost_by_carrier"][carrier] += year_stats[
|
|
600
|
+
"capital_cost_by_carrier"
|
|
601
|
+
].get(carrier, 0.0)
|
|
602
|
+
all_year_stats["operational_cost_by_carrier"][
|
|
603
|
+
carrier
|
|
604
|
+
] += year_stats["operational_cost_by_carrier"].get(carrier, 0.0)
|
|
605
|
+
all_year_stats["total_system_cost_by_carrier"][
|
|
606
|
+
carrier
|
|
607
|
+
] += year_stats["total_system_cost_by_carrier"].get(carrier, 0.0)
|
|
608
|
+
|
|
609
|
+
# For capacity: use the last year (final capacity state)
|
|
610
|
+
if year == years[-1]:
|
|
611
|
+
all_year_stats["power_capacity_by_carrier"][carrier] = (
|
|
612
|
+
year_stats["power_capacity_by_carrier"].get(carrier, 0.0)
|
|
613
|
+
)
|
|
614
|
+
all_year_stats["energy_capacity_by_carrier"][carrier] = (
|
|
615
|
+
year_stats["energy_capacity_by_carrier"].get(carrier, 0.0)
|
|
616
|
+
)
|
|
617
|
+
|
|
618
|
+
return all_year_stats
|
|
619
|
+
|
|
620
|
+
except Exception as e:
|
|
621
|
+
logger.error(f"Failed to calculate carrier statistics: {e}")
|
|
622
|
+
return {
|
|
623
|
+
"dispatch_by_carrier": {},
|
|
624
|
+
"power_capacity_by_carrier": {},
|
|
625
|
+
"energy_capacity_by_carrier": {},
|
|
626
|
+
"emissions_by_carrier": {},
|
|
627
|
+
"capital_cost_by_carrier": {},
|
|
628
|
+
"operational_cost_by_carrier": {},
|
|
629
|
+
"total_system_cost_by_carrier": {},
|
|
630
|
+
}
|
|
631
|
+
|
|
632
|
+
def _store_year_based_statistics(
|
|
633
|
+
self,
|
|
634
|
+
conn,
|
|
635
|
+
network: "pypsa.Network",
|
|
636
|
+
year_statistics: Dict[int, Dict[str, Any]],
|
|
637
|
+
scenario_id: Optional[int],
|
|
638
|
+
) -> int:
|
|
639
|
+
"""Store year-based statistics to database (single network per database)"""
|
|
640
|
+
try:
|
|
641
|
+
import json
|
|
642
|
+
|
|
643
|
+
stored_count = 0
|
|
644
|
+
|
|
645
|
+
# Check if network_solve_results_by_year table exists, create if not
|
|
646
|
+
conn.execute(
|
|
647
|
+
"""
|
|
648
|
+
CREATE TABLE IF NOT EXISTS network_solve_results_by_year (
|
|
649
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
650
|
+
scenario_id INTEGER,
|
|
651
|
+
year INTEGER NOT NULL,
|
|
652
|
+
results_json TEXT,
|
|
653
|
+
metadata_json TEXT,
|
|
654
|
+
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
|
655
|
+
FOREIGN KEY (scenario_id) REFERENCES scenarios(id),
|
|
656
|
+
UNIQUE(scenario_id, year)
|
|
657
|
+
)
|
|
658
|
+
"""
|
|
659
|
+
)
|
|
660
|
+
|
|
661
|
+
for year, stats in year_statistics.items():
|
|
662
|
+
try:
|
|
663
|
+
# Calculate proper year-specific carrier statistics
|
|
664
|
+
year_carrier_stats = self._calculate_year_carrier_statistics(
|
|
665
|
+
conn, network, year
|
|
666
|
+
)
|
|
667
|
+
|
|
668
|
+
# Merge year-specific carrier stats into the statistics
|
|
669
|
+
if "custom_statistics" in stats:
|
|
670
|
+
stats["custom_statistics"].update(year_carrier_stats)
|
|
671
|
+
else:
|
|
672
|
+
stats["custom_statistics"] = year_carrier_stats
|
|
673
|
+
|
|
674
|
+
# Wrap the year statistics in the same structure as overall results for consistency
|
|
675
|
+
year_result_wrapper = {
|
|
676
|
+
"success": True,
|
|
677
|
+
"year": year,
|
|
678
|
+
"network_statistics": stats,
|
|
679
|
+
}
|
|
680
|
+
|
|
681
|
+
metadata = {"year": year, "scenario_id": scenario_id}
|
|
682
|
+
|
|
683
|
+
# Delete existing result for this scenario+year first (handles NULL scenario_id correctly)
|
|
684
|
+
if scenario_id is None:
|
|
685
|
+
conn.execute(
|
|
686
|
+
"""
|
|
687
|
+
DELETE FROM network_solve_results_by_year
|
|
688
|
+
WHERE scenario_id IS NULL AND year = ?
|
|
689
|
+
""",
|
|
690
|
+
(year,),
|
|
691
|
+
)
|
|
692
|
+
else:
|
|
693
|
+
conn.execute(
|
|
694
|
+
"""
|
|
695
|
+
DELETE FROM network_solve_results_by_year
|
|
696
|
+
WHERE scenario_id = ? AND year = ?
|
|
697
|
+
""",
|
|
698
|
+
(scenario_id, year),
|
|
699
|
+
)
|
|
700
|
+
|
|
701
|
+
# Insert new year-based results
|
|
702
|
+
conn.execute(
|
|
703
|
+
"""
|
|
704
|
+
INSERT INTO network_solve_results_by_year
|
|
705
|
+
(scenario_id, year, results_json, metadata_json)
|
|
706
|
+
VALUES (?, ?, ?, ?)
|
|
707
|
+
""",
|
|
708
|
+
(
|
|
709
|
+
scenario_id,
|
|
710
|
+
year,
|
|
711
|
+
json.dumps(
|
|
712
|
+
year_result_wrapper, default=self._json_serializer
|
|
713
|
+
),
|
|
714
|
+
json.dumps(metadata, default=self._json_serializer),
|
|
715
|
+
),
|
|
716
|
+
)
|
|
717
|
+
|
|
718
|
+
stored_count += 1
|
|
719
|
+
|
|
720
|
+
except Exception as e:
|
|
721
|
+
logger.error(f"Failed to store statistics for year {year}: {e}")
|
|
722
|
+
continue
|
|
723
|
+
|
|
724
|
+
return stored_count
|
|
725
|
+
|
|
726
|
+
except Exception as e:
|
|
727
|
+
logger.error(f"Failed to store year-based statistics: {e}")
|
|
728
|
+
return 0
|
|
729
|
+
|
|
730
|
+
def _calculate_year_carrier_statistics(
|
|
731
|
+
self, conn, network: "pypsa.Network", year: int
|
|
732
|
+
) -> Dict[str, Any]:
|
|
733
|
+
"""
|
|
734
|
+
Calculate carrier-specific statistics for a specific year.
|
|
735
|
+
For now, only calculate capacity statistics.
|
|
736
|
+
"""
|
|
737
|
+
try:
|
|
738
|
+
# Initialize carrier statistics
|
|
739
|
+
carrier_stats = {
|
|
740
|
+
"dispatch_by_carrier": {},
|
|
741
|
+
"power_capacity_by_carrier": {}, # MW - Generators + Storage Units (power) + Lines + Links
|
|
742
|
+
"energy_capacity_by_carrier": {}, # MWh - Stores + Storage Units (energy)
|
|
743
|
+
"emissions_by_carrier": {},
|
|
744
|
+
"capital_cost_by_carrier": {},
|
|
745
|
+
"operational_cost_by_carrier": {},
|
|
746
|
+
"total_system_cost_by_carrier": {},
|
|
747
|
+
}
|
|
748
|
+
|
|
749
|
+
# Get all carriers from database
|
|
750
|
+
cursor = conn.execute(
|
|
751
|
+
"""
|
|
752
|
+
SELECT DISTINCT name FROM carriers
|
|
753
|
+
"""
|
|
754
|
+
)
|
|
755
|
+
all_carriers = [row[0] for row in cursor.fetchall()]
|
|
756
|
+
|
|
757
|
+
# Initialize all carriers with zero values (including special "Unmet Load" carrier)
|
|
758
|
+
all_carriers_with_unmet = all_carriers + ["Unmet Load"]
|
|
759
|
+
for carrier in all_carriers_with_unmet:
|
|
760
|
+
carrier_stats["dispatch_by_carrier"][carrier] = 0.0
|
|
761
|
+
carrier_stats["power_capacity_by_carrier"][carrier] = 0.0
|
|
762
|
+
carrier_stats["energy_capacity_by_carrier"][carrier] = 0.0
|
|
763
|
+
carrier_stats["emissions_by_carrier"][carrier] = 0.0
|
|
764
|
+
carrier_stats["capital_cost_by_carrier"][carrier] = 0.0
|
|
765
|
+
carrier_stats["operational_cost_by_carrier"][carrier] = 0.0
|
|
766
|
+
carrier_stats["total_system_cost_by_carrier"][carrier] = 0.0
|
|
767
|
+
|
|
768
|
+
# Calculate dispatch (generation) by carrier for this specific year
|
|
769
|
+
|
|
770
|
+
# 1. GENERATORS - Generation dispatch (including UNMET_LOAD)
|
|
771
|
+
if hasattr(network, "generators_t") and hasattr(network.generators_t, "p"):
|
|
772
|
+
# Get generator-carrier mapping (include both GENERATOR and UNMET_LOAD)
|
|
773
|
+
cursor = conn.execute(
|
|
774
|
+
"""
|
|
775
|
+
SELECT c.name as component_name,
|
|
776
|
+
CASE
|
|
777
|
+
WHEN c.component_type = 'UNMET_LOAD' THEN 'Unmet Load'
|
|
778
|
+
ELSE carr.name
|
|
779
|
+
END as carrier_name
|
|
780
|
+
FROM components c
|
|
781
|
+
JOIN carriers carr ON c.carrier_id = carr.id
|
|
782
|
+
WHERE c.component_type IN ('GENERATOR', 'UNMET_LOAD')
|
|
783
|
+
"""
|
|
784
|
+
)
|
|
785
|
+
generator_carriers = {row[0]: row[1] for row in cursor.fetchall()}
|
|
786
|
+
|
|
787
|
+
# Filter generation data for this specific year
|
|
788
|
+
year_generation = self._filter_timeseries_by_year(
|
|
789
|
+
network.generators_t.p, network.snapshots, year
|
|
790
|
+
)
|
|
791
|
+
if year_generation is not None and not year_generation.empty:
|
|
792
|
+
for gen_name in year_generation.columns:
|
|
793
|
+
if gen_name in generator_carriers:
|
|
794
|
+
carrier_name = generator_carriers[gen_name]
|
|
795
|
+
|
|
796
|
+
# Calculate generation for this year (ALWAYS apply snapshot weightings to convert MW to MWh)
|
|
797
|
+
year_weightings = self._get_year_weightings(network, year)
|
|
798
|
+
if year_weightings is not None:
|
|
799
|
+
generation_mwh = float(
|
|
800
|
+
(
|
|
801
|
+
year_generation[gen_name].values
|
|
802
|
+
* year_weightings
|
|
803
|
+
).sum()
|
|
804
|
+
)
|
|
805
|
+
else:
|
|
806
|
+
# Fallback: simple sum (will be incorrect for non-1H models)
|
|
807
|
+
generation_mwh = float(year_generation[gen_name].sum())
|
|
808
|
+
logger.warning(
|
|
809
|
+
f"Could not apply snapshot weightings for {gen_name} in year {year} - energy may be incorrect"
|
|
810
|
+
)
|
|
811
|
+
|
|
812
|
+
if carrier_name in carrier_stats["dispatch_by_carrier"]:
|
|
813
|
+
carrier_stats["dispatch_by_carrier"][
|
|
814
|
+
carrier_name
|
|
815
|
+
] += generation_mwh
|
|
816
|
+
|
|
817
|
+
# 2. STORAGE_UNITS - Discharge only (positive values)
|
|
818
|
+
if hasattr(network, "storage_units_t") and hasattr(
|
|
819
|
+
network.storage_units_t, "p"
|
|
820
|
+
):
|
|
821
|
+
# Get storage unit-carrier mapping
|
|
822
|
+
cursor = conn.execute(
|
|
823
|
+
"""
|
|
824
|
+
SELECT c.name as component_name, carr.name as carrier_name
|
|
825
|
+
FROM components c
|
|
826
|
+
JOIN carriers carr ON c.carrier_id = carr.id
|
|
827
|
+
WHERE c.component_type = 'STORAGE_UNIT'
|
|
828
|
+
"""
|
|
829
|
+
)
|
|
830
|
+
storage_unit_carriers = {row[0]: row[1] for row in cursor.fetchall()}
|
|
831
|
+
|
|
832
|
+
# Filter storage unit data for this specific year
|
|
833
|
+
year_storage = self._filter_timeseries_by_year(
|
|
834
|
+
network.storage_units_t.p, network.snapshots, year
|
|
835
|
+
)
|
|
836
|
+
if year_storage is not None and not year_storage.empty:
|
|
837
|
+
for su_name in year_storage.columns:
|
|
838
|
+
if su_name in storage_unit_carriers:
|
|
839
|
+
carrier_name = storage_unit_carriers[su_name]
|
|
840
|
+
|
|
841
|
+
# Calculate discharge for this year (positive values only, ALWAYS apply snapshot weightings)
|
|
842
|
+
year_weightings = self._get_year_weightings(network, year)
|
|
843
|
+
if year_weightings is not None:
|
|
844
|
+
discharge_mwh = float(
|
|
845
|
+
(
|
|
846
|
+
year_storage[su_name].clip(lower=0).values
|
|
847
|
+
* year_weightings
|
|
848
|
+
).sum()
|
|
849
|
+
)
|
|
850
|
+
else:
|
|
851
|
+
# Fallback: simple sum (will be incorrect for non-1H models)
|
|
852
|
+
discharge_mwh = float(
|
|
853
|
+
year_storage[su_name].clip(lower=0).sum()
|
|
854
|
+
)
|
|
855
|
+
logger.warning(
|
|
856
|
+
f"Could not apply snapshot weightings for storage unit {su_name} in year {year} - energy may be incorrect"
|
|
857
|
+
)
|
|
858
|
+
|
|
859
|
+
if carrier_name in carrier_stats["dispatch_by_carrier"]:
|
|
860
|
+
carrier_stats["dispatch_by_carrier"][
|
|
861
|
+
carrier_name
|
|
862
|
+
] += discharge_mwh
|
|
863
|
+
|
|
864
|
+
# 3. STORES - Discharge only (positive values)
|
|
865
|
+
if hasattr(network, "stores_t") and hasattr(network.stores_t, "p"):
|
|
866
|
+
# Get store-carrier mapping
|
|
867
|
+
cursor = conn.execute(
|
|
868
|
+
"""
|
|
869
|
+
SELECT c.name as component_name, carr.name as carrier_name
|
|
870
|
+
FROM components c
|
|
871
|
+
JOIN carriers carr ON c.carrier_id = carr.id
|
|
872
|
+
WHERE c.component_type = 'STORE'
|
|
873
|
+
"""
|
|
874
|
+
)
|
|
875
|
+
store_carriers = {row[0]: row[1] for row in cursor.fetchall()}
|
|
876
|
+
|
|
877
|
+
# Filter store data for this specific year
|
|
878
|
+
year_stores = self._filter_timeseries_by_year(
|
|
879
|
+
network.stores_t.p, network.snapshots, year
|
|
880
|
+
)
|
|
881
|
+
if year_stores is not None and not year_stores.empty:
|
|
882
|
+
for store_name in year_stores.columns:
|
|
883
|
+
if store_name in store_carriers:
|
|
884
|
+
carrier_name = store_carriers[store_name]
|
|
885
|
+
|
|
886
|
+
# Calculate discharge for this year (positive values only, ALWAYS apply snapshot weightings)
|
|
887
|
+
year_weightings = self._get_year_weightings(network, year)
|
|
888
|
+
if year_weightings is not None:
|
|
889
|
+
discharge_mwh = float(
|
|
890
|
+
(
|
|
891
|
+
year_stores[store_name].clip(lower=0).values
|
|
892
|
+
* year_weightings
|
|
893
|
+
).sum()
|
|
894
|
+
)
|
|
895
|
+
else:
|
|
896
|
+
# Fallback: simple sum (will be incorrect for non-1H models)
|
|
897
|
+
discharge_mwh = float(
|
|
898
|
+
year_stores[store_name].clip(lower=0).sum()
|
|
899
|
+
)
|
|
900
|
+
logger.warning(
|
|
901
|
+
f"Could not apply snapshot weightings for store {store_name} in year {year} - energy may be incorrect"
|
|
902
|
+
)
|
|
903
|
+
|
|
904
|
+
if carrier_name in carrier_stats["dispatch_by_carrier"]:
|
|
905
|
+
carrier_stats["dispatch_by_carrier"][
|
|
906
|
+
carrier_name
|
|
907
|
+
] += discharge_mwh
|
|
908
|
+
|
|
909
|
+
# Calculate emissions by carrier for this specific year
|
|
910
|
+
# Get emission factors for all carriers
|
|
911
|
+
cursor = conn.execute(
|
|
912
|
+
"""
|
|
913
|
+
SELECT name, co2_emissions FROM carriers
|
|
914
|
+
"""
|
|
915
|
+
)
|
|
916
|
+
emission_factors = {row[0]: row[1] for row in cursor.fetchall()}
|
|
917
|
+
|
|
918
|
+
# Calculate emissions: dispatch (MWh) × emission factor (tonnes CO2/MWh) = tonnes CO2
|
|
919
|
+
for carrier_name, dispatch_mwh in carrier_stats[
|
|
920
|
+
"dispatch_by_carrier"
|
|
921
|
+
].items():
|
|
922
|
+
# Handle None values safely
|
|
923
|
+
if dispatch_mwh is None:
|
|
924
|
+
dispatch_mwh = 0.0
|
|
925
|
+
|
|
926
|
+
emission_factor = emission_factors.get(
|
|
927
|
+
carrier_name, 0.0
|
|
928
|
+
) # Default to 0 if no emission factor
|
|
929
|
+
if emission_factor is None:
|
|
930
|
+
emission_factor = 0.0
|
|
931
|
+
|
|
932
|
+
emissions_tonnes = dispatch_mwh * emission_factor
|
|
933
|
+
|
|
934
|
+
if carrier_name in carrier_stats["emissions_by_carrier"]:
|
|
935
|
+
carrier_stats["emissions_by_carrier"][
|
|
936
|
+
carrier_name
|
|
937
|
+
] += emissions_tonnes
|
|
938
|
+
|
|
939
|
+
# Calculate capital costs by carrier for this specific year
|
|
940
|
+
# Capital costs are annualized and counted every year the component is active
|
|
941
|
+
|
|
942
|
+
# Helper function to check if component is active in this year
|
|
943
|
+
def is_component_active(build_year, lifetime, current_year):
|
|
944
|
+
"""Check if component is active in the current year based on build_year and lifetime"""
|
|
945
|
+
if pd.isna(build_year):
|
|
946
|
+
return True # No build year constraint
|
|
947
|
+
|
|
948
|
+
build_year = int(build_year)
|
|
949
|
+
if build_year > current_year:
|
|
950
|
+
return False # Not built yet
|
|
951
|
+
|
|
952
|
+
if pd.isna(lifetime) or lifetime == float("inf"):
|
|
953
|
+
return True # Infinite lifetime
|
|
954
|
+
|
|
955
|
+
lifetime = int(lifetime)
|
|
956
|
+
end_year = build_year + lifetime - 1
|
|
957
|
+
return current_year <= end_year
|
|
958
|
+
|
|
959
|
+
# 1. GENERATORS - Capital costs (including UNMET_LOAD)
|
|
960
|
+
if hasattr(network, "generators") and not network.generators.empty:
|
|
961
|
+
# Get generator info: carrier, capital_cost, build_year, lifetime (include UNMET_LOAD)
|
|
962
|
+
cursor = conn.execute(
|
|
963
|
+
"""
|
|
964
|
+
SELECT c.name as component_name,
|
|
965
|
+
CASE
|
|
966
|
+
WHEN c.component_type = 'UNMET_LOAD' THEN 'Unmet Load'
|
|
967
|
+
ELSE carr.name
|
|
968
|
+
END as carrier_name
|
|
969
|
+
FROM components c
|
|
970
|
+
JOIN carriers carr ON c.carrier_id = carr.id
|
|
971
|
+
WHERE c.component_type IN ('GENERATOR', 'UNMET_LOAD')
|
|
972
|
+
"""
|
|
973
|
+
)
|
|
974
|
+
generator_carriers = {row[0]: row[1] for row in cursor.fetchall()}
|
|
975
|
+
|
|
976
|
+
for gen_name in network.generators.index:
|
|
977
|
+
if gen_name in generator_carriers:
|
|
978
|
+
carrier_name = generator_carriers[gen_name]
|
|
979
|
+
|
|
980
|
+
# Get build year and lifetime
|
|
981
|
+
build_year = (
|
|
982
|
+
network.generators.loc[gen_name, "build_year"]
|
|
983
|
+
if "build_year" in network.generators.columns
|
|
984
|
+
else None
|
|
985
|
+
)
|
|
986
|
+
lifetime = (
|
|
987
|
+
network.generators.loc[gen_name, "lifetime"]
|
|
988
|
+
if "lifetime" in network.generators.columns
|
|
989
|
+
else None
|
|
990
|
+
)
|
|
991
|
+
|
|
992
|
+
# Check if component is active in this year
|
|
993
|
+
if is_component_active(build_year, lifetime, year):
|
|
994
|
+
# Get capacity and capital cost
|
|
995
|
+
if "p_nom_opt" in network.generators.columns:
|
|
996
|
+
capacity_mw = float(
|
|
997
|
+
network.generators.loc[gen_name, "p_nom_opt"]
|
|
998
|
+
)
|
|
999
|
+
else:
|
|
1000
|
+
capacity_mw = (
|
|
1001
|
+
float(network.generators.loc[gen_name, "p_nom"])
|
|
1002
|
+
if "p_nom" in network.generators.columns
|
|
1003
|
+
else 0.0
|
|
1004
|
+
)
|
|
1005
|
+
|
|
1006
|
+
capital_cost_per_mw = (
|
|
1007
|
+
float(network.generators.loc[gen_name, "capital_cost"])
|
|
1008
|
+
if "capital_cost" in network.generators.columns
|
|
1009
|
+
else 0.0
|
|
1010
|
+
)
|
|
1011
|
+
|
|
1012
|
+
# Calculate annualized capital cost for this year
|
|
1013
|
+
annual_capital_cost = capacity_mw * capital_cost_per_mw
|
|
1014
|
+
|
|
1015
|
+
if carrier_name in carrier_stats["capital_cost_by_carrier"]:
|
|
1016
|
+
carrier_stats["capital_cost_by_carrier"][
|
|
1017
|
+
carrier_name
|
|
1018
|
+
] += annual_capital_cost
|
|
1019
|
+
|
|
1020
|
+
# 2. STORAGE_UNITS - Capital costs
|
|
1021
|
+
if hasattr(network, "storage_units") and not network.storage_units.empty:
|
|
1022
|
+
cursor = conn.execute(
|
|
1023
|
+
"""
|
|
1024
|
+
SELECT c.name as component_name, carr.name as carrier_name
|
|
1025
|
+
FROM components c
|
|
1026
|
+
JOIN carriers carr ON c.carrier_id = carr.id
|
|
1027
|
+
WHERE c.component_type = 'STORAGE_UNIT'
|
|
1028
|
+
"""
|
|
1029
|
+
)
|
|
1030
|
+
storage_unit_carriers = {row[0]: row[1] for row in cursor.fetchall()}
|
|
1031
|
+
|
|
1032
|
+
for su_name in network.storage_units.index:
|
|
1033
|
+
if su_name in storage_unit_carriers:
|
|
1034
|
+
carrier_name = storage_unit_carriers[su_name]
|
|
1035
|
+
|
|
1036
|
+
# Get build year and lifetime
|
|
1037
|
+
build_year = (
|
|
1038
|
+
network.storage_units.loc[su_name, "build_year"]
|
|
1039
|
+
if "build_year" in network.storage_units.columns
|
|
1040
|
+
else None
|
|
1041
|
+
)
|
|
1042
|
+
lifetime = (
|
|
1043
|
+
network.storage_units.loc[su_name, "lifetime"]
|
|
1044
|
+
if "lifetime" in network.storage_units.columns
|
|
1045
|
+
else None
|
|
1046
|
+
)
|
|
1047
|
+
|
|
1048
|
+
# Check if component is active in this year
|
|
1049
|
+
if is_component_active(build_year, lifetime, year):
|
|
1050
|
+
# Get power capacity and capital cost (per MW)
|
|
1051
|
+
if "p_nom_opt" in network.storage_units.columns:
|
|
1052
|
+
capacity_mw = float(
|
|
1053
|
+
network.storage_units.loc[su_name, "p_nom_opt"]
|
|
1054
|
+
)
|
|
1055
|
+
else:
|
|
1056
|
+
capacity_mw = (
|
|
1057
|
+
float(network.storage_units.loc[su_name, "p_nom"])
|
|
1058
|
+
if "p_nom" in network.storage_units.columns
|
|
1059
|
+
else 0.0
|
|
1060
|
+
)
|
|
1061
|
+
|
|
1062
|
+
capital_cost_per_mw = (
|
|
1063
|
+
float(
|
|
1064
|
+
network.storage_units.loc[su_name, "capital_cost"]
|
|
1065
|
+
)
|
|
1066
|
+
if "capital_cost" in network.storage_units.columns
|
|
1067
|
+
else 0.0
|
|
1068
|
+
)
|
|
1069
|
+
|
|
1070
|
+
# Calculate annualized capital cost for this year
|
|
1071
|
+
annual_capital_cost = capacity_mw * capital_cost_per_mw
|
|
1072
|
+
|
|
1073
|
+
if carrier_name in carrier_stats["capital_cost_by_carrier"]:
|
|
1074
|
+
carrier_stats["capital_cost_by_carrier"][
|
|
1075
|
+
carrier_name
|
|
1076
|
+
] += annual_capital_cost
|
|
1077
|
+
|
|
1078
|
+
# 3. STORES - Capital costs (per MWh)
|
|
1079
|
+
if hasattr(network, "stores") and not network.stores.empty:
|
|
1080
|
+
cursor = conn.execute(
|
|
1081
|
+
"""
|
|
1082
|
+
SELECT c.name as component_name, carr.name as carrier_name
|
|
1083
|
+
FROM components c
|
|
1084
|
+
JOIN carriers carr ON c.carrier_id = carr.id
|
|
1085
|
+
WHERE c.component_type = 'STORE'
|
|
1086
|
+
"""
|
|
1087
|
+
)
|
|
1088
|
+
store_carriers = {row[0]: row[1] for row in cursor.fetchall()}
|
|
1089
|
+
|
|
1090
|
+
for store_name in network.stores.index:
|
|
1091
|
+
if store_name in store_carriers:
|
|
1092
|
+
carrier_name = store_carriers[store_name]
|
|
1093
|
+
|
|
1094
|
+
# Get build year and lifetime
|
|
1095
|
+
build_year = (
|
|
1096
|
+
network.stores.loc[store_name, "build_year"]
|
|
1097
|
+
if "build_year" in network.stores.columns
|
|
1098
|
+
else None
|
|
1099
|
+
)
|
|
1100
|
+
lifetime = (
|
|
1101
|
+
network.stores.loc[store_name, "lifetime"]
|
|
1102
|
+
if "lifetime" in network.stores.columns
|
|
1103
|
+
else None
|
|
1104
|
+
)
|
|
1105
|
+
|
|
1106
|
+
# Check if component is active in this year
|
|
1107
|
+
if is_component_active(build_year, lifetime, year):
|
|
1108
|
+
# Get energy capacity and capital cost (per MWh)
|
|
1109
|
+
if "e_nom_opt" in network.stores.columns:
|
|
1110
|
+
capacity_mwh = float(
|
|
1111
|
+
network.stores.loc[store_name, "e_nom_opt"]
|
|
1112
|
+
)
|
|
1113
|
+
else:
|
|
1114
|
+
capacity_mwh = (
|
|
1115
|
+
float(network.stores.loc[store_name, "e_nom"])
|
|
1116
|
+
if "e_nom" in network.stores.columns
|
|
1117
|
+
else 0.0
|
|
1118
|
+
)
|
|
1119
|
+
|
|
1120
|
+
capital_cost_per_mwh = (
|
|
1121
|
+
float(network.stores.loc[store_name, "capital_cost"])
|
|
1122
|
+
if "capital_cost" in network.stores.columns
|
|
1123
|
+
else 0.0
|
|
1124
|
+
)
|
|
1125
|
+
|
|
1126
|
+
# Calculate annualized capital cost for this year
|
|
1127
|
+
annual_capital_cost = capacity_mwh * capital_cost_per_mwh
|
|
1128
|
+
|
|
1129
|
+
if carrier_name in carrier_stats["capital_cost_by_carrier"]:
|
|
1130
|
+
carrier_stats["capital_cost_by_carrier"][
|
|
1131
|
+
carrier_name
|
|
1132
|
+
] += annual_capital_cost
|
|
1133
|
+
|
|
1134
|
+
# 4. LINES - Capital costs (per MVA)
|
|
1135
|
+
if hasattr(network, "lines") and not network.lines.empty:
|
|
1136
|
+
cursor = conn.execute(
|
|
1137
|
+
"""
|
|
1138
|
+
SELECT c.name as component_name, carr.name as carrier_name
|
|
1139
|
+
FROM components c
|
|
1140
|
+
JOIN carriers carr ON c.carrier_id = carr.id
|
|
1141
|
+
WHERE c.component_type = 'LINE'
|
|
1142
|
+
"""
|
|
1143
|
+
)
|
|
1144
|
+
line_carriers = {row[0]: row[1] for row in cursor.fetchall()}
|
|
1145
|
+
|
|
1146
|
+
for line_name in network.lines.index:
|
|
1147
|
+
if line_name in line_carriers:
|
|
1148
|
+
carrier_name = line_carriers[line_name]
|
|
1149
|
+
|
|
1150
|
+
# Get build year and lifetime
|
|
1151
|
+
build_year = (
|
|
1152
|
+
network.lines.loc[line_name, "build_year"]
|
|
1153
|
+
if "build_year" in network.lines.columns
|
|
1154
|
+
else None
|
|
1155
|
+
)
|
|
1156
|
+
lifetime = (
|
|
1157
|
+
network.lines.loc[line_name, "lifetime"]
|
|
1158
|
+
if "lifetime" in network.lines.columns
|
|
1159
|
+
else None
|
|
1160
|
+
)
|
|
1161
|
+
|
|
1162
|
+
# Check if component is active in this year
|
|
1163
|
+
if is_component_active(build_year, lifetime, year):
|
|
1164
|
+
# Get apparent power capacity and capital cost (per MVA)
|
|
1165
|
+
if "s_nom_opt" in network.lines.columns:
|
|
1166
|
+
capacity_mva = float(
|
|
1167
|
+
network.lines.loc[line_name, "s_nom_opt"]
|
|
1168
|
+
)
|
|
1169
|
+
else:
|
|
1170
|
+
capacity_mva = (
|
|
1171
|
+
float(network.lines.loc[line_name, "s_nom"])
|
|
1172
|
+
if "s_nom" in network.lines.columns
|
|
1173
|
+
else 0.0
|
|
1174
|
+
)
|
|
1175
|
+
|
|
1176
|
+
capital_cost_per_mva = (
|
|
1177
|
+
float(network.lines.loc[line_name, "capital_cost"])
|
|
1178
|
+
if "capital_cost" in network.lines.columns
|
|
1179
|
+
else 0.0
|
|
1180
|
+
)
|
|
1181
|
+
|
|
1182
|
+
# Calculate annualized capital cost for this year
|
|
1183
|
+
annual_capital_cost = capacity_mva * capital_cost_per_mva
|
|
1184
|
+
|
|
1185
|
+
if carrier_name in carrier_stats["capital_cost_by_carrier"]:
|
|
1186
|
+
carrier_stats["capital_cost_by_carrier"][
|
|
1187
|
+
carrier_name
|
|
1188
|
+
] += annual_capital_cost
|
|
1189
|
+
|
|
1190
|
+
# 5. LINKS - Capital costs (per MW)
|
|
1191
|
+
if hasattr(network, "links") and not network.links.empty:
|
|
1192
|
+
cursor = conn.execute(
|
|
1193
|
+
"""
|
|
1194
|
+
SELECT c.name as component_name, carr.name as carrier_name
|
|
1195
|
+
FROM components c
|
|
1196
|
+
JOIN carriers carr ON c.carrier_id = carr.id
|
|
1197
|
+
WHERE c.component_type = 'LINK'
|
|
1198
|
+
"""
|
|
1199
|
+
)
|
|
1200
|
+
link_carriers = {row[0]: row[1] for row in cursor.fetchall()}
|
|
1201
|
+
|
|
1202
|
+
for link_name in network.links.index:
|
|
1203
|
+
if link_name in link_carriers:
|
|
1204
|
+
carrier_name = link_carriers[link_name]
|
|
1205
|
+
|
|
1206
|
+
# Get build year and lifetime
|
|
1207
|
+
build_year = (
|
|
1208
|
+
network.links.loc[link_name, "build_year"]
|
|
1209
|
+
if "build_year" in network.links.columns
|
|
1210
|
+
else None
|
|
1211
|
+
)
|
|
1212
|
+
lifetime = (
|
|
1213
|
+
network.links.loc[link_name, "lifetime"]
|
|
1214
|
+
if "lifetime" in network.links.columns
|
|
1215
|
+
else None
|
|
1216
|
+
)
|
|
1217
|
+
|
|
1218
|
+
# Check if component is active in this year
|
|
1219
|
+
if is_component_active(build_year, lifetime, year):
|
|
1220
|
+
# Get power capacity and capital cost (per MW)
|
|
1221
|
+
if "p_nom_opt" in network.links.columns:
|
|
1222
|
+
capacity_mw = float(
|
|
1223
|
+
network.links.loc[link_name, "p_nom_opt"]
|
|
1224
|
+
)
|
|
1225
|
+
else:
|
|
1226
|
+
capacity_mw = (
|
|
1227
|
+
float(network.links.loc[link_name, "p_nom"])
|
|
1228
|
+
if "p_nom" in network.links.columns
|
|
1229
|
+
else 0.0
|
|
1230
|
+
)
|
|
1231
|
+
|
|
1232
|
+
capital_cost_per_mw = (
|
|
1233
|
+
float(network.links.loc[link_name, "capital_cost"])
|
|
1234
|
+
if "capital_cost" in network.links.columns
|
|
1235
|
+
else 0.0
|
|
1236
|
+
)
|
|
1237
|
+
|
|
1238
|
+
# Calculate annualized capital cost for this year
|
|
1239
|
+
annual_capital_cost = capacity_mw * capital_cost_per_mw
|
|
1240
|
+
|
|
1241
|
+
if carrier_name in carrier_stats["capital_cost_by_carrier"]:
|
|
1242
|
+
carrier_stats["capital_cost_by_carrier"][
|
|
1243
|
+
carrier_name
|
|
1244
|
+
] += annual_capital_cost
|
|
1245
|
+
|
|
1246
|
+
# Calculate operational costs by carrier for this specific year
|
|
1247
|
+
# Operational costs = dispatch (MWh) × marginal_cost (currency/MWh)
|
|
1248
|
+
# Only for components that are active in this year
|
|
1249
|
+
|
|
1250
|
+
# 1. GENERATORS - Operational costs (including UNMET_LOAD)
|
|
1251
|
+
if hasattr(network, "generators_t") and hasattr(network.generators_t, "p"):
|
|
1252
|
+
# Get generator info: carrier, marginal_cost, build_year, lifetime (include UNMET_LOAD)
|
|
1253
|
+
cursor = conn.execute(
|
|
1254
|
+
"""
|
|
1255
|
+
SELECT c.name as component_name,
|
|
1256
|
+
CASE
|
|
1257
|
+
WHEN c.component_type = 'UNMET_LOAD' THEN 'Unmet Load'
|
|
1258
|
+
ELSE carr.name
|
|
1259
|
+
END as carrier_name
|
|
1260
|
+
FROM components c
|
|
1261
|
+
JOIN carriers carr ON c.carrier_id = carr.id
|
|
1262
|
+
WHERE c.component_type IN ('GENERATOR', 'UNMET_LOAD')
|
|
1263
|
+
"""
|
|
1264
|
+
)
|
|
1265
|
+
generator_carriers = {row[0]: row[1] for row in cursor.fetchall()}
|
|
1266
|
+
|
|
1267
|
+
# Filter generation data for this specific year
|
|
1268
|
+
year_generation = self._filter_timeseries_by_year(
|
|
1269
|
+
network.generators_t.p, network.snapshots, year
|
|
1270
|
+
)
|
|
1271
|
+
if year_generation is not None and not year_generation.empty:
|
|
1272
|
+
for gen_name in year_generation.columns:
|
|
1273
|
+
if gen_name in generator_carriers:
|
|
1274
|
+
carrier_name = generator_carriers[gen_name]
|
|
1275
|
+
|
|
1276
|
+
# Get build year and lifetime
|
|
1277
|
+
build_year = (
|
|
1278
|
+
network.generators.loc[gen_name, "build_year"]
|
|
1279
|
+
if "build_year" in network.generators.columns
|
|
1280
|
+
else None
|
|
1281
|
+
)
|
|
1282
|
+
lifetime = (
|
|
1283
|
+
network.generators.loc[gen_name, "lifetime"]
|
|
1284
|
+
if "lifetime" in network.generators.columns
|
|
1285
|
+
else None
|
|
1286
|
+
)
|
|
1287
|
+
|
|
1288
|
+
# Check if component is active in this year
|
|
1289
|
+
if is_component_active(build_year, lifetime, year):
|
|
1290
|
+
# Calculate generation for this year (already calculated above, but need to recalculate for operational costs)
|
|
1291
|
+
year_weightings = self._get_year_weightings(
|
|
1292
|
+
network, year
|
|
1293
|
+
)
|
|
1294
|
+
if year_weightings is not None:
|
|
1295
|
+
generation_mwh = float(
|
|
1296
|
+
(
|
|
1297
|
+
year_generation[gen_name].values
|
|
1298
|
+
* year_weightings
|
|
1299
|
+
).sum()
|
|
1300
|
+
)
|
|
1301
|
+
else:
|
|
1302
|
+
generation_mwh = float(
|
|
1303
|
+
year_generation[gen_name].sum()
|
|
1304
|
+
)
|
|
1305
|
+
|
|
1306
|
+
# Get marginal cost
|
|
1307
|
+
marginal_cost = (
|
|
1308
|
+
float(
|
|
1309
|
+
network.generators.loc[
|
|
1310
|
+
gen_name, "marginal_cost"
|
|
1311
|
+
]
|
|
1312
|
+
)
|
|
1313
|
+
if "marginal_cost" in network.generators.columns
|
|
1314
|
+
else 0.0
|
|
1315
|
+
)
|
|
1316
|
+
|
|
1317
|
+
# Calculate operational cost for this year
|
|
1318
|
+
operational_cost = generation_mwh * marginal_cost
|
|
1319
|
+
|
|
1320
|
+
if (
|
|
1321
|
+
carrier_name
|
|
1322
|
+
in carrier_stats["operational_cost_by_carrier"]
|
|
1323
|
+
):
|
|
1324
|
+
carrier_stats["operational_cost_by_carrier"][
|
|
1325
|
+
carrier_name
|
|
1326
|
+
] += operational_cost
|
|
1327
|
+
|
|
1328
|
+
# 2. STORAGE_UNITS - Operational costs (discharge only)
|
|
1329
|
+
if hasattr(network, "storage_units_t") and hasattr(
|
|
1330
|
+
network.storage_units_t, "p"
|
|
1331
|
+
):
|
|
1332
|
+
# Get storage unit info: carrier, marginal_cost, build_year, lifetime
|
|
1333
|
+
cursor = conn.execute(
|
|
1334
|
+
"""
|
|
1335
|
+
SELECT c.name as component_name, carr.name as carrier_name
|
|
1336
|
+
FROM components c
|
|
1337
|
+
JOIN carriers carr ON c.carrier_id = carr.id
|
|
1338
|
+
WHERE c.component_type = 'STORAGE_UNIT'
|
|
1339
|
+
"""
|
|
1340
|
+
)
|
|
1341
|
+
storage_unit_carriers = {row[0]: row[1] for row in cursor.fetchall()}
|
|
1342
|
+
|
|
1343
|
+
# Filter storage unit data for this specific year
|
|
1344
|
+
year_storage = self._filter_timeseries_by_year(
|
|
1345
|
+
network.storage_units_t.p, network.snapshots, year
|
|
1346
|
+
)
|
|
1347
|
+
if year_storage is not None and not year_storage.empty:
|
|
1348
|
+
for su_name in year_storage.columns:
|
|
1349
|
+
if su_name in storage_unit_carriers:
|
|
1350
|
+
carrier_name = storage_unit_carriers[su_name]
|
|
1351
|
+
|
|
1352
|
+
# Get build year and lifetime
|
|
1353
|
+
build_year = (
|
|
1354
|
+
network.storage_units.loc[su_name, "build_year"]
|
|
1355
|
+
if "build_year" in network.storage_units.columns
|
|
1356
|
+
else None
|
|
1357
|
+
)
|
|
1358
|
+
lifetime = (
|
|
1359
|
+
network.storage_units.loc[su_name, "lifetime"]
|
|
1360
|
+
if "lifetime" in network.storage_units.columns
|
|
1361
|
+
else None
|
|
1362
|
+
)
|
|
1363
|
+
|
|
1364
|
+
# Check if component is active in this year
|
|
1365
|
+
if is_component_active(build_year, lifetime, year):
|
|
1366
|
+
# Calculate discharge for this year (positive values only)
|
|
1367
|
+
year_weightings = self._get_year_weightings(
|
|
1368
|
+
network, year
|
|
1369
|
+
)
|
|
1370
|
+
if year_weightings is not None:
|
|
1371
|
+
discharge_mwh = float(
|
|
1372
|
+
(
|
|
1373
|
+
year_storage[su_name].clip(lower=0).values
|
|
1374
|
+
* year_weightings
|
|
1375
|
+
).sum()
|
|
1376
|
+
)
|
|
1377
|
+
else:
|
|
1378
|
+
discharge_mwh = float(
|
|
1379
|
+
year_storage[su_name].clip(lower=0).sum()
|
|
1380
|
+
)
|
|
1381
|
+
|
|
1382
|
+
# Get marginal cost
|
|
1383
|
+
marginal_cost = (
|
|
1384
|
+
float(
|
|
1385
|
+
network.storage_units.loc[
|
|
1386
|
+
su_name, "marginal_cost"
|
|
1387
|
+
]
|
|
1388
|
+
)
|
|
1389
|
+
if "marginal_cost" in network.storage_units.columns
|
|
1390
|
+
else 0.0
|
|
1391
|
+
)
|
|
1392
|
+
|
|
1393
|
+
# Calculate operational cost for this year
|
|
1394
|
+
operational_cost = discharge_mwh * marginal_cost
|
|
1395
|
+
|
|
1396
|
+
if (
|
|
1397
|
+
carrier_name
|
|
1398
|
+
in carrier_stats["operational_cost_by_carrier"]
|
|
1399
|
+
):
|
|
1400
|
+
carrier_stats["operational_cost_by_carrier"][
|
|
1401
|
+
carrier_name
|
|
1402
|
+
] += operational_cost
|
|
1403
|
+
|
|
1404
|
+
# 3. STORES - Operational costs (discharge only)
|
|
1405
|
+
if hasattr(network, "stores_t") and hasattr(network.stores_t, "p"):
|
|
1406
|
+
# Get store info: carrier, marginal_cost, build_year, lifetime
|
|
1407
|
+
cursor = conn.execute(
|
|
1408
|
+
"""
|
|
1409
|
+
SELECT c.name as component_name, carr.name as carrier_name
|
|
1410
|
+
FROM components c
|
|
1411
|
+
JOIN carriers carr ON c.carrier_id = carr.id
|
|
1412
|
+
WHERE c.component_type = 'STORE'
|
|
1413
|
+
"""
|
|
1414
|
+
)
|
|
1415
|
+
store_carriers = {row[0]: row[1] for row in cursor.fetchall()}
|
|
1416
|
+
|
|
1417
|
+
# Filter store data for this specific year
|
|
1418
|
+
year_stores = self._filter_timeseries_by_year(
|
|
1419
|
+
network.stores_t.p, network.snapshots, year
|
|
1420
|
+
)
|
|
1421
|
+
if year_stores is not None and not year_stores.empty:
|
|
1422
|
+
for store_name in year_stores.columns:
|
|
1423
|
+
if store_name in store_carriers:
|
|
1424
|
+
carrier_name = store_carriers[store_name]
|
|
1425
|
+
|
|
1426
|
+
# Get build year and lifetime
|
|
1427
|
+
build_year = (
|
|
1428
|
+
network.stores.loc[store_name, "build_year"]
|
|
1429
|
+
if "build_year" in network.stores.columns
|
|
1430
|
+
else None
|
|
1431
|
+
)
|
|
1432
|
+
lifetime = (
|
|
1433
|
+
network.stores.loc[store_name, "lifetime"]
|
|
1434
|
+
if "lifetime" in network.stores.columns
|
|
1435
|
+
else None
|
|
1436
|
+
)
|
|
1437
|
+
|
|
1438
|
+
# Check if component is active in this year
|
|
1439
|
+
if is_component_active(build_year, lifetime, year):
|
|
1440
|
+
# Calculate discharge for this year (positive values only)
|
|
1441
|
+
year_weightings = self._get_year_weightings(
|
|
1442
|
+
network, year
|
|
1443
|
+
)
|
|
1444
|
+
if year_weightings is not None:
|
|
1445
|
+
discharge_mwh = float(
|
|
1446
|
+
(
|
|
1447
|
+
year_stores[store_name].clip(lower=0).values
|
|
1448
|
+
* year_weightings
|
|
1449
|
+
).sum()
|
|
1450
|
+
)
|
|
1451
|
+
else:
|
|
1452
|
+
discharge_mwh = float(
|
|
1453
|
+
year_stores[store_name].clip(lower=0).sum()
|
|
1454
|
+
)
|
|
1455
|
+
|
|
1456
|
+
# Get marginal cost
|
|
1457
|
+
marginal_cost = (
|
|
1458
|
+
float(
|
|
1459
|
+
network.stores.loc[store_name, "marginal_cost"]
|
|
1460
|
+
)
|
|
1461
|
+
if "marginal_cost" in network.stores.columns
|
|
1462
|
+
else 0.0
|
|
1463
|
+
)
|
|
1464
|
+
|
|
1465
|
+
# Calculate operational cost for this year
|
|
1466
|
+
operational_cost = discharge_mwh * marginal_cost
|
|
1467
|
+
|
|
1468
|
+
if (
|
|
1469
|
+
carrier_name
|
|
1470
|
+
in carrier_stats["operational_cost_by_carrier"]
|
|
1471
|
+
):
|
|
1472
|
+
carrier_stats["operational_cost_by_carrier"][
|
|
1473
|
+
carrier_name
|
|
1474
|
+
] += operational_cost
|
|
1475
|
+
|
|
1476
|
+
# Calculate total system costs by carrier for this specific year
|
|
1477
|
+
# Total system cost = capital cost + operational cost
|
|
1478
|
+
for carrier_name in carrier_stats["capital_cost_by_carrier"]:
|
|
1479
|
+
capital_cost = carrier_stats["capital_cost_by_carrier"][carrier_name]
|
|
1480
|
+
operational_cost = carrier_stats["operational_cost_by_carrier"][
|
|
1481
|
+
carrier_name
|
|
1482
|
+
]
|
|
1483
|
+
total_system_cost = capital_cost + operational_cost
|
|
1484
|
+
|
|
1485
|
+
if carrier_name in carrier_stats["total_system_cost_by_carrier"]:
|
|
1486
|
+
carrier_stats["total_system_cost_by_carrier"][
|
|
1487
|
+
carrier_name
|
|
1488
|
+
] = total_system_cost
|
|
1489
|
+
|
|
1490
|
+
# Calculate capacity by carrier for this specific year
|
|
1491
|
+
|
|
1492
|
+
# 4. GENERATORS - Power capacity (MW) (including UNMET_LOAD)
|
|
1493
|
+
if hasattr(network, "generators") and not network.generators.empty:
|
|
1494
|
+
# Get generator-carrier mapping (include UNMET_LOAD)
|
|
1495
|
+
cursor = conn.execute(
|
|
1496
|
+
"""
|
|
1497
|
+
SELECT c.name as component_name,
|
|
1498
|
+
CASE
|
|
1499
|
+
WHEN c.component_type = 'UNMET_LOAD' THEN 'Unmet Load'
|
|
1500
|
+
ELSE carr.name
|
|
1501
|
+
END as carrier_name
|
|
1502
|
+
FROM components c
|
|
1503
|
+
JOIN carriers carr ON c.carrier_id = carr.id
|
|
1504
|
+
WHERE c.component_type IN ('GENERATOR', 'UNMET_LOAD')
|
|
1505
|
+
"""
|
|
1506
|
+
)
|
|
1507
|
+
generator_carriers = {row[0]: row[1] for row in cursor.fetchall()}
|
|
1508
|
+
|
|
1509
|
+
for gen_name in network.generators.index:
|
|
1510
|
+
if gen_name in generator_carriers:
|
|
1511
|
+
carrier_name = generator_carriers[gen_name]
|
|
1512
|
+
|
|
1513
|
+
# Check if this generator is available in this year (build_year <= year)
|
|
1514
|
+
is_available = True
|
|
1515
|
+
if "build_year" in network.generators.columns:
|
|
1516
|
+
build_year = network.generators.loc[gen_name, "build_year"]
|
|
1517
|
+
if pd.notna(build_year) and int(build_year) > year:
|
|
1518
|
+
is_available = False
|
|
1519
|
+
|
|
1520
|
+
if is_available:
|
|
1521
|
+
# Use p_nom_opt if available, otherwise p_nom
|
|
1522
|
+
if "p_nom_opt" in network.generators.columns:
|
|
1523
|
+
capacity_mw = float(
|
|
1524
|
+
network.generators.loc[gen_name, "p_nom_opt"]
|
|
1525
|
+
)
|
|
1526
|
+
else:
|
|
1527
|
+
capacity_mw = (
|
|
1528
|
+
float(network.generators.loc[gen_name, "p_nom"])
|
|
1529
|
+
if "p_nom" in network.generators.columns
|
|
1530
|
+
else 0.0
|
|
1531
|
+
)
|
|
1532
|
+
|
|
1533
|
+
if (
|
|
1534
|
+
carrier_name
|
|
1535
|
+
in carrier_stats["power_capacity_by_carrier"]
|
|
1536
|
+
):
|
|
1537
|
+
carrier_stats["power_capacity_by_carrier"][
|
|
1538
|
+
carrier_name
|
|
1539
|
+
] += capacity_mw
|
|
1540
|
+
|
|
1541
|
+
# 2. STORAGE_UNITS - Power capacity (MW) + Energy capacity (MWh)
|
|
1542
|
+
if hasattr(network, "storage_units") and not network.storage_units.empty:
|
|
1543
|
+
# Get storage unit-carrier mapping
|
|
1544
|
+
cursor = conn.execute(
|
|
1545
|
+
"""
|
|
1546
|
+
SELECT c.name as component_name, carr.name as carrier_name
|
|
1547
|
+
FROM components c
|
|
1548
|
+
JOIN carriers carr ON c.carrier_id = carr.id
|
|
1549
|
+
WHERE c.component_type = 'STORAGE_UNIT'
|
|
1550
|
+
"""
|
|
1551
|
+
)
|
|
1552
|
+
storage_unit_carriers = {row[0]: row[1] for row in cursor.fetchall()}
|
|
1553
|
+
|
|
1554
|
+
for su_name in network.storage_units.index:
|
|
1555
|
+
if su_name in storage_unit_carriers:
|
|
1556
|
+
carrier_name = storage_unit_carriers[su_name]
|
|
1557
|
+
|
|
1558
|
+
# Check if this storage unit is available in this year
|
|
1559
|
+
is_available = True
|
|
1560
|
+
if "build_year" in network.storage_units.columns:
|
|
1561
|
+
build_year = network.storage_units.loc[
|
|
1562
|
+
su_name, "build_year"
|
|
1563
|
+
]
|
|
1564
|
+
if pd.notna(build_year) and int(build_year) > year:
|
|
1565
|
+
is_available = False
|
|
1566
|
+
|
|
1567
|
+
if is_available:
|
|
1568
|
+
# Power capacity (MW)
|
|
1569
|
+
if "p_nom_opt" in network.storage_units.columns:
|
|
1570
|
+
p_nom_opt = float(
|
|
1571
|
+
network.storage_units.loc[su_name, "p_nom_opt"]
|
|
1572
|
+
)
|
|
1573
|
+
else:
|
|
1574
|
+
p_nom_opt = (
|
|
1575
|
+
float(network.storage_units.loc[su_name, "p_nom"])
|
|
1576
|
+
if "p_nom" in network.storage_units.columns
|
|
1577
|
+
else 0.0
|
|
1578
|
+
)
|
|
1579
|
+
|
|
1580
|
+
if (
|
|
1581
|
+
carrier_name
|
|
1582
|
+
in carrier_stats["power_capacity_by_carrier"]
|
|
1583
|
+
):
|
|
1584
|
+
carrier_stats["power_capacity_by_carrier"][
|
|
1585
|
+
carrier_name
|
|
1586
|
+
] += p_nom_opt
|
|
1587
|
+
|
|
1588
|
+
# Energy capacity (MWh) using max_hours
|
|
1589
|
+
max_hours = 1.0 # Default
|
|
1590
|
+
if "max_hours" in network.storage_units.columns:
|
|
1591
|
+
max_hours = float(
|
|
1592
|
+
network.storage_units.loc[su_name, "max_hours"]
|
|
1593
|
+
)
|
|
1594
|
+
energy_capacity_mwh = p_nom_opt * max_hours
|
|
1595
|
+
|
|
1596
|
+
if (
|
|
1597
|
+
carrier_name
|
|
1598
|
+
in carrier_stats["energy_capacity_by_carrier"]
|
|
1599
|
+
):
|
|
1600
|
+
carrier_stats["energy_capacity_by_carrier"][
|
|
1601
|
+
carrier_name
|
|
1602
|
+
] += energy_capacity_mwh
|
|
1603
|
+
|
|
1604
|
+
# 3. STORES - Energy capacity (MWh) only
|
|
1605
|
+
if hasattr(network, "stores") and not network.stores.empty:
|
|
1606
|
+
# Get store-carrier mapping
|
|
1607
|
+
cursor = conn.execute(
|
|
1608
|
+
"""
|
|
1609
|
+
SELECT c.name as component_name, carr.name as carrier_name
|
|
1610
|
+
FROM components c
|
|
1611
|
+
JOIN carriers carr ON c.carrier_id = carr.id
|
|
1612
|
+
WHERE c.component_type = 'STORE'
|
|
1613
|
+
"""
|
|
1614
|
+
)
|
|
1615
|
+
store_carriers = {row[0]: row[1] for row in cursor.fetchall()}
|
|
1616
|
+
|
|
1617
|
+
for store_name in network.stores.index:
|
|
1618
|
+
if store_name in store_carriers:
|
|
1619
|
+
carrier_name = store_carriers[store_name]
|
|
1620
|
+
|
|
1621
|
+
# Check if this store is available in this year
|
|
1622
|
+
is_available = True
|
|
1623
|
+
if "build_year" in network.stores.columns:
|
|
1624
|
+
build_year = network.stores.loc[store_name, "build_year"]
|
|
1625
|
+
if pd.notna(build_year) and int(build_year) > year:
|
|
1626
|
+
is_available = False
|
|
1627
|
+
|
|
1628
|
+
if is_available:
|
|
1629
|
+
# Energy capacity (MWh)
|
|
1630
|
+
if "e_nom_opt" in network.stores.columns:
|
|
1631
|
+
capacity_mwh = float(
|
|
1632
|
+
network.stores.loc[store_name, "e_nom_opt"]
|
|
1633
|
+
)
|
|
1634
|
+
else:
|
|
1635
|
+
capacity_mwh = (
|
|
1636
|
+
float(network.stores.loc[store_name, "e_nom"])
|
|
1637
|
+
if "e_nom" in network.stores.columns
|
|
1638
|
+
else 0.0
|
|
1639
|
+
)
|
|
1640
|
+
|
|
1641
|
+
if (
|
|
1642
|
+
carrier_name
|
|
1643
|
+
in carrier_stats["energy_capacity_by_carrier"]
|
|
1644
|
+
):
|
|
1645
|
+
carrier_stats["energy_capacity_by_carrier"][
|
|
1646
|
+
carrier_name
|
|
1647
|
+
] += capacity_mwh
|
|
1648
|
+
|
|
1649
|
+
# 4. LINES - Apparent power capacity (MVA -> MW)
|
|
1650
|
+
if hasattr(network, "lines") and not network.lines.empty:
|
|
1651
|
+
# Get line-carrier mapping
|
|
1652
|
+
cursor = conn.execute(
|
|
1653
|
+
"""
|
|
1654
|
+
SELECT c.name as component_name, carr.name as carrier_name
|
|
1655
|
+
FROM components c
|
|
1656
|
+
JOIN carriers carr ON c.carrier_id = carr.id
|
|
1657
|
+
WHERE c.component_type = 'LINE'
|
|
1658
|
+
"""
|
|
1659
|
+
)
|
|
1660
|
+
line_carriers = {row[0]: row[1] for row in cursor.fetchall()}
|
|
1661
|
+
|
|
1662
|
+
for line_name in network.lines.index:
|
|
1663
|
+
if line_name in line_carriers:
|
|
1664
|
+
carrier_name = line_carriers[line_name]
|
|
1665
|
+
|
|
1666
|
+
# Check if this line is available in this year
|
|
1667
|
+
is_available = True
|
|
1668
|
+
if "build_year" in network.lines.columns:
|
|
1669
|
+
build_year = network.lines.loc[line_name, "build_year"]
|
|
1670
|
+
if pd.notna(build_year) and int(build_year) > year:
|
|
1671
|
+
is_available = False
|
|
1672
|
+
|
|
1673
|
+
if is_available:
|
|
1674
|
+
# Apparent power capacity (MVA -> MW, assume power factor = 1)
|
|
1675
|
+
if "s_nom_opt" in network.lines.columns:
|
|
1676
|
+
capacity_mva = float(
|
|
1677
|
+
network.lines.loc[line_name, "s_nom_opt"]
|
|
1678
|
+
)
|
|
1679
|
+
else:
|
|
1680
|
+
capacity_mva = (
|
|
1681
|
+
float(network.lines.loc[line_name, "s_nom"])
|
|
1682
|
+
if "s_nom" in network.lines.columns
|
|
1683
|
+
else 0.0
|
|
1684
|
+
)
|
|
1685
|
+
|
|
1686
|
+
capacity_mw = capacity_mva # Convert MVA to MW
|
|
1687
|
+
|
|
1688
|
+
if (
|
|
1689
|
+
carrier_name
|
|
1690
|
+
in carrier_stats["power_capacity_by_carrier"]
|
|
1691
|
+
):
|
|
1692
|
+
carrier_stats["power_capacity_by_carrier"][
|
|
1693
|
+
carrier_name
|
|
1694
|
+
] += capacity_mw
|
|
1695
|
+
|
|
1696
|
+
# 5. LINKS - Power capacity (MW)
|
|
1697
|
+
if hasattr(network, "links") and not network.links.empty:
|
|
1698
|
+
# Get link-carrier mapping
|
|
1699
|
+
cursor = conn.execute(
|
|
1700
|
+
"""
|
|
1701
|
+
SELECT c.name as component_name, carr.name as carrier_name
|
|
1702
|
+
FROM components c
|
|
1703
|
+
JOIN carriers carr ON c.carrier_id = carr.id
|
|
1704
|
+
WHERE c.component_type = 'LINK'
|
|
1705
|
+
"""
|
|
1706
|
+
)
|
|
1707
|
+
link_carriers = {row[0]: row[1] for row in cursor.fetchall()}
|
|
1708
|
+
|
|
1709
|
+
for link_name in network.links.index:
|
|
1710
|
+
if link_name in link_carriers:
|
|
1711
|
+
carrier_name = link_carriers[link_name]
|
|
1712
|
+
|
|
1713
|
+
# Check if this link is available in this year
|
|
1714
|
+
is_available = True
|
|
1715
|
+
if "build_year" in network.links.columns:
|
|
1716
|
+
build_year = network.links.loc[link_name, "build_year"]
|
|
1717
|
+
if pd.notna(build_year) and int(build_year) > year:
|
|
1718
|
+
is_available = False
|
|
1719
|
+
|
|
1720
|
+
if is_available:
|
|
1721
|
+
# Power capacity (MW)
|
|
1722
|
+
if "p_nom_opt" in network.links.columns:
|
|
1723
|
+
capacity_mw = float(
|
|
1724
|
+
network.links.loc[link_name, "p_nom_opt"]
|
|
1725
|
+
)
|
|
1726
|
+
else:
|
|
1727
|
+
capacity_mw = (
|
|
1728
|
+
float(network.links.loc[link_name, "p_nom"])
|
|
1729
|
+
if "p_nom" in network.links.columns
|
|
1730
|
+
else 0.0
|
|
1731
|
+
)
|
|
1732
|
+
|
|
1733
|
+
if (
|
|
1734
|
+
carrier_name
|
|
1735
|
+
in carrier_stats["power_capacity_by_carrier"]
|
|
1736
|
+
):
|
|
1737
|
+
carrier_stats["power_capacity_by_carrier"][
|
|
1738
|
+
carrier_name
|
|
1739
|
+
] += capacity_mw
|
|
1740
|
+
|
|
1741
|
+
return carrier_stats
|
|
1742
|
+
|
|
1743
|
+
except Exception as e:
|
|
1744
|
+
logger.error(f"Failed to calculate year {year} carrier statistics: {e}")
|
|
1745
|
+
return {
|
|
1746
|
+
"dispatch_by_carrier": {},
|
|
1747
|
+
"power_capacity_by_carrier": {},
|
|
1748
|
+
"energy_capacity_by_carrier": {},
|
|
1749
|
+
"emissions_by_carrier": {},
|
|
1750
|
+
"capital_cost_by_carrier": {},
|
|
1751
|
+
"operational_cost_by_carrier": {},
|
|
1752
|
+
"total_system_cost_by_carrier": {},
|
|
1753
|
+
}
|
|
1754
|
+
|
|
1755
|
+
def _sum_year_based_carrier_statistics(self, conn) -> Dict[str, Any]:
|
|
1756
|
+
"""
|
|
1757
|
+
Sum up per-year carrier statistics for accurate multi-year totals (single network per database).
|
|
1758
|
+
For capacity: take the LAST YEAR (final capacity) instead of maximum.
|
|
1759
|
+
"""
|
|
1760
|
+
try:
|
|
1761
|
+
import json
|
|
1762
|
+
|
|
1763
|
+
# Initialize totals
|
|
1764
|
+
totals = {
|
|
1765
|
+
"dispatch_by_carrier": {},
|
|
1766
|
+
"power_capacity_by_carrier": {},
|
|
1767
|
+
"energy_capacity_by_carrier": {},
|
|
1768
|
+
"emissions_by_carrier": {},
|
|
1769
|
+
"capital_cost_by_carrier": {},
|
|
1770
|
+
"operational_cost_by_carrier": {},
|
|
1771
|
+
"total_system_cost_by_carrier": {},
|
|
1772
|
+
}
|
|
1773
|
+
|
|
1774
|
+
# Get all carriers from database
|
|
1775
|
+
cursor = conn.execute(
|
|
1776
|
+
"""
|
|
1777
|
+
SELECT DISTINCT name FROM carriers
|
|
1778
|
+
"""
|
|
1779
|
+
)
|
|
1780
|
+
all_carriers = [row[0] for row in cursor.fetchall()]
|
|
1781
|
+
|
|
1782
|
+
# Initialize all carriers with zero values (including special "Unmet Load" carrier)
|
|
1783
|
+
all_carriers_with_unmet = all_carriers + ["Unmet Load"]
|
|
1784
|
+
for carrier in all_carriers_with_unmet:
|
|
1785
|
+
totals["dispatch_by_carrier"][carrier] = 0.0
|
|
1786
|
+
totals["power_capacity_by_carrier"][carrier] = 0.0
|
|
1787
|
+
totals["energy_capacity_by_carrier"][carrier] = 0.0
|
|
1788
|
+
totals["emissions_by_carrier"][carrier] = 0.0
|
|
1789
|
+
totals["capital_cost_by_carrier"][carrier] = 0.0
|
|
1790
|
+
totals["operational_cost_by_carrier"][carrier] = 0.0
|
|
1791
|
+
totals["total_system_cost_by_carrier"][carrier] = 0.0
|
|
1792
|
+
|
|
1793
|
+
# Get all year-based results, ordered by year
|
|
1794
|
+
cursor = conn.execute(
|
|
1795
|
+
"""
|
|
1796
|
+
SELECT year, results_json FROM network_solve_results_by_year
|
|
1797
|
+
ORDER BY year
|
|
1798
|
+
"""
|
|
1799
|
+
)
|
|
1800
|
+
|
|
1801
|
+
year_results = cursor.fetchall()
|
|
1802
|
+
|
|
1803
|
+
if not year_results:
|
|
1804
|
+
return totals
|
|
1805
|
+
|
|
1806
|
+
# For capacity: use the LAST YEAR only (final capacity state)
|
|
1807
|
+
last_year, last_results_json = year_results[-1]
|
|
1808
|
+
|
|
1809
|
+
try:
|
|
1810
|
+
results = json.loads(last_results_json)
|
|
1811
|
+
network_stats = results.get("network_statistics", {})
|
|
1812
|
+
custom_stats = network_stats.get("custom_statistics", {})
|
|
1813
|
+
|
|
1814
|
+
# Use last year's capacity as the all-year capacity
|
|
1815
|
+
power_capacity_by_carrier = custom_stats.get(
|
|
1816
|
+
"power_capacity_by_carrier", {}
|
|
1817
|
+
)
|
|
1818
|
+
for carrier, value in power_capacity_by_carrier.items():
|
|
1819
|
+
if carrier in totals["power_capacity_by_carrier"]:
|
|
1820
|
+
totals["power_capacity_by_carrier"][carrier] = float(value or 0)
|
|
1821
|
+
|
|
1822
|
+
energy_capacity_by_carrier = custom_stats.get(
|
|
1823
|
+
"energy_capacity_by_carrier", {}
|
|
1824
|
+
)
|
|
1825
|
+
for carrier, value in energy_capacity_by_carrier.items():
|
|
1826
|
+
if carrier in totals["energy_capacity_by_carrier"]:
|
|
1827
|
+
totals["energy_capacity_by_carrier"][carrier] = float(
|
|
1828
|
+
value or 0
|
|
1829
|
+
)
|
|
1830
|
+
|
|
1831
|
+
except Exception as e:
|
|
1832
|
+
logger.error(f"Failed to process last year ({last_year}) results: {e}")
|
|
1833
|
+
|
|
1834
|
+
# For other stats (dispatch, emissions, costs): sum across all years
|
|
1835
|
+
for year, results_json in year_results:
|
|
1836
|
+
try:
|
|
1837
|
+
results = json.loads(results_json)
|
|
1838
|
+
network_stats = results.get("network_statistics", {})
|
|
1839
|
+
custom_stats = network_stats.get("custom_statistics", {})
|
|
1840
|
+
|
|
1841
|
+
# Sum dispatch (energy values - sum across years)
|
|
1842
|
+
dispatch_by_carrier = custom_stats.get("dispatch_by_carrier", {})
|
|
1843
|
+
for carrier, value in dispatch_by_carrier.items():
|
|
1844
|
+
if carrier in totals["dispatch_by_carrier"]:
|
|
1845
|
+
totals["dispatch_by_carrier"][carrier] += float(value or 0)
|
|
1846
|
+
|
|
1847
|
+
# Sum emissions (cumulative across years)
|
|
1848
|
+
emissions_by_carrier = custom_stats.get("emissions_by_carrier", {})
|
|
1849
|
+
for carrier, value in emissions_by_carrier.items():
|
|
1850
|
+
if carrier in totals["emissions_by_carrier"]:
|
|
1851
|
+
totals["emissions_by_carrier"][carrier] += float(value or 0)
|
|
1852
|
+
|
|
1853
|
+
# Sum capital costs (cumulative across years)
|
|
1854
|
+
capital_cost_by_carrier = custom_stats.get(
|
|
1855
|
+
"capital_cost_by_carrier", {}
|
|
1856
|
+
)
|
|
1857
|
+
for carrier, value in capital_cost_by_carrier.items():
|
|
1858
|
+
if carrier in totals["capital_cost_by_carrier"]:
|
|
1859
|
+
totals["capital_cost_by_carrier"][carrier] += float(
|
|
1860
|
+
value or 0
|
|
1861
|
+
)
|
|
1862
|
+
|
|
1863
|
+
# Sum operational costs (cumulative across years)
|
|
1864
|
+
operational_cost_by_carrier = custom_stats.get(
|
|
1865
|
+
"operational_cost_by_carrier", {}
|
|
1866
|
+
)
|
|
1867
|
+
for carrier, value in operational_cost_by_carrier.items():
|
|
1868
|
+
if carrier in totals["operational_cost_by_carrier"]:
|
|
1869
|
+
totals["operational_cost_by_carrier"][carrier] += float(
|
|
1870
|
+
value or 0
|
|
1871
|
+
)
|
|
1872
|
+
|
|
1873
|
+
# Sum total system costs (cumulative across years)
|
|
1874
|
+
total_system_cost_by_carrier = custom_stats.get(
|
|
1875
|
+
"total_system_cost_by_carrier", {}
|
|
1876
|
+
)
|
|
1877
|
+
for carrier, value in total_system_cost_by_carrier.items():
|
|
1878
|
+
if carrier in totals["total_system_cost_by_carrier"]:
|
|
1879
|
+
totals["total_system_cost_by_carrier"][carrier] += float(
|
|
1880
|
+
value or 0
|
|
1881
|
+
)
|
|
1882
|
+
|
|
1883
|
+
except Exception as e:
|
|
1884
|
+
logger.error(f"Failed to process year {year} results: {e}")
|
|
1885
|
+
continue
|
|
1886
|
+
|
|
1887
|
+
return totals
|
|
1888
|
+
|
|
1889
|
+
except Exception as e:
|
|
1890
|
+
logger.error(f"Failed to sum year-based carrier statistics: {e}")
|
|
1891
|
+
# Return empty structure on error
|
|
1892
|
+
return {
|
|
1893
|
+
"dispatch_by_carrier": {},
|
|
1894
|
+
"power_capacity_by_carrier": {},
|
|
1895
|
+
"energy_capacity_by_carrier": {},
|
|
1896
|
+
"emissions_by_carrier": {},
|
|
1897
|
+
"capital_cost_by_carrier": {},
|
|
1898
|
+
"operational_cost_by_carrier": {},
|
|
1899
|
+
"total_system_cost_by_carrier": {},
|
|
1900
|
+
}
|
|
1901
|
+
|
|
1902
|
+
def _serialize_results_json(self, solve_result: Dict[str, Any]) -> str:
|
|
1903
|
+
"""Serialize solve results to JSON string."""
|
|
1904
|
+
import json
|
|
1905
|
+
|
|
1906
|
+
try:
|
|
1907
|
+
# Create a clean results dictionary
|
|
1908
|
+
results = {
|
|
1909
|
+
"success": solve_result.get("success", False),
|
|
1910
|
+
"status": solve_result.get("status", "unknown"),
|
|
1911
|
+
"solve_time": solve_result.get("solve_time", 0.0),
|
|
1912
|
+
"objective_value": solve_result.get("objective_value"),
|
|
1913
|
+
"solver_name": solve_result.get("solver_name", "unknown"),
|
|
1914
|
+
"run_id": solve_result.get("run_id"),
|
|
1915
|
+
"network_statistics": solve_result.get("network_statistics", {}),
|
|
1916
|
+
"pypsa_result": solve_result.get("pypsa_result", {}),
|
|
1917
|
+
}
|
|
1918
|
+
return json.dumps(results, default=self._json_serializer)
|
|
1919
|
+
except Exception as e:
|
|
1920
|
+
logger.warning(f"Failed to serialize results JSON: {e}")
|
|
1921
|
+
return json.dumps({"error": "serialization_failed"})
|
|
1922
|
+
|
|
1923
|
+
def _serialize_metadata_json(self, solve_result: Dict[str, Any]) -> str:
|
|
1924
|
+
"""Serialize solve metadata to JSON string."""
|
|
1925
|
+
import json
|
|
1926
|
+
|
|
1927
|
+
try:
|
|
1928
|
+
metadata = {
|
|
1929
|
+
"solver_name": solve_result.get("solver_name", "unknown"),
|
|
1930
|
+
"run_id": solve_result.get("run_id"),
|
|
1931
|
+
"multi_period": solve_result.get("multi_period", False),
|
|
1932
|
+
"years": solve_result.get("years", []),
|
|
1933
|
+
"network_name": solve_result.get("network_name"),
|
|
1934
|
+
"num_snapshots": solve_result.get("num_snapshots", 0),
|
|
1935
|
+
}
|
|
1936
|
+
return json.dumps(metadata, default=self._json_serializer)
|
|
1937
|
+
except Exception as e:
|
|
1938
|
+
logger.warning(f"Failed to serialize metadata JSON: {e}")
|
|
1939
|
+
return json.dumps({"error": "serialization_failed"})
|
|
1940
|
+
|
|
1941
|
+
def _filter_timeseries_by_year(
|
|
1942
|
+
self, timeseries_df: "pd.DataFrame", snapshots: "pd.Index", year: int
|
|
1943
|
+
) -> "pd.DataFrame":
|
|
1944
|
+
"""Filter timeseries data by year"""
|
|
1945
|
+
try:
|
|
1946
|
+
# Handle MultiIndex case (multi-period optimization)
|
|
1947
|
+
if hasattr(snapshots, "levels"):
|
|
1948
|
+
period_values = snapshots.get_level_values(0)
|
|
1949
|
+
year_mask = period_values == year
|
|
1950
|
+
if year_mask.any():
|
|
1951
|
+
year_snapshots = snapshots[year_mask]
|
|
1952
|
+
return timeseries_df.loc[year_snapshots]
|
|
1953
|
+
|
|
1954
|
+
# Handle DatetimeIndex case (regular time series)
|
|
1955
|
+
elif hasattr(snapshots, "year"):
|
|
1956
|
+
year_mask = snapshots.year == year
|
|
1957
|
+
if year_mask.any():
|
|
1958
|
+
return timeseries_df.loc[year_mask]
|
|
1959
|
+
|
|
1960
|
+
# Fallback - return None if can't filter
|
|
1961
|
+
return None
|
|
1962
|
+
|
|
1963
|
+
except Exception as e:
|
|
1964
|
+
logger.error(f"Failed to filter timeseries by year {year}: {e}")
|
|
1965
|
+
return None
|
|
1966
|
+
|
|
1967
|
+
def _get_year_weightings(self, network: "pypsa.Network", year: int) -> "np.ndarray":
|
|
1968
|
+
"""Get snapshot weightings for a specific year"""
|
|
1969
|
+
try:
|
|
1970
|
+
# Filter snapshot weightings by year
|
|
1971
|
+
if hasattr(network.snapshots, "levels"):
|
|
1972
|
+
period_values = network.snapshots.get_level_values(0)
|
|
1973
|
+
year_mask = period_values == year
|
|
1974
|
+
if year_mask.any():
|
|
1975
|
+
year_snapshots = network.snapshots[year_mask]
|
|
1976
|
+
year_weightings = network.snapshot_weightings.loc[year_snapshots]
|
|
1977
|
+
if isinstance(year_weightings, pd.DataFrame):
|
|
1978
|
+
if "objective" in year_weightings.columns:
|
|
1979
|
+
return year_weightings["objective"].values
|
|
1980
|
+
else:
|
|
1981
|
+
return year_weightings.iloc[:, 0].values
|
|
1982
|
+
else:
|
|
1983
|
+
return year_weightings.values
|
|
1984
|
+
|
|
1985
|
+
elif hasattr(network.snapshots, "year"):
|
|
1986
|
+
year_mask = network.snapshots.year == year
|
|
1987
|
+
if year_mask.any():
|
|
1988
|
+
year_weightings = network.snapshot_weightings.loc[year_mask]
|
|
1989
|
+
if isinstance(year_weightings, pd.DataFrame):
|
|
1990
|
+
if "objective" in year_weightings.columns:
|
|
1991
|
+
return year_weightings["objective"].values
|
|
1992
|
+
else:
|
|
1993
|
+
return year_weightings.iloc[:, 0].values
|
|
1994
|
+
else:
|
|
1995
|
+
return year_weightings.values
|
|
1996
|
+
|
|
1997
|
+
return None
|
|
1998
|
+
|
|
1999
|
+
except Exception as e:
|
|
2000
|
+
logger.error(f"Failed to get year weightings for year {year}: {e}")
|
|
2001
|
+
return None
|
|
2002
|
+
|
|
2003
|
+
def _calculate_total_demand(self, network: "pypsa.Network") -> float:
|
|
2004
|
+
"""Calculate total demand from loads in the network"""
|
|
2005
|
+
try:
|
|
2006
|
+
total_demand = 0.0
|
|
2007
|
+
|
|
2008
|
+
# Calculate demand from loads
|
|
2009
|
+
if hasattr(network, "loads_t") and hasattr(network.loads_t, "p"):
|
|
2010
|
+
# Apply snapshot weightings to convert MW to MWh
|
|
2011
|
+
weightings = network.snapshot_weightings
|
|
2012
|
+
if isinstance(weightings, pd.DataFrame):
|
|
2013
|
+
if "objective" in weightings.columns:
|
|
2014
|
+
weighting_values = weightings["objective"].values
|
|
2015
|
+
else:
|
|
2016
|
+
weighting_values = weightings.iloc[:, 0].values
|
|
2017
|
+
else:
|
|
2018
|
+
weighting_values = weightings.values
|
|
2019
|
+
|
|
2020
|
+
total_demand = float(
|
|
2021
|
+
(network.loads_t.p.values * weighting_values[:, None]).sum()
|
|
2022
|
+
)
|
|
2023
|
+
|
|
2024
|
+
return total_demand
|
|
2025
|
+
|
|
2026
|
+
except Exception as e:
|
|
2027
|
+
logger.error(f"Failed to calculate total demand: {e}")
|
|
2028
|
+
return 0.0
|
|
2029
|
+
|
|
2030
|
+
def _json_serializer(self, obj):
|
|
2031
|
+
"""Convert numpy/pandas types to JSON serializable types"""
|
|
2032
|
+
import numpy as np
|
|
2033
|
+
import pandas as pd
|
|
2034
|
+
|
|
2035
|
+
if isinstance(obj, (np.integer, np.int64, np.int32)):
|
|
2036
|
+
return int(obj)
|
|
2037
|
+
elif isinstance(obj, (np.floating, np.float64, np.float32)):
|
|
2038
|
+
return float(obj)
|
|
2039
|
+
elif isinstance(obj, np.ndarray):
|
|
2040
|
+
return obj.tolist()
|
|
2041
|
+
elif isinstance(obj, pd.Series):
|
|
2042
|
+
return obj.to_dict()
|
|
2043
|
+
elif isinstance(obj, pd.DataFrame):
|
|
2044
|
+
return obj.to_dict()
|
|
2045
|
+
elif hasattr(obj, "item"): # Handle numpy scalars
|
|
2046
|
+
return obj.item()
|
|
2047
|
+
else:
|
|
2048
|
+
raise TypeError(f"Object of type {type(obj)} is not JSON serializable")
|