pyconvexity 0.1.2__py3-none-any.whl → 0.1.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pyconvexity might be problematic. Click here for more details.

Files changed (43) hide show
  1. pyconvexity/__init__.py +57 -8
  2. pyconvexity/_version.py +1 -2
  3. pyconvexity/core/__init__.py +0 -2
  4. pyconvexity/core/database.py +158 -0
  5. pyconvexity/core/types.py +105 -18
  6. pyconvexity/data/README.md +101 -0
  7. pyconvexity/data/__init__.py +18 -0
  8. pyconvexity/data/__pycache__/__init__.cpython-313.pyc +0 -0
  9. pyconvexity/data/loaders/__init__.py +3 -0
  10. pyconvexity/data/loaders/__pycache__/__init__.cpython-313.pyc +0 -0
  11. pyconvexity/data/loaders/__pycache__/cache.cpython-313.pyc +0 -0
  12. pyconvexity/data/loaders/cache.py +212 -0
  13. pyconvexity/data/schema/01_core_schema.sql +12 -12
  14. pyconvexity/data/schema/02_data_metadata.sql +17 -321
  15. pyconvexity/data/sources/__init__.py +5 -0
  16. pyconvexity/data/sources/__pycache__/__init__.cpython-313.pyc +0 -0
  17. pyconvexity/data/sources/__pycache__/gem.cpython-313.pyc +0 -0
  18. pyconvexity/data/sources/gem.py +412 -0
  19. pyconvexity/io/__init__.py +32 -0
  20. pyconvexity/io/excel_exporter.py +1012 -0
  21. pyconvexity/io/excel_importer.py +1109 -0
  22. pyconvexity/io/netcdf_exporter.py +192 -0
  23. pyconvexity/io/netcdf_importer.py +1602 -0
  24. pyconvexity/models/__init__.py +7 -0
  25. pyconvexity/models/attributes.py +209 -72
  26. pyconvexity/models/components.py +3 -0
  27. pyconvexity/models/network.py +17 -15
  28. pyconvexity/models/scenarios.py +177 -0
  29. pyconvexity/solvers/__init__.py +29 -0
  30. pyconvexity/solvers/pypsa/__init__.py +24 -0
  31. pyconvexity/solvers/pypsa/api.py +421 -0
  32. pyconvexity/solvers/pypsa/batch_loader.py +304 -0
  33. pyconvexity/solvers/pypsa/builder.py +566 -0
  34. pyconvexity/solvers/pypsa/constraints.py +321 -0
  35. pyconvexity/solvers/pypsa/solver.py +1106 -0
  36. pyconvexity/solvers/pypsa/storage.py +1574 -0
  37. pyconvexity/timeseries.py +327 -0
  38. pyconvexity/validation/rules.py +2 -2
  39. {pyconvexity-0.1.2.dist-info → pyconvexity-0.1.4.dist-info}/METADATA +5 -2
  40. pyconvexity-0.1.4.dist-info/RECORD +46 -0
  41. pyconvexity-0.1.2.dist-info/RECORD +0 -20
  42. {pyconvexity-0.1.2.dist-info → pyconvexity-0.1.4.dist-info}/WHEEL +0 -0
  43. {pyconvexity-0.1.2.dist-info → pyconvexity-0.1.4.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,1574 @@
1
+ """
2
+ Result storage functionality for PyPSA solver integration.
3
+
4
+ Handles storing solve results back to the database with proper validation and error handling.
5
+ """
6
+
7
+ import logging
8
+ import uuid
9
+ import pandas as pd
10
+ import numpy as np
11
+ from typing import Dict, Any, Optional, Callable
12
+
13
+ from pyconvexity.core.types import StaticValue
14
+ from pyconvexity.models import (
15
+ list_components_by_type, set_static_attribute, set_timeseries_attribute
16
+ )
17
+ from pyconvexity.validation import get_validation_rule
18
+
19
+ logger = logging.getLogger(__name__)
20
+
21
+
22
+ class ResultStorage:
23
+ """
24
+ Handles storing PyPSA solve results back to the database.
25
+
26
+ This class manages the complex process of extracting results from PyPSA networks
27
+ and storing them back to the database with proper validation and error handling.
28
+ """
29
+
30
+ def store_results(
31
+ self,
32
+ conn,
33
+ network_id: int,
34
+ network: 'pypsa.Network',
35
+ solve_result: Dict[str, Any],
36
+ scenario_id: Optional[int] = None
37
+ ) -> Dict[str, Any]:
38
+ """
39
+ Store complete solve results back to database.
40
+
41
+ Args:
42
+ conn: Database connection
43
+ network_id: ID of the network
44
+ network: Solved PyPSA Network object
45
+ solve_result: Solve result metadata
46
+ scenario_id: Optional scenario ID
47
+
48
+ Returns:
49
+ Dictionary with storage statistics
50
+ """
51
+ run_id = solve_result.get('run_id', str(uuid.uuid4()))
52
+
53
+ try:
54
+ # Store component results
55
+ component_stats = self._store_component_results(
56
+ conn, network_id, network, scenario_id
57
+ )
58
+
59
+ # Calculate network statistics first
60
+ network_stats = self._calculate_network_statistics(
61
+ conn, network_id, network, solve_result
62
+ )
63
+
64
+ # Store solve summary with network statistics
65
+ self._store_solve_summary(
66
+ conn, network_id, solve_result, scenario_id, network_stats
67
+ )
68
+
69
+ # Store year-based statistics if available
70
+ year_stats_stored = 0
71
+ if solve_result.get('year_statistics'):
72
+ year_stats_stored = self._store_year_based_statistics(
73
+ conn, network_id, network, solve_result['year_statistics'], scenario_id
74
+ )
75
+
76
+ return {
77
+ "component_stats": component_stats,
78
+ "network_stats": network_stats,
79
+ "year_stats_stored": year_stats_stored,
80
+ "run_id": run_id,
81
+ "success": True
82
+ }
83
+
84
+ except Exception as e:
85
+ logger.error(f"Failed to store solve results: {e}", exc_info=True)
86
+ return {
87
+ "component_stats": {},
88
+ "network_stats": {},
89
+ "run_id": run_id,
90
+ "success": False,
91
+ "error": str(e)
92
+ }
93
+
94
+ def _store_component_results(
95
+ self,
96
+ conn,
97
+ network_id: int,
98
+ network: 'pypsa.Network',
99
+ scenario_id: Optional[int]
100
+ ) -> Dict[str, int]:
101
+ """Store results for all component types."""
102
+ results_stats = {
103
+ "stored_bus_results": 0,
104
+ "stored_generator_results": 0,
105
+ "stored_unmet_load_results": 0,
106
+ "stored_load_results": 0,
107
+ "stored_line_results": 0,
108
+ "stored_link_results": 0,
109
+ "stored_storage_unit_results": 0,
110
+ "stored_store_results": 0,
111
+ "skipped_attributes": 0,
112
+ "errors": 0
113
+ }
114
+
115
+ try:
116
+ # Store bus results
117
+ if hasattr(network, 'buses_t') and network.buses_t:
118
+ results_stats["stored_bus_results"] = self._store_component_type_results(
119
+ conn, network_id, 'BUS', network.buses, network.buses_t, scenario_id
120
+ )
121
+
122
+ # Store generator results (includes regular generators)
123
+ if hasattr(network, 'generators_t') and network.generators_t:
124
+ results_stats["stored_generator_results"] = self._store_component_type_results(
125
+ conn, network_id, 'GENERATOR', network.generators, network.generators_t, scenario_id
126
+ )
127
+
128
+ # Store UNMET_LOAD results (these are also stored as generators in PyPSA)
129
+ results_stats["stored_unmet_load_results"] = self._store_component_type_results(
130
+ conn, network_id, 'UNMET_LOAD', network.generators, network.generators_t, scenario_id
131
+ )
132
+
133
+ # Store load results
134
+ if hasattr(network, 'loads_t') and network.loads_t:
135
+ results_stats["stored_load_results"] = self._store_component_type_results(
136
+ conn, network_id, 'LOAD', network.loads, network.loads_t, scenario_id
137
+ )
138
+
139
+ # Store line results
140
+ if hasattr(network, 'lines_t') and network.lines_t:
141
+ results_stats["stored_line_results"] = self._store_component_type_results(
142
+ conn, network_id, 'LINE', network.lines, network.lines_t, scenario_id
143
+ )
144
+
145
+ # Store link results
146
+ if hasattr(network, 'links_t') and network.links_t:
147
+ results_stats["stored_link_results"] = self._store_component_type_results(
148
+ conn, network_id, 'LINK', network.links, network.links_t, scenario_id
149
+ )
150
+
151
+ # Store storage unit results
152
+ if hasattr(network, 'storage_units_t') and network.storage_units_t:
153
+ results_stats["stored_storage_unit_results"] = self._store_component_type_results(
154
+ conn, network_id, 'STORAGE_UNIT', network.storage_units, network.storage_units_t, scenario_id
155
+ )
156
+
157
+ # Store store results
158
+ if hasattr(network, 'stores_t') and network.stores_t:
159
+ results_stats["stored_store_results"] = self._store_component_type_results(
160
+ conn, network_id, 'STORE', network.stores, network.stores_t, scenario_id
161
+ )
162
+
163
+ return results_stats
164
+
165
+ except Exception as e:
166
+ logger.error(f"Error storing solve results: {e}", exc_info=True)
167
+ results_stats["errors"] += 1
168
+ return results_stats
169
+
170
+ def _store_component_type_results(
171
+ self,
172
+ conn,
173
+ network_id: int,
174
+ component_type: str,
175
+ static_df: pd.DataFrame,
176
+ timeseries_dict: Dict[str, pd.DataFrame],
177
+ scenario_id: Optional[int]
178
+ ) -> int:
179
+ """Store results for a specific component type - only store OUTPUT attributes."""
180
+ stored_count = 0
181
+
182
+ try:
183
+ # Get component name to ID mapping
184
+ components = list_components_by_type(conn, network_id, component_type)
185
+ name_to_id = {comp.name: comp.id for comp in components}
186
+
187
+ # Store timeseries results - ONLY OUTPUT ATTRIBUTES (is_input=FALSE)
188
+ for attr_name, timeseries_df in timeseries_dict.items():
189
+ if timeseries_df.empty:
190
+ continue
191
+
192
+ # Check if this attribute is an output attribute (not an input)
193
+ try:
194
+ rule = get_validation_rule(conn, component_type, attr_name)
195
+ if rule.is_input:
196
+ # Skip input attributes to preserve original input data
197
+ continue
198
+ except Exception:
199
+ # If no validation rule found, skip to be safe
200
+ continue
201
+
202
+ for component_name in timeseries_df.columns:
203
+ if component_name not in name_to_id:
204
+ continue
205
+
206
+ component_id = name_to_id[component_name]
207
+ component_series = timeseries_df[component_name]
208
+
209
+ # Skip if all values are NaN
210
+ if component_series.isna().all():
211
+ continue
212
+
213
+ # Convert to efficient values array
214
+ values = []
215
+ for value in component_series.values:
216
+ if pd.isna(value):
217
+ values.append(0.0) # Fill NaN with 0.0
218
+ else:
219
+ values.append(float(value))
220
+
221
+ if not values:
222
+ continue
223
+
224
+ # Store using efficient format
225
+ try:
226
+ set_timeseries_attribute(conn, component_id, attr_name, values, scenario_id)
227
+ stored_count += 1
228
+ except Exception as e:
229
+ # Handle validation errors gracefully
230
+ if ("No validation rule found" in str(e) or
231
+ "does not allow" in str(e) or
232
+ "ValidationError" in str(type(e).__name__)):
233
+ continue
234
+ else:
235
+ logger.warning(f"Error storing timeseries {attr_name} for {component_type} '{component_name}': {e}")
236
+ continue
237
+
238
+ # Store static optimization results - ONLY OUTPUT ATTRIBUTES (is_input=FALSE)
239
+ if not static_df.empty:
240
+ for attr_name in static_df.columns:
241
+ # Check if this attribute is an output attribute (not an input)
242
+ try:
243
+ rule = get_validation_rule(conn, component_type, attr_name)
244
+ if rule.is_input:
245
+ # Skip input attributes to preserve original input data
246
+ continue
247
+ except Exception:
248
+ # If no validation rule found, skip to be safe
249
+ continue
250
+
251
+ for component_name, value in static_df[attr_name].items():
252
+ if component_name not in name_to_id or pd.isna(value):
253
+ continue
254
+
255
+ component_id = name_to_id[component_name]
256
+
257
+ try:
258
+ # Convert value to StaticValue
259
+ if isinstance(value, (int, np.integer)):
260
+ static_value = StaticValue(int(value))
261
+ elif isinstance(value, (float, np.floating)):
262
+ if np.isfinite(value):
263
+ static_value = StaticValue(float(value))
264
+ else:
265
+ continue # Skip infinite/NaN values
266
+ elif isinstance(value, bool):
267
+ static_value = StaticValue(bool(value))
268
+ else:
269
+ static_value = StaticValue(str(value))
270
+
271
+ # Store using atomic utility
272
+ set_static_attribute(conn, component_id, attr_name, static_value, scenario_id)
273
+ stored_count += 1
274
+
275
+ except Exception as e:
276
+ # Handle validation errors gracefully
277
+ if ("No validation rule found" in str(e) or
278
+ "does not allow" in str(e) or
279
+ "ValidationError" in str(type(e).__name__)):
280
+ continue
281
+ else:
282
+ logger.warning(f"Error storing static {attr_name} for {component_type} '{component_name}': {e}")
283
+ continue
284
+
285
+ return stored_count
286
+
287
+ except Exception as e:
288
+ logger.error(f"Error storing results for {component_type}: {e}", exc_info=True)
289
+ return stored_count
290
+
291
+ def _store_solve_summary(
292
+ self,
293
+ conn,
294
+ network_id: int,
295
+ solve_result: Dict[str, Any],
296
+ scenario_id: Optional[int],
297
+ network_stats: Optional[Dict[str, Any]] = None
298
+ ):
299
+ """Store solve summary to network_solve_results table."""
300
+ try:
301
+ # Prepare solve summary data
302
+ solver_name = solve_result.get('solver_name', 'unknown')
303
+ solve_status = solve_result.get('status', 'unknown')
304
+ objective_value = solve_result.get('objective_value')
305
+ solve_time = solve_result.get('solve_time', 0.0)
306
+
307
+ # Use master scenario if no scenario specified
308
+ if scenario_id is None:
309
+ from pyconvexity.models import get_master_scenario_id
310
+ scenario_id = get_master_scenario_id(conn, network_id)
311
+
312
+ # Create enhanced solve result with network statistics for serialization
313
+ enhanced_solve_result = {
314
+ **solve_result,
315
+ "network_statistics": network_stats or {}
316
+ }
317
+
318
+ # Store solve results summary
319
+ conn.execute("""
320
+ INSERT OR REPLACE INTO network_solve_results (
321
+ network_id, scenario_id, solver_name, solve_type, solve_status,
322
+ objective_value, solve_time_seconds, results_json, metadata_json
323
+ ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
324
+ """, (
325
+ network_id,
326
+ scenario_id,
327
+ solver_name,
328
+ 'pypsa_optimization',
329
+ solve_status,
330
+ objective_value,
331
+ solve_time,
332
+ self._serialize_results_json(enhanced_solve_result),
333
+ self._serialize_metadata_json(enhanced_solve_result)
334
+ ))
335
+
336
+ logger.info(f"Stored solve summary for network {network_id}, scenario {scenario_id}")
337
+
338
+ except Exception as e:
339
+ logger.error(f"Failed to store solve summary: {e}", exc_info=True)
340
+
341
+ def _calculate_network_statistics(
342
+ self,
343
+ conn,
344
+ network_id: int,
345
+ network: 'pypsa.Network',
346
+ solve_result: Dict[str, Any]
347
+ ) -> Dict[str, Any]:
348
+ """Calculate network statistics - focusing only on capacity for now."""
349
+ try:
350
+ # Calculate carrier-specific statistics
351
+ carrier_stats = self._calculate_carrier_statistics(conn, network_id, network)
352
+
353
+ # Calculate basic network statistics
354
+ total_cost = solve_result.get('objective_value', 0.0)
355
+ total_generation_mwh = sum(carrier_stats.get("dispatch_by_carrier", {}).values())
356
+ total_emissions_tonnes = sum(carrier_stats.get("emissions_by_carrier", {}).values())
357
+ total_capital_cost = sum(carrier_stats.get("capital_cost_by_carrier", {}).values())
358
+ total_operational_cost = sum(carrier_stats.get("operational_cost_by_carrier", {}).values())
359
+ total_system_cost = sum(carrier_stats.get("total_system_cost_by_carrier", {}).values())
360
+
361
+ # Calculate unmet load statistics
362
+ unmet_load_mwh = carrier_stats.get("dispatch_by_carrier", {}).get("Unmet Load", 0.0)
363
+ total_demand_mwh = self._calculate_total_demand(network)
364
+ unmet_load_percentage = (unmet_load_mwh / (total_demand_mwh + 1e-6)) * 100 if total_demand_mwh > 0 else 0.0
365
+
366
+ # Create nested structure expected by frontend
367
+ network_statistics = {
368
+ "core_summary": {
369
+ "total_generation_mwh": total_generation_mwh,
370
+ "total_demand_mwh": total_demand_mwh,
371
+ "total_cost": total_cost,
372
+ "load_factor": (total_demand_mwh / (total_generation_mwh + 1e-6)) if total_generation_mwh > 0 else 0.0,
373
+ "unserved_energy_mwh": unmet_load_mwh
374
+ },
375
+ "custom_statistics": {
376
+ # Include carrier-specific statistics (capacity, dispatch, emissions, costs)
377
+ **carrier_stats,
378
+ "total_capital_cost": total_capital_cost,
379
+ "total_operational_cost": total_operational_cost,
380
+ "total_currency_cost": total_system_cost, # Use calculated system cost instead of PyPSA objective
381
+ "total_emissions_tons_co2": total_emissions_tonnes,
382
+ "average_price_per_mwh": (total_system_cost / (total_generation_mwh + 1e-6)) if total_generation_mwh > 0 else 0.0,
383
+ "unmet_load_percentage": unmet_load_percentage,
384
+ "max_unmet_load_hour_mw": 0.0 # TODO: Calculate max hourly unmet load later
385
+ },
386
+ "runtime_info": {
387
+ "component_count": (
388
+ len(network.buses) + len(network.generators) + len(network.loads) +
389
+ len(network.lines) + len(network.links)
390
+ ) if hasattr(network, 'buses') else 0,
391
+ "bus_count": len(network.buses) if hasattr(network, 'buses') else 0,
392
+ "generator_count": len(network.generators) if hasattr(network, 'generators') else 0,
393
+ "load_count": len(network.loads) if hasattr(network, 'loads') else 0,
394
+ "snapshot_count": len(network.snapshots) if hasattr(network, 'snapshots') else 0
395
+ }
396
+ }
397
+
398
+ logger.info(f"Calculated network statistics: core_summary={network_statistics['core_summary']}")
399
+ return network_statistics
400
+
401
+ except Exception as e:
402
+ logger.error(f"Failed to calculate network statistics: {e}", exc_info=True)
403
+ # Return empty structure matching expected format
404
+ return {
405
+ "core_summary": {
406
+ "total_generation_mwh": 0.0,
407
+ "total_demand_mwh": 0.0,
408
+ "total_cost": solve_result.get('objective_value', 0.0),
409
+ "load_factor": 0.0,
410
+ "unserved_energy_mwh": 0.0
411
+ },
412
+ "custom_statistics": {
413
+ "dispatch_by_carrier": {},
414
+ "power_capacity_by_carrier": {},
415
+ "energy_capacity_by_carrier": {},
416
+ "emissions_by_carrier": {},
417
+ "capital_cost_by_carrier": {},
418
+ "operational_cost_by_carrier": {},
419
+ "total_system_cost_by_carrier": {},
420
+ "total_capital_cost": 0.0,
421
+ "total_operational_cost": 0.0,
422
+ "total_currency_cost": 0.0,
423
+ "total_emissions_tons_co2": 0.0,
424
+ "average_price_per_mwh": 0.0,
425
+ "unmet_load_percentage": 0.0,
426
+ "max_unmet_load_hour_mw": 0.0
427
+ },
428
+ "runtime_info": {
429
+ "component_count": 0,
430
+ "bus_count": 0,
431
+ "generator_count": 0,
432
+ "load_count": 0,
433
+ "snapshot_count": 0
434
+ },
435
+ "error": str(e)
436
+ }
437
+
438
+ def _calculate_carrier_statistics(self, conn, network_id: int, network: 'pypsa.Network') -> Dict[str, Any]:
439
+ """
440
+ Calculate carrier-specific statistics directly from the network.
441
+ This is the primary calculation - per-year stats will be calculated separately.
442
+ """
443
+ try:
444
+ # Calculate all-year statistics directly from the network
445
+ # Extract years from network snapshots
446
+ if hasattr(network.snapshots, 'levels'):
447
+ # Multi-period optimization - get years from period level
448
+ period_values = network.snapshots.get_level_values(0)
449
+ years = sorted(period_values.unique())
450
+ elif hasattr(network.snapshots, 'year'):
451
+ years = sorted(network.snapshots.year.unique())
452
+ elif hasattr(network, '_available_years'):
453
+ years = network._available_years
454
+ else:
455
+ years = [2020] # Fallback
456
+
457
+ logger.info(f"Calculating all-year carrier statistics for years: {years}")
458
+
459
+ # Calculate per-year statistics first
460
+ all_year_stats = {
461
+ "dispatch_by_carrier": {},
462
+ "power_capacity_by_carrier": {},
463
+ "energy_capacity_by_carrier": {},
464
+ "emissions_by_carrier": {},
465
+ "capital_cost_by_carrier": {},
466
+ "operational_cost_by_carrier": {},
467
+ "total_system_cost_by_carrier": {}
468
+ }
469
+
470
+ # Initialize all carriers with zero values
471
+ cursor = conn.execute("""
472
+ SELECT DISTINCT name FROM carriers WHERE network_id = ?
473
+ """, (network_id,))
474
+ all_carriers = [row[0] for row in cursor.fetchall()]
475
+
476
+ # Initialize all carriers with zero values (including special "Unmet Load" carrier)
477
+ all_carriers_with_unmet = all_carriers + ['Unmet Load']
478
+ for carrier in all_carriers_with_unmet:
479
+ all_year_stats["dispatch_by_carrier"][carrier] = 0.0
480
+ all_year_stats["power_capacity_by_carrier"][carrier] = 0.0
481
+ all_year_stats["energy_capacity_by_carrier"][carrier] = 0.0
482
+ all_year_stats["emissions_by_carrier"][carrier] = 0.0
483
+ all_year_stats["capital_cost_by_carrier"][carrier] = 0.0
484
+ all_year_stats["operational_cost_by_carrier"][carrier] = 0.0
485
+ all_year_stats["total_system_cost_by_carrier"][carrier] = 0.0
486
+
487
+ # Calculate statistics for each year and sum them up
488
+ for year in years:
489
+ year_stats = self._calculate_year_carrier_statistics(conn, network_id, network, year)
490
+
491
+ # Sum up all the statistics (including "Unmet Load")
492
+ for carrier in all_carriers_with_unmet:
493
+ # Sum dispatch, emissions, and costs across years
494
+ all_year_stats["dispatch_by_carrier"][carrier] += year_stats["dispatch_by_carrier"].get(carrier, 0.0)
495
+ all_year_stats["emissions_by_carrier"][carrier] += year_stats["emissions_by_carrier"].get(carrier, 0.0)
496
+ all_year_stats["capital_cost_by_carrier"][carrier] += year_stats["capital_cost_by_carrier"].get(carrier, 0.0)
497
+ all_year_stats["operational_cost_by_carrier"][carrier] += year_stats["operational_cost_by_carrier"].get(carrier, 0.0)
498
+ all_year_stats["total_system_cost_by_carrier"][carrier] += year_stats["total_system_cost_by_carrier"].get(carrier, 0.0)
499
+
500
+ # For capacity: use the last year (final capacity state)
501
+ if year == years[-1]:
502
+ all_year_stats["power_capacity_by_carrier"][carrier] = year_stats["power_capacity_by_carrier"].get(carrier, 0.0)
503
+ all_year_stats["energy_capacity_by_carrier"][carrier] = year_stats["energy_capacity_by_carrier"].get(carrier, 0.0)
504
+
505
+ logger.info(f"Calculated all-year carrier statistics:")
506
+ logger.info(f" Total dispatch: {sum(all_year_stats['dispatch_by_carrier'].values()):.2f} MWh")
507
+ logger.info(f" Total emissions: {sum(all_year_stats['emissions_by_carrier'].values()):.2f} tonnes CO2")
508
+ logger.info(f" Total capital cost: {sum(all_year_stats['capital_cost_by_carrier'].values()):.2f} USD")
509
+ logger.info(f" Total operational cost: {sum(all_year_stats['operational_cost_by_carrier'].values()):.2f} USD")
510
+ logger.info(f" Final power capacity: {sum(all_year_stats['power_capacity_by_carrier'].values()):.2f} MW")
511
+
512
+ return all_year_stats
513
+
514
+ except Exception as e:
515
+ logger.error(f"Failed to calculate carrier statistics: {e}", exc_info=True)
516
+ return {
517
+ "dispatch_by_carrier": {},
518
+ "power_capacity_by_carrier": {},
519
+ "energy_capacity_by_carrier": {},
520
+ "emissions_by_carrier": {},
521
+ "capital_cost_by_carrier": {},
522
+ "operational_cost_by_carrier": {},
523
+ "total_system_cost_by_carrier": {}
524
+ }
525
+
526
+ def _store_year_based_statistics(
527
+ self,
528
+ conn,
529
+ network_id: int,
530
+ network: 'pypsa.Network',
531
+ year_statistics: Dict[int, Dict[str, Any]],
532
+ scenario_id: Optional[int]
533
+ ) -> int:
534
+ """Store year-based statistics to database"""
535
+ try:
536
+ import json
537
+ stored_count = 0
538
+
539
+ # Use master scenario if no scenario specified
540
+ if scenario_id is None:
541
+ from pyconvexity.models import get_master_scenario_id
542
+ scenario_id = get_master_scenario_id(conn, network_id)
543
+
544
+ # Check if network_solve_results_by_year table exists, create if not
545
+ conn.execute("""
546
+ CREATE TABLE IF NOT EXISTS network_solve_results_by_year (
547
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
548
+ network_id INTEGER NOT NULL,
549
+ scenario_id INTEGER NOT NULL,
550
+ year INTEGER NOT NULL,
551
+ results_json TEXT,
552
+ metadata_json TEXT,
553
+ created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
554
+ FOREIGN KEY (network_id) REFERENCES networks(id),
555
+ FOREIGN KEY (scenario_id) REFERENCES scenarios(id),
556
+ UNIQUE(network_id, scenario_id, year)
557
+ )
558
+ """)
559
+
560
+ for year, stats in year_statistics.items():
561
+ try:
562
+ # Calculate proper year-specific carrier statistics
563
+ year_carrier_stats = self._calculate_year_carrier_statistics(conn, network_id, network, year)
564
+
565
+ # Merge year-specific carrier stats into the statistics
566
+ if "custom_statistics" in stats:
567
+ stats["custom_statistics"].update(year_carrier_stats)
568
+ else:
569
+ stats["custom_statistics"] = year_carrier_stats
570
+
571
+ # Wrap the year statistics in the same structure as overall results for consistency
572
+ year_result_wrapper = {
573
+ "success": True,
574
+ "year": year,
575
+ "network_statistics": stats
576
+ }
577
+
578
+ metadata = {
579
+ "year": year,
580
+ "network_id": network_id,
581
+ "scenario_id": scenario_id
582
+ }
583
+
584
+ conn.execute("""
585
+ INSERT OR REPLACE INTO network_solve_results_by_year
586
+ (network_id, scenario_id, year, results_json, metadata_json)
587
+ VALUES (?, ?, ?, ?, ?)
588
+ """, (
589
+ network_id,
590
+ scenario_id,
591
+ year,
592
+ json.dumps(year_result_wrapper, default=self._json_serializer),
593
+ json.dumps(metadata, default=self._json_serializer)
594
+ ))
595
+
596
+ stored_count += 1
597
+ logger.info(f"Stored year-based statistics for year {year}")
598
+
599
+ except Exception as e:
600
+ logger.error(f"Failed to store statistics for year {year}: {e}")
601
+ continue
602
+
603
+ logger.info(f"Successfully stored year-based statistics for {stored_count} years")
604
+ return stored_count
605
+
606
+ except Exception as e:
607
+ logger.error(f"Failed to store year-based statistics: {e}", exc_info=True)
608
+ return 0
609
+
610
+ def _calculate_year_carrier_statistics(self, conn, network_id: int, network: 'pypsa.Network', year: int) -> Dict[str, Any]:
611
+ """
612
+ Calculate carrier-specific statistics for a specific year.
613
+ For now, only calculate capacity statistics.
614
+ """
615
+ try:
616
+ # Initialize carrier statistics
617
+ carrier_stats = {
618
+ "dispatch_by_carrier": {},
619
+ "power_capacity_by_carrier": {}, # MW - Generators + Storage Units (power) + Lines + Links
620
+ "energy_capacity_by_carrier": {}, # MWh - Stores + Storage Units (energy)
621
+ "emissions_by_carrier": {},
622
+ "capital_cost_by_carrier": {},
623
+ "operational_cost_by_carrier": {},
624
+ "total_system_cost_by_carrier": {}
625
+ }
626
+
627
+ # Get all carriers from database
628
+ cursor = conn.execute("""
629
+ SELECT DISTINCT name FROM carriers WHERE network_id = ?
630
+ """, (network_id,))
631
+ all_carriers = [row[0] for row in cursor.fetchall()]
632
+
633
+ # Initialize all carriers with zero values (including special "Unmet Load" carrier)
634
+ all_carriers_with_unmet = all_carriers + ['Unmet Load']
635
+ for carrier in all_carriers_with_unmet:
636
+ carrier_stats["dispatch_by_carrier"][carrier] = 0.0
637
+ carrier_stats["power_capacity_by_carrier"][carrier] = 0.0
638
+ carrier_stats["energy_capacity_by_carrier"][carrier] = 0.0
639
+ carrier_stats["emissions_by_carrier"][carrier] = 0.0
640
+ carrier_stats["capital_cost_by_carrier"][carrier] = 0.0
641
+ carrier_stats["operational_cost_by_carrier"][carrier] = 0.0
642
+ carrier_stats["total_system_cost_by_carrier"][carrier] = 0.0
643
+
644
+ # Calculate dispatch (generation) by carrier for this specific year
645
+
646
+ # 1. GENERATORS - Generation dispatch (including UNMET_LOAD)
647
+ if hasattr(network, 'generators_t') and hasattr(network.generators_t, 'p'):
648
+ # Get generator-carrier mapping (include both GENERATOR and UNMET_LOAD)
649
+ cursor = conn.execute("""
650
+ SELECT c.name as component_name,
651
+ CASE
652
+ WHEN c.component_type = 'UNMET_LOAD' THEN 'Unmet Load'
653
+ ELSE carr.name
654
+ END as carrier_name
655
+ FROM components c
656
+ JOIN carriers carr ON c.carrier_id = carr.id
657
+ WHERE c.network_id = ? AND c.component_type IN ('GENERATOR', 'UNMET_LOAD')
658
+ """, (network_id,))
659
+ generator_carriers = {row[0]: row[1] for row in cursor.fetchall()}
660
+
661
+ # Filter generation data for this specific year
662
+ year_generation = self._filter_timeseries_by_year(network.generators_t.p, network.snapshots, year)
663
+ if year_generation is not None and not year_generation.empty:
664
+ for gen_name in year_generation.columns:
665
+ if gen_name in generator_carriers:
666
+ carrier_name = generator_carriers[gen_name]
667
+
668
+ # Calculate generation for this year (ALWAYS apply snapshot weightings to convert MW to MWh)
669
+ year_weightings = self._get_year_weightings(network, year)
670
+ if year_weightings is not None:
671
+ generation_mwh = float((year_generation[gen_name].values * year_weightings).sum())
672
+ else:
673
+ # Fallback: simple sum (will be incorrect for non-1H models)
674
+ generation_mwh = float(year_generation[gen_name].sum())
675
+ logger.warning(f"Could not apply snapshot weightings for {gen_name} in year {year} - energy may be incorrect")
676
+
677
+ if carrier_name in carrier_stats["dispatch_by_carrier"]:
678
+ carrier_stats["dispatch_by_carrier"][carrier_name] += generation_mwh
679
+
680
+ # 2. STORAGE_UNITS - Discharge only (positive values)
681
+ if hasattr(network, 'storage_units_t') and hasattr(network.storage_units_t, 'p'):
682
+ # Get storage unit-carrier mapping
683
+ cursor = conn.execute("""
684
+ SELECT c.name as component_name, carr.name as carrier_name
685
+ FROM components c
686
+ JOIN carriers carr ON c.carrier_id = carr.id
687
+ WHERE c.network_id = ? AND c.component_type = 'STORAGE_UNIT'
688
+ """, (network_id,))
689
+ storage_unit_carriers = {row[0]: row[1] for row in cursor.fetchall()}
690
+
691
+ # Filter storage unit data for this specific year
692
+ year_storage = self._filter_timeseries_by_year(network.storage_units_t.p, network.snapshots, year)
693
+ if year_storage is not None and not year_storage.empty:
694
+ for su_name in year_storage.columns:
695
+ if su_name in storage_unit_carriers:
696
+ carrier_name = storage_unit_carriers[su_name]
697
+
698
+ # Calculate discharge for this year (positive values only, ALWAYS apply snapshot weightings)
699
+ year_weightings = self._get_year_weightings(network, year)
700
+ if year_weightings is not None:
701
+ discharge_mwh = float((year_storage[su_name].clip(lower=0).values * year_weightings).sum())
702
+ else:
703
+ # Fallback: simple sum (will be incorrect for non-1H models)
704
+ discharge_mwh = float(year_storage[su_name].clip(lower=0).sum())
705
+ logger.warning(f"Could not apply snapshot weightings for storage unit {su_name} in year {year} - energy may be incorrect")
706
+
707
+ if carrier_name in carrier_stats["dispatch_by_carrier"]:
708
+ carrier_stats["dispatch_by_carrier"][carrier_name] += discharge_mwh
709
+
710
+ # 3. STORES - Discharge only (positive values)
711
+ if hasattr(network, 'stores_t') and hasattr(network.stores_t, 'p'):
712
+ # Get store-carrier mapping
713
+ cursor = conn.execute("""
714
+ SELECT c.name as component_name, carr.name as carrier_name
715
+ FROM components c
716
+ JOIN carriers carr ON c.carrier_id = carr.id
717
+ WHERE c.network_id = ? AND c.component_type = 'STORE'
718
+ """, (network_id,))
719
+ store_carriers = {row[0]: row[1] for row in cursor.fetchall()}
720
+
721
+ # Filter store data for this specific year
722
+ year_stores = self._filter_timeseries_by_year(network.stores_t.p, network.snapshots, year)
723
+ if year_stores is not None and not year_stores.empty:
724
+ for store_name in year_stores.columns:
725
+ if store_name in store_carriers:
726
+ carrier_name = store_carriers[store_name]
727
+
728
+ # Calculate discharge for this year (positive values only, ALWAYS apply snapshot weightings)
729
+ year_weightings = self._get_year_weightings(network, year)
730
+ if year_weightings is not None:
731
+ discharge_mwh = float((year_stores[store_name].clip(lower=0).values * year_weightings).sum())
732
+ else:
733
+ # Fallback: simple sum (will be incorrect for non-1H models)
734
+ discharge_mwh = float(year_stores[store_name].clip(lower=0).sum())
735
+ logger.warning(f"Could not apply snapshot weightings for store {store_name} in year {year} - energy may be incorrect")
736
+
737
+ if carrier_name in carrier_stats["dispatch_by_carrier"]:
738
+ carrier_stats["dispatch_by_carrier"][carrier_name] += discharge_mwh
739
+
740
+ # Calculate emissions by carrier for this specific year
741
+ # Get emission factors for all carriers
742
+ cursor = conn.execute("""
743
+ SELECT name, co2_emissions FROM carriers WHERE network_id = ?
744
+ """, (network_id,))
745
+ emission_factors = {row[0]: row[1] for row in cursor.fetchall()}
746
+
747
+ # Calculate emissions: dispatch (MWh) × emission factor (tonnes CO2/MWh) = tonnes CO2
748
+ for carrier_name, dispatch_mwh in carrier_stats["dispatch_by_carrier"].items():
749
+ # Handle None values safely
750
+ if dispatch_mwh is None:
751
+ dispatch_mwh = 0.0
752
+
753
+ emission_factor = emission_factors.get(carrier_name, 0.0) # Default to 0 if no emission factor
754
+ if emission_factor is None:
755
+ emission_factor = 0.0
756
+
757
+ emissions_tonnes = dispatch_mwh * emission_factor
758
+
759
+ if carrier_name in carrier_stats["emissions_by_carrier"]:
760
+ carrier_stats["emissions_by_carrier"][carrier_name] += emissions_tonnes
761
+
762
+ # Calculate capital costs by carrier for this specific year
763
+ # Capital costs are annualized and counted every year the component is active
764
+
765
+ # Helper function to check if component is active in this year
766
+ def is_component_active(build_year, lifetime, current_year):
767
+ """Check if component is active in the current year based on build_year and lifetime"""
768
+ if pd.isna(build_year):
769
+ return True # No build year constraint
770
+
771
+ build_year = int(build_year)
772
+ if build_year > current_year:
773
+ return False # Not built yet
774
+
775
+ if pd.isna(lifetime) or lifetime == float('inf'):
776
+ return True # Infinite lifetime
777
+
778
+ lifetime = int(lifetime)
779
+ end_year = build_year + lifetime - 1
780
+ return current_year <= end_year
781
+
782
+ # 1. GENERATORS - Capital costs (including UNMET_LOAD)
783
+ if hasattr(network, 'generators') and not network.generators.empty:
784
+ # Get generator info: carrier, capital_cost, build_year, lifetime (include UNMET_LOAD)
785
+ cursor = conn.execute("""
786
+ SELECT c.name as component_name,
787
+ CASE
788
+ WHEN c.component_type = 'UNMET_LOAD' THEN 'Unmet Load'
789
+ ELSE carr.name
790
+ END as carrier_name
791
+ FROM components c
792
+ JOIN carriers carr ON c.carrier_id = carr.id
793
+ WHERE c.network_id = ? AND c.component_type IN ('GENERATOR', 'UNMET_LOAD')
794
+ """, (network_id,))
795
+ generator_carriers = {row[0]: row[1] for row in cursor.fetchall()}
796
+
797
+ for gen_name in network.generators.index:
798
+ if gen_name in generator_carriers:
799
+ carrier_name = generator_carriers[gen_name]
800
+
801
+ # Get build year and lifetime
802
+ build_year = network.generators.loc[gen_name, 'build_year'] if 'build_year' in network.generators.columns else None
803
+ lifetime = network.generators.loc[gen_name, 'lifetime'] if 'lifetime' in network.generators.columns else None
804
+
805
+ # Check if component is active in this year
806
+ if is_component_active(build_year, lifetime, year):
807
+ # Get capacity and capital cost
808
+ if 'p_nom_opt' in network.generators.columns:
809
+ capacity_mw = float(network.generators.loc[gen_name, 'p_nom_opt'])
810
+ else:
811
+ capacity_mw = float(network.generators.loc[gen_name, 'p_nom']) if 'p_nom' in network.generators.columns else 0.0
812
+
813
+ capital_cost_per_mw = float(network.generators.loc[gen_name, 'capital_cost']) if 'capital_cost' in network.generators.columns else 0.0
814
+
815
+ # Calculate annualized capital cost for this year
816
+ annual_capital_cost = capacity_mw * capital_cost_per_mw
817
+
818
+ if carrier_name in carrier_stats["capital_cost_by_carrier"]:
819
+ carrier_stats["capital_cost_by_carrier"][carrier_name] += annual_capital_cost
820
+
821
+ # 2. STORAGE_UNITS - Capital costs
822
+ if hasattr(network, 'storage_units') and not network.storage_units.empty:
823
+ cursor = conn.execute("""
824
+ SELECT c.name as component_name, carr.name as carrier_name
825
+ FROM components c
826
+ JOIN carriers carr ON c.carrier_id = carr.id
827
+ WHERE c.network_id = ? AND c.component_type = 'STORAGE_UNIT'
828
+ """, (network_id,))
829
+ storage_unit_carriers = {row[0]: row[1] for row in cursor.fetchall()}
830
+
831
+ for su_name in network.storage_units.index:
832
+ if su_name in storage_unit_carriers:
833
+ carrier_name = storage_unit_carriers[su_name]
834
+
835
+ # Get build year and lifetime
836
+ build_year = network.storage_units.loc[su_name, 'build_year'] if 'build_year' in network.storage_units.columns else None
837
+ lifetime = network.storage_units.loc[su_name, 'lifetime'] if 'lifetime' in network.storage_units.columns else None
838
+
839
+ # Check if component is active in this year
840
+ if is_component_active(build_year, lifetime, year):
841
+ # Get power capacity and capital cost (per MW)
842
+ if 'p_nom_opt' in network.storage_units.columns:
843
+ capacity_mw = float(network.storage_units.loc[su_name, 'p_nom_opt'])
844
+ else:
845
+ capacity_mw = float(network.storage_units.loc[su_name, 'p_nom']) if 'p_nom' in network.storage_units.columns else 0.0
846
+
847
+ capital_cost_per_mw = float(network.storage_units.loc[su_name, 'capital_cost']) if 'capital_cost' in network.storage_units.columns else 0.0
848
+
849
+ # Calculate annualized capital cost for this year
850
+ annual_capital_cost = capacity_mw * capital_cost_per_mw
851
+
852
+ if carrier_name in carrier_stats["capital_cost_by_carrier"]:
853
+ carrier_stats["capital_cost_by_carrier"][carrier_name] += annual_capital_cost
854
+
855
+ # 3. STORES - Capital costs (per MWh)
856
+ if hasattr(network, 'stores') and not network.stores.empty:
857
+ cursor = conn.execute("""
858
+ SELECT c.name as component_name, carr.name as carrier_name
859
+ FROM components c
860
+ JOIN carriers carr ON c.carrier_id = carr.id
861
+ WHERE c.network_id = ? AND c.component_type = 'STORE'
862
+ """, (network_id,))
863
+ store_carriers = {row[0]: row[1] for row in cursor.fetchall()}
864
+
865
+ for store_name in network.stores.index:
866
+ if store_name in store_carriers:
867
+ carrier_name = store_carriers[store_name]
868
+
869
+ # Get build year and lifetime
870
+ build_year = network.stores.loc[store_name, 'build_year'] if 'build_year' in network.stores.columns else None
871
+ lifetime = network.stores.loc[store_name, 'lifetime'] if 'lifetime' in network.stores.columns else None
872
+
873
+ # Check if component is active in this year
874
+ if is_component_active(build_year, lifetime, year):
875
+ # Get energy capacity and capital cost (per MWh)
876
+ if 'e_nom_opt' in network.stores.columns:
877
+ capacity_mwh = float(network.stores.loc[store_name, 'e_nom_opt'])
878
+ else:
879
+ capacity_mwh = float(network.stores.loc[store_name, 'e_nom']) if 'e_nom' in network.stores.columns else 0.0
880
+
881
+ capital_cost_per_mwh = float(network.stores.loc[store_name, 'capital_cost']) if 'capital_cost' in network.stores.columns else 0.0
882
+
883
+ # Calculate annualized capital cost for this year
884
+ annual_capital_cost = capacity_mwh * capital_cost_per_mwh
885
+
886
+ if carrier_name in carrier_stats["capital_cost_by_carrier"]:
887
+ carrier_stats["capital_cost_by_carrier"][carrier_name] += annual_capital_cost
888
+
889
+ # 4. LINES - Capital costs (per MVA)
890
+ if hasattr(network, 'lines') and not network.lines.empty:
891
+ cursor = conn.execute("""
892
+ SELECT c.name as component_name, carr.name as carrier_name
893
+ FROM components c
894
+ JOIN carriers carr ON c.carrier_id = carr.id
895
+ WHERE c.network_id = ? AND c.component_type = 'LINE'
896
+ """, (network_id,))
897
+ line_carriers = {row[0]: row[1] for row in cursor.fetchall()}
898
+
899
+ for line_name in network.lines.index:
900
+ if line_name in line_carriers:
901
+ carrier_name = line_carriers[line_name]
902
+
903
+ # Get build year and lifetime
904
+ build_year = network.lines.loc[line_name, 'build_year'] if 'build_year' in network.lines.columns else None
905
+ lifetime = network.lines.loc[line_name, 'lifetime'] if 'lifetime' in network.lines.columns else None
906
+
907
+ # Check if component is active in this year
908
+ if is_component_active(build_year, lifetime, year):
909
+ # Get apparent power capacity and capital cost (per MVA)
910
+ if 's_nom_opt' in network.lines.columns:
911
+ capacity_mva = float(network.lines.loc[line_name, 's_nom_opt'])
912
+ else:
913
+ capacity_mva = float(network.lines.loc[line_name, 's_nom']) if 's_nom' in network.lines.columns else 0.0
914
+
915
+ capital_cost_per_mva = float(network.lines.loc[line_name, 'capital_cost']) if 'capital_cost' in network.lines.columns else 0.0
916
+
917
+ # Calculate annualized capital cost for this year
918
+ annual_capital_cost = capacity_mva * capital_cost_per_mva
919
+
920
+ if carrier_name in carrier_stats["capital_cost_by_carrier"]:
921
+ carrier_stats["capital_cost_by_carrier"][carrier_name] += annual_capital_cost
922
+
923
+ # 5. LINKS - Capital costs (per MW)
924
+ if hasattr(network, 'links') and not network.links.empty:
925
+ cursor = conn.execute("""
926
+ SELECT c.name as component_name, carr.name as carrier_name
927
+ FROM components c
928
+ JOIN carriers carr ON c.carrier_id = carr.id
929
+ WHERE c.network_id = ? AND c.component_type = 'LINK'
930
+ """, (network_id,))
931
+ link_carriers = {row[0]: row[1] for row in cursor.fetchall()}
932
+
933
+ for link_name in network.links.index:
934
+ if link_name in link_carriers:
935
+ carrier_name = link_carriers[link_name]
936
+
937
+ # Get build year and lifetime
938
+ build_year = network.links.loc[link_name, 'build_year'] if 'build_year' in network.links.columns else None
939
+ lifetime = network.links.loc[link_name, 'lifetime'] if 'lifetime' in network.links.columns else None
940
+
941
+ # Check if component is active in this year
942
+ if is_component_active(build_year, lifetime, year):
943
+ # Get power capacity and capital cost (per MW)
944
+ if 'p_nom_opt' in network.links.columns:
945
+ capacity_mw = float(network.links.loc[link_name, 'p_nom_opt'])
946
+ else:
947
+ capacity_mw = float(network.links.loc[link_name, 'p_nom']) if 'p_nom' in network.links.columns else 0.0
948
+
949
+ capital_cost_per_mw = float(network.links.loc[link_name, 'capital_cost']) if 'capital_cost' in network.links.columns else 0.0
950
+
951
+ # Calculate annualized capital cost for this year
952
+ annual_capital_cost = capacity_mw * capital_cost_per_mw
953
+
954
+ if carrier_name in carrier_stats["capital_cost_by_carrier"]:
955
+ carrier_stats["capital_cost_by_carrier"][carrier_name] += annual_capital_cost
956
+
957
+ # Calculate operational costs by carrier for this specific year
958
+ # Operational costs = dispatch (MWh) × marginal_cost (currency/MWh)
959
+ # Only for components that are active in this year
960
+
961
+ # 1. GENERATORS - Operational costs (including UNMET_LOAD)
962
+ if hasattr(network, 'generators_t') and hasattr(network.generators_t, 'p'):
963
+ # Get generator info: carrier, marginal_cost, build_year, lifetime (include UNMET_LOAD)
964
+ cursor = conn.execute("""
965
+ SELECT c.name as component_name,
966
+ CASE
967
+ WHEN c.component_type = 'UNMET_LOAD' THEN 'Unmet Load'
968
+ ELSE carr.name
969
+ END as carrier_name
970
+ FROM components c
971
+ JOIN carriers carr ON c.carrier_id = carr.id
972
+ WHERE c.network_id = ? AND c.component_type IN ('GENERATOR', 'UNMET_LOAD')
973
+ """, (network_id,))
974
+ generator_carriers = {row[0]: row[1] for row in cursor.fetchall()}
975
+
976
+ # Filter generation data for this specific year
977
+ year_generation = self._filter_timeseries_by_year(network.generators_t.p, network.snapshots, year)
978
+ if year_generation is not None and not year_generation.empty:
979
+ for gen_name in year_generation.columns:
980
+ if gen_name in generator_carriers:
981
+ carrier_name = generator_carriers[gen_name]
982
+
983
+ # Get build year and lifetime
984
+ build_year = network.generators.loc[gen_name, 'build_year'] if 'build_year' in network.generators.columns else None
985
+ lifetime = network.generators.loc[gen_name, 'lifetime'] if 'lifetime' in network.generators.columns else None
986
+
987
+ # Check if component is active in this year
988
+ if is_component_active(build_year, lifetime, year):
989
+ # Calculate generation for this year (already calculated above, but need to recalculate for operational costs)
990
+ year_weightings = self._get_year_weightings(network, year)
991
+ if year_weightings is not None:
992
+ generation_mwh = float((year_generation[gen_name].values * year_weightings).sum())
993
+ else:
994
+ generation_mwh = float(year_generation[gen_name].sum())
995
+
996
+ # Get marginal cost
997
+ marginal_cost = float(network.generators.loc[gen_name, 'marginal_cost']) if 'marginal_cost' in network.generators.columns else 0.0
998
+
999
+ # Calculate operational cost for this year
1000
+ operational_cost = generation_mwh * marginal_cost
1001
+
1002
+ if carrier_name in carrier_stats["operational_cost_by_carrier"]:
1003
+ carrier_stats["operational_cost_by_carrier"][carrier_name] += operational_cost
1004
+
1005
+ # 2. STORAGE_UNITS - Operational costs (discharge only)
1006
+ if hasattr(network, 'storage_units_t') and hasattr(network.storage_units_t, 'p'):
1007
+ # Get storage unit info: carrier, marginal_cost, build_year, lifetime
1008
+ cursor = conn.execute("""
1009
+ SELECT c.name as component_name, carr.name as carrier_name
1010
+ FROM components c
1011
+ JOIN carriers carr ON c.carrier_id = carr.id
1012
+ WHERE c.network_id = ? AND c.component_type = 'STORAGE_UNIT'
1013
+ """, (network_id,))
1014
+ storage_unit_carriers = {row[0]: row[1] for row in cursor.fetchall()}
1015
+
1016
+ # Filter storage unit data for this specific year
1017
+ year_storage = self._filter_timeseries_by_year(network.storage_units_t.p, network.snapshots, year)
1018
+ if year_storage is not None and not year_storage.empty:
1019
+ for su_name in year_storage.columns:
1020
+ if su_name in storage_unit_carriers:
1021
+ carrier_name = storage_unit_carriers[su_name]
1022
+
1023
+ # Get build year and lifetime
1024
+ build_year = network.storage_units.loc[su_name, 'build_year'] if 'build_year' in network.storage_units.columns else None
1025
+ lifetime = network.storage_units.loc[su_name, 'lifetime'] if 'lifetime' in network.storage_units.columns else None
1026
+
1027
+ # Check if component is active in this year
1028
+ if is_component_active(build_year, lifetime, year):
1029
+ # Calculate discharge for this year (positive values only)
1030
+ year_weightings = self._get_year_weightings(network, year)
1031
+ if year_weightings is not None:
1032
+ discharge_mwh = float((year_storage[su_name].clip(lower=0).values * year_weightings).sum())
1033
+ else:
1034
+ discharge_mwh = float(year_storage[su_name].clip(lower=0).sum())
1035
+
1036
+ # Get marginal cost
1037
+ marginal_cost = float(network.storage_units.loc[su_name, 'marginal_cost']) if 'marginal_cost' in network.storage_units.columns else 0.0
1038
+
1039
+ # Calculate operational cost for this year
1040
+ operational_cost = discharge_mwh * marginal_cost
1041
+
1042
+ if carrier_name in carrier_stats["operational_cost_by_carrier"]:
1043
+ carrier_stats["operational_cost_by_carrier"][carrier_name] += operational_cost
1044
+
1045
+ # 3. STORES - Operational costs (discharge only)
1046
+ if hasattr(network, 'stores_t') and hasattr(network.stores_t, 'p'):
1047
+ # Get store info: carrier, marginal_cost, build_year, lifetime
1048
+ cursor = conn.execute("""
1049
+ SELECT c.name as component_name, carr.name as carrier_name
1050
+ FROM components c
1051
+ JOIN carriers carr ON c.carrier_id = carr.id
1052
+ WHERE c.network_id = ? AND c.component_type = 'STORE'
1053
+ """, (network_id,))
1054
+ store_carriers = {row[0]: row[1] for row in cursor.fetchall()}
1055
+
1056
+ # Filter store data for this specific year
1057
+ year_stores = self._filter_timeseries_by_year(network.stores_t.p, network.snapshots, year)
1058
+ if year_stores is not None and not year_stores.empty:
1059
+ for store_name in year_stores.columns:
1060
+ if store_name in store_carriers:
1061
+ carrier_name = store_carriers[store_name]
1062
+
1063
+ # Get build year and lifetime
1064
+ build_year = network.stores.loc[store_name, 'build_year'] if 'build_year' in network.stores.columns else None
1065
+ lifetime = network.stores.loc[store_name, 'lifetime'] if 'lifetime' in network.stores.columns else None
1066
+
1067
+ # Check if component is active in this year
1068
+ if is_component_active(build_year, lifetime, year):
1069
+ # Calculate discharge for this year (positive values only)
1070
+ year_weightings = self._get_year_weightings(network, year)
1071
+ if year_weightings is not None:
1072
+ discharge_mwh = float((year_stores[store_name].clip(lower=0).values * year_weightings).sum())
1073
+ else:
1074
+ discharge_mwh = float(year_stores[store_name].clip(lower=0).sum())
1075
+
1076
+ # Get marginal cost
1077
+ marginal_cost = float(network.stores.loc[store_name, 'marginal_cost']) if 'marginal_cost' in network.stores.columns else 0.0
1078
+
1079
+ # Calculate operational cost for this year
1080
+ operational_cost = discharge_mwh * marginal_cost
1081
+
1082
+ if carrier_name in carrier_stats["operational_cost_by_carrier"]:
1083
+ carrier_stats["operational_cost_by_carrier"][carrier_name] += operational_cost
1084
+
1085
+ # Calculate total system costs by carrier for this specific year
1086
+ # Total system cost = capital cost + operational cost
1087
+ for carrier_name in carrier_stats["capital_cost_by_carrier"]:
1088
+ capital_cost = carrier_stats["capital_cost_by_carrier"][carrier_name]
1089
+ operational_cost = carrier_stats["operational_cost_by_carrier"][carrier_name]
1090
+ total_system_cost = capital_cost + operational_cost
1091
+
1092
+ if carrier_name in carrier_stats["total_system_cost_by_carrier"]:
1093
+ carrier_stats["total_system_cost_by_carrier"][carrier_name] = total_system_cost
1094
+
1095
+ # Calculate capacity by carrier for this specific year
1096
+
1097
+ # 4. GENERATORS - Power capacity (MW) (including UNMET_LOAD)
1098
+ if hasattr(network, 'generators') and not network.generators.empty:
1099
+ # Get generator-carrier mapping (include UNMET_LOAD)
1100
+ cursor = conn.execute("""
1101
+ SELECT c.name as component_name,
1102
+ CASE
1103
+ WHEN c.component_type = 'UNMET_LOAD' THEN 'Unmet Load'
1104
+ ELSE carr.name
1105
+ END as carrier_name
1106
+ FROM components c
1107
+ JOIN carriers carr ON c.carrier_id = carr.id
1108
+ WHERE c.network_id = ? AND c.component_type IN ('GENERATOR', 'UNMET_LOAD')
1109
+ """, (network_id,))
1110
+ generator_carriers = {row[0]: row[1] for row in cursor.fetchall()}
1111
+
1112
+ for gen_name in network.generators.index:
1113
+ if gen_name in generator_carriers:
1114
+ carrier_name = generator_carriers[gen_name]
1115
+
1116
+ # Check if this generator is available in this year (build_year <= year)
1117
+ is_available = True
1118
+ if 'build_year' in network.generators.columns:
1119
+ build_year = network.generators.loc[gen_name, 'build_year']
1120
+ if pd.notna(build_year) and int(build_year) > year:
1121
+ is_available = False
1122
+
1123
+ if is_available:
1124
+ # Use p_nom_opt if available, otherwise p_nom
1125
+ if 'p_nom_opt' in network.generators.columns:
1126
+ capacity_mw = float(network.generators.loc[gen_name, 'p_nom_opt'])
1127
+ else:
1128
+ capacity_mw = float(network.generators.loc[gen_name, 'p_nom']) if 'p_nom' in network.generators.columns else 0.0
1129
+
1130
+ if carrier_name in carrier_stats["power_capacity_by_carrier"]:
1131
+ carrier_stats["power_capacity_by_carrier"][carrier_name] += capacity_mw
1132
+
1133
+ # 2. STORAGE_UNITS - Power capacity (MW) + Energy capacity (MWh)
1134
+ if hasattr(network, 'storage_units') and not network.storage_units.empty:
1135
+ # Get storage unit-carrier mapping
1136
+ cursor = conn.execute("""
1137
+ SELECT c.name as component_name, carr.name as carrier_name
1138
+ FROM components c
1139
+ JOIN carriers carr ON c.carrier_id = carr.id
1140
+ WHERE c.network_id = ? AND c.component_type = 'STORAGE_UNIT'
1141
+ """, (network_id,))
1142
+ storage_unit_carriers = {row[0]: row[1] for row in cursor.fetchall()}
1143
+
1144
+ for su_name in network.storage_units.index:
1145
+ if su_name in storage_unit_carriers:
1146
+ carrier_name = storage_unit_carriers[su_name]
1147
+
1148
+ # Check if this storage unit is available in this year
1149
+ is_available = True
1150
+ if 'build_year' in network.storage_units.columns:
1151
+ build_year = network.storage_units.loc[su_name, 'build_year']
1152
+ if pd.notna(build_year) and int(build_year) > year:
1153
+ is_available = False
1154
+
1155
+ if is_available:
1156
+ # Power capacity (MW)
1157
+ if 'p_nom_opt' in network.storage_units.columns:
1158
+ p_nom_opt = float(network.storage_units.loc[su_name, 'p_nom_opt'])
1159
+ else:
1160
+ p_nom_opt = float(network.storage_units.loc[su_name, 'p_nom']) if 'p_nom' in network.storage_units.columns else 0.0
1161
+
1162
+ if carrier_name in carrier_stats["power_capacity_by_carrier"]:
1163
+ carrier_stats["power_capacity_by_carrier"][carrier_name] += p_nom_opt
1164
+
1165
+ # Energy capacity (MWh) using max_hours
1166
+ max_hours = 1.0 # Default
1167
+ if 'max_hours' in network.storage_units.columns:
1168
+ max_hours = float(network.storage_units.loc[su_name, 'max_hours'])
1169
+ energy_capacity_mwh = p_nom_opt * max_hours
1170
+
1171
+ if carrier_name in carrier_stats["energy_capacity_by_carrier"]:
1172
+ carrier_stats["energy_capacity_by_carrier"][carrier_name] += energy_capacity_mwh
1173
+
1174
+ # 3. STORES - Energy capacity (MWh) only
1175
+ if hasattr(network, 'stores') and not network.stores.empty:
1176
+ # Get store-carrier mapping
1177
+ cursor = conn.execute("""
1178
+ SELECT c.name as component_name, carr.name as carrier_name
1179
+ FROM components c
1180
+ JOIN carriers carr ON c.carrier_id = carr.id
1181
+ WHERE c.network_id = ? AND c.component_type = 'STORE'
1182
+ """, (network_id,))
1183
+ store_carriers = {row[0]: row[1] for row in cursor.fetchall()}
1184
+
1185
+ for store_name in network.stores.index:
1186
+ if store_name in store_carriers:
1187
+ carrier_name = store_carriers[store_name]
1188
+
1189
+ # Check if this store is available in this year
1190
+ is_available = True
1191
+ if 'build_year' in network.stores.columns:
1192
+ build_year = network.stores.loc[store_name, 'build_year']
1193
+ if pd.notna(build_year) and int(build_year) > year:
1194
+ is_available = False
1195
+
1196
+ if is_available:
1197
+ # Energy capacity (MWh)
1198
+ if 'e_nom_opt' in network.stores.columns:
1199
+ capacity_mwh = float(network.stores.loc[store_name, 'e_nom_opt'])
1200
+ else:
1201
+ capacity_mwh = float(network.stores.loc[store_name, 'e_nom']) if 'e_nom' in network.stores.columns else 0.0
1202
+
1203
+ if carrier_name in carrier_stats["energy_capacity_by_carrier"]:
1204
+ carrier_stats["energy_capacity_by_carrier"][carrier_name] += capacity_mwh
1205
+
1206
+ # 4. LINES - Apparent power capacity (MVA -> MW)
1207
+ if hasattr(network, 'lines') and not network.lines.empty:
1208
+ # Get line-carrier mapping
1209
+ cursor = conn.execute("""
1210
+ SELECT c.name as component_name, carr.name as carrier_name
1211
+ FROM components c
1212
+ JOIN carriers carr ON c.carrier_id = carr.id
1213
+ WHERE c.network_id = ? AND c.component_type = 'LINE'
1214
+ """, (network_id,))
1215
+ line_carriers = {row[0]: row[1] for row in cursor.fetchall()}
1216
+
1217
+ for line_name in network.lines.index:
1218
+ if line_name in line_carriers:
1219
+ carrier_name = line_carriers[line_name]
1220
+
1221
+ # Check if this line is available in this year
1222
+ is_available = True
1223
+ if 'build_year' in network.lines.columns:
1224
+ build_year = network.lines.loc[line_name, 'build_year']
1225
+ if pd.notna(build_year) and int(build_year) > year:
1226
+ is_available = False
1227
+
1228
+ if is_available:
1229
+ # Apparent power capacity (MVA -> MW, assume power factor = 1)
1230
+ if 's_nom_opt' in network.lines.columns:
1231
+ capacity_mva = float(network.lines.loc[line_name, 's_nom_opt'])
1232
+ else:
1233
+ capacity_mva = float(network.lines.loc[line_name, 's_nom']) if 's_nom' in network.lines.columns else 0.0
1234
+
1235
+ capacity_mw = capacity_mva # Convert MVA to MW
1236
+
1237
+ if carrier_name in carrier_stats["power_capacity_by_carrier"]:
1238
+ carrier_stats["power_capacity_by_carrier"][carrier_name] += capacity_mw
1239
+
1240
+ # 5. LINKS - Power capacity (MW)
1241
+ if hasattr(network, 'links') and not network.links.empty:
1242
+ # Get link-carrier mapping
1243
+ cursor = conn.execute("""
1244
+ SELECT c.name as component_name, carr.name as carrier_name
1245
+ FROM components c
1246
+ JOIN carriers carr ON c.carrier_id = carr.id
1247
+ WHERE c.network_id = ? AND c.component_type = 'LINK'
1248
+ """, (network_id,))
1249
+ link_carriers = {row[0]: row[1] for row in cursor.fetchall()}
1250
+
1251
+ for link_name in network.links.index:
1252
+ if link_name in link_carriers:
1253
+ carrier_name = link_carriers[link_name]
1254
+
1255
+ # Check if this link is available in this year
1256
+ is_available = True
1257
+ if 'build_year' in network.links.columns:
1258
+ build_year = network.links.loc[link_name, 'build_year']
1259
+ if pd.notna(build_year) and int(build_year) > year:
1260
+ is_available = False
1261
+
1262
+ if is_available:
1263
+ # Power capacity (MW)
1264
+ if 'p_nom_opt' in network.links.columns:
1265
+ capacity_mw = float(network.links.loc[link_name, 'p_nom_opt'])
1266
+ else:
1267
+ capacity_mw = float(network.links.loc[link_name, 'p_nom']) if 'p_nom' in network.links.columns else 0.0
1268
+
1269
+ if carrier_name in carrier_stats["power_capacity_by_carrier"]:
1270
+ carrier_stats["power_capacity_by_carrier"][carrier_name] += capacity_mw
1271
+
1272
+ logger.info(f"Calculated year {year} carrier statistics:")
1273
+ logger.info(f" Dispatch: {sum(carrier_stats['dispatch_by_carrier'].values()):.2f} MWh")
1274
+ logger.info(f" Emissions: {sum(carrier_stats['emissions_by_carrier'].values()):.2f} tonnes CO2")
1275
+ logger.info(f" Capital cost: {sum(carrier_stats['capital_cost_by_carrier'].values()):.2f} USD")
1276
+ logger.info(f" Operational cost: {sum(carrier_stats['operational_cost_by_carrier'].values()):.2f} USD")
1277
+ logger.info(f" Total system cost: {sum(carrier_stats['total_system_cost_by_carrier'].values()):.2f} USD")
1278
+ logger.info(f" Power capacity: {sum(carrier_stats['power_capacity_by_carrier'].values()):.2f} MW")
1279
+ logger.info(f" Energy capacity: {sum(carrier_stats['energy_capacity_by_carrier'].values()):.2f} MWh")
1280
+
1281
+ return carrier_stats
1282
+
1283
+ except Exception as e:
1284
+ logger.error(f"Failed to calculate year {year} carrier statistics: {e}", exc_info=True)
1285
+ return {
1286
+ "dispatch_by_carrier": {},
1287
+ "power_capacity_by_carrier": {},
1288
+ "energy_capacity_by_carrier": {},
1289
+ "emissions_by_carrier": {},
1290
+ "capital_cost_by_carrier": {},
1291
+ "operational_cost_by_carrier": {},
1292
+ "total_system_cost_by_carrier": {}
1293
+ }
1294
+
1295
+ def _sum_year_based_carrier_statistics(self, conn, network_id: int) -> Dict[str, Any]:
1296
+ """
1297
+ Sum up per-year carrier statistics for accurate multi-year totals.
1298
+ For capacity: take the LAST YEAR (final capacity) instead of maximum.
1299
+ """
1300
+ try:
1301
+ import json
1302
+
1303
+ # Initialize totals
1304
+ totals = {
1305
+ "dispatch_by_carrier": {},
1306
+ "power_capacity_by_carrier": {},
1307
+ "energy_capacity_by_carrier": {},
1308
+ "emissions_by_carrier": {},
1309
+ "capital_cost_by_carrier": {},
1310
+ "operational_cost_by_carrier": {},
1311
+ "total_system_cost_by_carrier": {}
1312
+ }
1313
+
1314
+ # Get all carriers from database
1315
+ cursor = conn.execute("""
1316
+ SELECT DISTINCT name FROM carriers WHERE network_id = ?
1317
+ """, (network_id,))
1318
+ all_carriers = [row[0] for row in cursor.fetchall()]
1319
+
1320
+ # Initialize all carriers with zero values (including special "Unmet Load" carrier)
1321
+ all_carriers_with_unmet = all_carriers + ['Unmet Load']
1322
+ for carrier in all_carriers_with_unmet:
1323
+ totals["dispatch_by_carrier"][carrier] = 0.0
1324
+ totals["power_capacity_by_carrier"][carrier] = 0.0
1325
+ totals["energy_capacity_by_carrier"][carrier] = 0.0
1326
+ totals["emissions_by_carrier"][carrier] = 0.0
1327
+ totals["capital_cost_by_carrier"][carrier] = 0.0
1328
+ totals["operational_cost_by_carrier"][carrier] = 0.0
1329
+ totals["total_system_cost_by_carrier"][carrier] = 0.0
1330
+
1331
+ # Get all year-based results, ordered by year
1332
+ cursor = conn.execute("""
1333
+ SELECT year, results_json FROM network_solve_results_by_year
1334
+ WHERE network_id = ?
1335
+ ORDER BY year
1336
+ """, (network_id,))
1337
+
1338
+ year_results = cursor.fetchall()
1339
+ logger.info(f"Found {len(year_results)} year-based results to sum for network {network_id}")
1340
+
1341
+ if not year_results:
1342
+ logger.warning(f"No year-based results found for network {network_id}")
1343
+ return totals
1344
+
1345
+ # For capacity: use the LAST YEAR only (final capacity state)
1346
+ last_year, last_results_json = year_results[-1]
1347
+
1348
+ try:
1349
+ results = json.loads(last_results_json)
1350
+ network_stats = results.get('network_statistics', {})
1351
+ custom_stats = network_stats.get('custom_statistics', {})
1352
+
1353
+ # Use last year's capacity as the all-year capacity
1354
+ power_capacity_by_carrier = custom_stats.get('power_capacity_by_carrier', {})
1355
+ for carrier, value in power_capacity_by_carrier.items():
1356
+ if carrier in totals["power_capacity_by_carrier"]:
1357
+ totals["power_capacity_by_carrier"][carrier] = float(value or 0)
1358
+
1359
+ energy_capacity_by_carrier = custom_stats.get('energy_capacity_by_carrier', {})
1360
+ for carrier, value in energy_capacity_by_carrier.items():
1361
+ if carrier in totals["energy_capacity_by_carrier"]:
1362
+ totals["energy_capacity_by_carrier"][carrier] = float(value or 0)
1363
+
1364
+ logger.info(f"Used last year ({last_year}) capacity as all-year capacity")
1365
+
1366
+ except Exception as e:
1367
+ logger.error(f"Failed to process last year ({last_year}) results: {e}")
1368
+
1369
+ # For other stats (dispatch, emissions, costs): sum across all years
1370
+ for year, results_json in year_results:
1371
+ try:
1372
+ results = json.loads(results_json)
1373
+ network_stats = results.get('network_statistics', {})
1374
+ custom_stats = network_stats.get('custom_statistics', {})
1375
+
1376
+ # Sum dispatch (energy values - sum across years)
1377
+ dispatch_by_carrier = custom_stats.get('dispatch_by_carrier', {})
1378
+ for carrier, value in dispatch_by_carrier.items():
1379
+ if carrier in totals["dispatch_by_carrier"]:
1380
+ totals["dispatch_by_carrier"][carrier] += float(value or 0)
1381
+
1382
+ # Sum emissions (cumulative across years)
1383
+ emissions_by_carrier = custom_stats.get('emissions_by_carrier', {})
1384
+ for carrier, value in emissions_by_carrier.items():
1385
+ if carrier in totals["emissions_by_carrier"]:
1386
+ totals["emissions_by_carrier"][carrier] += float(value or 0)
1387
+
1388
+ # Sum capital costs (cumulative across years)
1389
+ capital_cost_by_carrier = custom_stats.get('capital_cost_by_carrier', {})
1390
+ for carrier, value in capital_cost_by_carrier.items():
1391
+ if carrier in totals["capital_cost_by_carrier"]:
1392
+ totals["capital_cost_by_carrier"][carrier] += float(value or 0)
1393
+
1394
+ # Sum operational costs (cumulative across years)
1395
+ operational_cost_by_carrier = custom_stats.get('operational_cost_by_carrier', {})
1396
+ for carrier, value in operational_cost_by_carrier.items():
1397
+ if carrier in totals["operational_cost_by_carrier"]:
1398
+ totals["operational_cost_by_carrier"][carrier] += float(value or 0)
1399
+
1400
+ # Sum total system costs (cumulative across years)
1401
+ total_system_cost_by_carrier = custom_stats.get('total_system_cost_by_carrier', {})
1402
+ for carrier, value in total_system_cost_by_carrier.items():
1403
+ if carrier in totals["total_system_cost_by_carrier"]:
1404
+ totals["total_system_cost_by_carrier"][carrier] += float(value or 0)
1405
+
1406
+ except Exception as e:
1407
+ logger.error(f"Failed to process year {year} results: {e}")
1408
+ continue
1409
+
1410
+ logger.info(f"Summed carrier statistics across {len(year_results)} years:")
1411
+ logger.info(f" Final power capacity: {sum(totals['power_capacity_by_carrier'].values()):.2f} MW")
1412
+ logger.info(f" Final energy capacity: {sum(totals['energy_capacity_by_carrier'].values()):.2f} MWh")
1413
+ logger.info(f" Total dispatch: {sum(totals['dispatch_by_carrier'].values()):.2f} MWh")
1414
+ logger.info(f" Total emissions: {sum(totals['emissions_by_carrier'].values()):.2f} tonnes CO2")
1415
+ logger.info(f" Total capital cost: {sum(totals['capital_cost_by_carrier'].values()):.2f} USD")
1416
+ logger.info(f" Total operational cost: {sum(totals['operational_cost_by_carrier'].values()):.2f} USD")
1417
+ logger.info(f" Total system cost: {sum(totals['total_system_cost_by_carrier'].values()):.2f} USD")
1418
+
1419
+ return totals
1420
+
1421
+ except Exception as e:
1422
+ logger.error(f"Failed to sum year-based carrier statistics: {e}", exc_info=True)
1423
+ # Return empty structure on error
1424
+ return {
1425
+ "dispatch_by_carrier": {},
1426
+ "power_capacity_by_carrier": {},
1427
+ "energy_capacity_by_carrier": {},
1428
+ "emissions_by_carrier": {},
1429
+ "capital_cost_by_carrier": {},
1430
+ "operational_cost_by_carrier": {},
1431
+ "total_system_cost_by_carrier": {}
1432
+ }
1433
+
1434
+ def _serialize_results_json(self, solve_result: Dict[str, Any]) -> str:
1435
+ """Serialize solve results to JSON string."""
1436
+ import json
1437
+ try:
1438
+ # Create a clean results dictionary
1439
+ results = {
1440
+ "success": solve_result.get("success", False),
1441
+ "status": solve_result.get("status", "unknown"),
1442
+ "solve_time": solve_result.get("solve_time", 0.0),
1443
+ "objective_value": solve_result.get("objective_value"),
1444
+ "solver_name": solve_result.get("solver_name", "unknown"),
1445
+ "run_id": solve_result.get("run_id"),
1446
+ "network_statistics": solve_result.get("network_statistics", {}),
1447
+ "pypsa_result": solve_result.get("pypsa_result", {})
1448
+ }
1449
+ return json.dumps(results, default=self._json_serializer)
1450
+ except Exception as e:
1451
+ logger.warning(f"Failed to serialize results JSON: {e}")
1452
+ return json.dumps({"error": "serialization_failed"})
1453
+
1454
+ def _serialize_metadata_json(self, solve_result: Dict[str, Any]) -> str:
1455
+ """Serialize solve metadata to JSON string."""
1456
+ import json
1457
+ try:
1458
+ metadata = {
1459
+ "solver_name": solve_result.get("solver_name", "unknown"),
1460
+ "run_id": solve_result.get("run_id"),
1461
+ "multi_period": solve_result.get("multi_period", False),
1462
+ "years": solve_result.get("years", []),
1463
+ "network_name": solve_result.get("network_name"),
1464
+ "num_snapshots": solve_result.get("num_snapshots", 0)
1465
+ }
1466
+ return json.dumps(metadata, default=self._json_serializer)
1467
+ except Exception as e:
1468
+ logger.warning(f"Failed to serialize metadata JSON: {e}")
1469
+ return json.dumps({"error": "serialization_failed"})
1470
+
1471
+ def _filter_timeseries_by_year(self, timeseries_df: 'pd.DataFrame', snapshots: 'pd.Index', year: int) -> 'pd.DataFrame':
1472
+ """Filter timeseries data by year"""
1473
+ try:
1474
+ # Handle MultiIndex case (multi-period optimization)
1475
+ if hasattr(snapshots, 'levels'):
1476
+ period_values = snapshots.get_level_values(0)
1477
+ year_mask = period_values == year
1478
+ if year_mask.any():
1479
+ year_snapshots = snapshots[year_mask]
1480
+ return timeseries_df.loc[year_snapshots]
1481
+
1482
+ # Handle DatetimeIndex case (regular time series)
1483
+ elif hasattr(snapshots, 'year'):
1484
+ year_mask = snapshots.year == year
1485
+ if year_mask.any():
1486
+ return timeseries_df.loc[year_mask]
1487
+
1488
+ # Fallback - return None if can't filter
1489
+ return None
1490
+
1491
+ except Exception as e:
1492
+ logger.error(f"Failed to filter timeseries by year {year}: {e}")
1493
+ return None
1494
+
1495
+ def _get_year_weightings(self, network: 'pypsa.Network', year: int) -> 'np.ndarray':
1496
+ """Get snapshot weightings for a specific year"""
1497
+ try:
1498
+ # Filter snapshot weightings by year
1499
+ if hasattr(network.snapshots, 'levels'):
1500
+ period_values = network.snapshots.get_level_values(0)
1501
+ year_mask = period_values == year
1502
+ if year_mask.any():
1503
+ year_snapshots = network.snapshots[year_mask]
1504
+ year_weightings = network.snapshot_weightings.loc[year_snapshots]
1505
+ if isinstance(year_weightings, pd.DataFrame):
1506
+ if 'objective' in year_weightings.columns:
1507
+ return year_weightings['objective'].values
1508
+ else:
1509
+ return year_weightings.iloc[:, 0].values
1510
+ else:
1511
+ return year_weightings.values
1512
+
1513
+ elif hasattr(network.snapshots, 'year'):
1514
+ year_mask = network.snapshots.year == year
1515
+ if year_mask.any():
1516
+ year_weightings = network.snapshot_weightings.loc[year_mask]
1517
+ if isinstance(year_weightings, pd.DataFrame):
1518
+ if 'objective' in year_weightings.columns:
1519
+ return year_weightings['objective'].values
1520
+ else:
1521
+ return year_weightings.iloc[:, 0].values
1522
+ else:
1523
+ return year_weightings.values
1524
+
1525
+ return None
1526
+
1527
+ except Exception as e:
1528
+ logger.error(f"Failed to get year weightings for year {year}: {e}")
1529
+ return None
1530
+
1531
+ def _calculate_total_demand(self, network: 'pypsa.Network') -> float:
1532
+ """Calculate total demand from loads in the network"""
1533
+ try:
1534
+ total_demand = 0.0
1535
+
1536
+ # Calculate demand from loads
1537
+ if hasattr(network, 'loads_t') and hasattr(network.loads_t, 'p'):
1538
+ # Apply snapshot weightings to convert MW to MWh
1539
+ weightings = network.snapshot_weightings
1540
+ if isinstance(weightings, pd.DataFrame):
1541
+ if 'objective' in weightings.columns:
1542
+ weighting_values = weightings['objective'].values
1543
+ else:
1544
+ weighting_values = weightings.iloc[:, 0].values
1545
+ else:
1546
+ weighting_values = weightings.values
1547
+
1548
+ total_demand = float((network.loads_t.p.values * weighting_values[:, None]).sum())
1549
+
1550
+ return total_demand
1551
+
1552
+ except Exception as e:
1553
+ logger.error(f"Failed to calculate total demand: {e}")
1554
+ return 0.0
1555
+
1556
+ def _json_serializer(self, obj):
1557
+ """Convert numpy/pandas types to JSON serializable types"""
1558
+ import numpy as np
1559
+ import pandas as pd
1560
+
1561
+ if isinstance(obj, (np.integer, np.int64, np.int32)):
1562
+ return int(obj)
1563
+ elif isinstance(obj, (np.floating, np.float64, np.float32)):
1564
+ return float(obj)
1565
+ elif isinstance(obj, np.ndarray):
1566
+ return obj.tolist()
1567
+ elif isinstance(obj, pd.Series):
1568
+ return obj.to_dict()
1569
+ elif isinstance(obj, pd.DataFrame):
1570
+ return obj.to_dict()
1571
+ elif hasattr(obj, 'item'): # Handle numpy scalars
1572
+ return obj.item()
1573
+ else:
1574
+ raise TypeError(f"Object of type {type(obj)} is not JSON serializable")