pyconvexity 0.1.1__py3-none-any.whl → 0.1.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pyconvexity might be problematic. Click here for more details.

Files changed (35) hide show
  1. pyconvexity/__init__.py +30 -6
  2. pyconvexity/_version.py +1 -1
  3. pyconvexity/data/README.md +101 -0
  4. pyconvexity/data/__init__.py +18 -0
  5. pyconvexity/data/__pycache__/__init__.cpython-313.pyc +0 -0
  6. pyconvexity/data/loaders/__init__.py +3 -0
  7. pyconvexity/data/loaders/__pycache__/__init__.cpython-313.pyc +0 -0
  8. pyconvexity/data/loaders/__pycache__/cache.cpython-313.pyc +0 -0
  9. pyconvexity/data/loaders/cache.py +212 -0
  10. pyconvexity/data/sources/__init__.py +5 -0
  11. pyconvexity/data/sources/__pycache__/__init__.cpython-313.pyc +0 -0
  12. pyconvexity/data/sources/__pycache__/gem.cpython-313.pyc +0 -0
  13. pyconvexity/data/sources/gem.py +412 -0
  14. pyconvexity/io/__init__.py +32 -0
  15. pyconvexity/io/excel_exporter.py +991 -0
  16. pyconvexity/io/excel_importer.py +1112 -0
  17. pyconvexity/io/netcdf_exporter.py +192 -0
  18. pyconvexity/io/netcdf_importer.py +599 -0
  19. pyconvexity/models/__init__.py +7 -0
  20. pyconvexity/models/attributes.py +3 -1
  21. pyconvexity/models/components.py +3 -0
  22. pyconvexity/models/scenarios.py +177 -0
  23. pyconvexity/solvers/__init__.py +29 -0
  24. pyconvexity/solvers/pypsa/__init__.py +24 -0
  25. pyconvexity/solvers/pypsa/api.py +398 -0
  26. pyconvexity/solvers/pypsa/batch_loader.py +311 -0
  27. pyconvexity/solvers/pypsa/builder.py +656 -0
  28. pyconvexity/solvers/pypsa/constraints.py +321 -0
  29. pyconvexity/solvers/pypsa/solver.py +1255 -0
  30. pyconvexity/solvers/pypsa/storage.py +2207 -0
  31. {pyconvexity-0.1.1.dist-info → pyconvexity-0.1.3.dist-info}/METADATA +5 -2
  32. pyconvexity-0.1.3.dist-info/RECORD +45 -0
  33. pyconvexity-0.1.1.dist-info/RECORD +0 -20
  34. {pyconvexity-0.1.1.dist-info → pyconvexity-0.1.3.dist-info}/WHEEL +0 -0
  35. {pyconvexity-0.1.1.dist-info → pyconvexity-0.1.3.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,2207 @@
1
+ """
2
+ Result storage functionality for PyPSA solver integration.
3
+
4
+ Handles storing solve results back to the database with proper validation and error handling.
5
+ """
6
+
7
+ import logging
8
+ import uuid
9
+ import pandas as pd
10
+ import numpy as np
11
+ from typing import Dict, Any, Optional, Callable
12
+
13
+ from pyconvexity.core.types import StaticValue, TimeseriesPoint
14
+ from pyconvexity.models import (
15
+ list_components_by_type, set_static_attribute, set_timeseries_attribute
16
+ )
17
+ from pyconvexity.validation import get_validation_rule
18
+
19
+ logger = logging.getLogger(__name__)
20
+
21
+
22
+ class ResultStorage:
23
+ """
24
+ Handles storing PyPSA solve results back to the database.
25
+
26
+ This class manages the complex process of extracting results from PyPSA networks
27
+ and storing them back to the database with proper validation and error handling.
28
+ """
29
+
30
+ def store_results(
31
+ self,
32
+ conn,
33
+ network_id: int,
34
+ network: 'pypsa.Network',
35
+ solve_result: Dict[str, Any],
36
+ scenario_id: Optional[int] = None
37
+ ) -> Dict[str, Any]:
38
+ """
39
+ Store complete solve results back to database.
40
+
41
+ Args:
42
+ conn: Database connection
43
+ network_id: ID of the network
44
+ network: Solved PyPSA Network object
45
+ solve_result: Solve result metadata
46
+ scenario_id: Optional scenario ID
47
+
48
+ Returns:
49
+ Dictionary with storage statistics
50
+ """
51
+ run_id = solve_result.get('run_id', str(uuid.uuid4()))
52
+
53
+ try:
54
+ # Store component results
55
+ component_stats = self._store_component_results(
56
+ conn, network_id, network, scenario_id
57
+ )
58
+
59
+ # Calculate network statistics first
60
+ network_stats = self._calculate_network_statistics(
61
+ conn, network_id, network, solve_result
62
+ )
63
+
64
+ # Store solve summary with network statistics
65
+ self._store_solve_summary(
66
+ conn, network_id, solve_result, scenario_id, network_stats
67
+ )
68
+
69
+ # Store year-based statistics if available
70
+ year_stats_stored = 0
71
+ if solve_result.get('year_statistics'):
72
+ year_stats_stored = self._store_year_based_statistics(
73
+ conn, network_id, network, solve_result['year_statistics'], scenario_id
74
+ )
75
+
76
+ return {
77
+ "component_stats": component_stats,
78
+ "network_stats": network_stats,
79
+ "year_stats_stored": year_stats_stored,
80
+ "run_id": run_id,
81
+ "success": True
82
+ }
83
+
84
+ except Exception as e:
85
+ logger.error(f"Failed to store solve results: {e}", exc_info=True)
86
+ return {
87
+ "component_stats": {},
88
+ "network_stats": {},
89
+ "run_id": run_id,
90
+ "success": False,
91
+ "error": str(e)
92
+ }
93
+
94
+ def _store_component_results(
95
+ self,
96
+ conn,
97
+ network_id: int,
98
+ network: 'pypsa.Network',
99
+ scenario_id: Optional[int]
100
+ ) -> Dict[str, int]:
101
+ """Store results for all component types."""
102
+ results_stats = {
103
+ "stored_bus_results": 0,
104
+ "stored_generator_results": 0,
105
+ "stored_unmet_load_results": 0,
106
+ "stored_load_results": 0,
107
+ "stored_line_results": 0,
108
+ "stored_link_results": 0,
109
+ "stored_storage_unit_results": 0,
110
+ "stored_store_results": 0,
111
+ "skipped_attributes": 0,
112
+ "errors": 0
113
+ }
114
+
115
+ try:
116
+ # Store bus results
117
+ if hasattr(network, 'buses_t') and network.buses_t:
118
+ results_stats["stored_bus_results"] = self._store_component_type_results(
119
+ conn, network_id, 'BUS', network.buses, network.buses_t, scenario_id
120
+ )
121
+
122
+ # Store generator results (includes regular generators)
123
+ if hasattr(network, 'generators_t') and network.generators_t:
124
+ results_stats["stored_generator_results"] = self._store_component_type_results(
125
+ conn, network_id, 'GENERATOR', network.generators, network.generators_t, scenario_id
126
+ )
127
+
128
+ # Store UNMET_LOAD results (these are also stored as generators in PyPSA)
129
+ results_stats["stored_unmet_load_results"] = self._store_component_type_results(
130
+ conn, network_id, 'UNMET_LOAD', network.generators, network.generators_t, scenario_id
131
+ )
132
+
133
+ # Store load results
134
+ if hasattr(network, 'loads_t') and network.loads_t:
135
+ results_stats["stored_load_results"] = self._store_component_type_results(
136
+ conn, network_id, 'LOAD', network.loads, network.loads_t, scenario_id
137
+ )
138
+
139
+ # Store line results
140
+ if hasattr(network, 'lines_t') and network.lines_t:
141
+ results_stats["stored_line_results"] = self._store_component_type_results(
142
+ conn, network_id, 'LINE', network.lines, network.lines_t, scenario_id
143
+ )
144
+
145
+ # Store link results
146
+ if hasattr(network, 'links_t') and network.links_t:
147
+ results_stats["stored_link_results"] = self._store_component_type_results(
148
+ conn, network_id, 'LINK', network.links, network.links_t, scenario_id
149
+ )
150
+
151
+ # Store storage unit results
152
+ if hasattr(network, 'storage_units_t') and network.storage_units_t:
153
+ results_stats["stored_storage_unit_results"] = self._store_component_type_results(
154
+ conn, network_id, 'STORAGE_UNIT', network.storage_units, network.storage_units_t, scenario_id
155
+ )
156
+
157
+ # Store store results
158
+ if hasattr(network, 'stores_t') and network.stores_t:
159
+ results_stats["stored_store_results"] = self._store_component_type_results(
160
+ conn, network_id, 'STORE', network.stores, network.stores_t, scenario_id
161
+ )
162
+
163
+ return results_stats
164
+
165
+ except Exception as e:
166
+ logger.error(f"Error storing solve results: {e}", exc_info=True)
167
+ results_stats["errors"] += 1
168
+ return results_stats
169
+
170
+ def _store_component_type_results(
171
+ self,
172
+ conn,
173
+ network_id: int,
174
+ component_type: str,
175
+ static_df: pd.DataFrame,
176
+ timeseries_dict: Dict[str, pd.DataFrame],
177
+ scenario_id: Optional[int]
178
+ ) -> int:
179
+ """Store results for a specific component type - only store OUTPUT attributes."""
180
+ stored_count = 0
181
+
182
+ try:
183
+ # Get component name to ID mapping
184
+ components = list_components_by_type(conn, network_id, component_type)
185
+ name_to_id = {comp.name: comp.id for comp in components}
186
+
187
+ # Store timeseries results - ONLY OUTPUT ATTRIBUTES (is_input=FALSE)
188
+ for attr_name, timeseries_df in timeseries_dict.items():
189
+ if timeseries_df.empty:
190
+ continue
191
+
192
+ # Check if this attribute is an output attribute (not an input)
193
+ try:
194
+ rule = get_validation_rule(conn, component_type, attr_name)
195
+ if rule.is_input:
196
+ # Skip input attributes to preserve original input data
197
+ continue
198
+ except Exception:
199
+ # If no validation rule found, skip to be safe
200
+ continue
201
+
202
+ for component_name in timeseries_df.columns:
203
+ if component_name not in name_to_id:
204
+ continue
205
+
206
+ component_id = name_to_id[component_name]
207
+ component_series = timeseries_df[component_name]
208
+
209
+ # Skip if all values are NaN
210
+ if component_series.isna().all():
211
+ continue
212
+
213
+ # Convert to TimeseriesPoint list
214
+ timeseries_points = []
215
+ for period_index, (timestamp_idx, value) in enumerate(component_series.items()):
216
+ if pd.isna(value):
217
+ continue
218
+
219
+ timestamp = int(timestamp_idx.timestamp()) if hasattr(timestamp_idx, 'timestamp') else period_index
220
+ timeseries_points.append(TimeseriesPoint(
221
+ timestamp=timestamp,
222
+ value=float(value),
223
+ period_index=period_index
224
+ ))
225
+
226
+ if not timeseries_points:
227
+ continue
228
+
229
+ # Store using atomic utility
230
+ try:
231
+ set_timeseries_attribute(conn, component_id, attr_name, timeseries_points, scenario_id)
232
+ stored_count += 1
233
+ except Exception as e:
234
+ # Handle validation errors gracefully
235
+ if ("No validation rule found" in str(e) or
236
+ "does not allow" in str(e) or
237
+ "ValidationError" in str(type(e).__name__)):
238
+ continue
239
+ else:
240
+ logger.warning(f"Error storing timeseries {attr_name} for {component_type} '{component_name}': {e}")
241
+ continue
242
+
243
+ # Store static optimization results - ONLY OUTPUT ATTRIBUTES (is_input=FALSE)
244
+ if not static_df.empty:
245
+ for attr_name in static_df.columns:
246
+ # Check if this attribute is an output attribute (not an input)
247
+ try:
248
+ rule = get_validation_rule(conn, component_type, attr_name)
249
+ if rule.is_input:
250
+ # Skip input attributes to preserve original input data
251
+ continue
252
+ except Exception:
253
+ # If no validation rule found, skip to be safe
254
+ continue
255
+
256
+ for component_name, value in static_df[attr_name].items():
257
+ if component_name not in name_to_id or pd.isna(value):
258
+ continue
259
+
260
+ component_id = name_to_id[component_name]
261
+
262
+ try:
263
+ # Convert value to StaticValue
264
+ if isinstance(value, (int, np.integer)):
265
+ static_value = StaticValue(int(value))
266
+ elif isinstance(value, (float, np.floating)):
267
+ if np.isfinite(value):
268
+ static_value = StaticValue(float(value))
269
+ else:
270
+ continue # Skip infinite/NaN values
271
+ elif isinstance(value, bool):
272
+ static_value = StaticValue(bool(value))
273
+ else:
274
+ static_value = StaticValue(str(value))
275
+
276
+ # Store using atomic utility
277
+ set_static_attribute(conn, component_id, attr_name, static_value, scenario_id)
278
+ stored_count += 1
279
+
280
+ except Exception as e:
281
+ # Handle validation errors gracefully
282
+ if ("No validation rule found" in str(e) or
283
+ "does not allow" in str(e) or
284
+ "ValidationError" in str(type(e).__name__)):
285
+ continue
286
+ else:
287
+ logger.warning(f"Error storing static {attr_name} for {component_type} '{component_name}': {e}")
288
+ continue
289
+
290
+ return stored_count
291
+
292
+ except Exception as e:
293
+ logger.error(f"Error storing results for {component_type}: {e}", exc_info=True)
294
+ return stored_count
295
+
296
+ def _store_solve_summary(
297
+ self,
298
+ conn,
299
+ network_id: int,
300
+ solve_result: Dict[str, Any],
301
+ scenario_id: Optional[int],
302
+ network_stats: Optional[Dict[str, Any]] = None
303
+ ):
304
+ """Store solve summary to network_solve_results table."""
305
+ try:
306
+ # Prepare solve summary data
307
+ solver_name = solve_result.get('solver_name', 'unknown')
308
+ solve_status = solve_result.get('status', 'unknown')
309
+ objective_value = solve_result.get('objective_value')
310
+ solve_time = solve_result.get('solve_time', 0.0)
311
+
312
+ # Use master scenario if no scenario specified
313
+ if scenario_id is None:
314
+ from pyconvexity.models import get_master_scenario_id
315
+ scenario_id = get_master_scenario_id(conn, network_id)
316
+
317
+ # Create enhanced solve result with network statistics for serialization
318
+ enhanced_solve_result = {
319
+ **solve_result,
320
+ "network_statistics": network_stats or {}
321
+ }
322
+
323
+ # Store solve results summary
324
+ conn.execute("""
325
+ INSERT OR REPLACE INTO network_solve_results (
326
+ network_id, scenario_id, solver_name, solve_type, solve_status,
327
+ objective_value, solve_time_seconds, results_json, metadata_json
328
+ ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
329
+ """, (
330
+ network_id,
331
+ scenario_id,
332
+ solver_name,
333
+ 'pypsa_optimization',
334
+ solve_status,
335
+ objective_value,
336
+ solve_time,
337
+ self._serialize_results_json(enhanced_solve_result),
338
+ self._serialize_metadata_json(enhanced_solve_result)
339
+ ))
340
+
341
+ logger.info(f"Stored solve summary for network {network_id}, scenario {scenario_id}")
342
+
343
+ except Exception as e:
344
+ logger.error(f"Failed to store solve summary: {e}", exc_info=True)
345
+
346
+ def _calculate_network_statistics(
347
+ self,
348
+ conn,
349
+ network_id: int,
350
+ network: 'pypsa.Network',
351
+ solve_result: Dict[str, Any]
352
+ ) -> Dict[str, Any]:
353
+ """Calculate network statistics in the format expected by the frontend."""
354
+ try:
355
+ # Calculate basic statistics
356
+ total_generation_mwh = 0.0
357
+ total_load_mwh = 0.0
358
+ unmet_load_mwh = 0.0
359
+
360
+ # Calculate generation statistics (simple sum of all positive generator output)
361
+ if hasattr(network, 'generators_t') and hasattr(network.generators_t, 'p'):
362
+ gen_data = network.generators_t.p
363
+ if not gen_data.empty:
364
+ # Debug: Log what's in the generators DataFrame
365
+ logger.info(f"Generators DataFrame columns: {list(gen_data.columns)}")
366
+ logger.info(f"Generators DataFrame shape: {gen_data.shape}")
367
+
368
+ # Total generation - only count positive generation (ignore negative values like storage charging)
369
+ # CRITICAL: Apply snapshot weightings to convert MW to MWh
370
+ weightings = network.snapshot_weightings
371
+ if isinstance(weightings, pd.DataFrame):
372
+ if 'objective' in weightings.columns:
373
+ weighting_values = weightings['objective'].values
374
+ else:
375
+ weighting_values = weightings.iloc[:, 0].values
376
+ else:
377
+ weighting_values = weightings.values
378
+
379
+ # Apply weightings and clip to positive values
380
+ total_generation_mwh = float((gen_data.clip(lower=0).values * weighting_values[:, None]).sum())
381
+
382
+ # Debug logging
383
+ raw_sum = gen_data.sum().sum()
384
+ clipped_sum = gen_data.clip(lower=0).sum().sum()
385
+
386
+ logger.info(f"Generation calculation: raw_sum={raw_sum}, clipped_sum={clipped_sum}")
387
+
388
+ # Check for negative generator values
389
+ negative_gen_columns = []
390
+ for col in gen_data.columns:
391
+ if (gen_data[col] < 0).any():
392
+ negative_gen_columns.append(col)
393
+ min_val = gen_data[col].min()
394
+ logger.warning(f"Generator column '{col}' has negative values (min: {min_val})")
395
+
396
+ if negative_gen_columns:
397
+ logger.info(f"Found {len(negative_gen_columns)} generator columns with negative values: {negative_gen_columns}")
398
+
399
+ # Calculate unmet load if component type mapping available
400
+ if hasattr(network, '_component_type_map'):
401
+ unmet_load_total = 0.0
402
+ for gen_name, gen_type in network._component_type_map.items():
403
+ if gen_type == 'UNMET_LOAD' and gen_name in gen_data.columns:
404
+ # Unmet load should be positive (it's generation to meet unserved demand)
405
+ unmet_load_total += max(0, gen_data[gen_name].sum())
406
+ unmet_load_mwh = float(unmet_load_total)
407
+
408
+ # Calculate load statistics
409
+ if hasattr(network, 'loads_t') and hasattr(network.loads_t, 'p'):
410
+ load_data = network.loads_t.p
411
+ if not load_data.empty:
412
+ # Debug: Log what's in the loads DataFrame
413
+ logger.info(f"Loads DataFrame columns: {list(load_data.columns)}")
414
+ logger.info(f"Loads DataFrame shape: {load_data.shape}")
415
+ logger.info(f"Sample loads data (first 5 columns): {load_data.iloc[:3, :5].to_dict()}")
416
+
417
+ # CRITICAL: Apply snapshot weightings to convert MW to MWh
418
+ weightings = network.snapshot_weightings
419
+ if isinstance(weightings, pd.DataFrame):
420
+ if 'objective' in weightings.columns:
421
+ weighting_values = weightings['objective'].values
422
+ else:
423
+ weighting_values = weightings.iloc[:, 0].values
424
+ else:
425
+ weighting_values = weightings.values
426
+
427
+ total_load_mwh = float(abs((load_data.values * weighting_values[:, None]).sum()))
428
+ logger.info(f"Total load calculation with weightings: {total_load_mwh} MWh")
429
+ logger.info(f"Total load calculation without weightings: {abs(load_data.sum().sum())} MW")
430
+
431
+ # Check if any columns have negative values (which shouldn't be in loads)
432
+ negative_columns = []
433
+ for col in load_data.columns:
434
+ if (load_data[col] < 0).any():
435
+ negative_columns.append(col)
436
+ min_val = load_data[col].min()
437
+ logger.warning(f"Load column '{col}' has negative values (min: {min_val})")
438
+
439
+ if negative_columns:
440
+ logger.error(f"Found {len(negative_columns)} load columns with negative values: {negative_columns}")
441
+ else:
442
+ total_load_mwh = 0.0
443
+
444
+ # Calculate transmission losses from links (CORRECTED)
445
+ total_link_losses_mwh = 0.0
446
+ total_link_flow_mwh = 0.0
447
+ if hasattr(network, 'links_t') and hasattr(network.links_t, 'p0'):
448
+ link_p0_data = network.links_t.p0 # Power at bus0
449
+ link_p1_data = network.links_t.p1 # Power at bus1
450
+
451
+ if not link_p0_data.empty and not link_p1_data.empty:
452
+ logger.info(f"Links p0 DataFrame columns: {list(link_p0_data.columns)}")
453
+ logger.info(f"Links p0 DataFrame shape: {link_p0_data.shape}")
454
+ logger.info(f"Links p1 DataFrame columns: {list(link_p1_data.columns)}")
455
+ logger.info(f"Links p1 DataFrame shape: {link_p1_data.shape}")
456
+
457
+ # CORRECT calculation: For each link and timestep, calculate losses properly
458
+ # Losses occur when power flows through a link with efficiency < 1.0
459
+ # p1 = p0 * efficiency, so losses = p0 - p1 = p0 * (1 - efficiency)
460
+
461
+ link_losses_by_link = {}
462
+ total_losses = 0.0
463
+ total_flow = 0.0
464
+
465
+ for link_name in link_p0_data.columns:
466
+ p0_series = link_p0_data[link_name] # Power input to link
467
+ p1_series = link_p1_data[link_name] # Power output from link
468
+
469
+ # Calculate losses for this link across all timesteps
470
+ # Losses should always be positive regardless of flow direction
471
+ # For each timestep: losses = abs(p0) * (1 - efficiency)
472
+ # But we don't have efficiency here, so use: losses = abs(p0) - abs(p1)
473
+
474
+ # Calculate losses properly for each timestep
475
+ timestep_losses = abs(p0_series) - abs(p1_series)
476
+ link_losses = timestep_losses.sum()
477
+ link_flow = abs(p0_series).sum() # Total absolute flow through this link
478
+
479
+ link_losses_by_link[link_name] = {
480
+ 'losses_mwh': link_losses,
481
+ 'flow_mwh': link_flow,
482
+ 'loss_rate': (link_losses / link_flow * 100) if link_flow > 0 else 0
483
+ }
484
+
485
+ total_losses += link_losses
486
+ total_flow += link_flow
487
+
488
+ # Log details for first few links
489
+ if len(link_losses_by_link) <= 5:
490
+ avg_p0 = p0_series.mean()
491
+ avg_p1 = p1_series.mean()
492
+ logger.info(f" Link '{link_name}': avg_p0={avg_p0:.1f}MW, avg_p1={avg_p1:.1f}MW, losses={link_losses:.1f}MWh, flow={link_flow:.1f}MWh")
493
+
494
+ total_link_losses_mwh = total_losses
495
+ total_link_flow_mwh = total_flow
496
+
497
+ # Summary statistics
498
+ logger.info(f"Link transmission analysis:")
499
+ logger.info(f" Total link flow: {total_link_flow_mwh:.1f} MWh")
500
+ logger.info(f" Total link losses: {total_link_losses_mwh:.1f} MWh")
501
+ logger.info(f" Average loss rate: {(total_link_losses_mwh/total_link_flow_mwh*100):.2f}%")
502
+ logger.info(f" Number of links: {len(link_losses_by_link)}")
503
+
504
+ # Show top 5 links by losses
505
+ top_loss_links = sorted(link_losses_by_link.items(), key=lambda x: x[1]['losses_mwh'], reverse=True)[:5]
506
+ logger.info(f" Top 5 links by losses:")
507
+ for link_name, stats in top_loss_links:
508
+ logger.info(f" {link_name}: {stats['losses_mwh']:.1f} MWh ({stats['loss_rate']:.2f}%)")
509
+
510
+ # Calculate storage losses if any
511
+ total_storage_losses_mwh = 0.0
512
+ storage_charging_mwh = 0.0
513
+ storage_discharging_mwh = 0.0
514
+
515
+ # Check for storage units
516
+ if hasattr(network, 'storage_units_t') and hasattr(network.storage_units_t, 'p'):
517
+ storage_data = network.storage_units_t.p
518
+ if not storage_data.empty:
519
+ logger.info(f"Storage units DataFrame columns: {list(storage_data.columns)}")
520
+ logger.info(f"Storage units DataFrame shape: {storage_data.shape}")
521
+
522
+ # Storage: positive = discharge (generation), negative = charge (consumption)
523
+ total_storage_power = storage_data.sum().sum()
524
+ storage_discharging_mwh = storage_data.clip(lower=0).sum().sum() # Positive values
525
+ storage_charging_mwh = abs(storage_data.clip(upper=0).sum().sum()) # Negative values made positive
526
+
527
+ logger.info(f"Storage analysis:")
528
+ logger.info(f" Total storage net: {total_storage_power:.1f} MWh")
529
+ logger.info(f" Storage discharging: {storage_discharging_mwh:.1f} MWh")
530
+ logger.info(f" Storage charging: {storage_charging_mwh:.1f} MWh")
531
+
532
+ # Storage losses = charging - discharging (due to round-trip efficiency)
533
+ total_storage_losses_mwh = storage_charging_mwh - storage_discharging_mwh
534
+ if total_storage_losses_mwh < 0:
535
+ logger.warning(f"Negative storage losses: {total_storage_losses_mwh:.1f} MWh (net discharge)")
536
+ total_storage_losses_mwh = 0.0 # Don't count net discharge as negative loss
537
+
538
+ # Check for other PyPSA components that might consume energy
539
+ other_consumption_mwh = 0.0
540
+
541
+ # Check stores
542
+ if hasattr(network, 'stores_t') and hasattr(network.stores_t, 'p'):
543
+ stores_data = network.stores_t.p
544
+ if not stores_data.empty:
545
+ stores_consumption = abs(stores_data.sum().sum())
546
+ other_consumption_mwh += stores_consumption
547
+ logger.info(f"Stores consumption: {stores_consumption:.1f} MWh")
548
+
549
+ # Total consumption (link losses already accounted for in PyPSA generation)
550
+ total_consumption_with_losses_mwh = (total_load_mwh + total_storage_losses_mwh + other_consumption_mwh)
551
+
552
+ # Detailed energy balance analysis
553
+ logger.info(f"=== DETAILED ENERGY BALANCE ANALYSIS ===")
554
+ logger.info(f"GENERATION SIDE:")
555
+ logger.info(f" Total generation: {total_generation_mwh:.1f} MWh")
556
+ logger.info(f" Storage discharging: {storage_discharging_mwh:.1f} MWh")
557
+ logger.info(f" Total supply: {total_generation_mwh + storage_discharging_mwh:.1f} MWh")
558
+ logger.info(f"")
559
+ logger.info(f"CONSUMPTION SIDE:")
560
+ logger.info(f" Load demand: {total_load_mwh:.1f} MWh")
561
+ logger.info(f" Storage charging: {storage_charging_mwh:.1f} MWh")
562
+ logger.info(f" Link losses: {total_link_losses_mwh:.1f} MWh (for info only - already in generation)")
563
+ logger.info(f" Storage losses: {total_storage_losses_mwh:.1f} MWh")
564
+ logger.info(f" Other consumption: {other_consumption_mwh:.1f} MWh")
565
+ logger.info(f" Total consumption: {total_load_mwh + storage_charging_mwh + total_storage_losses_mwh + other_consumption_mwh:.1f} MWh")
566
+ logger.info(f"")
567
+ logger.info(f"BALANCE CHECK:")
568
+ total_supply = total_generation_mwh + storage_discharging_mwh
569
+ total_consumption = total_load_mwh + storage_charging_mwh + total_storage_losses_mwh + other_consumption_mwh
570
+ balance_error = total_supply - total_consumption
571
+ logger.info(f" Supply - Consumption = {balance_error:.1f} MWh")
572
+ logger.info(f" Balance error %: {(balance_error/total_supply*100):.3f}%")
573
+ logger.info(f"=========================================")
574
+
575
+ # Calculate carrier-specific statistics first
576
+ carrier_stats = self._calculate_carrier_statistics(conn, network_id, network)
577
+
578
+ # Calculate totals from carrier statistics
579
+ total_capital_cost = sum(carrier_stats["capital_cost_by_carrier"].values())
580
+ total_operational_cost = sum(carrier_stats["operational_cost_by_carrier"].values())
581
+ total_emissions = sum(carrier_stats["emissions_by_carrier"].values())
582
+
583
+ # Calculate derived statistics
584
+ total_cost = solve_result.get('objective_value', 0.0)
585
+ unmet_load_percentage = (unmet_load_mwh / total_load_mwh * 100) if total_load_mwh > 0 else 0.0
586
+ load_factor = (total_generation_mwh / total_load_mwh) if total_load_mwh > 0 else 0.0
587
+
588
+ logger.info(f"Cost breakdown: Capital=${total_capital_cost:.0f}, Operational=${total_operational_cost:.0f}, Total Objective=${total_cost:.0f}")
589
+
590
+ # Create nested structure expected by frontend
591
+ network_statistics = {
592
+ "core_summary": {
593
+ "total_generation_mwh": total_generation_mwh,
594
+ "total_demand_mwh": total_load_mwh, # Frontend expects "demand" not "load"
595
+ "total_cost": total_cost,
596
+ "load_factor": load_factor,
597
+ "unserved_energy_mwh": unmet_load_mwh
598
+ },
599
+ "custom_statistics": {
600
+ # Include carrier-specific statistics
601
+ **carrier_stats,
602
+ "total_capital_cost": total_capital_cost, # Sum from carriers
603
+ "total_operational_cost": total_operational_cost, # Sum from carriers
604
+ "total_currency_cost": total_cost, # PyPSA objective (discounted total)
605
+ "total_emissions_tons_co2": total_emissions, # Sum from carriers
606
+ "average_price_per_mwh": (total_cost / total_generation_mwh) if total_generation_mwh > 0 else 0.0,
607
+ "unmet_load_percentage": unmet_load_percentage,
608
+ "max_unmet_load_hour_mw": 0.0 # TODO: Calculate max hourly unmet load
609
+ },
610
+ "runtime_info": {
611
+ "component_count": (
612
+ len(network.buses) + len(network.generators) + len(network.loads) +
613
+ len(network.lines) + len(network.links)
614
+ ) if hasattr(network, 'buses') else 0,
615
+ "bus_count": len(network.buses) if hasattr(network, 'buses') else 0,
616
+ "generator_count": len(network.generators) if hasattr(network, 'generators') else 0,
617
+ "load_count": len(network.loads) if hasattr(network, 'loads') else 0,
618
+ "snapshot_count": len(network.snapshots) if hasattr(network, 'snapshots') else 0
619
+ }
620
+ }
621
+
622
+ logger.info(f"Calculated network statistics: core_summary={network_statistics['core_summary']}")
623
+ logger.info(f"Calculated custom statistics: custom_statistics={network_statistics['custom_statistics']}")
624
+ return network_statistics
625
+
626
+ except Exception as e:
627
+ logger.error(f"Failed to calculate network statistics: {e}", exc_info=True)
628
+ # Return empty structure matching expected format
629
+ return {
630
+ "core_summary": {
631
+ "total_generation_mwh": 0.0,
632
+ "total_demand_mwh": 0.0,
633
+ "total_cost": solve_result.get('objective_value', 0.0),
634
+ "load_factor": 0.0,
635
+ "unserved_energy_mwh": 0.0
636
+ },
637
+ "custom_statistics": {
638
+ "total_capital_cost": 0.0,
639
+ "total_operational_cost": 0.0,
640
+ "total_currency_cost": 0.0,
641
+ "total_emissions_tons_co2": 0.0,
642
+ "average_price_per_mwh": 0.0,
643
+ "unmet_load_percentage": 0.0,
644
+ "max_unmet_load_hour_mw": 0.0
645
+ },
646
+ "runtime_info": {
647
+ "component_count": 0,
648
+ "bus_count": 0,
649
+ "generator_count": 0,
650
+ "load_count": 0,
651
+ "snapshot_count": 0
652
+ },
653
+ "error": str(e)
654
+ }
655
+
656
+ def _serialize_results_json(self, solve_result: Dict[str, Any]) -> str:
657
+ """Serialize solve results to JSON string."""
658
+ import json
659
+ try:
660
+ # Create a clean results dictionary
661
+ results = {
662
+ "success": solve_result.get("success", False),
663
+ "status": solve_result.get("status", "unknown"),
664
+ "solve_time": solve_result.get("solve_time", 0.0),
665
+ "objective_value": solve_result.get("objective_value"),
666
+ "solver_name": solve_result.get("solver_name", "unknown"),
667
+ "run_id": solve_result.get("run_id"),
668
+ "network_statistics": solve_result.get("network_statistics", {}),
669
+ "pypsa_result": solve_result.get("pypsa_result", {})
670
+ }
671
+ return json.dumps(results, default=self._json_serializer)
672
+ except Exception as e:
673
+ logger.warning(f"Failed to serialize results JSON: {e}")
674
+ return json.dumps({"error": "serialization_failed"})
675
+
676
+ def _serialize_metadata_json(self, solve_result: Dict[str, Any]) -> str:
677
+ """Serialize solve metadata to JSON string."""
678
+ import json
679
+ try:
680
+ metadata = {
681
+ "solver_name": solve_result.get("solver_name", "unknown"),
682
+ "run_id": solve_result.get("run_id"),
683
+ "multi_period": solve_result.get("multi_period", False),
684
+ "years": solve_result.get("years", []),
685
+ "network_name": solve_result.get("network_name"),
686
+ "num_snapshots": solve_result.get("num_snapshots", 0)
687
+ }
688
+ return json.dumps(metadata, default=self._json_serializer)
689
+ except Exception as e:
690
+ logger.warning(f"Failed to serialize metadata JSON: {e}")
691
+ return json.dumps({"error": "serialization_failed"})
692
+
693
+ def _calculate_carrier_statistics(self, conn, network_id: int, network: 'pypsa.Network') -> Dict[str, Any]:
694
+ """Calculate carrier-specific statistics that the frontend expects."""
695
+ try:
696
+ # Initialize carrier statistics (separate power and energy capacity like old solver)
697
+ carrier_stats = {
698
+ "dispatch_by_carrier": {},
699
+ "power_capacity_by_carrier": {}, # MW - Generators + Storage Units (power)
700
+ "energy_capacity_by_carrier": {}, # MWh - Stores + Storage Units (energy)
701
+ "emissions_by_carrier": {},
702
+ "capital_cost_by_carrier": {},
703
+ "operational_cost_by_carrier": {},
704
+ "total_system_cost_by_carrier": {}
705
+ }
706
+
707
+ # Get all carriers from database
708
+ cursor = conn.execute("""
709
+ SELECT DISTINCT name FROM carriers WHERE network_id = ?
710
+ """, (network_id,))
711
+ all_carriers = [row[0] for row in cursor.fetchall()]
712
+
713
+ # Initialize all carriers with zero values
714
+ for carrier in all_carriers:
715
+ carrier_stats["dispatch_by_carrier"][carrier] = 0.0
716
+ carrier_stats["power_capacity_by_carrier"][carrier] = 0.0
717
+ carrier_stats["energy_capacity_by_carrier"][carrier] = 0.0
718
+ carrier_stats["emissions_by_carrier"][carrier] = 0.0
719
+ carrier_stats["capital_cost_by_carrier"][carrier] = 0.0
720
+ carrier_stats["operational_cost_by_carrier"][carrier] = 0.0
721
+ carrier_stats["total_system_cost_by_carrier"][carrier] = 0.0
722
+
723
+ # Calculate dispatch by carrier (generation + storage discharge)
724
+
725
+ # 1. GENERATORS - All generation
726
+ if hasattr(network, 'generators_t') and hasattr(network.generators_t, 'p'):
727
+ # Get generator-carrier mapping
728
+ cursor = conn.execute("""
729
+ SELECT c.name as component_name, carr.name as carrier_name
730
+ FROM components c
731
+ JOIN carriers carr ON c.carrier_id = carr.id
732
+ WHERE c.network_id = ? AND c.component_type = 'GENERATOR'
733
+ """, (network_id,))
734
+
735
+ generator_carriers = {row[0]: row[1] for row in cursor.fetchall()}
736
+
737
+ # Calculate dispatch for each generator
738
+ for gen_name in network.generators_t.p.columns:
739
+ if gen_name in generator_carriers:
740
+ carrier_name = generator_carriers[gen_name]
741
+ # Apply snapshot weightings to convert MW to MWh
742
+ weightings = network.snapshot_weightings
743
+ if isinstance(weightings, pd.DataFrame):
744
+ if 'objective' in weightings.columns:
745
+ weighting_values = weightings['objective'].values
746
+ else:
747
+ weighting_values = weightings.iloc[:, 0].values
748
+ else:
749
+ weighting_values = weightings.values
750
+
751
+ generation_mwh = float((network.generators_t.p[gen_name].values * weighting_values).sum())
752
+ if carrier_name in carrier_stats["dispatch_by_carrier"]:
753
+ carrier_stats["dispatch_by_carrier"][carrier_name] += generation_mwh
754
+
755
+ # 2. STORAGE_UNITS - Discharge only (positive values)
756
+ if hasattr(network, 'storage_units_t') and hasattr(network.storage_units_t, 'p'):
757
+ # Get storage unit-carrier mapping
758
+ cursor = conn.execute("""
759
+ SELECT c.name as component_name, carr.name as carrier_name
760
+ FROM components c
761
+ JOIN carriers carr ON c.carrier_id = carr.id
762
+ WHERE c.network_id = ? AND c.component_type = 'STORAGE_UNIT'
763
+ """, (network_id,))
764
+
765
+ storage_unit_carriers = {row[0]: row[1] for row in cursor.fetchall()}
766
+
767
+ # Calculate dispatch for each storage unit (discharge only)
768
+ for su_name in network.storage_units_t.p.columns:
769
+ if su_name in storage_unit_carriers:
770
+ carrier_name = storage_unit_carriers[su_name]
771
+ # Apply snapshot weightings and only count positive discharge
772
+ weightings = network.snapshot_weightings
773
+ if isinstance(weightings, pd.DataFrame):
774
+ if 'objective' in weightings.columns:
775
+ weighting_values = weightings['objective'].values
776
+ else:
777
+ weighting_values = weightings.iloc[:, 0].values
778
+ else:
779
+ weighting_values = weightings.values
780
+
781
+ # Only count positive values (discharge)
782
+ su_power = network.storage_units_t.p[su_name]
783
+ discharge_mwh = float((su_power.clip(lower=0) * weighting_values).sum())
784
+
785
+ if carrier_name in carrier_stats["dispatch_by_carrier"]:
786
+ carrier_stats["dispatch_by_carrier"][carrier_name] += discharge_mwh
787
+
788
+ # 3. STORES - Discharge only (positive values)
789
+ if hasattr(network, 'stores_t') and hasattr(network.stores_t, 'p'):
790
+ # Get store-carrier mapping
791
+ cursor = conn.execute("""
792
+ SELECT c.name as component_name, carr.name as carrier_name
793
+ FROM components c
794
+ JOIN carriers carr ON c.carrier_id = carr.id
795
+ WHERE c.network_id = ? AND c.component_type = 'STORE'
796
+ """, (network_id,))
797
+
798
+ store_carriers = {row[0]: row[1] for row in cursor.fetchall()}
799
+
800
+ # Calculate dispatch for each store (discharge only)
801
+ for store_name in network.stores_t.p.columns:
802
+ if store_name in store_carriers:
803
+ carrier_name = store_carriers[store_name]
804
+ # Apply snapshot weightings and only count positive discharge
805
+ weightings = network.snapshot_weightings
806
+ if isinstance(weightings, pd.DataFrame):
807
+ if 'objective' in weightings.columns:
808
+ weighting_values = weightings['objective'].values
809
+ else:
810
+ weighting_values = weightings.iloc[:, 0].values
811
+ else:
812
+ weighting_values = weightings.values
813
+
814
+ # Only count positive values (discharge)
815
+ store_power = network.stores_t.p[store_name]
816
+ discharge_mwh = float((store_power.clip(lower=0) * weighting_values).sum())
817
+
818
+ if carrier_name in carrier_stats["dispatch_by_carrier"]:
819
+ carrier_stats["dispatch_by_carrier"][carrier_name] += discharge_mwh
820
+
821
+ # Calculate capacity by carrier (power + energy capacity)
822
+
823
+ # 1. GENERATORS - Power capacity (MW)
824
+ if hasattr(network, 'generators') and not network.generators.empty:
825
+ # Get generator-carrier mapping
826
+ cursor = conn.execute("""
827
+ SELECT c.name as component_name, carr.name as carrier_name
828
+ FROM components c
829
+ JOIN carriers carr ON c.carrier_id = carr.id
830
+ WHERE c.network_id = ? AND c.component_type = 'GENERATOR'
831
+ """, (network_id,))
832
+
833
+ generator_carriers = {row[0]: row[1] for row in cursor.fetchall()}
834
+
835
+ # Calculate capacity for each generator
836
+ for gen_name in network.generators.index:
837
+ if gen_name in generator_carriers:
838
+ carrier_name = generator_carriers[gen_name]
839
+ # Use p_nom_opt if available, otherwise p_nom (POWER capacity)
840
+ if 'p_nom_opt' in network.generators.columns:
841
+ capacity_mw = float(network.generators.loc[gen_name, 'p_nom_opt'])
842
+ else:
843
+ capacity_mw = float(network.generators.loc[gen_name, 'p_nom']) if 'p_nom' in network.generators.columns else 0.0
844
+
845
+ if carrier_name in carrier_stats["power_capacity_by_carrier"]:
846
+ carrier_stats["power_capacity_by_carrier"][carrier_name] += capacity_mw
847
+
848
+ # 2. STORAGE_UNITS - Power capacity (MW) + Energy capacity (MWh)
849
+ if hasattr(network, 'storage_units') and not network.storage_units.empty:
850
+ # Get storage unit-carrier mapping
851
+ cursor = conn.execute("""
852
+ SELECT c.name as component_name, carr.name as carrier_name
853
+ FROM components c
854
+ JOIN carriers carr ON c.carrier_id = carr.id
855
+ WHERE c.network_id = ? AND c.component_type = 'STORAGE_UNIT'
856
+ """, (network_id,))
857
+
858
+ storage_unit_carriers = {row[0]: row[1] for row in cursor.fetchall()}
859
+
860
+ # Calculate capacity for each storage unit
861
+ for su_name in network.storage_units.index:
862
+ if su_name in storage_unit_carriers:
863
+ carrier_name = storage_unit_carriers[su_name]
864
+
865
+ # Power capacity (MW)
866
+ if 'p_nom_opt' in network.storage_units.columns:
867
+ p_nom_opt = float(network.storage_units.loc[su_name, 'p_nom_opt'])
868
+ else:
869
+ p_nom_opt = float(network.storage_units.loc[su_name, 'p_nom']) if 'p_nom' in network.storage_units.columns else 0.0
870
+
871
+ if carrier_name in carrier_stats["power_capacity_by_carrier"]:
872
+ carrier_stats["power_capacity_by_carrier"][carrier_name] += p_nom_opt
873
+
874
+ # Energy capacity (MWh) using max_hours (matching old solver)
875
+ max_hours = 1.0 # Default from validation data
876
+ if 'max_hours' in network.storage_units.columns:
877
+ max_hours = float(network.storage_units.loc[su_name, 'max_hours'])
878
+ energy_capacity_mwh = p_nom_opt * max_hours
879
+
880
+ if carrier_name in carrier_stats["energy_capacity_by_carrier"]:
881
+ carrier_stats["energy_capacity_by_carrier"][carrier_name] += energy_capacity_mwh
882
+
883
+ # 3. STORES - Energy capacity (MWh) only
884
+ if hasattr(network, 'stores') and not network.stores.empty:
885
+ # Get store-carrier mapping
886
+ cursor = conn.execute("""
887
+ SELECT c.name as component_name, carr.name as carrier_name
888
+ FROM components c
889
+ JOIN carriers carr ON c.carrier_id = carr.id
890
+ WHERE c.network_id = ? AND c.component_type = 'STORE'
891
+ """, (network_id,))
892
+
893
+ store_carriers = {row[0]: row[1] for row in cursor.fetchall()}
894
+
895
+ # Calculate energy capacity for each store
896
+ for store_name in network.stores.index:
897
+ if store_name in store_carriers:
898
+ carrier_name = store_carriers[store_name]
899
+
900
+ # Energy capacity (MWh) - stores don't have power capacity, only energy
901
+ if 'e_nom_opt' in network.stores.columns:
902
+ e_nom_opt = float(network.stores.loc[store_name, 'e_nom_opt'])
903
+ else:
904
+ e_nom_opt = float(network.stores.loc[store_name, 'e_nom']) if 'e_nom' in network.stores.columns else 0.0
905
+
906
+ # Stores contribute only to energy capacity
907
+ if carrier_name in carrier_stats["energy_capacity_by_carrier"]:
908
+ carrier_stats["energy_capacity_by_carrier"][carrier_name] += e_nom_opt
909
+
910
+ # 4. LINES - Apparent power capacity (MVA)
911
+ if hasattr(network, 'lines') and not network.lines.empty:
912
+ # Get line-carrier mapping
913
+ cursor = conn.execute("""
914
+ SELECT c.name as component_name, carr.name as carrier_name
915
+ FROM components c
916
+ JOIN carriers carr ON c.carrier_id = carr.id
917
+ WHERE c.network_id = ? AND c.component_type = 'LINE'
918
+ """, (network_id,))
919
+
920
+ line_carriers = {row[0]: row[1] for row in cursor.fetchall()}
921
+
922
+ # Calculate capacity for each line
923
+ for line_name in network.lines.index:
924
+ if line_name in line_carriers:
925
+ carrier_name = line_carriers[line_name]
926
+
927
+ # Apparent power capacity (MVA) - convert to MW equivalent for consistency
928
+ if 's_nom_opt' in network.lines.columns:
929
+ capacity_mva = float(network.lines.loc[line_name, 's_nom_opt'])
930
+ else:
931
+ capacity_mva = float(network.lines.loc[line_name, 's_nom']) if 's_nom' in network.lines.columns else 0.0
932
+
933
+ # Convert MVA to MW (assume power factor = 1 for simplicity)
934
+ capacity_mw = capacity_mva
935
+
936
+ if carrier_name in carrier_stats["power_capacity_by_carrier"]:
937
+ carrier_stats["power_capacity_by_carrier"][carrier_name] += capacity_mw
938
+
939
+ # 5. LINKS - Power capacity (MW)
940
+ if hasattr(network, 'links') and not network.links.empty:
941
+ # Get link-carrier mapping
942
+ cursor = conn.execute("""
943
+ SELECT c.name as component_name, carr.name as carrier_name
944
+ FROM components c
945
+ JOIN carriers carr ON c.carrier_id = carr.id
946
+ WHERE c.network_id = ? AND c.component_type = 'LINK'
947
+ """, (network_id,))
948
+
949
+ link_carriers = {row[0]: row[1] for row in cursor.fetchall()}
950
+
951
+ # Calculate capacity for each link
952
+ for link_name in network.links.index:
953
+ if link_name in link_carriers:
954
+ carrier_name = link_carriers[link_name]
955
+
956
+ # Power capacity (MW)
957
+ if 'p_nom_opt' in network.links.columns:
958
+ capacity_mw = float(network.links.loc[link_name, 'p_nom_opt'])
959
+ else:
960
+ capacity_mw = float(network.links.loc[link_name, 'p_nom']) if 'p_nom' in network.links.columns else 0.0
961
+
962
+ if carrier_name in carrier_stats["power_capacity_by_carrier"]:
963
+ carrier_stats["power_capacity_by_carrier"][carrier_name] += capacity_mw
964
+
965
+ # Calculate emissions by carrier
966
+ cursor = conn.execute("""
967
+ SELECT name, co2_emissions
968
+ FROM carriers
969
+ WHERE network_id = ? AND co2_emissions IS NOT NULL
970
+ ORDER BY name
971
+ """, (network_id,))
972
+
973
+ emission_factors = {}
974
+ for row in cursor.fetchall():
975
+ carrier_name, co2_emissions = row
976
+ emission_factors[carrier_name] = co2_emissions
977
+
978
+ # Calculate emissions = dispatch * emission_factor
979
+ for carrier, dispatch_mwh in carrier_stats["dispatch_by_carrier"].items():
980
+ emission_factor = emission_factors.get(carrier, 0.0)
981
+ emissions = dispatch_mwh * emission_factor
982
+ carrier_stats["emissions_by_carrier"][carrier] = emissions
983
+
984
+ # Calculate cost statistics by carrier (all component types)
985
+
986
+ # 1. GENERATORS - Operational and capital costs
987
+ if hasattr(network, 'generators') and not network.generators.empty:
988
+ # Get generator-carrier mapping
989
+ cursor = conn.execute("""
990
+ SELECT c.name as component_name, carr.name as carrier_name
991
+ FROM components c
992
+ JOIN carriers carr ON c.carrier_id = carr.id
993
+ WHERE c.network_id = ? AND c.component_type = 'GENERATOR'
994
+ """, (network_id,))
995
+
996
+ generator_carriers = {row[0]: row[1] for row in cursor.fetchall()}
997
+
998
+ # Calculate operational costs based on dispatch and marginal costs
999
+ if hasattr(network, 'generators_t') and hasattr(network.generators_t, 'p'):
1000
+ for gen_name in network.generators.index:
1001
+ if gen_name in generator_carriers and gen_name in network.generators_t.p.columns:
1002
+ carrier_name = generator_carriers[gen_name]
1003
+
1004
+ # Get marginal cost for this generator
1005
+ marginal_cost = 0.0
1006
+ if 'marginal_cost' in network.generators.columns:
1007
+ marginal_cost = float(network.generators.loc[gen_name, 'marginal_cost'])
1008
+
1009
+ # Calculate operational cost = dispatch * marginal_cost (with weightings)
1010
+ weightings = network.snapshot_weightings
1011
+ if isinstance(weightings, pd.DataFrame):
1012
+ if 'objective' in weightings.columns:
1013
+ weighting_values = weightings['objective'].values
1014
+ else:
1015
+ weighting_values = weightings.iloc[:, 0].values
1016
+ else:
1017
+ weighting_values = weightings.values
1018
+
1019
+ dispatch_mwh = float((network.generators_t.p[gen_name].values * weighting_values).sum())
1020
+ operational_cost = dispatch_mwh * marginal_cost
1021
+
1022
+ if carrier_name in carrier_stats["operational_cost_by_carrier"]:
1023
+ carrier_stats["operational_cost_by_carrier"][carrier_name] += operational_cost
1024
+
1025
+ # Calculate annual capital costs for all operational generators (matching old solver per-year logic)
1026
+ for gen_name in network.generators.index:
1027
+ if gen_name in generator_carriers:
1028
+ carrier_name = generator_carriers[gen_name]
1029
+
1030
+ # Get capital cost and capacity
1031
+ capital_cost_per_mw = 0.0
1032
+ if 'capital_cost' in network.generators.columns:
1033
+ capital_cost_per_mw = float(network.generators.loc[gen_name, 'capital_cost'])
1034
+
1035
+ capacity_mw = 0.0
1036
+ if 'p_nom_opt' in network.generators.columns:
1037
+ capacity_mw = float(network.generators.loc[gen_name, 'p_nom_opt'])
1038
+ elif 'p_nom' in network.generators.columns:
1039
+ capacity_mw = float(network.generators.loc[gen_name, 'p_nom'])
1040
+
1041
+ # Annual capital cost for operational assets (undiscounted)
1042
+ annual_capital_cost = capital_cost_per_mw * capacity_mw
1043
+
1044
+ if carrier_name in carrier_stats["capital_cost_by_carrier"]:
1045
+ carrier_stats["capital_cost_by_carrier"][carrier_name] += annual_capital_cost
1046
+
1047
+ # Calculate operational costs including fixed costs (matching old solver)
1048
+ for gen_name in network.generators.index:
1049
+ if gen_name in generator_carriers:
1050
+ carrier_name = generator_carriers[gen_name]
1051
+
1052
+ # Fixed O&M costs (annual cost per MW of capacity)
1053
+ fixed_cost_per_mw = 0.0
1054
+ if 'fixed_cost' in network.generators.columns:
1055
+ fixed_cost_per_mw = float(network.generators.loc[gen_name, 'fixed_cost'])
1056
+
1057
+ capacity_mw = 0.0
1058
+ if 'p_nom_opt' in network.generators.columns:
1059
+ capacity_mw = float(network.generators.loc[gen_name, 'p_nom_opt'])
1060
+ elif 'p_nom' in network.generators.columns:
1061
+ capacity_mw = float(network.generators.loc[gen_name, 'p_nom'])
1062
+
1063
+ fixed_cost_total = fixed_cost_per_mw * capacity_mw
1064
+
1065
+ if carrier_name in carrier_stats["operational_cost_by_carrier"]:
1066
+ carrier_stats["operational_cost_by_carrier"][carrier_name] += fixed_cost_total
1067
+
1068
+ # 2. STORAGE_UNITS - Operational and capital costs
1069
+ if hasattr(network, 'storage_units') and not network.storage_units.empty:
1070
+ # Get storage unit-carrier mapping
1071
+ cursor = conn.execute("""
1072
+ SELECT c.name as component_name, carr.name as carrier_name
1073
+ FROM components c
1074
+ JOIN carriers carr ON c.carrier_id = carr.id
1075
+ WHERE c.network_id = ? AND c.component_type = 'STORAGE_UNIT'
1076
+ """, (network_id,))
1077
+
1078
+ storage_unit_carriers = {row[0]: row[1] for row in cursor.fetchall()}
1079
+
1080
+ # Calculate operational costs (marginal costs for storage units)
1081
+ if hasattr(network, 'storage_units_t') and hasattr(network.storage_units_t, 'p'):
1082
+ for su_name in network.storage_units.index:
1083
+ if su_name in storage_unit_carriers and su_name in network.storage_units_t.p.columns:
1084
+ carrier_name = storage_unit_carriers[su_name]
1085
+
1086
+ # Get marginal cost for this storage unit
1087
+ marginal_cost = 0.0
1088
+ if 'marginal_cost' in network.storage_units.columns:
1089
+ marginal_cost = float(network.storage_units.loc[su_name, 'marginal_cost'])
1090
+
1091
+ # Calculate operational cost = dispatch * marginal_cost (discharge only)
1092
+ weightings = network.snapshot_weightings
1093
+ if isinstance(weightings, pd.DataFrame):
1094
+ if 'objective' in weightings.columns:
1095
+ weighting_values = weightings['objective'].values
1096
+ else:
1097
+ weighting_values = weightings.iloc[:, 0].values
1098
+ else:
1099
+ weighting_values = weightings.values
1100
+
1101
+ su_power = network.storage_units_t.p[su_name]
1102
+ discharge_mwh = float((su_power.clip(lower=0) * weighting_values).sum())
1103
+ operational_cost = discharge_mwh * marginal_cost
1104
+
1105
+ if carrier_name in carrier_stats["operational_cost_by_carrier"]:
1106
+ carrier_stats["operational_cost_by_carrier"][carrier_name] += operational_cost
1107
+
1108
+ # Calculate fixed O&M costs for storage units (matching old solver)
1109
+ for su_name in network.storage_units.index:
1110
+ if su_name in storage_unit_carriers:
1111
+ carrier_name = storage_unit_carriers[su_name]
1112
+
1113
+ # Fixed O&M costs (annual cost per MW of capacity)
1114
+ fixed_cost_per_mw = 0.0
1115
+ if 'fixed_cost' in network.storage_units.columns:
1116
+ fixed_cost_per_mw = float(network.storage_units.loc[su_name, 'fixed_cost'])
1117
+
1118
+ capacity_mw = 0.0
1119
+ if 'p_nom_opt' in network.storage_units.columns:
1120
+ capacity_mw = float(network.storage_units.loc[su_name, 'p_nom_opt'])
1121
+ elif 'p_nom' in network.storage_units.columns:
1122
+ capacity_mw = float(network.storage_units.loc[su_name, 'p_nom'])
1123
+
1124
+ fixed_cost_total = fixed_cost_per_mw * capacity_mw
1125
+
1126
+ if carrier_name in carrier_stats["operational_cost_by_carrier"]:
1127
+ carrier_stats["operational_cost_by_carrier"][carrier_name] += fixed_cost_total
1128
+
1129
+ # Calculate annual capital costs for all operational storage units
1130
+ for su_name in network.storage_units.index:
1131
+ if su_name in storage_unit_carriers:
1132
+ carrier_name = storage_unit_carriers[su_name]
1133
+
1134
+ # Get capital cost for this storage unit
1135
+ capital_cost_per_mw = 0.0
1136
+ if 'capital_cost' in network.storage_units.columns:
1137
+ capital_cost_per_mw = float(network.storage_units.loc[su_name, 'capital_cost'])
1138
+
1139
+ # Get capacity
1140
+ capacity_mw = 0.0
1141
+ if 'p_nom_opt' in network.storage_units.columns:
1142
+ capacity_mw = float(network.storage_units.loc[su_name, 'p_nom_opt'])
1143
+ elif 'p_nom' in network.storage_units.columns:
1144
+ capacity_mw = float(network.storage_units.loc[su_name, 'p_nom'])
1145
+
1146
+ # Annual capital cost for operational assets (undiscounted)
1147
+ annual_capital_cost = capital_cost_per_mw * capacity_mw
1148
+
1149
+ if carrier_name in carrier_stats["capital_cost_by_carrier"]:
1150
+ carrier_stats["capital_cost_by_carrier"][carrier_name] += annual_capital_cost
1151
+
1152
+ # 3. STORES - Operational and capital costs
1153
+ if hasattr(network, 'stores') and not network.stores.empty:
1154
+ # Get store-carrier mapping
1155
+ cursor = conn.execute("""
1156
+ SELECT c.name as component_name, carr.name as carrier_name
1157
+ FROM components c
1158
+ JOIN carriers carr ON c.carrier_id = carr.id
1159
+ WHERE c.network_id = ? AND c.component_type = 'STORE'
1160
+ """, (network_id,))
1161
+
1162
+ store_carriers = {row[0]: row[1] for row in cursor.fetchall()}
1163
+
1164
+ # Calculate operational costs (marginal costs for stores)
1165
+ if hasattr(network, 'stores_t') and hasattr(network.stores_t, 'p'):
1166
+ for store_name in network.stores.index:
1167
+ if store_name in store_carriers and store_name in network.stores_t.p.columns:
1168
+ carrier_name = store_carriers[store_name]
1169
+
1170
+ # Get marginal cost for this store
1171
+ marginal_cost = 0.0
1172
+ if 'marginal_cost' in network.stores.columns:
1173
+ marginal_cost = float(network.stores.loc[store_name, 'marginal_cost'])
1174
+
1175
+ # Calculate operational cost = dispatch * marginal_cost (discharge only)
1176
+ weightings = network.snapshot_weightings
1177
+ if isinstance(weightings, pd.DataFrame):
1178
+ if 'objective' in weightings.columns:
1179
+ weighting_values = weightings['objective'].values
1180
+ else:
1181
+ weighting_values = weightings.iloc[:, 0].values
1182
+ else:
1183
+ weighting_values = weightings.values
1184
+
1185
+ store_power = network.stores_t.p[store_name]
1186
+ discharge_mwh = float((store_power.clip(lower=0) * weighting_values).sum())
1187
+ operational_cost = discharge_mwh * marginal_cost
1188
+
1189
+ if carrier_name in carrier_stats["operational_cost_by_carrier"]:
1190
+ carrier_stats["operational_cost_by_carrier"][carrier_name] += operational_cost
1191
+
1192
+ # Calculate annual capital costs for all operational stores (based on energy capacity)
1193
+ for store_name in network.stores.index:
1194
+ if store_name in store_carriers:
1195
+ carrier_name = store_carriers[store_name]
1196
+
1197
+ # Get capital cost for this store (per MWh)
1198
+ capital_cost_per_mwh = 0.0
1199
+ if 'capital_cost' in network.stores.columns:
1200
+ capital_cost_per_mwh = float(network.stores.loc[store_name, 'capital_cost'])
1201
+
1202
+ # Get energy capacity
1203
+ energy_capacity_mwh = 0.0
1204
+ if 'e_nom_opt' in network.stores.columns:
1205
+ energy_capacity_mwh = float(network.stores.loc[store_name, 'e_nom_opt'])
1206
+ elif 'e_nom' in network.stores.columns:
1207
+ energy_capacity_mwh = float(network.stores.loc[store_name, 'e_nom'])
1208
+
1209
+ # Annual capital cost for operational assets (undiscounted)
1210
+ annual_capital_cost = capital_cost_per_mwh * energy_capacity_mwh
1211
+
1212
+ if carrier_name in carrier_stats["capital_cost_by_carrier"]:
1213
+ carrier_stats["capital_cost_by_carrier"][carrier_name] += annual_capital_cost
1214
+
1215
+ # 4. LINES - Capital costs only (no operational costs for transmission lines)
1216
+ if hasattr(network, 'lines') and not network.lines.empty:
1217
+ # Get line-carrier mapping
1218
+ cursor = conn.execute("""
1219
+ SELECT c.name as component_name, carr.name as carrier_name
1220
+ FROM components c
1221
+ JOIN carriers carr ON c.carrier_id = carr.id
1222
+ WHERE c.network_id = ? AND c.component_type = 'LINE'
1223
+ """, (network_id,))
1224
+
1225
+ line_carriers = {row[0]: row[1] for row in cursor.fetchall()}
1226
+
1227
+ # Calculate capital costs for lines (based on s_nom_opt capacity)
1228
+ for line_name in network.lines.index:
1229
+ if line_name in line_carriers:
1230
+ carrier_name = line_carriers[line_name]
1231
+
1232
+ # Get capital cost for this line (per MVA)
1233
+ capital_cost_per_mva = 0.0
1234
+ if 'capital_cost' in network.lines.columns:
1235
+ capital_cost_per_mva = float(network.lines.loc[line_name, 'capital_cost'])
1236
+
1237
+ # Get apparent power capacity (MVA)
1238
+ capacity_mva = 0.0
1239
+ if 's_nom_opt' in network.lines.columns:
1240
+ capacity_mva = float(network.lines.loc[line_name, 's_nom_opt'])
1241
+ elif 's_nom' in network.lines.columns:
1242
+ capacity_mva = float(network.lines.loc[line_name, 's_nom'])
1243
+
1244
+ # Annual capital cost for operational assets (undiscounted)
1245
+ annual_capital_cost = capacity_mva * capital_cost_per_mva
1246
+
1247
+ if carrier_name in carrier_stats["capital_cost_by_carrier"]:
1248
+ carrier_stats["capital_cost_by_carrier"][carrier_name] += annual_capital_cost
1249
+
1250
+ # 5. LINKS - Capital and operational costs
1251
+ if hasattr(network, 'links') and not network.links.empty:
1252
+ # Get link-carrier mapping
1253
+ cursor = conn.execute("""
1254
+ SELECT c.name as component_name, carr.name as carrier_name
1255
+ FROM components c
1256
+ JOIN carriers carr ON c.carrier_id = carr.id
1257
+ WHERE c.network_id = ? AND c.component_type = 'LINK'
1258
+ """, (network_id,))
1259
+
1260
+ link_carriers = {row[0]: row[1] for row in cursor.fetchall()}
1261
+
1262
+ # Calculate operational costs (marginal costs for links)
1263
+ if hasattr(network, 'links_t') and hasattr(network.links_t, 'p0'):
1264
+ for link_name in network.links.index:
1265
+ if link_name in link_carriers and link_name in network.links_t.p0.columns:
1266
+ carrier_name = link_carriers[link_name]
1267
+
1268
+ # Get marginal cost for this link
1269
+ marginal_cost = 0.0
1270
+ if 'marginal_cost' in network.links.columns:
1271
+ marginal_cost = float(network.links.loc[link_name, 'marginal_cost'])
1272
+
1273
+ # Calculate operational cost = flow * marginal_cost (use absolute flow)
1274
+ weightings = network.snapshot_weightings
1275
+ if isinstance(weightings, pd.DataFrame):
1276
+ if 'objective' in weightings.columns:
1277
+ weighting_values = weightings['objective'].values
1278
+ else:
1279
+ weighting_values = weightings.iloc[:, 0].values
1280
+ else:
1281
+ weighting_values = weightings.values
1282
+
1283
+ # Use absolute flow for cost calculation
1284
+ link_flow = abs(network.links_t.p0[link_name])
1285
+ flow_mwh = float((link_flow * weighting_values).sum())
1286
+ operational_cost = flow_mwh * marginal_cost
1287
+
1288
+ if carrier_name in carrier_stats["operational_cost_by_carrier"]:
1289
+ carrier_stats["operational_cost_by_carrier"][carrier_name] += operational_cost
1290
+
1291
+ # Calculate capital costs for links
1292
+ for link_name in network.links.index:
1293
+ if link_name in link_carriers:
1294
+ carrier_name = link_carriers[link_name]
1295
+
1296
+ # Get capital cost for this link (per MW)
1297
+ capital_cost_per_mw = 0.0
1298
+ if 'capital_cost' in network.links.columns:
1299
+ capital_cost_per_mw = float(network.links.loc[link_name, 'capital_cost'])
1300
+
1301
+ # Get power capacity (MW)
1302
+ capacity_mw = 0.0
1303
+ if 'p_nom_opt' in network.links.columns:
1304
+ capacity_mw = float(network.links.loc[link_name, 'p_nom_opt'])
1305
+ elif 'p_nom' in network.links.columns:
1306
+ capacity_mw = float(network.links.loc[link_name, 'p_nom'])
1307
+
1308
+ # Annual capital cost for operational assets (undiscounted)
1309
+ annual_capital_cost = capacity_mw * capital_cost_per_mw
1310
+
1311
+ if carrier_name in carrier_stats["capital_cost_by_carrier"]:
1312
+ carrier_stats["capital_cost_by_carrier"][carrier_name] += annual_capital_cost
1313
+
1314
+ # Calculate total system cost = capital + operational
1315
+ for carrier in all_carriers:
1316
+ capital = carrier_stats["capital_cost_by_carrier"][carrier]
1317
+ operational = carrier_stats["operational_cost_by_carrier"][carrier]
1318
+ carrier_stats["total_system_cost_by_carrier"][carrier] = capital + operational
1319
+
1320
+ logger.info(f"Calculated carrier statistics for {len(all_carriers)} carriers")
1321
+ logger.info(f"Total dispatch: {sum(carrier_stats['dispatch_by_carrier'].values()):.2f} MWh")
1322
+ logger.info(f"Total power capacity: {sum(carrier_stats['power_capacity_by_carrier'].values()):.2f} MW")
1323
+ logger.info(f"Total energy capacity: {sum(carrier_stats['energy_capacity_by_carrier'].values()):.2f} MWh")
1324
+ logger.info(f"Total emissions: {sum(carrier_stats['emissions_by_carrier'].values()):.2f} tCO2")
1325
+ logger.info(f"Total capital cost: {sum(carrier_stats['capital_cost_by_carrier'].values()):.2f} USD")
1326
+ logger.info(f"Total operational cost: {sum(carrier_stats['operational_cost_by_carrier'].values()):.2f} USD")
1327
+ logger.info(f"Total system cost: {sum(carrier_stats['total_system_cost_by_carrier'].values()):.2f} USD")
1328
+
1329
+ return carrier_stats
1330
+
1331
+ except Exception as e:
1332
+ logger.error(f"Failed to calculate carrier statistics: {e}", exc_info=True)
1333
+ return {
1334
+ "dispatch_by_carrier": {},
1335
+ "power_capacity_by_carrier": {},
1336
+ "energy_capacity_by_carrier": {},
1337
+ "emissions_by_carrier": {},
1338
+ "capital_cost_by_carrier": {},
1339
+ "operational_cost_by_carrier": {},
1340
+ "total_system_cost_by_carrier": {}
1341
+ }
1342
+
1343
+ def _store_year_based_statistics(
1344
+ self,
1345
+ conn,
1346
+ network_id: int,
1347
+ network: 'pypsa.Network',
1348
+ year_statistics: Dict[int, Dict[str, Any]],
1349
+ scenario_id: Optional[int]
1350
+ ) -> int:
1351
+ """Store year-based statistics to database"""
1352
+ try:
1353
+ import json
1354
+ stored_count = 0
1355
+
1356
+ # Use master scenario if no scenario specified
1357
+ if scenario_id is None:
1358
+ from pyconvexity.models import get_master_scenario_id
1359
+ scenario_id = get_master_scenario_id(conn, network_id)
1360
+
1361
+ # Check if network_solve_results_by_year table exists, create if not
1362
+ conn.execute("""
1363
+ CREATE TABLE IF NOT EXISTS network_solve_results_by_year (
1364
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
1365
+ network_id INTEGER NOT NULL,
1366
+ scenario_id INTEGER NOT NULL,
1367
+ year INTEGER NOT NULL,
1368
+ results_json TEXT,
1369
+ metadata_json TEXT,
1370
+ created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
1371
+ FOREIGN KEY (network_id) REFERENCES networks(id),
1372
+ FOREIGN KEY (scenario_id) REFERENCES scenarios(id),
1373
+ UNIQUE(network_id, scenario_id, year)
1374
+ )
1375
+ """)
1376
+
1377
+ for year, stats in year_statistics.items():
1378
+ try:
1379
+ # Calculate proper year-specific carrier statistics
1380
+ year_carrier_stats = self._calculate_year_carrier_statistics(conn, network_id, network, year)
1381
+
1382
+ # Merge year-specific carrier stats into the statistics
1383
+ if "custom_statistics" in stats:
1384
+ stats["custom_statistics"].update(year_carrier_stats)
1385
+ else:
1386
+ stats["custom_statistics"] = year_carrier_stats
1387
+
1388
+ # Wrap the year statistics in the same structure as overall results for consistency
1389
+ year_result_wrapper = {
1390
+ "success": True,
1391
+ "year": year,
1392
+ "network_statistics": stats
1393
+ }
1394
+
1395
+ metadata = {
1396
+ "year": year,
1397
+ "network_id": network_id,
1398
+ "scenario_id": scenario_id
1399
+ }
1400
+
1401
+ conn.execute("""
1402
+ INSERT OR REPLACE INTO network_solve_results_by_year
1403
+ (network_id, scenario_id, year, results_json, metadata_json)
1404
+ VALUES (?, ?, ?, ?, ?)
1405
+ """, (
1406
+ network_id,
1407
+ scenario_id,
1408
+ year,
1409
+ json.dumps(year_result_wrapper, default=self._json_serializer),
1410
+ json.dumps(metadata, default=self._json_serializer)
1411
+ ))
1412
+
1413
+ stored_count += 1
1414
+ logger.info(f"Stored year-based statistics for year {year}")
1415
+
1416
+ except Exception as e:
1417
+ logger.error(f"Failed to store statistics for year {year}: {e}")
1418
+ continue
1419
+
1420
+ logger.info(f"Successfully stored year-based statistics for {stored_count} years")
1421
+ return stored_count
1422
+
1423
+ except Exception as e:
1424
+ logger.error(f"Failed to store year-based statistics: {e}", exc_info=True)
1425
+ return 0
1426
+
1427
+ def _calculate_year_carrier_statistics(self, conn, network_id: int, network: 'pypsa.Network', year: int) -> Dict[str, Any]:
1428
+ """
1429
+ Calculate carrier-specific statistics for a specific year with proper database access.
1430
+
1431
+ CRITICAL: This method now consistently applies snapshot weightings to ALL energy calculations
1432
+ to convert MW to MWh, matching the old PyPSA solver behavior. This is essential for
1433
+ multi-hourly models (e.g., 3-hourly models where each timestep = 3 hours).
1434
+ """
1435
+ try:
1436
+ # Initialize carrier statistics (separate power and energy capacity like old solver)
1437
+ carrier_stats = {
1438
+ "dispatch_by_carrier": {},
1439
+ "power_capacity_by_carrier": {}, # MW - Generators + Storage Units (power)
1440
+ "energy_capacity_by_carrier": {}, # MWh - Stores + Storage Units (energy)
1441
+ "emissions_by_carrier": {},
1442
+ "capital_cost_by_carrier": {},
1443
+ "operational_cost_by_carrier": {},
1444
+ "total_system_cost_by_carrier": {}
1445
+ }
1446
+
1447
+ # Get all carriers from database
1448
+ cursor = conn.execute("""
1449
+ SELECT DISTINCT name FROM carriers WHERE network_id = ?
1450
+ """, (network_id,))
1451
+ all_carriers = [row[0] for row in cursor.fetchall()]
1452
+
1453
+ # Initialize all carriers with zero values
1454
+ for carrier in all_carriers:
1455
+ carrier_stats["dispatch_by_carrier"][carrier] = 0.0
1456
+ carrier_stats["power_capacity_by_carrier"][carrier] = 0.0
1457
+ carrier_stats["energy_capacity_by_carrier"][carrier] = 0.0
1458
+ carrier_stats["emissions_by_carrier"][carrier] = 0.0
1459
+ carrier_stats["capital_cost_by_carrier"][carrier] = 0.0
1460
+ carrier_stats["operational_cost_by_carrier"][carrier] = 0.0
1461
+ carrier_stats["total_system_cost_by_carrier"][carrier] = 0.0
1462
+
1463
+ # Get generator-carrier mapping from database
1464
+ cursor = conn.execute("""
1465
+ SELECT c.name as component_name, carr.name as carrier_name
1466
+ FROM components c
1467
+ JOIN carriers carr ON c.carrier_id = carr.id
1468
+ WHERE c.network_id = ? AND c.component_type = 'GENERATOR'
1469
+ """, (network_id,))
1470
+ generator_carriers = {row[0]: row[1] for row in cursor.fetchall()}
1471
+
1472
+ # Calculate year-specific dispatch by carrier (all component types)
1473
+
1474
+ # 1. GENERATORS - Year-specific generation
1475
+ if hasattr(network, 'generators_t') and hasattr(network.generators_t, 'p'):
1476
+ # Filter generation data for this specific year
1477
+ year_generation = self._filter_timeseries_by_year(network.generators_t.p, network.snapshots, year)
1478
+ if year_generation is not None and not year_generation.empty:
1479
+ for gen_name in year_generation.columns:
1480
+ if gen_name in generator_carriers:
1481
+ carrier_name = generator_carriers[gen_name]
1482
+ # Calculate generation for this year (ALWAYS apply snapshot weightings to convert MW to MWh)
1483
+ year_weightings = self._get_year_weightings(network, year)
1484
+ if year_weightings is not None:
1485
+ generation_mwh = float((year_generation[gen_name].values * year_weightings).sum())
1486
+ else:
1487
+ # Fallback: use all-year weightings if year-specific not available
1488
+ weightings = network.snapshot_weightings
1489
+ if isinstance(weightings, pd.DataFrame):
1490
+ if 'objective' in weightings.columns:
1491
+ weighting_values = weightings['objective'].values
1492
+ else:
1493
+ weighting_values = weightings.iloc[:, 0].values
1494
+ else:
1495
+ weighting_values = weightings.values
1496
+ # Apply weightings to the filtered year data
1497
+ if len(weighting_values) == len(year_generation):
1498
+ generation_mwh = float((year_generation[gen_name].values * weighting_values).sum())
1499
+ else:
1500
+ # Last resort: simple sum (will be incorrect for non-1H models)
1501
+ generation_mwh = float(year_generation[gen_name].sum())
1502
+ logger.warning(f"Could not apply snapshot weightings for {gen_name} in year {year} - energy may be incorrect")
1503
+
1504
+ if carrier_name in carrier_stats["dispatch_by_carrier"]:
1505
+ carrier_stats["dispatch_by_carrier"][carrier_name] += generation_mwh
1506
+
1507
+ # 2. STORAGE_UNITS - Year-specific discharge
1508
+ if hasattr(network, 'storage_units_t') and hasattr(network.storage_units_t, 'p'):
1509
+ # Get storage unit-carrier mapping
1510
+ cursor = conn.execute("""
1511
+ SELECT c.name as component_name, carr.name as carrier_name
1512
+ FROM components c
1513
+ JOIN carriers carr ON c.carrier_id = carr.id
1514
+ WHERE c.network_id = ? AND c.component_type = 'STORAGE_UNIT'
1515
+ """, (network_id,))
1516
+ storage_unit_carriers = {row[0]: row[1] for row in cursor.fetchall()}
1517
+
1518
+ # Filter storage unit data for this specific year
1519
+ year_storage = self._filter_timeseries_by_year(network.storage_units_t.p, network.snapshots, year)
1520
+ if year_storage is not None and not year_storage.empty:
1521
+ for su_name in year_storage.columns:
1522
+ if su_name in storage_unit_carriers:
1523
+ carrier_name = storage_unit_carriers[su_name]
1524
+ # Calculate discharge for this year (positive values only, ALWAYS apply snapshot weightings)
1525
+ year_weightings = self._get_year_weightings(network, year)
1526
+ if year_weightings is not None:
1527
+ discharge_mwh = float((year_storage[su_name].clip(lower=0).values * year_weightings).sum())
1528
+ else:
1529
+ # Fallback: use all-year weightings if year-specific not available
1530
+ weightings = network.snapshot_weightings
1531
+ if isinstance(weightings, pd.DataFrame):
1532
+ if 'objective' in weightings.columns:
1533
+ weighting_values = weightings['objective'].values
1534
+ else:
1535
+ weighting_values = weightings.iloc[:, 0].values
1536
+ else:
1537
+ weighting_values = weightings.values
1538
+ # Apply weightings to the filtered year data
1539
+ if len(weighting_values) == len(year_storage):
1540
+ discharge_mwh = float((year_storage[su_name].clip(lower=0).values * weighting_values).sum())
1541
+ else:
1542
+ discharge_mwh = float(year_storage[su_name].clip(lower=0).sum())
1543
+ logger.warning(f"Could not apply snapshot weightings for storage unit {su_name} in year {year} - energy may be incorrect")
1544
+
1545
+ if carrier_name in carrier_stats["dispatch_by_carrier"]:
1546
+ carrier_stats["dispatch_by_carrier"][carrier_name] += discharge_mwh
1547
+
1548
+ # 3. STORES - Year-specific discharge
1549
+ if hasattr(network, 'stores_t') and hasattr(network.stores_t, 'p'):
1550
+ # Get store-carrier mapping
1551
+ cursor = conn.execute("""
1552
+ SELECT c.name as component_name, carr.name as carrier_name
1553
+ FROM components c
1554
+ JOIN carriers carr ON c.carrier_id = carr.id
1555
+ WHERE c.network_id = ? AND c.component_type = 'STORE'
1556
+ """, (network_id,))
1557
+ store_carriers = {row[0]: row[1] for row in cursor.fetchall()}
1558
+
1559
+ # Filter store data for this specific year
1560
+ year_stores = self._filter_timeseries_by_year(network.stores_t.p, network.snapshots, year)
1561
+ if year_stores is not None and not year_stores.empty:
1562
+ for store_name in year_stores.columns:
1563
+ if store_name in store_carriers:
1564
+ carrier_name = store_carriers[store_name]
1565
+ # Calculate discharge for this year (positive values only, ALWAYS apply snapshot weightings)
1566
+ year_weightings = self._get_year_weightings(network, year)
1567
+ if year_weightings is not None:
1568
+ discharge_mwh = float((year_stores[store_name].clip(lower=0).values * year_weightings).sum())
1569
+ else:
1570
+ # Fallback: use all-year weightings if year-specific not available
1571
+ weightings = network.snapshot_weightings
1572
+ if isinstance(weightings, pd.DataFrame):
1573
+ if 'objective' in weightings.columns:
1574
+ weighting_values = weightings['objective'].values
1575
+ else:
1576
+ weighting_values = weightings.iloc[:, 0].values
1577
+ else:
1578
+ weighting_values = weightings.values
1579
+ # Apply weightings to the filtered year data
1580
+ if len(weighting_values) == len(year_stores):
1581
+ discharge_mwh = float((year_stores[store_name].clip(lower=0).values * weighting_values).sum())
1582
+ else:
1583
+ discharge_mwh = float(year_stores[store_name].clip(lower=0).sum())
1584
+ logger.warning(f"Could not apply snapshot weightings for store {store_name} in year {year} - energy may be incorrect")
1585
+
1586
+ if carrier_name in carrier_stats["dispatch_by_carrier"]:
1587
+ carrier_stats["dispatch_by_carrier"][carrier_name] += discharge_mwh
1588
+
1589
+ # Calculate year-specific capacity by carrier (capacity available in this year)
1590
+
1591
+ # 1. GENERATORS - Year-specific power capacity
1592
+ if hasattr(network, 'generators') and not network.generators.empty:
1593
+ for gen_name in network.generators.index:
1594
+ if gen_name in generator_carriers:
1595
+ carrier_name = generator_carriers[gen_name]
1596
+
1597
+ # Check if this generator is available in this year (build_year <= year)
1598
+ is_available = True
1599
+ if 'build_year' in network.generators.columns:
1600
+ build_year = network.generators.loc[gen_name, 'build_year']
1601
+ if pd.notna(build_year) and int(build_year) > year:
1602
+ is_available = False
1603
+
1604
+ if is_available:
1605
+ # Use p_nom_opt if available, otherwise p_nom
1606
+ if 'p_nom_opt' in network.generators.columns:
1607
+ capacity_mw = float(network.generators.loc[gen_name, 'p_nom_opt'])
1608
+ else:
1609
+ capacity_mw = float(network.generators.loc[gen_name, 'p_nom']) if 'p_nom' in network.generators.columns else 0.0
1610
+
1611
+ if carrier_name in carrier_stats["power_capacity_by_carrier"]:
1612
+ carrier_stats["power_capacity_by_carrier"][carrier_name] += capacity_mw
1613
+
1614
+ # 2. STORAGE_UNITS - Year-specific power capacity
1615
+ if hasattr(network, 'storage_units') and not network.storage_units.empty:
1616
+ # Get storage unit-carrier mapping
1617
+ cursor = conn.execute("""
1618
+ SELECT c.name as component_name, carr.name as carrier_name
1619
+ FROM components c
1620
+ JOIN carriers carr ON c.carrier_id = carr.id
1621
+ WHERE c.network_id = ? AND c.component_type = 'STORAGE_UNIT'
1622
+ """, (network_id,))
1623
+ storage_unit_carriers = {row[0]: row[1] for row in cursor.fetchall()}
1624
+
1625
+ for su_name in network.storage_units.index:
1626
+ if su_name in storage_unit_carriers:
1627
+ carrier_name = storage_unit_carriers[su_name]
1628
+
1629
+ # Check if this storage unit is available in this year
1630
+ is_available = True
1631
+ if 'build_year' in network.storage_units.columns:
1632
+ build_year = network.storage_units.loc[su_name, 'build_year']
1633
+ if pd.notna(build_year) and int(build_year) > year:
1634
+ is_available = False
1635
+
1636
+ if is_available:
1637
+ # Use p_nom_opt if available, otherwise p_nom
1638
+ if 'p_nom_opt' in network.storage_units.columns:
1639
+ capacity_mw = float(network.storage_units.loc[su_name, 'p_nom_opt'])
1640
+ else:
1641
+ capacity_mw = float(network.storage_units.loc[su_name, 'p_nom']) if 'p_nom' in network.storage_units.columns else 0.0
1642
+
1643
+ if carrier_name in carrier_stats["power_capacity_by_carrier"]:
1644
+ carrier_stats["power_capacity_by_carrier"][carrier_name] += capacity_mw
1645
+
1646
+ # 3. STORES - Year-specific energy capacity
1647
+ if hasattr(network, 'stores') and not network.stores.empty:
1648
+ # Get store-carrier mapping
1649
+ cursor = conn.execute("""
1650
+ SELECT c.name as component_name, carr.name as carrier_name
1651
+ FROM components c
1652
+ JOIN carriers carr ON c.carrier_id = carr.id
1653
+ WHERE c.network_id = ? AND c.component_type = 'STORE'
1654
+ """, (network_id,))
1655
+ store_carriers = {row[0]: row[1] for row in cursor.fetchall()}
1656
+
1657
+ for store_name in network.stores.index:
1658
+ if store_name in store_carriers:
1659
+ carrier_name = store_carriers[store_name]
1660
+
1661
+ # Check if this store is available in this year
1662
+ is_available = True
1663
+ if 'build_year' in network.stores.columns:
1664
+ build_year = network.stores.loc[store_name, 'build_year']
1665
+ if pd.notna(build_year) and int(build_year) > year:
1666
+ is_available = False
1667
+
1668
+ if is_available:
1669
+ # Use e_nom_opt if available, otherwise e_nom (energy capacity)
1670
+ if 'e_nom_opt' in network.stores.columns:
1671
+ capacity_mwh = float(network.stores.loc[store_name, 'e_nom_opt'])
1672
+ else:
1673
+ capacity_mwh = float(network.stores.loc[store_name, 'e_nom']) if 'e_nom' in network.stores.columns else 0.0
1674
+
1675
+ # Add to capacity (stores contribute energy capacity to the general "capacity" metric)
1676
+ if carrier_name in carrier_stats["power_capacity_by_carrier"]:
1677
+ carrier_stats["power_capacity_by_carrier"][carrier_name] += capacity_mwh
1678
+
1679
+ # 4. LINES - Year-specific apparent power capacity
1680
+ if hasattr(network, 'lines') and not network.lines.empty:
1681
+ # Get line-carrier mapping
1682
+ cursor = conn.execute("""
1683
+ SELECT c.name as component_name, carr.name as carrier_name
1684
+ FROM components c
1685
+ JOIN carriers carr ON c.carrier_id = carr.id
1686
+ WHERE c.network_id = ? AND c.component_type = 'LINE'
1687
+ """, (network_id,))
1688
+ line_carriers = {row[0]: row[1] for row in cursor.fetchall()}
1689
+
1690
+ for line_name in network.lines.index:
1691
+ if line_name in line_carriers:
1692
+ carrier_name = line_carriers[line_name]
1693
+
1694
+ # Check if this line is available in this year
1695
+ is_available = True
1696
+ if 'build_year' in network.lines.columns:
1697
+ build_year = network.lines.loc[line_name, 'build_year']
1698
+ if pd.notna(build_year) and int(build_year) > year:
1699
+ is_available = False
1700
+
1701
+ if is_available:
1702
+ # Use s_nom_opt if available, otherwise s_nom (convert MVA to MW)
1703
+ if 's_nom_opt' in network.lines.columns:
1704
+ capacity_mva = float(network.lines.loc[line_name, 's_nom_opt'])
1705
+ else:
1706
+ capacity_mva = float(network.lines.loc[line_name, 's_nom']) if 's_nom' in network.lines.columns else 0.0
1707
+
1708
+ # Convert MVA to MW (assume power factor = 1)
1709
+ capacity_mw = capacity_mva
1710
+
1711
+ if carrier_name in carrier_stats["power_capacity_by_carrier"]:
1712
+ carrier_stats["power_capacity_by_carrier"][carrier_name] += capacity_mw
1713
+
1714
+ # 5. LINKS - Year-specific power capacity
1715
+ if hasattr(network, 'links') and not network.links.empty:
1716
+ # Get link-carrier mapping
1717
+ cursor = conn.execute("""
1718
+ SELECT c.name as component_name, carr.name as carrier_name
1719
+ FROM components c
1720
+ JOIN carriers carr ON c.carrier_id = carr.id
1721
+ WHERE c.network_id = ? AND c.component_type = 'LINK'
1722
+ """, (network_id,))
1723
+ link_carriers = {row[0]: row[1] for row in cursor.fetchall()}
1724
+
1725
+ for link_name in network.links.index:
1726
+ if link_name in link_carriers:
1727
+ carrier_name = link_carriers[link_name]
1728
+
1729
+ # Check if this link is available in this year
1730
+ is_available = True
1731
+ if 'build_year' in network.links.columns:
1732
+ build_year = network.links.loc[link_name, 'build_year']
1733
+ if pd.notna(build_year) and int(build_year) > year:
1734
+ is_available = False
1735
+
1736
+ if is_available:
1737
+ # Use p_nom_opt if available, otherwise p_nom
1738
+ if 'p_nom_opt' in network.links.columns:
1739
+ capacity_mw = float(network.links.loc[link_name, 'p_nom_opt'])
1740
+ else:
1741
+ capacity_mw = float(network.links.loc[link_name, 'p_nom']) if 'p_nom' in network.links.columns else 0.0
1742
+
1743
+ if carrier_name in carrier_stats["power_capacity_by_carrier"]:
1744
+ carrier_stats["power_capacity_by_carrier"][carrier_name] += capacity_mw
1745
+
1746
+ # Calculate year-specific emissions (based on year-specific dispatch)
1747
+ cursor = conn.execute("""
1748
+ SELECT name, co2_emissions
1749
+ FROM carriers
1750
+ WHERE network_id = ? AND co2_emissions IS NOT NULL
1751
+ ORDER BY name
1752
+ """, (network_id,))
1753
+
1754
+ emission_factors = {}
1755
+ for row in cursor.fetchall():
1756
+ carrier_name, co2_emissions = row
1757
+ emission_factors[carrier_name] = co2_emissions
1758
+
1759
+ # Calculate emissions = year_dispatch * emission_factor
1760
+ for carrier, dispatch_mwh in carrier_stats["dispatch_by_carrier"].items():
1761
+ emission_factor = emission_factors.get(carrier, 0.0)
1762
+ emissions = dispatch_mwh * emission_factor
1763
+ carrier_stats["emissions_by_carrier"][carrier] = emissions
1764
+
1765
+ # Calculate year-specific costs (all component types)
1766
+ # For multi-period models, costs are complex - capital costs are incurred at build time
1767
+ # but operational costs are incurred when generating
1768
+
1769
+ # 1. GENERATORS - Year-specific operational and capital costs
1770
+ if hasattr(network, 'generators') and not network.generators.empty:
1771
+ for gen_name in network.generators.index:
1772
+ if gen_name in generator_carriers:
1773
+ carrier_name = generator_carriers[gen_name]
1774
+
1775
+ # Operational costs = year_dispatch * marginal_cost
1776
+ if 'marginal_cost' in network.generators.columns:
1777
+ year_dispatch = 0.0
1778
+ if hasattr(network, 'generators_t') and hasattr(network.generators_t, 'p'):
1779
+ year_generation = self._filter_timeseries_by_year(network.generators_t.p, network.snapshots, year)
1780
+ if year_generation is not None and gen_name in year_generation.columns:
1781
+ year_weightings = self._get_year_weightings(network, year)
1782
+ if year_weightings is not None:
1783
+ year_dispatch = float((year_generation[gen_name].values * year_weightings).sum())
1784
+ else:
1785
+ # Fallback: use all-year weightings if year-specific not available
1786
+ weightings = network.snapshot_weightings
1787
+ if isinstance(weightings, pd.DataFrame):
1788
+ if 'objective' in weightings.columns:
1789
+ weighting_values = weightings['objective'].values
1790
+ else:
1791
+ weighting_values = weightings.iloc[:, 0].values
1792
+ else:
1793
+ weighting_values = weightings.values
1794
+ # Apply weightings to the filtered year data
1795
+ if len(weighting_values) == len(year_generation):
1796
+ year_dispatch = float((year_generation[gen_name].values * weighting_values).sum())
1797
+ else:
1798
+ year_dispatch = float(year_generation[gen_name].sum())
1799
+ logger.warning(f"Could not apply snapshot weightings for operational cost calc of {gen_name} in year {year} - cost may be incorrect")
1800
+
1801
+ marginal_cost = float(network.generators.loc[gen_name, 'marginal_cost'])
1802
+ operational_cost = year_dispatch * marginal_cost
1803
+
1804
+ if carrier_name in carrier_stats["operational_cost_by_carrier"]:
1805
+ carrier_stats["operational_cost_by_carrier"][carrier_name] += operational_cost
1806
+
1807
+ # Capital costs - include if asset is operational in this year (matching old solver)
1808
+ if 'capital_cost' in network.generators.columns:
1809
+ # Check if this generator is operational in this year
1810
+ is_operational = True
1811
+ if 'build_year' in network.generators.columns:
1812
+ build_year = network.generators.loc[gen_name, 'build_year']
1813
+ if pd.notna(build_year) and int(build_year) > year:
1814
+ is_operational = False # Not built yet
1815
+
1816
+ if is_operational:
1817
+ capital_cost_per_mw = float(network.generators.loc[gen_name, 'capital_cost'])
1818
+
1819
+ if 'p_nom_opt' in network.generators.columns:
1820
+ capacity_mw = float(network.generators.loc[gen_name, 'p_nom_opt'])
1821
+ elif 'p_nom' in network.generators.columns:
1822
+ capacity_mw = float(network.generators.loc[gen_name, 'p_nom'])
1823
+ else:
1824
+ capacity_mw = 0.0
1825
+
1826
+ # Annual capital cost for operational assets (undiscounted)
1827
+ annual_capital_cost = capacity_mw * capital_cost_per_mw
1828
+
1829
+ if carrier_name in carrier_stats["capital_cost_by_carrier"]:
1830
+ carrier_stats["capital_cost_by_carrier"][carrier_name] += annual_capital_cost
1831
+
1832
+ # 2. STORAGE_UNITS - Year-specific operational and capital costs
1833
+ if hasattr(network, 'storage_units') and not network.storage_units.empty:
1834
+ # Get storage unit-carrier mapping
1835
+ cursor = conn.execute("""
1836
+ SELECT c.name as component_name, carr.name as carrier_name
1837
+ FROM components c
1838
+ JOIN carriers carr ON c.carrier_id = carr.id
1839
+ WHERE c.network_id = ? AND c.component_type = 'STORAGE_UNIT'
1840
+ """, (network_id,))
1841
+ storage_unit_carriers = {row[0]: row[1] for row in cursor.fetchall()}
1842
+
1843
+ for su_name in network.storage_units.index:
1844
+ if su_name in storage_unit_carriers:
1845
+ carrier_name = storage_unit_carriers[su_name]
1846
+
1847
+ # Operational costs = year_discharge * marginal_cost
1848
+ if 'marginal_cost' in network.storage_units.columns:
1849
+ year_discharge = 0.0
1850
+ if hasattr(network, 'storage_units_t') and hasattr(network.storage_units_t, 'p'):
1851
+ year_storage = self._filter_timeseries_by_year(network.storage_units_t.p, network.snapshots, year)
1852
+ if year_storage is not None and su_name in year_storage.columns:
1853
+ year_weightings = self._get_year_weightings(network, year)
1854
+ if year_weightings is not None:
1855
+ year_discharge = float((year_storage[su_name].clip(lower=0).values * year_weightings).sum())
1856
+ else:
1857
+ # Fallback: use all-year weightings if year-specific not available
1858
+ weightings = network.snapshot_weightings
1859
+ if isinstance(weightings, pd.DataFrame):
1860
+ if 'objective' in weightings.columns:
1861
+ weighting_values = weightings['objective'].values
1862
+ else:
1863
+ weighting_values = weightings.iloc[:, 0].values
1864
+ else:
1865
+ weighting_values = weightings.values
1866
+ # Apply weightings to the filtered year data
1867
+ if len(weighting_values) == len(year_storage):
1868
+ year_discharge = float((year_storage[su_name].clip(lower=0).values * weighting_values).sum())
1869
+ else:
1870
+ year_discharge = float(year_storage[su_name].clip(lower=0).sum())
1871
+ logger.warning(f"Could not apply snapshot weightings for operational cost calc of storage unit {su_name} in year {year} - cost may be incorrect")
1872
+
1873
+ marginal_cost = float(network.storage_units.loc[su_name, 'marginal_cost'])
1874
+ operational_cost = year_discharge * marginal_cost
1875
+
1876
+ if carrier_name in carrier_stats["operational_cost_by_carrier"]:
1877
+ carrier_stats["operational_cost_by_carrier"][carrier_name] += operational_cost
1878
+
1879
+ # Capital costs - include if asset is operational in this year (matching old solver)
1880
+ if 'capital_cost' in network.storage_units.columns:
1881
+ # Check if this storage unit is operational in this year
1882
+ is_operational = True
1883
+ if 'build_year' in network.storage_units.columns:
1884
+ build_year = network.storage_units.loc[su_name, 'build_year']
1885
+ if pd.notna(build_year) and int(build_year) > year:
1886
+ is_operational = False # Not built yet
1887
+
1888
+ if is_operational:
1889
+ capital_cost_per_mw = float(network.storage_units.loc[su_name, 'capital_cost'])
1890
+
1891
+ if 'p_nom_opt' in network.storage_units.columns:
1892
+ capacity_mw = float(network.storage_units.loc[su_name, 'p_nom_opt'])
1893
+ elif 'p_nom' in network.storage_units.columns:
1894
+ capacity_mw = float(network.storage_units.loc[su_name, 'p_nom'])
1895
+ else:
1896
+ capacity_mw = 0.0
1897
+
1898
+ # Annual capital cost for operational assets (undiscounted)
1899
+ annual_capital_cost = capacity_mw * capital_cost_per_mw
1900
+
1901
+ if carrier_name in carrier_stats["capital_cost_by_carrier"]:
1902
+ carrier_stats["capital_cost_by_carrier"][carrier_name] += annual_capital_cost
1903
+
1904
+ # 3. STORES - Year-specific operational and capital costs
1905
+ if hasattr(network, 'stores') and not network.stores.empty:
1906
+ # Get store-carrier mapping
1907
+ cursor = conn.execute("""
1908
+ SELECT c.name as component_name, carr.name as carrier_name
1909
+ FROM components c
1910
+ JOIN carriers carr ON c.carrier_id = carr.id
1911
+ WHERE c.network_id = ? AND c.component_type = 'STORE'
1912
+ """, (network_id,))
1913
+ store_carriers = {row[0]: row[1] for row in cursor.fetchall()}
1914
+
1915
+ for store_name in network.stores.index:
1916
+ if store_name in store_carriers:
1917
+ carrier_name = store_carriers[store_name]
1918
+
1919
+ # Operational costs = year_discharge * marginal_cost
1920
+ if 'marginal_cost' in network.stores.columns:
1921
+ year_discharge = 0.0
1922
+ if hasattr(network, 'stores_t') and hasattr(network.stores_t, 'p'):
1923
+ year_stores = self._filter_timeseries_by_year(network.stores_t.p, network.snapshots, year)
1924
+ if year_stores is not None and store_name in year_stores.columns:
1925
+ year_weightings = self._get_year_weightings(network, year)
1926
+ if year_weightings is not None:
1927
+ year_discharge = float((year_stores[store_name].clip(lower=0).values * year_weightings).sum())
1928
+ else:
1929
+ # Fallback: use all-year weightings if year-specific not available
1930
+ weightings = network.snapshot_weightings
1931
+ if isinstance(weightings, pd.DataFrame):
1932
+ if 'objective' in weightings.columns:
1933
+ weighting_values = weightings['objective'].values
1934
+ else:
1935
+ weighting_values = weightings.iloc[:, 0].values
1936
+ else:
1937
+ weighting_values = weightings.values
1938
+ # Apply weightings to the filtered year data
1939
+ if len(weighting_values) == len(year_stores):
1940
+ year_discharge = float((year_stores[store_name].clip(lower=0).values * weighting_values).sum())
1941
+ else:
1942
+ year_discharge = float(year_stores[store_name].clip(lower=0).sum())
1943
+ logger.warning(f"Could not apply snapshot weightings for operational cost calc of store {store_name} in year {year} - cost may be incorrect")
1944
+
1945
+ marginal_cost = float(network.stores.loc[store_name, 'marginal_cost'])
1946
+ operational_cost = year_discharge * marginal_cost
1947
+
1948
+ if carrier_name in carrier_stats["operational_cost_by_carrier"]:
1949
+ carrier_stats["operational_cost_by_carrier"][carrier_name] += operational_cost
1950
+
1951
+ # Capital costs - include if asset is operational in this year (matching old solver)
1952
+ if 'capital_cost' in network.stores.columns:
1953
+ # Check if this store is operational in this year
1954
+ is_operational = True
1955
+ if 'build_year' in network.stores.columns:
1956
+ build_year = network.stores.loc[store_name, 'build_year']
1957
+ if pd.notna(build_year) and int(build_year) > year:
1958
+ is_operational = False # Not built yet
1959
+
1960
+ if is_operational:
1961
+ capital_cost_per_mwh = float(network.stores.loc[store_name, 'capital_cost'])
1962
+
1963
+ if 'e_nom_opt' in network.stores.columns:
1964
+ capacity_mwh = float(network.stores.loc[store_name, 'e_nom_opt'])
1965
+ elif 'e_nom' in network.stores.columns:
1966
+ capacity_mwh = float(network.stores.loc[store_name, 'e_nom'])
1967
+ else:
1968
+ capacity_mwh = 0.0
1969
+
1970
+ # Annual capital cost for operational assets (undiscounted)
1971
+ annual_capital_cost = capacity_mwh * capital_cost_per_mwh
1972
+
1973
+ if carrier_name in carrier_stats["capital_cost_by_carrier"]:
1974
+ carrier_stats["capital_cost_by_carrier"][carrier_name] += annual_capital_cost
1975
+
1976
+ # 4. LINES - Year-specific capital costs (only count if built in this year)
1977
+ if hasattr(network, 'lines') and not network.lines.empty:
1978
+ # Get line-carrier mapping
1979
+ cursor = conn.execute("""
1980
+ SELECT c.name as component_name, carr.name as carrier_name
1981
+ FROM components c
1982
+ JOIN carriers carr ON c.carrier_id = carr.id
1983
+ WHERE c.network_id = ? AND c.component_type = 'LINE'
1984
+ """, (network_id,))
1985
+ line_carriers = {row[0]: row[1] for row in cursor.fetchall()}
1986
+
1987
+ for line_name in network.lines.index:
1988
+ if line_name in line_carriers:
1989
+ carrier_name = line_carriers[line_name]
1990
+
1991
+ # Capital costs - include if asset is operational in this year (matching old solver)
1992
+ if 'capital_cost' in network.lines.columns:
1993
+ # Check if this line is operational in this year
1994
+ is_operational = True
1995
+ if 'build_year' in network.lines.columns:
1996
+ build_year = network.lines.loc[line_name, 'build_year']
1997
+ if pd.notna(build_year) and int(build_year) > year:
1998
+ is_operational = False # Not built yet
1999
+
2000
+ if is_operational:
2001
+ capital_cost_per_mva = float(network.lines.loc[line_name, 'capital_cost'])
2002
+
2003
+ if 's_nom_opt' in network.lines.columns:
2004
+ capacity_mva = float(network.lines.loc[line_name, 's_nom_opt'])
2005
+ elif 's_nom' in network.lines.columns:
2006
+ capacity_mva = float(network.lines.loc[line_name, 's_nom'])
2007
+ else:
2008
+ capacity_mva = 0.0
2009
+
2010
+ # Annual capital cost for operational assets (undiscounted)
2011
+ annual_capital_cost = capacity_mva * capital_cost_per_mva
2012
+
2013
+ if carrier_name in carrier_stats["capital_cost_by_carrier"]:
2014
+ carrier_stats["capital_cost_by_carrier"][carrier_name] += annual_capital_cost
2015
+
2016
+ # 5. LINKS - Year-specific operational and capital costs
2017
+ if hasattr(network, 'links') and not network.links.empty:
2018
+ # Get link-carrier mapping
2019
+ cursor = conn.execute("""
2020
+ SELECT c.name as component_name, carr.name as carrier_name
2021
+ FROM components c
2022
+ JOIN carriers carr ON c.carrier_id = carr.id
2023
+ WHERE c.network_id = ? AND c.component_type = 'LINK'
2024
+ """, (network_id,))
2025
+ link_carriers = {row[0]: row[1] for row in cursor.fetchall()}
2026
+
2027
+ # Operational costs for links (year-specific flow)
2028
+ if hasattr(network, 'links_t') and hasattr(network.links_t, 'p0'):
2029
+ for link_name in network.links.index:
2030
+ if link_name in link_carriers:
2031
+ carrier_name = link_carriers[link_name]
2032
+
2033
+ # Get marginal cost for this link
2034
+ marginal_cost = 0.0
2035
+ if 'marginal_cost' in network.links.columns:
2036
+ marginal_cost = float(network.links.loc[link_name, 'marginal_cost'])
2037
+
2038
+ # Calculate operational cost = year_flow * marginal_cost
2039
+ year_flow = 0.0
2040
+ if link_name in network.links_t.p0.columns:
2041
+ year_links = self._filter_timeseries_by_year(network.links_t.p0, network.snapshots, year)
2042
+ if year_links is not None and link_name in year_links.columns:
2043
+ year_weightings = self._get_year_weightings(network, year)
2044
+ if year_weightings is not None:
2045
+ year_flow = float((abs(year_links[link_name]).values * year_weightings).sum())
2046
+ else:
2047
+ # Fallback: use all-year weightings if year-specific not available
2048
+ weightings = network.snapshot_weightings
2049
+ if isinstance(weightings, pd.DataFrame):
2050
+ if 'objective' in weightings.columns:
2051
+ weighting_values = weightings['objective'].values
2052
+ else:
2053
+ weighting_values = weightings.iloc[:, 0].values
2054
+ else:
2055
+ weighting_values = weightings.values
2056
+ # Apply weightings to the filtered year data
2057
+ if len(weighting_values) == len(year_links):
2058
+ year_flow = float((abs(year_links[link_name]).values * weighting_values).sum())
2059
+ else:
2060
+ year_flow = float(abs(year_links[link_name]).sum())
2061
+ logger.warning(f"Could not apply snapshot weightings for operational cost calc of link {link_name} in year {year} - cost may be incorrect")
2062
+
2063
+ operational_cost = year_flow * marginal_cost
2064
+
2065
+ if carrier_name in carrier_stats["operational_cost_by_carrier"]:
2066
+ carrier_stats["operational_cost_by_carrier"][carrier_name] += operational_cost
2067
+
2068
+ # Capital costs for links - only count if built in this year
2069
+ for link_name in network.links.index:
2070
+ if link_name in link_carriers:
2071
+ carrier_name = link_carriers[link_name]
2072
+
2073
+ # Capital costs - include if asset is operational in this year (matching old solver)
2074
+ if 'capital_cost' in network.links.columns:
2075
+ # Check if this link is operational in this year
2076
+ is_operational = True
2077
+ if 'build_year' in network.links.columns:
2078
+ build_year = network.links.loc[link_name, 'build_year']
2079
+ if pd.notna(build_year) and int(build_year) > year:
2080
+ is_operational = False # Not built yet
2081
+
2082
+ if is_operational:
2083
+ capital_cost_per_mw = float(network.links.loc[link_name, 'capital_cost'])
2084
+
2085
+ if 'p_nom_opt' in network.links.columns:
2086
+ capacity_mw = float(network.links.loc[link_name, 'p_nom_opt'])
2087
+ elif 'p_nom' in network.links.columns:
2088
+ capacity_mw = float(network.links.loc[link_name, 'p_nom'])
2089
+ else:
2090
+ capacity_mw = 0.0
2091
+
2092
+ # Annual capital cost for operational assets (undiscounted)
2093
+ annual_capital_cost = capacity_mw * capital_cost_per_mw
2094
+
2095
+ if carrier_name in carrier_stats["capital_cost_by_carrier"]:
2096
+ carrier_stats["capital_cost_by_carrier"][carrier_name] += annual_capital_cost
2097
+
2098
+ # Calculate total system cost = capital + operational (for this year)
2099
+ for carrier in all_carriers:
2100
+ capital = carrier_stats["capital_cost_by_carrier"][carrier]
2101
+ operational = carrier_stats["operational_cost_by_carrier"][carrier]
2102
+ carrier_stats["total_system_cost_by_carrier"][carrier] = capital + operational
2103
+
2104
+ logger.info(f"Calculated year {year} carrier statistics:")
2105
+ logger.info(f" Dispatch: {sum(carrier_stats['dispatch_by_carrier'].values()):.2f} MWh")
2106
+ logger.info(f" Power capacity: {sum(carrier_stats['power_capacity_by_carrier'].values()):.2f} MW")
2107
+ logger.info(f" Energy capacity: {sum(carrier_stats['energy_capacity_by_carrier'].values()):.2f} MWh")
2108
+ logger.info(f" Emissions: {sum(carrier_stats['emissions_by_carrier'].values()):.2f} tCO2")
2109
+ logger.info(f" Capital cost: {sum(carrier_stats['capital_cost_by_carrier'].values()):.2f} USD")
2110
+ logger.info(f" Operational cost: {sum(carrier_stats['operational_cost_by_carrier'].values()):.2f} USD")
2111
+
2112
+ return carrier_stats
2113
+
2114
+ except Exception as e:
2115
+ logger.error(f"Failed to calculate year {year} carrier statistics: {e}", exc_info=True)
2116
+ return {
2117
+ "dispatch_by_carrier": {},
2118
+ "power_capacity_by_carrier": {},
2119
+ "energy_capacity_by_carrier": {},
2120
+ "emissions_by_carrier": {},
2121
+ "capital_cost_by_carrier": {},
2122
+ "operational_cost_by_carrier": {},
2123
+ "total_system_cost_by_carrier": {}
2124
+ }
2125
+
2126
+ def _filter_timeseries_by_year(self, timeseries_df: 'pd.DataFrame', snapshots: 'pd.Index', year: int) -> 'pd.DataFrame':
2127
+ """Filter timeseries data by year - copied from solver for consistency"""
2128
+ try:
2129
+
2130
+ # Handle MultiIndex case (multi-period optimization)
2131
+ if hasattr(snapshots, 'levels'):
2132
+ period_values = snapshots.get_level_values(0)
2133
+ year_mask = period_values == year
2134
+ if year_mask.any():
2135
+ year_snapshots = snapshots[year_mask]
2136
+ return timeseries_df.loc[year_snapshots]
2137
+
2138
+ # Handle DatetimeIndex case (regular time series)
2139
+ elif hasattr(snapshots, 'year'):
2140
+ year_mask = snapshots.year == year
2141
+ if year_mask.any():
2142
+ return timeseries_df.loc[year_mask]
2143
+
2144
+ # Fallback - return None if can't filter
2145
+ return None
2146
+
2147
+ except Exception as e:
2148
+ logger.error(f"Failed to filter timeseries by year {year}: {e}")
2149
+ return None
2150
+
2151
+ def _get_year_weightings(self, network: 'pypsa.Network', year: int) -> 'np.ndarray':
2152
+ """Get snapshot weightings for a specific year - copied from solver for consistency"""
2153
+ try:
2154
+
2155
+ # Filter snapshot weightings by year
2156
+ if hasattr(network.snapshots, 'levels'):
2157
+ period_values = network.snapshots.get_level_values(0)
2158
+ year_mask = period_values == year
2159
+ if year_mask.any():
2160
+ year_snapshots = network.snapshots[year_mask]
2161
+ year_weightings = network.snapshot_weightings.loc[year_snapshots]
2162
+ if isinstance(year_weightings, pd.DataFrame):
2163
+ if 'objective' in year_weightings.columns:
2164
+ return year_weightings['objective'].values
2165
+ else:
2166
+ return year_weightings.iloc[:, 0].values
2167
+ else:
2168
+ return year_weightings.values
2169
+
2170
+ elif hasattr(network.snapshots, 'year'):
2171
+ year_mask = network.snapshots.year == year
2172
+ if year_mask.any():
2173
+ year_weightings = network.snapshot_weightings.loc[year_mask]
2174
+ if isinstance(year_weightings, pd.DataFrame):
2175
+ if 'objective' in year_weightings.columns:
2176
+ return year_weightings['objective'].values
2177
+ else:
2178
+ return year_weightings.iloc[:, 0].values
2179
+ else:
2180
+ return year_weightings.values
2181
+
2182
+ return None
2183
+
2184
+ except Exception as e:
2185
+ logger.error(f"Failed to get year weightings for year {year}: {e}")
2186
+ return None
2187
+
2188
+
2189
+ def _json_serializer(self, obj):
2190
+ """Convert numpy/pandas types to JSON serializable types"""
2191
+ import numpy as np
2192
+ import pandas as pd
2193
+
2194
+ if isinstance(obj, (np.integer, np.int64, np.int32)):
2195
+ return int(obj)
2196
+ elif isinstance(obj, (np.floating, np.float64, np.float32)):
2197
+ return float(obj)
2198
+ elif isinstance(obj, np.ndarray):
2199
+ return obj.tolist()
2200
+ elif isinstance(obj, pd.Series):
2201
+ return obj.to_dict()
2202
+ elif isinstance(obj, pd.DataFrame):
2203
+ return obj.to_dict()
2204
+ elif hasattr(obj, 'item'): # Handle numpy scalars
2205
+ return obj.item()
2206
+ else:
2207
+ raise TypeError(f"Object of type {type(obj)} is not JSON serializable")