pyconvexity 0.4.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pyconvexity might be problematic. Click here for more details.

Files changed (42) hide show
  1. pyconvexity/__init__.py +226 -0
  2. pyconvexity/_version.py +1 -0
  3. pyconvexity/core/__init__.py +60 -0
  4. pyconvexity/core/database.py +485 -0
  5. pyconvexity/core/errors.py +106 -0
  6. pyconvexity/core/types.py +400 -0
  7. pyconvexity/data/README.md +101 -0
  8. pyconvexity/data/__init__.py +17 -0
  9. pyconvexity/data/loaders/__init__.py +3 -0
  10. pyconvexity/data/loaders/cache.py +213 -0
  11. pyconvexity/data/schema/01_core_schema.sql +420 -0
  12. pyconvexity/data/schema/02_data_metadata.sql +120 -0
  13. pyconvexity/data/schema/03_validation_data.sql +506 -0
  14. pyconvexity/data/sources/__init__.py +5 -0
  15. pyconvexity/data/sources/gem.py +442 -0
  16. pyconvexity/io/__init__.py +26 -0
  17. pyconvexity/io/excel_exporter.py +1226 -0
  18. pyconvexity/io/excel_importer.py +1381 -0
  19. pyconvexity/io/netcdf_exporter.py +197 -0
  20. pyconvexity/io/netcdf_importer.py +1833 -0
  21. pyconvexity/models/__init__.py +195 -0
  22. pyconvexity/models/attributes.py +730 -0
  23. pyconvexity/models/carriers.py +159 -0
  24. pyconvexity/models/components.py +611 -0
  25. pyconvexity/models/network.py +503 -0
  26. pyconvexity/models/results.py +148 -0
  27. pyconvexity/models/scenarios.py +234 -0
  28. pyconvexity/solvers/__init__.py +29 -0
  29. pyconvexity/solvers/pypsa/__init__.py +24 -0
  30. pyconvexity/solvers/pypsa/api.py +460 -0
  31. pyconvexity/solvers/pypsa/batch_loader.py +307 -0
  32. pyconvexity/solvers/pypsa/builder.py +675 -0
  33. pyconvexity/solvers/pypsa/constraints.py +405 -0
  34. pyconvexity/solvers/pypsa/solver.py +1509 -0
  35. pyconvexity/solvers/pypsa/storage.py +2048 -0
  36. pyconvexity/timeseries.py +330 -0
  37. pyconvexity/validation/__init__.py +25 -0
  38. pyconvexity/validation/rules.py +312 -0
  39. pyconvexity-0.4.3.dist-info/METADATA +47 -0
  40. pyconvexity-0.4.3.dist-info/RECORD +42 -0
  41. pyconvexity-0.4.3.dist-info/WHEEL +5 -0
  42. pyconvexity-0.4.3.dist-info/top_level.txt +1 -0
@@ -0,0 +1,1226 @@
1
+ """
2
+ Excel exporter for PyConvexity energy system models.
3
+ Exports complete network models to Excel workbooks with multiple sheets.
4
+ """
5
+
6
+ import logging
7
+ import sqlite3
8
+ from typing import Dict, Any, Optional, List
9
+ from pathlib import Path
10
+ import pandas as pd
11
+ from datetime import datetime
12
+ import json
13
+
14
+ # Import functions directly from pyconvexity
15
+ from pyconvexity.core.database import open_connection
16
+ from pyconvexity.core.errors import AttributeNotFound
17
+ from pyconvexity.models import (
18
+ list_components_by_type,
19
+ list_carriers,
20
+ get_network_info,
21
+ get_network_time_periods,
22
+ get_attribute,
23
+ list_component_attributes,
24
+ get_network_config,
25
+ )
26
+ from pyconvexity.validation import list_validation_rules
27
+ from pyconvexity.models.attributes import get_timeseries as get_timeseries_conn
28
+
29
+ logger = logging.getLogger(__name__)
30
+
31
+
32
+ class ExcelModelExporter:
33
+ """Export entire network model to Excel workbook"""
34
+
35
+ def __init__(self):
36
+ self.logger = logging.getLogger(__name__)
37
+
38
+ def export_model_to_excel(
39
+ self,
40
+ db_path: str,
41
+ output_path: str,
42
+ scenario_id: Optional[int] = None,
43
+ progress_callback: Optional[callable] = None,
44
+ ) -> Dict[str, Any]:
45
+ """
46
+ Export complete network model to Excel workbook
47
+
48
+ Args:
49
+ db_path: Database path
50
+
51
+ output_path: Excel file output path
52
+ scenario_id: Scenario ID (defaults to master scenario)
53
+ progress_callback: Optional callback for progress updates
54
+
55
+ Returns:
56
+ Export statistics and metadata
57
+ """
58
+
59
+ try:
60
+ if progress_callback:
61
+ progress_callback(0, "Starting Excel export...")
62
+
63
+ # Connect to database
64
+ conn = open_connection(db_path)
65
+
66
+ if progress_callback:
67
+ progress_callback(5, "Loading network information...")
68
+
69
+ # Get network information
70
+ network_info = get_network_info(conn)
71
+
72
+ # Get master scenario if no scenario specified
73
+ if scenario_id is None:
74
+ # Base network uses scenario_id = NULL, no need to query
75
+ # scenario_id remains None for base network
76
+ pass
77
+
78
+ if progress_callback:
79
+ progress_callback(10, "Loading carriers...")
80
+
81
+ # Get carriers
82
+ carriers = list_carriers(conn)
83
+
84
+ if progress_callback:
85
+ progress_callback(15, "Loading components...")
86
+
87
+ # Get all component types
88
+ component_types = [
89
+ "BUS",
90
+ "GENERATOR",
91
+ "LOAD",
92
+ "LINE",
93
+ "LINK",
94
+ "STORAGE_UNIT",
95
+ "STORE",
96
+ "CONSTRAINT",
97
+ ]
98
+
99
+ # Load components by type
100
+ components_by_type = {}
101
+ for comp_type in component_types:
102
+ components = list_components_by_type(conn, comp_type)
103
+ components_by_type[comp_type] = components
104
+
105
+ if progress_callback:
106
+ progress_callback(25, "Processing component attributes...")
107
+
108
+ # Process components and their attributes
109
+ processed_components = {}
110
+ timeseries_data = {}
111
+
112
+ for comp_type, components in components_by_type.items():
113
+ processed_components[comp_type] = []
114
+ timeseries_data[comp_type] = {}
115
+
116
+ for component in components:
117
+ # Check for cancellation during processing
118
+ if progress_callback:
119
+ try:
120
+ progress_callback(None, None) # Check for cancellation
121
+ except KeyboardInterrupt:
122
+ self.logger.info("Excel export cancelled by user")
123
+ raise
124
+
125
+ # Get component attributes (all possible attributes for this component type)
126
+ attributes = self._get_component_attributes(
127
+ conn, component.id, scenario_id, comp_type
128
+ )
129
+
130
+ # Process component data
131
+ processed_component = self._process_component_for_excel(
132
+ component, attributes, carriers, components_by_type
133
+ )
134
+ processed_components[comp_type].append(processed_component)
135
+
136
+ # Extract timeseries data
137
+ for attr_name, attr_data in attributes.items():
138
+ if isinstance(attr_data, dict) and "Timeseries" in attr_data:
139
+ if comp_type not in timeseries_data:
140
+ timeseries_data[comp_type] = {}
141
+ if attr_name not in timeseries_data[comp_type]:
142
+ timeseries_data[comp_type][attr_name] = {}
143
+
144
+ # Handle both new efficient format and legacy format
145
+ if "values" in attr_data:
146
+ # New efficient format - store values directly
147
+ timeseries_data[comp_type][attr_name][
148
+ component.name
149
+ ] = attr_data["values"]
150
+ elif "points" in attr_data:
151
+ # Legacy format - store the timeseries points
152
+ timeseries_data[comp_type][attr_name][
153
+ component.name
154
+ ] = attr_data["points"]
155
+
156
+ if progress_callback:
157
+ progress_callback(50, "Creating Excel workbook...")
158
+
159
+ # Check for cancellation before starting Excel creation
160
+ if progress_callback:
161
+ try:
162
+ progress_callback(None, None) # Check for cancellation
163
+ except KeyboardInterrupt:
164
+ self.logger.info("Excel export cancelled before workbook creation")
165
+ raise
166
+
167
+ # Get scenario information if scenario_id is provided
168
+ scenario_info = None
169
+ if scenario_id is not None:
170
+ scenario_info = self._get_scenario_info(conn, scenario_id)
171
+
172
+ # Create Excel workbook
173
+ with pd.ExcelWriter(output_path, engine="openpyxl") as writer:
174
+ # Create overview sheet
175
+ self._create_overview_sheet(
176
+ writer, network_info, processed_components, scenario_info
177
+ )
178
+
179
+ # Create component sheets
180
+ for comp_type in component_types:
181
+ if processed_components[comp_type]:
182
+ # Check for cancellation during sheet creation
183
+ if progress_callback:
184
+ try:
185
+ progress_callback(None, None) # Check for cancellation
186
+ except KeyboardInterrupt:
187
+ self.logger.info(
188
+ f"Excel export cancelled during {comp_type} sheet creation"
189
+ )
190
+ raise
191
+
192
+ self._create_component_sheet(
193
+ writer, conn, comp_type, processed_components[comp_type]
194
+ )
195
+
196
+ # Create timeseries sheet if there's timeseries data
197
+ if comp_type in timeseries_data and timeseries_data[comp_type]:
198
+ self._create_timeseries_sheet(
199
+ writer, comp_type, timeseries_data[comp_type], conn
200
+ )
201
+
202
+ # Create carriers sheet
203
+ self._create_carriers_sheet(writer, carriers)
204
+
205
+ # Create network config sheet
206
+ self._create_network_config_sheet(writer, conn)
207
+
208
+ # Create statistics sheet if solve results are available
209
+ self._create_statistics_sheet(writer, scenario_id, conn)
210
+
211
+ # Create per-year statistics sheet if available
212
+ self._create_per_year_statistics_sheet(writer, scenario_id, conn)
213
+
214
+ if progress_callback:
215
+ progress_callback(100, "Excel export completed")
216
+
217
+ # Calculate statistics
218
+ stats = self._calculate_export_stats(processed_components, timeseries_data)
219
+
220
+ return {
221
+ "success": True,
222
+ "message": f"Network exported to Excel: {output_path}",
223
+ "output_path": output_path,
224
+ "stats": stats,
225
+ }
226
+
227
+ except Exception as e:
228
+ self.logger.error(f"Excel export failed: {e}", exc_info=True)
229
+ if progress_callback:
230
+ progress_callback(None, f"Export failed: {str(e)}")
231
+ raise
232
+
233
+ def _get_component_attributes(
234
+ self, conn, component_id: int, scenario_id: int, component_type: str
235
+ ) -> Dict[str, Any]:
236
+ """Get all possible attributes for a component type, with values where set"""
237
+ attributes = {}
238
+
239
+ # Get ALL possible attribute names for this component type from validation rules
240
+ validation_rules = list_validation_rules(conn, component_type)
241
+
242
+ for rule in validation_rules:
243
+ attr_name = rule.attribute_name
244
+ try:
245
+ # Try to get the attribute value (may not exist)
246
+ attr_value = get_attribute(conn, component_id, attr_name, scenario_id)
247
+
248
+ if attr_value.variant == "Static":
249
+ # Extract static value
250
+ static_value = attr_value.static_value
251
+ if static_value.data_type() == "float":
252
+ attributes[attr_name] = static_value.as_f64()
253
+ elif static_value.data_type() == "int":
254
+ attributes[attr_name] = int(static_value.as_f64())
255
+ elif static_value.data_type() == "boolean":
256
+ attributes[attr_name] = static_value.data["Boolean"]
257
+ elif static_value.data_type() == "string":
258
+ attributes[attr_name] = static_value.data["String"]
259
+ else:
260
+ attributes[attr_name] = static_value.data
261
+
262
+ elif attr_value.variant == "Timeseries":
263
+ # Use new efficient timeseries access
264
+ try:
265
+ timeseries = get_timeseries_conn(
266
+ conn, component_id, attr_name, scenario_id
267
+ )
268
+ if timeseries and timeseries.values:
269
+ attributes[attr_name] = {
270
+ "Timeseries": True,
271
+ "values": timeseries.values,
272
+ }
273
+ else:
274
+ # Fallback to legacy method if new method fails
275
+ attributes[attr_name] = {
276
+ "Timeseries": True,
277
+ "points": attr_value.timeseries_value,
278
+ }
279
+ except Exception as ts_e:
280
+ self.logger.warning(
281
+ f"Failed to load timeseries {attr_name} for component {component_id}: {ts_e}"
282
+ )
283
+ # Fallback to legacy method
284
+ attributes[attr_name] = {
285
+ "Timeseries": True,
286
+ "points": attr_value.timeseries_value,
287
+ }
288
+
289
+ except AttributeNotFound:
290
+ # Attribute not set - always use empty string for blank Excel cell
291
+ attributes[attr_name] = ""
292
+
293
+ except Exception as e:
294
+ self.logger.warning(
295
+ f"Failed to load attribute {attr_name} for component {component_id}: {e}"
296
+ )
297
+ # Still include the attribute with empty value
298
+ attributes[attr_name] = ""
299
+ continue
300
+
301
+ return attributes
302
+
303
+ def _process_component_for_excel(
304
+ self, component, attributes: Dict, carriers: List, components_by_type: Dict
305
+ ) -> Dict[str, Any]:
306
+ """Process a component for Excel export"""
307
+ processed = {
308
+ "name": component.name,
309
+ "type": component.component_type.lower(),
310
+ }
311
+
312
+ # Add carrier name
313
+ if component.carrier_id:
314
+ # Carriers are objects with attributes, not dictionaries
315
+ carrier = next((c for c in carriers if c.id == component.carrier_id), None)
316
+ carrier_name = carrier.name if carrier else "CARRIER_NOT_FOUND"
317
+ processed["carrier"] = carrier_name
318
+ self.logger.info(
319
+ f"Component '{component.name}' has carrier_id={component.carrier_id}, resolved to carrier: {carrier_name}"
320
+ )
321
+ else:
322
+ processed["carrier"] = "" # Use empty string for no carrier
323
+ self.logger.info(
324
+ f"Component '{component.name}' has no carrier_id (carrier_id={component.carrier_id})"
325
+ )
326
+
327
+ # Add bus connections
328
+ if component.bus_id:
329
+ bus = next(
330
+ (
331
+ b
332
+ for b in components_by_type.get("BUS", [])
333
+ if b.id == component.bus_id
334
+ ),
335
+ None,
336
+ )
337
+ processed["bus"] = bus.name if bus else ""
338
+ else:
339
+ processed["bus"] = ""
340
+
341
+ if component.bus0_id:
342
+ bus0 = next(
343
+ (
344
+ b
345
+ for b in components_by_type.get("BUS", [])
346
+ if b.id == component.bus0_id
347
+ ),
348
+ None,
349
+ )
350
+ processed["bus0"] = bus0.name if bus0 else ""
351
+ else:
352
+ processed["bus0"] = ""
353
+
354
+ if component.bus1_id:
355
+ bus1 = next(
356
+ (
357
+ b
358
+ for b in components_by_type.get("BUS", [])
359
+ if b.id == component.bus1_id
360
+ ),
361
+ None,
362
+ )
363
+ processed["bus1"] = bus1.name if bus1 else ""
364
+ else:
365
+ processed["bus1"] = ""
366
+
367
+ # Add coordinates
368
+ processed["latitude"] = (
369
+ component.latitude if component.latitude is not None else ""
370
+ )
371
+ processed["longitude"] = (
372
+ component.longitude if component.longitude is not None else ""
373
+ )
374
+
375
+ # Add attributes
376
+ for attr_name, attr_value in attributes.items():
377
+ if isinstance(attr_value, dict) and "Timeseries" in attr_value:
378
+ processed[attr_name] = "[timeseries]"
379
+ else:
380
+ # Special handling for carrier attribute - don't overwrite relationship carrier
381
+ if attr_name == "carrier":
382
+ if component.carrier_id is not None:
383
+ self.logger.info(
384
+ f"DEBUG: Skipping carrier attribute '{attr_value}' for '{component.name}' - using relationship carrier '{processed['carrier']}'"
385
+ )
386
+ continue # Skip the carrier attribute, keep the relationship carrier
387
+ else:
388
+ self.logger.info(
389
+ f"DEBUG: Using carrier attribute '{attr_value}' for '{component.name}' (no relationship carrier)"
390
+ )
391
+
392
+ processed[attr_name] = attr_value
393
+
394
+ self.logger.info(
395
+ f"DEBUG: Final processed data for '{component.name}': carrier='{processed.get('carrier', 'NOT_SET')}'"
396
+ )
397
+ return processed
398
+
399
+ def _filter_component_columns(
400
+ self, conn, component_data: Dict[str, Any], component_type: str
401
+ ) -> Dict[str, Any]:
402
+ """Filter out unused columns based on component type, following DatabaseTable logic"""
403
+
404
+ filtered_data = {}
405
+
406
+ # Always include basic fields (name, carrier, latitude, longitude)
407
+ # Note: bus connections are NOT basic fields - they are component-type specific
408
+ # Note: "type" is NOT included - it's implicit based on the sheet/component type
409
+ # Note: CONSTRAINT components don't have carrier, latitude, or longitude - they are code-based rules
410
+ if component_type.upper() == "CONSTRAINT":
411
+ basic_fields = [
412
+ "name"
413
+ ] # Constraints only have name - no physical location or carrier
414
+ else:
415
+ basic_fields = ["name", "carrier", "latitude", "longitude"]
416
+
417
+ for field in basic_fields:
418
+ if field in component_data:
419
+ filtered_data[field] = component_data[field]
420
+ self.logger.info(
421
+ f"Added basic field '{field}' = '{component_data[field]}' for component type {component_type}"
422
+ )
423
+ if field == "carrier":
424
+ self.logger.info(
425
+ f"DEBUG: Setting carrier field to '{component_data[field]}' from component_data"
426
+ )
427
+
428
+ # Add bus connection columns based on component type - EXACT DatabaseTable logic
429
+ component_type_lower = component_type.lower()
430
+ needs_bus_connection = component_type_lower in [
431
+ "generator",
432
+ "load",
433
+ "storage_unit",
434
+ "store",
435
+ "unmet_load",
436
+ ]
437
+ needs_two_bus_connections = component_type_lower in ["line", "link"]
438
+
439
+ if needs_bus_connection:
440
+ if "bus" in component_data:
441
+ filtered_data["bus"] = component_data["bus"]
442
+ elif needs_two_bus_connections:
443
+ if "bus0" in component_data:
444
+ filtered_data["bus0"] = component_data["bus0"]
445
+ if "bus1" in component_data:
446
+ filtered_data["bus1"] = component_data["bus1"]
447
+ else:
448
+ # Buses and other components don't get bus connection columns
449
+ self.logger.info(f"No bus connection columns for {component_type_lower}")
450
+
451
+ # Get validation rules to determine which attributes are input vs output
452
+ try:
453
+
454
+ # Add all other attributes that aren't filtered out
455
+ for key, value in component_data.items():
456
+ if key in filtered_data:
457
+ continue # Already handled
458
+
459
+ # Filter out unused attributes following DatabaseTable logic
460
+ should_exclude = False
461
+ exclude_reason = ""
462
+
463
+ # Note: Carrier attribute exclusion is now handled in _process_component_for_excel
464
+ # to prevent overwriting relationship carriers
465
+
466
+ # Remove location and carrier attributes for CONSTRAINT components (they don't have physical location or carriers)
467
+ if component_type.upper() == "CONSTRAINT" and key in [
468
+ "carrier",
469
+ "latitude",
470
+ "longitude",
471
+ ]:
472
+ should_exclude = True
473
+ exclude_reason = (
474
+ f"constraint exclusion - constraints don't have {key}"
475
+ )
476
+
477
+ # Remove 'type' and 'unit' attributes for buses (not used in this application)
478
+ elif component_type.upper() == "BUS" and key in ["type", "unit"]:
479
+ should_exclude = True
480
+ exclude_reason = f"bus-specific exclusion ({key})"
481
+
482
+ # Remove 'x' and 'y' coordinates for buses only - we use latitude/longitude instead
483
+ elif component_type.upper() == "BUS" and key in ["x", "y"]:
484
+ should_exclude = True
485
+ exclude_reason = f"bus coordinate exclusion ({key})"
486
+
487
+ # Remove sub-network and slack generator attributes for buses
488
+ elif component_type.upper() == "BUS" and key in [
489
+ "sub_network",
490
+ "slack_generator",
491
+ ]:
492
+ should_exclude = True
493
+ exclude_reason = f"bus network exclusion ({key})"
494
+
495
+ # CRITICAL: Remove bus connection columns for components that shouldn't have them
496
+ elif key in ["bus", "bus0", "bus1"]:
497
+ if key == "bus" and not needs_bus_connection:
498
+ should_exclude = True
499
+ exclude_reason = (
500
+ f"bus connection not needed for {component_type_lower}"
501
+ )
502
+ elif key in ["bus0", "bus1"] and not needs_two_bus_connections:
503
+ should_exclude = True
504
+ exclude_reason = (
505
+ f"two-bus connection not needed for {component_type_lower}"
506
+ )
507
+
508
+ if should_exclude:
509
+ self.logger.info(f"Excluded {key}: {exclude_reason}")
510
+ else:
511
+ # Special handling for carrier attribute - don't overwrite relationship field
512
+ if key == "carrier" and "carrier" in filtered_data:
513
+ self.logger.info(
514
+ f"Skipping carrier attribute '{value}' - keeping relationship carrier '{filtered_data['carrier']}'"
515
+ )
516
+ else:
517
+ filtered_data[key] = value
518
+ self.logger.info(f"Added attribute: {key} = {value}")
519
+
520
+ except Exception as e:
521
+ self.logger.warning(f"Could not load validation rules for filtering: {e}")
522
+ # Fallback: include all attributes except the basic exclusions
523
+ for key, value in component_data.items():
524
+ if key in filtered_data:
525
+ continue
526
+ if key == "carrier": # Skip carrier attribute
527
+ continue
528
+ filtered_data[key] = value
529
+
530
+ return filtered_data
531
+
532
+ def _create_overview_sheet(
533
+ self,
534
+ writer,
535
+ network_info: Dict,
536
+ processed_components: Dict,
537
+ scenario_info: Dict = None,
538
+ ):
539
+ """Create overview sheet with network metadata"""
540
+ # Create key-value pairs as separate lists for two columns
541
+ keys = []
542
+ values = []
543
+
544
+ # Network information
545
+ keys.extend(["Name", "Description", "Time Start", "Time End", "Time Interval"])
546
+ values.extend(
547
+ [
548
+ network_info["name"],
549
+ network_info.get("description", ""),
550
+ network_info["time_start"],
551
+ network_info["time_end"],
552
+ network_info["time_interval"],
553
+ ]
554
+ )
555
+
556
+ # Scenario information
557
+ if scenario_info:
558
+ keys.append("")
559
+ values.append("")
560
+ keys.extend(
561
+ [
562
+ "Scenario Information",
563
+ "Scenario Name",
564
+ "Scenario Description",
565
+ "Is Master Scenario",
566
+ "Scenario Created",
567
+ ]
568
+ )
569
+ values.extend(
570
+ [
571
+ "",
572
+ scenario_info.get("name", "Unknown"),
573
+ scenario_info.get("description", "") or "No description",
574
+ "Yes" if scenario_info.get("is_master", False) else "No",
575
+ scenario_info.get("created_at", ""),
576
+ ]
577
+ )
578
+
579
+ # Empty row
580
+ keys.append("")
581
+ values.append("")
582
+
583
+ # Export information
584
+ keys.extend(["Export Information", "Export Date", "Export Version"])
585
+ values.extend(
586
+ ["", datetime.now().strftime("%Y-%m-%d %H:%M:%S"), self._get_app_version()]
587
+ )
588
+
589
+ # Create two-column DataFrame
590
+ df = pd.DataFrame({"Property": keys, "Value": values})
591
+ df.to_excel(writer, sheet_name="Overview", index=False)
592
+
593
+ def _get_scenario_info(self, conn, scenario_id: int) -> Dict[str, Any]:
594
+ """Get scenario information from database"""
595
+ try:
596
+ cursor = conn.execute(
597
+ """
598
+ SELECT id, name, description, created_at
599
+ FROM scenarios
600
+ WHERE id = ?
601
+ """,
602
+ (scenario_id,),
603
+ )
604
+
605
+ row = cursor.fetchone()
606
+ if not row:
607
+ self.logger.warning(f"No scenario found with ID {scenario_id}")
608
+ return {}
609
+
610
+ return {
611
+ "id": row[0],
612
+ "name": row[1],
613
+ "description": row[2],
614
+ "created_at": row[3],
615
+ }
616
+
617
+ except Exception as e:
618
+ self.logger.warning(f"Failed to retrieve scenario info: {e}")
619
+ return {}
620
+
621
+ def _create_component_sheet(
622
+ self, writer, conn, component_type: str, components: List[Dict]
623
+ ):
624
+ """Create a sheet for a specific component type"""
625
+ if not components:
626
+ return
627
+
628
+ # Apply column filtering to each component
629
+ filtered_components = []
630
+ for component in components:
631
+ filtered_component = self._filter_component_columns(
632
+ conn, component, component_type
633
+ )
634
+ filtered_components.append(filtered_component)
635
+
636
+ # Convert to DataFrame
637
+ df = pd.DataFrame(filtered_components)
638
+
639
+ # Reorder columns to put core fields first
640
+ core_columns = [
641
+ "name",
642
+ "carrier",
643
+ "bus",
644
+ "bus0",
645
+ "bus1",
646
+ "latitude",
647
+ "longitude",
648
+ ]
649
+ other_columns = []
650
+ for col in df.columns:
651
+ if col not in core_columns:
652
+ other_columns.append(col)
653
+ ordered_columns = []
654
+ for col in core_columns:
655
+ if col in df.columns:
656
+ ordered_columns.append(col)
657
+ ordered_columns.extend(other_columns)
658
+
659
+ df = df[ordered_columns]
660
+
661
+ # Write to Excel with proper pluralization
662
+ sheet_name_mapping = {
663
+ "BUS": "Buses",
664
+ "GENERATOR": "Generators",
665
+ "LOAD": "Loads",
666
+ "LINE": "Lines",
667
+ "LINK": "Links",
668
+ "STORAGE_UNIT": "Storage Units",
669
+ "STORE": "Stores",
670
+ "CONSTRAINT": "Constraints",
671
+ }
672
+ sheet_name = sheet_name_mapping.get(
673
+ component_type, f"{component_type.title()}s"
674
+ )
675
+ df.to_excel(writer, sheet_name=sheet_name, index=False)
676
+
677
+ def _create_timeseries_sheet(
678
+ self, writer, component_type: str, timeseries_data: Dict, conn
679
+ ):
680
+ """Create a timeseries sheet for a component type"""
681
+ # Get network time periods
682
+ time_periods = get_network_time_periods(conn)
683
+ if not time_periods:
684
+ self.logger.warning(
685
+ f"No time periods found, skipping timeseries sheet for {component_type}"
686
+ )
687
+ return
688
+
689
+ self.logger.info(
690
+ f"Creating timeseries sheet for {component_type} with {len(time_periods)} time periods"
691
+ )
692
+ self.logger.info(
693
+ f"First few time periods: {[(p.formatted_time, p.timestamp, p.period_index) for p in time_periods[:3]]}"
694
+ )
695
+
696
+ # Create DataFrame with human-readable timestamps
697
+ timestamps = [
698
+ period.formatted_time for period in time_periods
699
+ ] # Use formatted_time instead of timestamp
700
+ df_data = {"timestamp": timestamps}
701
+
702
+ # Add component columns for each attribute
703
+ for attr_name, component_data in timeseries_data.items():
704
+ for component_name, timeseries_data_item in component_data.items():
705
+ if isinstance(timeseries_data_item, list):
706
+ # Handle efficient format (list of values)
707
+ values = timeseries_data_item
708
+
709
+ # Pad or truncate to match time periods
710
+ while len(values) < len(timestamps):
711
+ values.append(0)
712
+ values = values[: len(timestamps)]
713
+ df_data[f"{component_name}_{attr_name}"] = values
714
+
715
+ df = pd.DataFrame(df_data)
716
+ sheet_name = f"{component_type.title()} Timeseries"
717
+ df.to_excel(writer, sheet_name=sheet_name, index=False)
718
+ self.logger.info(
719
+ f"Created timeseries sheet '{sheet_name}' with {len(df)} rows and {len(df.columns)} columns"
720
+ )
721
+
722
+ def _create_carriers_sheet(self, writer, carriers: List[Dict]):
723
+ """Create carriers sheet"""
724
+ if not carriers:
725
+ return
726
+
727
+ df = pd.DataFrame(carriers)
728
+ df.to_excel(writer, sheet_name="Carriers", index=False)
729
+
730
+ def _create_network_config_sheet(self, writer, conn):
731
+ """Create network configuration sheet"""
732
+ try:
733
+ config = get_network_config(conn, None) # Master scenario
734
+ if config:
735
+ config_data = []
736
+ for param_name, param_value in config.items():
737
+ config_data.append(
738
+ {
739
+ "Parameter": param_name,
740
+ "Value": str(param_value),
741
+ "Type": type(param_value).__name__,
742
+ "Description": "",
743
+ }
744
+ )
745
+
746
+ if config_data:
747
+ df = pd.DataFrame(config_data)
748
+ df.to_excel(writer, sheet_name="Network Config", index=False)
749
+ except Exception as e:
750
+ self.logger.warning(f"Could not create network config sheet: {e}")
751
+
752
+ def _calculate_export_stats(
753
+ self, processed_components: Dict, timeseries_data: Dict
754
+ ) -> Dict[str, Any]:
755
+ """Calculate export statistics"""
756
+ total_components = sum(
757
+ len(components) for components in processed_components.values()
758
+ )
759
+ total_timeseries = sum(
760
+ len(attr_data)
761
+ for comp_data in timeseries_data.values()
762
+ for attr_data in comp_data.values()
763
+ )
764
+
765
+ return {
766
+ "total_components": total_components,
767
+ "total_timeseries": total_timeseries,
768
+ "component_types": len(processed_components),
769
+ "components_by_type": {
770
+ comp_type: len(components)
771
+ for comp_type, components in processed_components.items()
772
+ },
773
+ }
774
+
775
+ def _get_solve_results(
776
+ self, conn, scenario_id: Optional[int]
777
+ ) -> Optional[Dict[str, Any]]:
778
+ """Get solve results from the database"""
779
+ try:
780
+ cursor = conn.execute(
781
+ """
782
+ SELECT results_json, metadata_json, solver_name, solve_status,
783
+ objective_value, solve_time_seconds, solved_at
784
+ FROM network_solve_results
785
+ WHERE scenario_id = ? OR (scenario_id IS NULL AND ? IS NULL)
786
+ """,
787
+ (scenario_id, scenario_id),
788
+ )
789
+
790
+ row = cursor.fetchone()
791
+ if not row:
792
+ self.logger.info(f"No solve results found for scenario {scenario_id}")
793
+ return None
794
+
795
+ (
796
+ results_json_str,
797
+ metadata_json_str,
798
+ solver_name,
799
+ solve_status,
800
+ objective_value,
801
+ solve_time,
802
+ solved_at,
803
+ ) = row
804
+
805
+ # Parse the JSON results
806
+ if results_json_str:
807
+ results = json.loads(results_json_str)
808
+ # Add metadata from the table columns
809
+ results["solver_name"] = solver_name
810
+ results["solve_status"] = solve_status
811
+ results["objective_value"] = objective_value
812
+ results["solve_time_seconds"] = solve_time
813
+ results["solved_at"] = solved_at
814
+
815
+ if metadata_json_str:
816
+ metadata = json.loads(metadata_json_str)
817
+ results["metadata"] = metadata
818
+
819
+ return results
820
+
821
+ return None
822
+
823
+ except Exception as e:
824
+ self.logger.warning(f"Failed to retrieve solve results: {e}")
825
+ return None
826
+
827
+ def _get_solve_results_by_year(
828
+ self, conn, scenario_id: Optional[int]
829
+ ) -> Optional[Dict[int, Dict[str, Any]]]:
830
+ """Get per-year solve results from the database"""
831
+ try:
832
+ cursor = conn.execute(
833
+ """
834
+ SELECT year, results_json, metadata_json
835
+ FROM network_solve_results_by_year
836
+ WHERE scenario_id = ? OR (scenario_id IS NULL AND ? IS NULL)
837
+ ORDER BY year
838
+ """,
839
+ (scenario_id, scenario_id),
840
+ )
841
+
842
+ rows = cursor.fetchall()
843
+ if not rows:
844
+ self.logger.info(
845
+ f"No per-year solve results found for scenario {scenario_id}"
846
+ )
847
+ return None
848
+
849
+ year_results = {}
850
+ for row in rows:
851
+ year, results_json_str, metadata_json_str = row
852
+
853
+ if results_json_str:
854
+ year_data = json.loads(results_json_str)
855
+
856
+ # Add metadata if available
857
+ if metadata_json_str:
858
+ metadata = json.loads(metadata_json_str)
859
+ year_data["metadata"] = metadata
860
+
861
+ year_results[year] = year_data
862
+
863
+ return year_results if year_results else None
864
+
865
+ except Exception as e:
866
+ self.logger.warning(f"Failed to retrieve per-year solve results: {e}")
867
+ return None
868
+
869
+ def _create_statistics_sheet(self, writer, scenario_id: int, conn):
870
+ """Create statistics sheet with full-run solve results (no per-year data)"""
871
+ try:
872
+ # Get solve results
873
+ solve_results = self._get_solve_results(conn, scenario_id)
874
+ if not solve_results:
875
+ self.logger.info(
876
+ "No solve results available, skipping statistics sheet"
877
+ )
878
+ return
879
+
880
+ # Prepare data for the statistics sheet
881
+ stats_data = []
882
+
883
+ # Section 1: Solve Summary
884
+ stats_data.extend(
885
+ [
886
+ ["SOLVE SUMMARY", ""],
887
+ ["Solver Name", solve_results.get("solver_name", "Unknown")],
888
+ ["Solve Status", solve_results.get("solve_status", "Unknown")],
889
+ [
890
+ "Solve Time (seconds)",
891
+ solve_results.get("solve_time_seconds", 0),
892
+ ],
893
+ ["Objective Value", solve_results.get("objective_value", 0)],
894
+ ["Solved At", solve_results.get("solved_at", "")],
895
+ ["", ""], # Empty row separator
896
+ ]
897
+ )
898
+
899
+ # Extract network statistics if available
900
+ network_stats = solve_results.get("network_statistics", {})
901
+
902
+ # Section 2: Core Network Statistics
903
+ core_summary = network_stats.get("core_summary", {})
904
+ if core_summary:
905
+ stats_data.extend(
906
+ [
907
+ ["CORE NETWORK STATISTICS", ""],
908
+ [
909
+ "Total Generation (MWh)",
910
+ core_summary.get("total_generation_mwh", 0),
911
+ ],
912
+ ["Total Demand (MWh)", core_summary.get("total_demand_mwh", 0)],
913
+ ["Total Cost", core_summary.get("total_cost", 0)],
914
+ ["Load Factor", core_summary.get("load_factor", 0)],
915
+ [
916
+ "Unserved Energy (MWh)",
917
+ core_summary.get("unserved_energy_mwh", 0),
918
+ ],
919
+ ["", ""],
920
+ ]
921
+ )
922
+
923
+ # Section 3: Custom Statistics
924
+ custom_stats = network_stats.get("custom_statistics", {})
925
+ if custom_stats:
926
+ # Emissions by Carrier
927
+ emissions = custom_stats.get("emissions_by_carrier", {})
928
+ if emissions:
929
+ stats_data.extend([["EMISSIONS BY CARRIER (tons CO2)", ""]])
930
+ for carrier, value in emissions.items():
931
+ if value > 0: # Only show carriers with emissions
932
+ stats_data.append([carrier, value])
933
+ stats_data.extend(
934
+ [
935
+ [
936
+ "Total Emissions (tons CO2)",
937
+ custom_stats.get("total_emissions_tons_co2", 0),
938
+ ],
939
+ ["", ""],
940
+ ]
941
+ )
942
+
943
+ # Generation Dispatch by Carrier
944
+ dispatch = custom_stats.get("dispatch_by_carrier", {})
945
+ if dispatch:
946
+ stats_data.extend([["GENERATION DISPATCH BY CARRIER (MWh)", ""]])
947
+ for carrier, value in dispatch.items():
948
+ if value > 0: # Only show carriers with generation
949
+ stats_data.append([carrier, value])
950
+ stats_data.append(["", ""])
951
+
952
+ # Power Capacity by Carrier (MW)
953
+ power_capacity = custom_stats.get("power_capacity_by_carrier", {})
954
+ if power_capacity:
955
+ stats_data.extend([["POWER CAPACITY BY CARRIER (MW)", ""]])
956
+ for carrier, value in power_capacity.items():
957
+ if value > 0: # Only show carriers with capacity
958
+ stats_data.append([carrier, value])
959
+ stats_data.append(["", ""])
960
+
961
+ # Energy Capacity by Carrier (MWh)
962
+ energy_capacity = custom_stats.get("energy_capacity_by_carrier", {})
963
+ if energy_capacity:
964
+ stats_data.extend([["ENERGY CAPACITY BY CARRIER (MWh)", ""]])
965
+ for carrier, value in energy_capacity.items():
966
+ if value > 0: # Only show carriers with capacity
967
+ stats_data.append([carrier, value])
968
+ stats_data.append(["", ""])
969
+
970
+ # Capital Costs by Carrier
971
+ capital_costs = custom_stats.get("capital_cost_by_carrier", {})
972
+ if capital_costs:
973
+ stats_data.extend([["CAPITAL COSTS BY CARRIER", ""]])
974
+ for carrier, value in capital_costs.items():
975
+ if value > 0: # Only show carriers with costs
976
+ stats_data.append([carrier, value])
977
+ stats_data.extend(
978
+ [
979
+ [
980
+ "Total Capital Cost",
981
+ custom_stats.get("total_capital_cost", 0),
982
+ ],
983
+ ["", ""],
984
+ ]
985
+ )
986
+
987
+ # Operational Costs by Carrier
988
+ op_costs = custom_stats.get("operational_cost_by_carrier", {})
989
+ if op_costs:
990
+ stats_data.extend([["OPERATIONAL COSTS BY CARRIER", ""]])
991
+ for carrier, value in op_costs.items():
992
+ if value > 0: # Only show carriers with costs
993
+ stats_data.append([carrier, value])
994
+ stats_data.extend(
995
+ [
996
+ [
997
+ "Total Operational Cost",
998
+ custom_stats.get("total_operational_cost", 0),
999
+ ],
1000
+ ["", ""],
1001
+ ]
1002
+ )
1003
+
1004
+ # Total System Costs by Carrier
1005
+ total_costs = custom_stats.get("total_system_cost_by_carrier", {})
1006
+ if total_costs:
1007
+ stats_data.extend([["TOTAL SYSTEM COSTS BY CARRIER", ""]])
1008
+ for carrier, value in total_costs.items():
1009
+ if value > 0: # Only show carriers with costs
1010
+ stats_data.append([carrier, value])
1011
+ stats_data.extend(
1012
+ [
1013
+ [
1014
+ "Total Currency Cost",
1015
+ custom_stats.get("total_currency_cost", 0),
1016
+ ],
1017
+ [
1018
+ "Average Price per MWh",
1019
+ custom_stats.get("average_price_per_mwh", 0),
1020
+ ],
1021
+ ["", ""],
1022
+ ]
1023
+ )
1024
+
1025
+ # Unmet Load Statistics
1026
+ unmet_stats = custom_stats.get("unmet_load_statistics", {})
1027
+ if unmet_stats:
1028
+ stats_data.extend(
1029
+ [
1030
+ ["UNMET LOAD STATISTICS", ""],
1031
+ ["Unmet Load (MWh)", unmet_stats.get("unmet_load_mwh", 0)],
1032
+ [
1033
+ "Unmet Load Percentage",
1034
+ custom_stats.get("unmet_load_percentage", 0),
1035
+ ],
1036
+ [
1037
+ "Max Unmet Load Hour (MW)",
1038
+ custom_stats.get("max_unmet_load_hour_mw", 0),
1039
+ ],
1040
+ ["", ""],
1041
+ ]
1042
+ )
1043
+
1044
+ # Section 4: Component Storage Statistics
1045
+ storage_stats = solve_results.get("component_storage_stats", {})
1046
+ if storage_stats:
1047
+ stats_data.extend([["COMPONENT STORAGE STATISTICS", ""]])
1048
+ for key, value in storage_stats.items():
1049
+ # Convert snake_case to readable format
1050
+ readable_key = key.replace("_", " ").title()
1051
+ stats_data.append([readable_key, value])
1052
+ stats_data.append(["", ""])
1053
+
1054
+ # Section 5: Runtime Information
1055
+ runtime_info = network_stats.get("runtime_info", {})
1056
+ if runtime_info:
1057
+ stats_data.extend([["RUNTIME INFORMATION", ""]])
1058
+ for key, value in runtime_info.items():
1059
+ # Convert snake_case to readable format
1060
+ readable_key = key.replace("_", " ").title()
1061
+ stats_data.append([readable_key, value])
1062
+ stats_data.append(["", ""])
1063
+
1064
+ # Section 6: Solver Information
1065
+ solver_info = network_stats.get("solver_info", {})
1066
+ if solver_info:
1067
+ stats_data.extend([["SOLVER INFORMATION", ""]])
1068
+ for key, value in solver_info.items():
1069
+ # Convert snake_case to readable format
1070
+ readable_key = key.replace("_", " ").title()
1071
+ stats_data.append([readable_key, value])
1072
+ stats_data.append(["", ""])
1073
+
1074
+ # Create DataFrame and write to Excel (simple 2-column format)
1075
+ if stats_data:
1076
+ df = pd.DataFrame(stats_data, columns=["Parameter", "Value"])
1077
+ df.to_excel(writer, sheet_name="Statistics", index=False)
1078
+ self.logger.info(
1079
+ f"Created Statistics sheet with {len(stats_data)} rows"
1080
+ )
1081
+
1082
+ except Exception as e:
1083
+ self.logger.warning(f"Failed to create statistics sheet: {e}")
1084
+ # Don't fail the entire export if statistics sheet fails
1085
+
1086
+ def _create_per_year_statistics_sheet(self, writer, scenario_id: int, conn):
1087
+ """Create per-year statistics sheet in tidy data format"""
1088
+ try:
1089
+ # Get per-year solve results
1090
+ year_results = self._get_solve_results_by_year(conn, scenario_id)
1091
+ if not year_results:
1092
+ self.logger.info(
1093
+ "No per-year solve results available, skipping per-year statistics sheet"
1094
+ )
1095
+ return
1096
+
1097
+ # Prepare tidy data: Variable, Year, Carrier, Value, Units
1098
+ tidy_data = []
1099
+
1100
+ # Get sorted years
1101
+ years = sorted(year_results.keys())
1102
+
1103
+ # Define the statistics we want to include with their units
1104
+ stat_definitions = [
1105
+ ("dispatch_by_carrier", "Generation Dispatch", "MWh"),
1106
+ ("power_capacity_by_carrier", "Power Capacity", "MW"),
1107
+ ("energy_capacity_by_carrier", "Energy Capacity", "MWh"),
1108
+ ("capital_cost_by_carrier", "Capital Cost", "Currency"),
1109
+ ("operational_cost_by_carrier", "Operational Cost", "Currency"),
1110
+ ("total_system_cost_by_carrier", "Total System Cost", "Currency"),
1111
+ ("emissions_by_carrier", "Emissions", "tons CO2"),
1112
+ ]
1113
+
1114
+ # Process each statistic type
1115
+ for stat_key, stat_name, units in stat_definitions:
1116
+ # Collect all carriers across all years for this statistic
1117
+ all_carriers = set()
1118
+ for year in years:
1119
+ year_data = year_results[year]
1120
+ if (
1121
+ "network_statistics" in year_data
1122
+ and "custom_statistics" in year_data["network_statistics"]
1123
+ ):
1124
+ custom_stats = year_data["network_statistics"][
1125
+ "custom_statistics"
1126
+ ]
1127
+ if stat_key in custom_stats:
1128
+ all_carriers.update(custom_stats[stat_key].keys())
1129
+
1130
+ # Add data rows for each carrier and year combination
1131
+ for carrier in sorted(all_carriers):
1132
+ for year in years:
1133
+ year_data = year_results[year]
1134
+ value = 0.0
1135
+
1136
+ if (
1137
+ "network_statistics" in year_data
1138
+ and "custom_statistics" in year_data["network_statistics"]
1139
+ ):
1140
+ custom_stats = year_data["network_statistics"][
1141
+ "custom_statistics"
1142
+ ]
1143
+ if (
1144
+ stat_key in custom_stats
1145
+ and carrier in custom_stats[stat_key]
1146
+ ):
1147
+ value = custom_stats[stat_key][carrier]
1148
+
1149
+ # Only include rows with non-zero values to keep the data clean
1150
+ if value > 0:
1151
+ tidy_data.append([stat_name, year, carrier, value, units])
1152
+
1153
+ # Add core summary statistics (these don't have carriers)
1154
+ core_stat_definitions = [
1155
+ ("total_generation_mwh", "Total Generation", "MWh"),
1156
+ ("total_demand_mwh", "Total Demand", "MWh"),
1157
+ ("total_cost", "Total Cost", "Currency"),
1158
+ ("load_factor", "Load Factor", "Ratio"),
1159
+ ("unserved_energy_mwh", "Unserved Energy", "MWh"),
1160
+ ("total_emissions_tons_co2", "Total Emissions", "tons CO2"),
1161
+ ]
1162
+
1163
+ for stat_key, stat_name, units in core_stat_definitions:
1164
+ for year in years:
1165
+ year_data = year_results[year]
1166
+ value = 0.0
1167
+
1168
+ # Check both core_summary and custom_statistics
1169
+ if "network_statistics" in year_data:
1170
+ network_stats = year_data["network_statistics"]
1171
+
1172
+ # Try core_summary first
1173
+ if (
1174
+ "core_summary" in network_stats
1175
+ and stat_key in network_stats["core_summary"]
1176
+ ):
1177
+ value = network_stats["core_summary"][stat_key]
1178
+ # Try custom_statistics as fallback
1179
+ elif (
1180
+ "custom_statistics" in network_stats
1181
+ and stat_key in network_stats["custom_statistics"]
1182
+ ):
1183
+ value = network_stats["custom_statistics"][stat_key]
1184
+
1185
+ # Include all core statistics (even zeros for completeness)
1186
+ tidy_data.append([stat_name, year, "Total", value, units])
1187
+
1188
+ # Create DataFrame and write to Excel
1189
+ if tidy_data:
1190
+ df = pd.DataFrame(
1191
+ tidy_data, columns=["Variable", "Year", "Carrier", "Value", "Units"]
1192
+ )
1193
+ df.to_excel(writer, sheet_name="Per-Year Statistics", index=False)
1194
+ self.logger.info(
1195
+ f"Created Per-Year Statistics sheet with {len(tidy_data)} rows"
1196
+ )
1197
+ else:
1198
+ self.logger.info("No per-year statistics data to export")
1199
+
1200
+ except Exception as e:
1201
+ self.logger.warning(f"Failed to create per-year statistics sheet: {e}")
1202
+ # Don't fail the entire export if per-year statistics sheet fails
1203
+
1204
+ def _get_app_version(self) -> str:
1205
+ """Get the application version."""
1206
+ try:
1207
+ # Try to read from package.json in the project root
1208
+ import json
1209
+ import os
1210
+ from pathlib import Path
1211
+
1212
+ # Look for package.json in parent directories
1213
+ current_dir = Path(__file__).parent
1214
+ while current_dir != current_dir.parent:
1215
+ package_json = current_dir / "package.json"
1216
+ if package_json.exists():
1217
+ with open(package_json, "r") as f:
1218
+ package_data = json.load(f)
1219
+ return package_data.get("version", "1.0.0")
1220
+ current_dir = current_dir.parent
1221
+
1222
+ # Fallback version
1223
+ return "1.0.0"
1224
+ except Exception as e:
1225
+ self.logger.warning(f"Could not get app version: {e}")
1226
+ return "1.0.0"